├── .github └── workflows │ ├── build_doc.yml │ ├── build_on_prerelease.yml │ ├── build_on_release.yml │ └── check_on_PR.yml ├── .gitignore ├── .travis.yml ├── CITATION.cff ├── Dockerfile ├── Dockerfile_hdbet ├── Dockerfile_macapype_env ├── Dockerfile_macapype_env_spm ├── Dockerfile_spm_hdbet ├── Dockerfile_testpypi ├── Dockerfile_testpypi_spm_hdbet ├── LICENCE.txt ├── MANIFEST.in ├── README.md ├── TODO.txt ├── _config.yml ├── build └── lib │ └── workflows │ └── which_spm.py ├── docs ├── Makefile ├── command.rst ├── conf.py ├── derivatives.rst ├── docker_install.rst ├── img │ ├── images │ │ └── BIDS_orga.jpg │ └── logo │ │ └── logo_macapype_0.3.jpg ├── index.rst ├── indiv_params.rst ├── params.rst ├── quick_install.rst └── quick_test.rst ├── examples_doc ├── README.txt ├── indiv_params_preparation.json ├── indiv_params_segment_ants.json ├── indiv_params_segment_spm.json ├── params_general_preparation.json ├── params_segment_ants.json ├── params_segment_spm.json └── plot_segment_sphinx_macaque_ants_based.py ├── initial_scripts ├── Macaque Segmentation Steps.docx ├── extract_macapype_outputs.sh ├── init_script_kepkee.sh ├── init_script_regis │ ├── ANTS_N4.m │ ├── brain_segmentation.m │ ├── create_seg_masks.m │ ├── erode_or_dilate.m │ ├── fsl_fast.m │ ├── parameters_Apache.m │ ├── set_parameters.m │ ├── spm_old_segment.m │ └── spm_sanlm.m └── seg_pipe.sh ├── macapype-conda └── macapype │ └── meta.yaml ├── macapype ├── __init__.py ├── _version.py ├── bash │ ├── CropVolume.sh │ ├── IterREGBET.sh │ ├── NMT_subject_align │ ├── NMT_subject_align.csh │ ├── T1xT2BET.sh │ ├── T1xT2BiasFieldCorrection.sh │ └── atlasBREX.sh ├── nodes │ ├── __init__.py │ ├── correct_bias.py │ ├── denoise.py │ ├── extract_brain.py │ ├── pad.py │ ├── prepare.py │ ├── register.py │ ├── segment.py │ ├── surface.py │ └── tests │ │ ├── test_segment.py │ │ └── test_surface.py ├── pipelines │ ├── __init__.py │ ├── correct_bias.py │ ├── extract_brain.py │ ├── full_pipelines.py │ ├── prepare.py │ ├── register.py │ ├── rename.py │ ├── segment.py │ ├── surface.py │ └── tests │ │ ├── test_full_pipelines.py │ │ ├── test_prepare.py │ │ └── test_surface_pipelines.py └── utils │ ├── __init__.py │ ├── data_test_servers.json │ ├── misc.py │ ├── regex_subs.json │ ├── subs.json │ ├── templates.json │ ├── tests │ ├── test_misc.py │ ├── test_utils_bids.py │ ├── test_utils_nodes.py │ └── test_utils_tests.py │ ├── utils_bids.py │ ├── utils_nodes.py │ ├── utils_params.py │ ├── utils_spm.py │ └── utils_tests.py ├── pyproject.toml ├── setup.py └── workflows ├── __init__.py ├── params_segment_baboon0_ants.json ├── params_segment_baboon0_ants_4animal.json ├── params_segment_baboon1_0p6_ants.json ├── params_segment_baboon1_0p6_ants_4animal.json ├── params_segment_baboon1_ants.json ├── params_segment_baboon1_ants_4animal.json ├── params_segment_baboon2_0p6_ants.json ├── params_segment_baboon2_0p6_ants_4animal.json ├── params_segment_baboon2_ants.json ├── params_segment_baboon2_ants_4animal.json ├── params_segment_baboon2_ants_quick.json ├── params_segment_baboon3_0p6_ants.json ├── params_segment_baboon3_0p6_ants_4animal.json ├── params_segment_baboon3_ants.json ├── params_segment_baboon3_ants_4animal.json ├── params_segment_baboon3_ants_quick.json ├── params_segment_baboon_ants.json ├── params_segment_baboon_ants_4animal.json ├── params_segment_baboon_ants_quick.json ├── params_segment_baboon_spm.json ├── params_segment_chimp_ants.json ├── params_segment_human_1mm_ants_4animal.json ├── params_segment_human_2mm_ants_4animal.json ├── params_segment_macaque_0p5_ants.json ├── params_segment_macaque_0p5_ants_4animal.json ├── params_segment_macaque_0p5_spm.json ├── params_segment_macaque_ants.json ├── params_segment_macaque_ants_4animal.json ├── params_segment_macaque_ants_quick.json ├── params_segment_macaque_spm.json ├── params_segment_marmo_ants.json ├── params_segment_marmo_ants_4animal.json ├── params_segment_marmo_spm.json ├── params_segment_marmot2_ants.json ├── segment_from_mask_pnh_ants_based.py ├── segment_pnh.py ├── segment_pnh_regis.py └── which_spm.py /.github/workflows/build_doc.yml: -------------------------------------------------------------------------------- 1 | name: "Test and deploy" 2 | 3 | on: 4 | push: 5 | branches: 6 | [ master ] 7 | 8 | jobs: 9 | build: 10 | 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@master 15 | - name: Set up Python 3.10.5 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: 3.10.5 19 | 20 | - name: Install dependencies 21 | run: | 22 | pip install -e .[doc] 23 | sudo apt-get install build-essential graphviz libgraphviz-dev 24 | pip install --upgrade pygraphviz graphviz 25 | 26 | - name: Test with pytest 27 | run: 28 | py.test --cov macapype --ignore=examples/ --ignore=run_examples/ 29 | 30 | - name: Build the Doc 🔧 31 | run: | 32 | cd docs 33 | make clean 34 | make html 35 | touch _build/html/.nojekyll 36 | 37 | - name: Deploy Github Pages 🚀 38 | uses: JamesIves/github-pages-deploy-action@v4.4.3 39 | with: 40 | branch: gh-pages 41 | folder: docs/_build/html/ 42 | clean: true 43 | ssh-key: ${{ secrets.DEPLOY_KEY }} 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /local* 2 | /data 3 | /venv 4 | *.pyc 5 | *~ 6 | *.pklz 7 | *trace* 8 | *egg* 9 | .idea 10 | doc/_build 11 | doc/auto_examples 12 | doc/generated 13 | doc/img 14 | .coverage 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | cache: 4 | pip: true 5 | apt: true 6 | directories: 7 | - ~/mne_data/ 8 | 9 | sudo: false 10 | dist: trusty 11 | 12 | 13 | matrix: 14 | include: 15 | - env: TEST=standard 16 | os: linux 17 | python: '3.6' 18 | 19 | 20 | before_install: 21 | # Instal miniconda 22 | - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then 23 | wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; 24 | else 25 | wget http://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -O miniconda.sh; 26 | fi; 27 | - bash miniconda.sh -b -p $HOME/miniconda 28 | - export PATH="$HOME/miniconda/bin:$PATH" 29 | - hash -r 30 | - conda config --set always_yes yes --set changeps1 no 31 | - conda update -q conda 32 | - conda info -a 33 | - SRC_DIR=$(pwd) 34 | 35 | 36 | install: 37 | # Create the python environment ; 38 | - conda create -q -n testenv python=$TRAVIS_PYTHON_VERSION numpy pip cython matplotlib; 39 | - source activate testenv 40 | # Testing dependencies 41 | - pip install codecov pytest pytest-cov; 42 | # Install macapype 43 | - cd ${SRC_DIR} 44 | - pip install -e . 45 | - pip install flake8 46 | - sudo apt-get install graphviz libgraphviz-dev 47 | - pip install --upgrade pygraphviz graphviz 48 | 49 | script: 50 | 51 | # Run test with coverage 52 | - if [[ "${TEST}" == "standard" ]]; then 53 | py.test --cov macapype --ignore=examples/ --ignore=run_examples/; 54 | fi; 55 | 56 | - flake8 --count macapype 57 | 58 | notifications: 59 | email: false 60 | 61 | 62 | after_success: 63 | # Code coverage 64 | - if [ "${TEST}" == "standard" ]; then 65 | codecov; 66 | fi 67 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # This CITATION.cff file was generated with cffinit. 2 | # Visit https://bit.ly/cffinit to generate yours today! 3 | 4 | cff-version: 1.2.0 5 | title: macapype 6 | message: >- 7 | If you use this software, please cite it using the 8 | metadata from this file. 9 | type: software 10 | authors: 11 | - given-names: 'David ' 12 | family-names: Meuner 13 | email: david.meunier@univ-amu.fr 14 | affiliation: Institut de Neurosciences de la Timone (AMU/CNRS) 15 | orcid: 'https://orcid.org/0000-0002-5812-6138' 16 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | ######################## Python packages 20 | 21 | RUN apt-get update && apt-get install -y git libpng-dev libfreetype6-dev libxft-dev libblas-dev liblapack-dev libatlas-base-dev gfortran libxml2-dev libxslt1-dev wget graphviz 22 | 23 | RUN python -m pip install xvfbwrapper \ 24 | psutil \ 25 | numpy \ 26 | scipy \ 27 | matplotlib \ 28 | statsmodels \ 29 | pandas \ 30 | networkx\ 31 | mock \ 32 | prov \ 33 | click \ 34 | funcsigs \ 35 | pydotplus \ 36 | pydot \ 37 | rdflib \ 38 | pbr \ 39 | nibabel \ 40 | packaging \ 41 | pytest 42 | 43 | RUN python -m pip install graphviz \ 44 | pybids \ 45 | nipype==1.8.6 \ 46 | nilearn \ 47 | scikit-image \ 48 | brain-slam 49 | 50 | RUN python -m pip install SimpleITK 51 | 52 | ENV OPENBLAS_NUM_THREADS=1 53 | 54 | ############################################# install macapype 55 | 56 | RUN python -m pip install --pre macapype 57 | RUN python -c "import macapype; print(macapype.__version__)" 58 | 59 | ################################################## Finishing 60 | RUN apt-get clean \ 61 | && rm -rf /var/lib/apt/lists/* 62 | 63 | RUN rm -rf \ 64 | /tmp/hsperfdata* \ 65 | /var/*/apt/*/partial \ 66 | /var/log/apt/term* 67 | -------------------------------------------------------------------------------- /Dockerfile_hdbet: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | ######################## Python packages 20 | 21 | RUN apt-get update && apt-get install -y git libpng-dev libfreetype6-dev libxft-dev libblas-dev liblapack-dev libatlas-base-dev gfortran libxml2-dev libxslt1-dev wget graphviz 22 | 23 | RUN python -m pip install xvfbwrapper \ 24 | psutil \ 25 | numpy \ 26 | scipy \ 27 | matplotlib \ 28 | statsmodels \ 29 | pandas \ 30 | networkx\ 31 | mock \ 32 | prov \ 33 | click \ 34 | funcsigs \ 35 | pydotplus \ 36 | pydot \ 37 | rdflib \ 38 | pbr \ 39 | nibabel \ 40 | packaging \ 41 | pytest 42 | 43 | RUN python -m pip install graphviz \ 44 | pybids \ 45 | nipype \ 46 | nilearn \ 47 | scikit-image \ 48 | brain-slam 49 | 50 | RUN python -m pip install SimpleITK 51 | 52 | RUN python -m pip install HD-BET 53 | 54 | ############################################# install macapype 55 | 56 | RUN python -m pip install --pre macapype 57 | RUN python -c "import macapype; print(macapype.__version__)" 58 | 59 | ################################################## Finishing 60 | RUN apt-get clean \ 61 | && rm -rf /var/lib/apt/lists/* 62 | 63 | RUN rm -rf \ 64 | /tmp/hsperfdata* \ 65 | /var/*/apt/*/partial \ 66 | /var/log/apt/term* 67 | -------------------------------------------------------------------------------- /Dockerfile_macapype_env_spm: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | 20 | RUN apt-get update -qq 21 | 22 | ################################################## Install SPM12 as MCR 23 | RUN apt-get -y install unzip xorg wget 24 | 25 | # Install MATLAB MCR in /opt/mcr/ 26 | ENV MATLAB_VERSION R2019b 27 | ENV MCR_VERSION v97 28 | RUN mkdir /opt/mcr_install \ 29 | && mkdir /opt/mcr \ 30 | && wget --progress=bar:force -P /opt/mcr_install https://ssd.mathworks.com/supportfiles/downloads/${MATLAB_VERSION}/Release/3/deployment_files/installer/complete/glnxa64/MATLAB_Runtime_${MATLAB_VERSION}_Update_3_glnxa64.zip \ 31 | && unzip -q /opt/mcr_install/MATLAB_Runtime_${MATLAB_VERSION}_Update_3_glnxa64.zip -d /opt/mcr_install \ 32 | && /opt/mcr_install/install -destinationFolder /opt/mcr -agreeToLicense yes -mode silent \ 33 | && rm -rf /opt/mcr_install /tmp/* 34 | 35 | # Install SPM Standalone in /opt/spm12/ 36 | ENV SPM_VERSION 12 37 | ENV SPM_REVISION r7771 38 | 39 | ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/mcr/${MCR_VERSION}/runtime/glnxa64:/opt/mcr/${MCR_VERSION}/bin/glnxa64:/opt/mcr/${MCR_VERSION}/sys/os/glnxa64:/opt/mcr/${MCR_VERSION}/sys/opengl/lib/glnxa64:/opt/mcr/${MCR_VERSION}/extern/bin/glnxa64 40 | 41 | ENV MCR_INHIBIT_CTF_LOCK 1 42 | ENV SPM_HTML_BROWSER 0 43 | # Running SPM once with "function exit" tests the succesfull installation *and* 44 | # extracts the ctf archive which is necessary if singularity is going to be 45 | # used later on, because singularity containers are read-only. 46 | # Also, set +x on the entrypoint for non-root container invocations 47 | RUN wget --no-check-certificate --progress=bar:force -P /opt https://www.fil.ion.ucl.ac.uk/spm/download/restricted/bids/spm${SPM_VERSION}_${SPM_REVISION}_Linux_${MATLAB_VERSION}.zip \ 48 | && unzip -q /opt/spm${SPM_VERSION}_${SPM_REVISION}_Linux_${MATLAB_VERSION}.zip -d /opt \ 49 | && rm -f /opt/spm${SPM_VERSION}_${SPM_REVISION}_Linux_${MATLAB_VERSION}.zip \ 50 | && /opt/spm${SPM_VERSION}/spm${SPM_VERSION} function exit \ 51 | && chmod +x /opt/spm${SPM_VERSION}/spm${SPM_VERSION} 52 | ENV SPM_DIR /opt/spm${SPM_VERSION} 53 | 54 | 55 | ################################################## Finishing 56 | RUN apt-get clean \ 57 | && rm -rf /var/lib/apt/lists/* 58 | 59 | RUN rm -rf \ 60 | /tmp/hsperfdata* \ 61 | /var/*/apt/*/partial \ 62 | /var/log/apt/term* 63 | 64 | -------------------------------------------------------------------------------- /Dockerfile_spm_hdbet: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2-spm 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | ######################## Python packages 20 | 21 | RUN apt-get update && apt-get install -y git libpng-dev libfreetype6-dev libxft-dev libblas-dev liblapack-dev libatlas-base-dev gfortran libxml2-dev libxslt1-dev wget graphviz 22 | 23 | RUN python -m pip install xvfbwrapper \ 24 | psutil \ 25 | numpy \ 26 | scipy \ 27 | matplotlib \ 28 | statsmodels \ 29 | pandas \ 30 | networkx\ 31 | mock \ 32 | prov \ 33 | click \ 34 | funcsigs \ 35 | pydotplus \ 36 | pydot \ 37 | rdflib \ 38 | pbr \ 39 | nibabel\ 40 | packaging \ 41 | pytest 42 | 43 | RUN python -m pip install graphviz \ 44 | pybids \ 45 | nipype==1.8.6 \ 46 | nilearn \ 47 | scikit-image \ 48 | brain-slam 49 | 50 | RUN python -m pip install SimpleITK 51 | 52 | RUN python -m pip install HD-BET 53 | 54 | # should be added in macapype_env 55 | ENV OPENBLAS_NUM_THREADS=1 56 | 57 | ############################################# install macapype 58 | 59 | RUN python -m pip install --pre macapype --upgrade 60 | RUN python -c "import macapype; print(macapype.__version__)" 61 | 62 | ################################################## Finishing 63 | RUN apt-get clean \ 64 | && rm -rf /var/lib/apt/lists/* 65 | 66 | RUN rm -rf \ 67 | /tmp/hsperfdata* \ 68 | /var/*/apt/*/partial \ 69 | /var/log/apt/term* 70 | 71 | ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH 72 | -------------------------------------------------------------------------------- /Dockerfile_testpypi: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | ######################## Python packages 20 | 21 | RUN apt-get update && apt-get install -y git libpng-dev libfreetype6-dev libxft-dev libblas-dev liblapack-dev libatlas-base-dev gfortran libxml2-dev libxslt1-dev wget graphviz 22 | 23 | RUN python -m pip install xvfbwrapper \ 24 | psutil \ 25 | numpy \ 26 | scipy \ 27 | matplotlib \ 28 | statsmodels \ 29 | pandas \ 30 | networkx\ 31 | mock \ 32 | prov \ 33 | click \ 34 | funcsigs \ 35 | pydotplus \ 36 | pydot \ 37 | rdflib \ 38 | pbr \ 39 | nibabel \ 40 | packaging \ 41 | pytest 42 | 43 | RUN python -m pip install graphviz \ 44 | pybids \ 45 | nipype==1.8.6 \ 46 | nilearn \ 47 | scikit-image \ 48 | brain-slam 49 | 50 | RUN python -m pip install SimpleITK 51 | 52 | ENV OPENBLAS_NUM_THREADS=1 # should be added in macapype_env 53 | 54 | ############################################# install macapype 55 | 56 | RUN python -m pip install --no-deps --index-url https://test.pypi.org/simple/ --pre macapype 57 | RUN python -c "import macapype; print(macapype.__version__)" 58 | 59 | ################################################## Finishing 60 | RUN apt-get clean \ 61 | && rm -rf /var/lib/apt/lists/* 62 | 63 | RUN rm -rf \ 64 | /tmp/hsperfdata* \ 65 | /var/*/apt/*/partial \ 66 | /var/log/apt/term* 67 | 68 | ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH # to check if useful 69 | -------------------------------------------------------------------------------- /Dockerfile_testpypi_spm_hdbet: -------------------------------------------------------------------------------- 1 | # Generated by: Neurodocker version 0.7.0+15.ga4940e3.dirty 2 | # Latest release: Neurodocker version 0.7.0 3 | # 4 | # Thank you for using Neurodocker. If you discover any issues 5 | # or ways to improve this software, please submit an issue or 6 | # pull request on our GitHub repository: 7 | # 8 | # https://github.com/ReproNim/neurodocker 9 | # 10 | # Timestamp: 2020/12/02 18:33:44 UTC 11 | 12 | FROM macatools/macapype_env:v0.3.2-spm 13 | 14 | USER root 15 | 16 | ARG DEBIAN_FRONTEND="noninteractive" 17 | 18 | MAINTAINER David Meunier "david.meunier@univ-amu.fr" 19 | ######################## Python packages 20 | 21 | RUN apt-get update && apt-get install -y git libpng-dev libfreetype6-dev libxft-dev libblas-dev liblapack-dev libatlas-base-dev gfortran libxml2-dev libxslt1-dev wget graphviz 22 | 23 | RUN python -m pip install xvfbwrapper \ 24 | psutil \ 25 | numpy \ 26 | scipy \ 27 | matplotlib \ 28 | statsmodels \ 29 | pandas \ 30 | networkx\ 31 | mock \ 32 | prov \ 33 | click \ 34 | funcsigs \ 35 | pydotplus \ 36 | pydot \ 37 | rdflib \ 38 | pbr \ 39 | nibabel\ 40 | packaging \ 41 | pytest 42 | 43 | RUN python -m pip install graphviz \ 44 | pybids \ 45 | nipype==1.8.6 \ 46 | nilearn \ 47 | scikit-image \ 48 | brain-slam 49 | 50 | RUN python -m pip install SimpleITK 51 | 52 | RUN python -m pip install HD-BET 53 | 54 | ENV OPENBLAS_NUM_THREADS=1 # should be added in macapype_env 55 | 56 | ############################################# install macapype 57 | 58 | RUN python -m pip install --no-deps --index-url https://test.pypi.org/simple/ --pre macapype 59 | RUN python -c "import macapype; print(macapype.__version__)" 60 | 61 | ################################################## Finishing 62 | RUN apt-get clean \ 63 | && rm -rf /var/lib/apt/lists/* 64 | 65 | RUN rm -rf \ 66 | /tmp/hsperfdata* \ 67 | /var/*/apt/*/partial \ 68 | /var/log/apt/term* 69 | 70 | ENV LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH # to check if useful 71 | -------------------------------------------------------------------------------- /LICENCE.txt: -------------------------------------------------------------------------------- 1 | Copyright 2021 Macatools 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | 2 | include macapype/utils/*.json 3 | include macapype/bash/* 4 | include workflows/params*.json 5 | 6 | include macapype/_version.py 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Downloads](https://pepy.tech/badge/macapype)](https://pepy.tech/project/macapype) 2 | [![Build Status](https://travis-ci.com/Macatools/macapype.svg?branch=master)](https://travis-ci.com/Macatools/macapype) 3 | 4 | # Quick Install 5 | To use it: 6 | ```shell 7 | pip install macapype 8 | ``` 9 | 10 | To develop: 11 | ```shell 12 | git clone https://github.com/Macatools/macapype.git 13 | cd macapype 14 | python setup.py develop 15 | ``` 16 | 17 | # Docker image 18 | ```shell 19 | docker pull macatools/macapype/latest 20 | ``` 21 | 22 | # Documentation 23 | 24 | https://macatools.github.io/macapype/ 25 | 26 | # Related to project: 27 | 28 | [A guide for PNH MRI processing](https://github.com/PRIME-RE/prime-re.github.io/wiki), and in particular, the session on [structural MRI processing](https://github.com/PRIME-RE/prime-re.github.io/wiki/Structural-processing) 29 | 30 | [Open Science Room OSR 2020 of OHBM congress presentation of macapype](https://docs.google.com/presentation/d/11RrcZW25MyLbc0_9T2zzhwy5RyUvcjYG4UAgzjuuv8M/edit?usp=sharing) 31 | 32 | 33 | # Forum macapype_users 34 | 35 | The macapype_users can be be found [here](https://framateam.org/signup_user_complete/?id=ebtxf45nmiyqmkm8e6oh9qjsoe) (requires a framateam/framagit account, but should be accessible with a github or bitbucket account) 36 | 37 | -------------------------------------------------------------------------------- /TODO.txt: -------------------------------------------------------------------------------- 1 | 19/05/2025 2 | 3 | debug due to singularity calling old version in home 4 | to avoid, use: 5 | singularity run --cleanenv --no-home 6 | 7 | 8 | 16/04/2025 9 | Speed test: using downsampled version in brainmask (otherwise atlasbrex is too long) 10 | 11 | baboon Prune 3: 12 | - with Baba01_0p6 (stereo + brainmask + seg): time = 250414-16:18:26,610 -> 250414-19:12:22,411 = ~3h 13 | (atlas brex 250414-16:48:26,554 -> 250414-18:43:39,994 = ~ 2h) 14 | - stereo0p4_brainmask0p6_seg0p4: time: 250416-11:57:29,946 -> 250416-15:21:49,870 = ~3h20 15 | (atlas brex 250414-16:48:26,554 -> 250416-14:55:33,43 = ~2h aussi) 16 | - hdbet: 250423-12:08:52,603 -> 250423-13:06:36,235 = 1h 17 | - hdbet 0p6: 250423-13:21:44,124 ->250423-13:42:01,401 = 20min 18 | 19 | macaque Stevie: 20 | - full0p5: 250418-11:51:23,979 -> 250418-12:32:33,123 : ~40min (atlas_brex: 250418-12:01:14,602 -> 250418-12:27:20,613: ~25min) 21 | - 0p5: 250417-17:00:14,13 -> 250417-22:58:18,298 : ~6h (atlas_brex : 250417-17:39:27,819 -> 250417-22:07:53,96: 4h30) 22 | -orig: 250418-09:47:36,282 -> (atlas_brex: 250418-10:23:48,572 -> ) 23 | orig2 (attention, preproc deja fait mais planté... 250422-10:22:57,51 -> 250422-15:44:05,719 (atlas_brex 250422-10:23:21,79 -> 250422-14:53:24,27 4h30) 24 | - hdbet: 250422-16:13:33,463 250422-17:03:00,507 (prep = av, crop, denoise, debias 250422-16:23:30,75 -> 250422-16:59:20,278) 25 | et ensuite: 250422-17:05:39,547 -> (hdbet 250422-17:06:01,574 -> 250422-17:07:16,945 = ~ 1min et 26 | - hdbet+full0p5: 250422-16:48:28,170 -> 250422-17:07:59,387 27 | 28 | spm_full: 250428-14:13:15,872 -> 250428-14:34:55,249 (sans N4debias) 29 | 30 | 31 | 29/08/2022 32 | Bug report 33 | - Issue when using _spm docker versions (at least the latest rc7_spm and rc8_spm versions) with the -soft ANTS option 34 | 35 | Road map to v0.3.2: 36 | - replace in default params (used with -species, located in workflows) short_preparation_pipe -> preparation pipe, and modify params only if -params is not used 37 | 38 | 39 | 24/08/2022 40 | Bug report 41 | Issues with padding, works only if short_preparation_pipe / crop_T1 (not with long preparation_prep / prep_T1 / crop in params.json) 42 | 43 | 29/06/2022 Added the following issues: 44 | 45 | Road map to v0.3.2 46 | - -template to provide user template. 47 | 48 | 19/05/2022: with params, if long_prep, do not work with ANTS_T1. 49 | also with -species, problem if crop_T1 and crop_T2 is available, -soft ANTS_T1 will crash 50 | 51 | 17/05/2022 Added the following issues 52 | 53 | Road map to v0.3.2 54 | - make use of macapype_CI.zip in the examples + build the doc at all the release 55 | - reframe automated definition of short_preparation_pipe based on the content indiv_params.json 56 | - add more command line options (in the default, but interfering with the params): 57 | -no_crop if data are already cropped (see previous point) 58 | -export_5tt for exporting 5tt 59 | - 5tt with 6 priors -> gm cortical and subcortical as 2 first tissus 60 | 61 | 62 | Road map to v0.4 63 | - better reorientation tools 64 | - better bet-cropping 65 | 66 | 67 | 16/05/2022 Release v0.3.1 68 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /build/lib/workflows/which_spm.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from macapype.utils.utils_spm import set_spm 4 | 5 | assert set_spm(), "Error, SPM was not found" 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html-noplot to make standalone HTML files, without plotting anything" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 33 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 34 | @echo " changes to make an overview of all changed/added/deprecated items" 35 | @echo " linkcheck to check all external links for integrity" 36 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 37 | @echo " coverage to run coverage check of the documentation (if enabled)" 38 | @echo " install to make the html and push it online" 39 | 40 | .PHONY: clean 41 | 42 | clean: 43 | rm -rf $(BUILDDIR)/* 44 | rm -rf auto_examples/ 45 | rm -rf generated/* 46 | rm -rf modules/* 47 | 48 | clean_no_plot: 49 | rm -rf $(BUILDDIR)/* 50 | rm -rf generated/* 51 | rm -rf modules/* 52 | 53 | 54 | html-noplot: 55 | $(SPHINXBUILD) -D plot_gallery=0 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | .PHONY: html 60 | html: 61 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 62 | @echo 63 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 64 | 65 | .PHONY: dirhtml 66 | dirhtml: 67 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 68 | @echo 69 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 70 | 71 | .PHONY: singlehtml 72 | singlehtml: 73 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 74 | @echo 75 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 76 | 77 | .PHONY: pickle 78 | pickle: 79 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 80 | @echo 81 | @echo "Build finished; now you can process the pickle files." 82 | 83 | .PHONY: htmlhelp 84 | htmlhelp: 85 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 86 | @echo 87 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 88 | ".hhp project file in $(BUILDDIR)/htmlhelp." 89 | 90 | .PHONY: qthelp 91 | qthelp: 92 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 93 | @echo 94 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 95 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 96 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/macapype.qhcp" 97 | @echo "To view the help file:" 98 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/macapype.qhc" 99 | 100 | .PHONY: latex 101 | latex: 102 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 103 | @echo 104 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 105 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 106 | "(use \`make latexpdf' here to do that automatically)." 107 | 108 | .PHONY: latexpdf 109 | latexpdf: 110 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 111 | @echo "Running LaTeX files through pdflatex..." 112 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 113 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 114 | 115 | .PHONY: changes 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | .PHONY: linkcheck 122 | linkcheck: 123 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 124 | @echo 125 | @echo "Link check complete; look for any errors in the above output " \ 126 | "or in $(BUILDDIR)/linkcheck/output.txt." 127 | 128 | .PHONY: doctest 129 | doctest: 130 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 131 | @echo "Testing of doctests in the sources finished, look at the " \ 132 | "results in $(BUILDDIR)/doctest/output.txt." 133 | 134 | .PHONY: coverage 135 | coverage: 136 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 137 | @echo "Testing of coverage in the sources finished, look at the " \ 138 | "results in $(BUILDDIR)/coverage/python.txt." 139 | 140 | -------------------------------------------------------------------------------- /docs/command.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | .. command: 4 | 5 | ~~~~~~~~~~~~~~~~~~~~~~ 6 | Launching a processing 7 | ~~~~~~~~~~~~~~~~~~~~~~ 8 | 9 | Commands 10 | ******** 11 | 12 | The main file is located in workflows and is called segment_pnh.py and should be called like a python script: 13 | 14 | .. code:: bash 15 | 16 | $ python workflows/segment_pnh.py 17 | 18 | **N.B. if you have installed the pypi version (e.g. using pip install skullTo3d) or a docker/singularity version, you can replace the previous command by the following command:** 19 | 20 | .. code:: bash 21 | 22 | $ segment_pnh 23 | 24 | 25 | 26 | For container (docker and singularity), here are some examples - add your proper bindings: 27 | 28 | .. code:: bash 29 | 30 | $ docker run -B binding_to_host:binding_guest macatools/macapype:latest segment_pnh 31 | 32 | .. code:: bash 33 | 34 | $ singularity run -v binding_to_host:binding_guest /path/to/containers/macapype_v0.6.sif segment_pnh 35 | 36 | Expected input data 37 | ******************* 38 | 39 | 40 | All the data have to be in BIDS format to run properly (see `BIDS specification `_ for more details) 41 | 42 | In particular: 43 | 44 | * ``_T1w`` (BIDS) extension is expected for T1 weighted images (BIDS) 45 | * ``_T2w`` (BIDS) extension is expected for T2 weighted images (BIDS) 46 | 47 | .. image:: ./img/images/BIDS_orga.jpg 48 | :width: 600 49 | :align: center 50 | 51 | **Note** : All files with the same extension (T1w or T2w) will be aligned to the first one and averaged 52 | 53 | 54 | Command line parameters 55 | *********************** 56 | 57 | -------------------- 58 | mandatory parameters 59 | -------------------- 60 | 61 | * ``-data`` : path to your data dataset (existing BIDS format directory) 62 | * ``-out`` : path to the output results (an existing path) 63 | * ``-soft`` : can be one of these : SPM or ANTS ( **NB:** SPM requires a specific version of macapype/skullTo3d, not available by default) 64 | 65 | For ``-soft`` value, it is possible to add some key words (e.g. ``-soft ANTS_robustreg_prep``) all these options are available (to place after SPM or ANTS, e.g) and will change the brain extraction: 66 | 67 | * ``_4animal`` : will use bet4animal (FSL) for brain extraction, for faster computation (by default atlas_brex is used) 68 | * ``_quick`` : will use hd-bet (Deep Learning) for brain extraction, for faster computation (by default atlas_brex is used) 69 | **NB: ** hd-bet requires a specific version of macapype/skullTo3d, not available by default 70 | 71 | This option should be used if the coregistration to template in preparation is not performed correctly: 72 | 73 | * ``_robustreg`` (at the end) to have a more robust registration (in two steps) 74 | 75 | Finally, these option are available (to place after SPM or ANTS) and will modify the parameters but can be launched in sequence: 76 | 77 | * ``_test`` : (at the end) to check if the full pipeline is coherent (will only generate the graph.dot and graph.png) 78 | * ``_prep`` (at the end) will perform data preparation (no brain extraction and segmentation) 79 | * ``_noseg`` (at the end) will perform data preparation and brain extraction (no segmentation) 80 | 81 | -------------------- 82 | exclusive parameters 83 | -------------------- 84 | *(but one is mandatory)* 85 | 86 | * ``-params`` : *(mandatory if -species is omitted)* a json file specifiying the global parameters of the analysis. See :ref:`Parameters ` for more details 87 | * ``-species`` : *(mandatory if -params is omitted)* followed the NHP species corresponding to the image, e.g. {macaque | marmo | baboon | chimp} 88 | 89 | **NB** : marmoT2 can be used for segmenting from the T2w image (by default, T1w is used for marmo) 90 | 91 | **NB** : baboon0, baboon1, baboon2 baboon3 can be used for template `Baba21 `_ 92 | and matching ages 93 | 94 | **NB** : some templates are available in downgraded versions: baboon1_0p6, baboon2_0p6 baboon3_0p6 and macaque_0p5 and show significant decrease in processing time with low redection in quality. However, not all combinations are available 95 | 96 | ------------------- 97 | optional parameters 98 | ------------------- 99 | *(but highly recommanded)* 100 | 101 | * ``-dt`` : specifies the datatype available to perform brain segmentation (can be "T1", or "T1 T2") 102 | 103 | **Note** : default is T1 if the attribute is omitted 104 | 105 | * ``-deriv`` : creates a derivatives directory, with all important files, properly named following BIDS derivatives convertion. See :ref:`Derivatives ` for a descrition of the outputs 106 | 107 | * ``-padback`` : exports most important files in native (original) space 108 | 109 | ------------------------ 110 | more optional parameters 111 | ------------------------ 112 | 113 | * ``-indiv`` or ``-indiv_params`` : a json file overwriting the default parameters (both macapype default and parameters specified in -params json file) for specific subjects/sessions. See :ref:`Individual Parameters ` for more details 114 | * ``-sub`` (-subjects), ``-ses`` (-sessions), ``-acq`` (-acquisions), ``-rec`` (-reconstructions) allows to specifiy a subset of the BIDS dataset respectively to a range of subjects, session, acquision types and reconstruction types. The arguments can be listed with space seperator. **Note** if not specified, the full BIDS dataset will be processed 115 | * ``-nprocs`` : an integer, to specifiy the number of processes that should be allocated by the parralel engine of macapype 116 | 117 | * typically equals to the number of subjects*session (i.e. iterables). 118 | * can be multiplied by 2 if T1*T2 pipelines are run (the first steps at least will benefit from it) 119 | * default = 4 if unspecified ; if is put to 1, then the sequential processing is used 120 | 121 | * ``-mask`` allows to specify a precomputed binary mask file (skipping brain extraction). The best usage of this option is: precomputing the pipeline till brain_extraction_pipe, modify by hand the mask and use the mask for segmentation. Better if only one subject*session is specified (one file is specified at a time...). 122 | 123 | **Warning:** the mask should be in the same space as the data. And only works with -soft ANTS so far 124 | 125 | Command line examples 126 | ********************* 127 | 128 | .. code:: bash 129 | 130 | $ python workflows/segment_pnh.py -data ~/Data_maca -out ./local_test -soft ANTS -params params.json 131 | 132 | .. code:: bash 133 | 134 | $ python workflows/segment_pnh.py -data ~/Data_maca -out ./local_test -soft ANTS_robustreg -species macaque 135 | 136 | .. code:: bash 137 | 138 | $ python workflows/segment_pnh.py -data ~/Data_maca -out ./local_test -soft ANTS -params params.json -sub Apache Baron -ses 01 -rec mean -deriv -padback 139 | -------------------------------------------------------------------------------- /docs/derivatives.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | .. _derivatives: 4 | 5 | *********** 6 | Derivatives 7 | *********** 8 | 9 | Introduction 10 | ************ 11 | 12 | Depending on the options provided by command line and params.json, different files will ouput 13 | 14 | Derivatives will be output if option ``-deriv`` is provided to the command line (See `Commands `): 15 | 16 | All files are by default in stereo space; if option ``-padback`` is provided to the command line (See `Commands `), files in native will also be output. 17 | 18 | Data Preparation 19 | **************** 20 | 21 | | 22 | 23 | Original files (possibly after reorientation and avereging): 24 | 25 | *sub-Stevie_ses-01_space-native_T1w.nii.gz* 26 | 27 | *sub-Stevie_ses-01_space-native_T2w.nii.gz* 28 | 29 | | 30 | 31 | If ``-padback`` is defined in command line (See `Commands `): 32 | 33 | *sub-Stevie_ses-01_space-native_desc-denoised_T1w.nii.gz* 34 | 35 | *sub-Stevie_ses-01_space-native_desc-denoised_T2w.nii.gz* 36 | 37 | *sub-Stevie_ses-01_space-native_desc-debiased_T1w.nii.gz* 38 | 39 | *sub-Stevie_ses-01_space-native_desc-debiased_T2w.nii.gz* 40 | 41 | **NB:** Both denoise and debias are optional 42 | 43 | | 44 | 45 | Original files in stereo space: 46 | 47 | *sub-Stevie_ses-01_space-stereo_T1w.nii.gz* 48 | 49 | *sub-Stevie_ses-01_space-stereo_T2w.nii.gz* 50 | 51 | | 52 | 53 | After some preprocessing : 54 | 55 | *sub-Stevie_ses-01_space-stereo_desc-denoised_T1w.nii.gz* 56 | 57 | *sub-Stevie_ses-01_space-stereo_desc-denoised_T2w.nii.gz* 58 | 59 | *sub-Stevie_ses-01_space-stereo_desc-debiased_T1w.nii.gz* 60 | 61 | *sub-Stevie_ses-01_space-stereo_desc-debiased_T2w.nii.gz* 62 | 63 | **NB:** Both denoise and debias are optional 64 | 65 | | 66 | 67 | Transformations: 68 | 69 | *sub-Stevie_ses-01_space-native_target-stereo_affine.txt* 70 | 71 | *sub-Stevie_ses-01_space-stereo_target-native_affine.txt* 72 | 73 | | 74 | 75 | Brain extraction 76 | **************** 77 | 78 | | 79 | 80 | Brain mask: 81 | 82 | *sub-Stevie_ses-01_space-stereo_desc-brain_mask.nii.gz* 83 | 84 | *sub-Stevie_ses-01_space-native_desc-brain_mask.nii.gz* 85 | 86 | | 87 | 88 | Brain segmentation 89 | ****************** 90 | 91 | | 92 | 93 | Brainmasked files after T1*T2 Bias correction: 94 | 95 | *sub-Stevie_ses-01_space-stereo_desc-debiased_desc-brain_T1w.nii.gz* 96 | 97 | *sub-Stevie_ses-01_space-stereo_desc-debiased_desc-brain_T2w.nii.gz* 98 | 99 | *sub-Stevie_ses-01_space-native_desc-debiased_desc-brain_T2w.nii.gz* 100 | 101 | *sub-Stevie_ses-01_space-native_desc-debiased_desc-brain_T1w.nii.gz* 102 | 103 | | 104 | 105 | Segmentated files as probability tisses: 106 | 107 | *sub-Stevie_ses-01_space-stereo_label-WM_probseg.nii.gz* 108 | 109 | *sub-Stevie_ses-01_space-stereo_label-GM_probseg.nii.gz* 110 | 111 | *sub-Stevie_ses-01_space-stereo_label-CSF_probseg.nii.gz* 112 | 113 | *sub-Stevie_ses-01_space-native_label-WM_probseg.nii.gz* 114 | 115 | *sub-Stevie_ses-01_space-native_label-GM_probseg.nii.gz* 116 | 117 | *sub-Stevie_ses-01_space-native_label-CSF_probseg.nii.gz* 118 | 119 | | 120 | 121 | Segmentated files as indexed tisses: 122 | 123 | *sub-Stevie_ses-01_space-stereo_desc-brain_dseg.nii.gz* 124 | 125 | *sub-Stevie_ses-01_space-native_desc-brain_dseg.nii.gz* 126 | 127 | | 128 | 129 | Optional Post brain segmentation 130 | ******************************** 131 | 132 | | 133 | 134 | Segmented files in mrtrix format: 135 | 136 | *sub-Stevie_ses-01_space-stereo_desc-5tt_dseg.nii.gz* 137 | 138 | *sub-Stevie_ses-01_space-native_desc-5tt_dseg.nii.gz* 139 | 140 | | 141 | 142 | White matter + Gray matter binary mask and corresponding mesh: 143 | 144 | *sub-Stevie_ses-01_space-stereo_desc-wmgm_mask.nii.gz* 145 | 146 | *sub-Stevie_ses-01_space-native_desc-wmgm_mask.nii.gz* 147 | 148 | *sub-Stevie_ses-01_desc-wmgm_mask.stl* 149 | 150 | -------------------------------------------------------------------------------- /docs/docker_install.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | .. _docker_install: 4 | 5 | ***************** 6 | Container install 7 | ***************** 8 | 9 | Docker allows to provide all necessary softwares in extra to macapype packages. The Docker image we provide include ANTS 2.3.1, FSL 5.0.10 and AFNI (latest version). See at the bottom of this page for docker with SPM Stand-alone. 10 | 11 | **Note 1** :the image is quite big (~5GB) so requires some space on your "/" partition. 12 | 13 | Dockerfile 14 | ----------- 15 | 16 | Downloading Dockerfile and building an image: 17 | 18 | .. code:: bash 19 | 20 | # Downloading Dockerfile 21 | $ wget https://github.com/Macatools/macapype/blob/master/Dockerfile 22 | 23 | # Building your image from the Dockerfile 24 | $ docker build -t macapype_docker . 25 | 26 | Docker image 27 | ------------ 28 | 29 | A docker image can also be downloaded directly from `DockerHub repo `_ : 30 | 31 | .. code:: bash 32 | 33 | $ docker pull macatools/macapype:latest 34 | 35 | Starting from the release v0.2.1 on github, the docker images are tagged accordingly on Dockerhub: 36 | 37 | .. code:: bash 38 | 39 | $ docker pull macatools/macapype:v0.6 40 | 41 | See :ref:`Quick test ` for testing if your docker installation works properly on test datasets. 42 | 43 | **NB** for running ``-soft SPM`` and/or hdbet (corresponding ``-soft ANTS_quick``), a bigger version of the docker is available: 44 | 45 | .. code:: bash 46 | 47 | $ docker pull macatools/macapype:v0.6-spm-hdbet 48 | 49 | Note on Singularity 50 | ------------------- 51 | 52 | It is possible (and recommanded) to use singularity version of container on shared computers/clusters. macapype docker version has been tested and is compatible with versions of singularity higher than 0.3 ("sif" version) 53 | 54 | Here is an example of a command line to install and convert the docker image to singularity image : 55 | 56 | .. code:: bash 57 | 58 | $ singularity build /path/to/containers/macapype_v0.6.sif docker://macatools/macapype:v0.6 59 | 60 | The container have been tested and can be installed without sudo privilieges; In case you have trouble or want the container to be accessible on a shared cluster, ask the admin of the cluster to perform this operation: 61 | 62 | .. code:: bash 63 | 64 | $ export SINGULARITY_TMPDIR=/tmp/; export SINGULARITY_CACHEDIR=/tmp/; sudo -E /path/to/bin/singularity build /path/to/containers/macapype_v0.6.sif docker://macatools/macapype:v0.6 65 | 66 | See :ref:`Quick test ` for testing if your singularity installation works properly on test datasets. 67 | 68 | -------------------------------------------------------------------------------- /docs/img/images/BIDS_orga.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/docs/img/images/BIDS_orga.jpg -------------------------------------------------------------------------------- /docs/img/logo/logo_macapype_0.3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/docs/img/logo/logo_macapype_0.3.jpg -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. _macapype: 2 | 3 | ******** 4 | Macapype 5 | ******** 6 | .. 7 | .. .. _short_logo: 8 | .. .. |logo1| image:: ./img/logo/logo_macapype_0.3.jpg 9 | .. :scale: 100% 10 | .. 11 | .. .. table:: 12 | .. :align: center 13 | .. 14 | .. +---------+ 15 | .. | |logo1| | 16 | .. +---------+ 17 | .. 18 | 19 | Macapype is an open-source multi-modal brain data analysis kit which provides **Python-based pipelines** for advanced multi-thread processing of MRI anatomical data of NPH brain images. Macapype is based on `Nipype `_, a tool developed in fMRI field, which facilitates data analyses by wrapping many commonly-used neuro-imaging software into a common python framework. 20 | 21 | .. image:: ./img/logo/logo_macapype_0.3.jpg 22 | :width: 600 23 | :align: center 24 | 25 | Installation 26 | ************ 27 | 28 | See :ref:`Quick Installation ` for installation on your local system if you have adequate softwares (i.e. FSL, AFNI, Ants, SPM) running on your machine/clusters 29 | 30 | See :ref:`Container installation ` for fully compliant installation (no MRI softwares, or Windows / MacOS operating system) 31 | 32 | Once installed, see :ref:`Quick test ` for testing if your installation is working properly 33 | 34 | Command line parameters 35 | *********************** 36 | 37 | macapype is fairly flexible, but requires to specify multiples parameters in command line 38 | 39 | See :ref:`Commands ` for a description on the avalaible command parameters 40 | 41 | If ``-deriv``` is provided, see :ref:`Derivatives ` for a descrition of the outputs 42 | 43 | Table of contents 44 | ****************** 45 | 46 | .. toctree:: 47 | :maxdepth: 2 48 | 49 | quick_install 50 | docker_install 51 | quick_test 52 | command 53 | derivatives 54 | params 55 | indiv_params 56 | 57 | 58 | -------------------------------------------------------------------------------- /docs/indiv_params.rst: -------------------------------------------------------------------------------- 1 | .. _indiv_params: 2 | 3 | Individual Parameters 4 | _____________________ 5 | 6 | Adding -indiv 7 | ************* 8 | 9 | You can include "-indiv indiv_params.json" in the python command, for specifiying parameters specific parameters: 10 | 11 | 12 | .. code:: bash 13 | 14 | $ python workflows/segment_petra.py -data ~/Data_maca -out ./local_test -soft SPM -params params.json -indiv indiv_params.json 15 | 16 | 17 | Advanced parameters settings 18 | **************************** 19 | 20 | 21 | Here is json file with all possible nodes to be tuned in indiv_params; In particular, the specifications of sub- and ses- are mandatory before the specification of nodes; Also note that the nodes are specified directly, without specifiying the sub-pipelines they belong to. 22 | 23 | The nodes belonging to "short_data_preparation" pipeline (see `Params`_) are common to both -soft ANTS and SPM : 24 | 25 | .. include:: ../examples_doc/indiv_params_preparation.json 26 | :literal: 27 | 28 | 29 | For -soft SPM, here is the set of nodes that can be individually tuned: 30 | 31 | .. include:: ../examples_doc/indiv_params_segment_spm.json 32 | :literal: 33 | 34 | For -soft ANTS, here is the set of nodes that can be individually tuned: 35 | 36 | .. include:: ../examples_doc/indiv_params_segment_ants.json 37 | :literal: 38 | 39 | -------------------------------------------------------------------------------- /docs/params.rst: -------------------------------------------------------------------------------- 1 | .. _params: 2 | 3 | Parameters 4 | __________ 5 | 6 | Adding -params 7 | ************** 8 | 9 | Definition of the sequence of nodes in processing pipelines, as well as general parameters, are specified by json file. When specifying a -species and -soft (see `Commands `_), the corresponding default parameter file will be used; They are located in the *workflows* directory of the package. 10 | 11 | The original parameter file can be altered by some options in -soft (e.g. _robustreg, _prep, etc.; see `Commands `_). 12 | 13 | Here is an example of the params.json with parameters -soft ANTS for -species macaque: 14 | 15 | .. include:: ../workflows/params_segment_macaque_ants.json 16 | :literal: 17 | 18 | It is also possible to alter values of some nodes for individual sessions/subjects. See `individual parameter section `_). 19 | 20 | **Note**: Individual parameter will not modify the pipeline sequence itself, a value specified in indiv_params for a node that do not exists in params will ignored 21 | 22 | Advanced parameters settings 23 | **************************** 24 | 25 | For advanced user, it is however possible to pass the pipeline sequence as a params.json file. In this case all further alteration will be canceled. 26 | 27 | Here is json file with all possible nodes to be tuned; Some node are optional, some nodes are exclusive with each other (XOR). All parameters given here are give as examples and may vary: 28 | 29 | The beginning of a params include a keyword "general", and the definition of "short_data_preparation" is also required for both -soft ANTS and SPM: 30 | 31 | .. include:: ../examples_doc/params_general_preparation.json 32 | :literal: 33 | 34 | For -soft SPM, here is the overall structure of the pipeline: 35 | 36 | .. include:: ../examples_doc/params_segment_spm.json 37 | :literal: 38 | 39 | 40 | For -soft ANTS, here is the overall structure of the pipeline: 41 | 42 | .. include:: ../examples_doc/params_segment_ants.json 43 | :literal: 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /docs/quick_install.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | .. _quick_install: 4 | 5 | ************ 6 | Installation 7 | ************ 8 | 9 | Dependancies 10 | ############ 11 | 12 | External software dependancies 13 | ------------------------------ 14 | 15 | Macapype relies heavily on other neuroimaging Softwares, predominentyly: 16 | 17 | * `FSL `_ 18 | * `ANTS `_ 19 | * `AFNI `_ 20 | * `SPM `_ 21 | * `NiftiReg `_ 22 | 23 | Python packages dependancies 24 | ---------------------------- 25 | 26 | Macapype relies on python packages. Here we provide installations using Anaconda 27 | 28 | Creating environment with all packages 29 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 30 | 31 | In case you have access to a conda environment, here is the procedure to initialize your own environnment (called "macapype_env", but can be called the name you prefer): 32 | 33 | .. code-block:: bash 34 | 35 | $ conda init bash 36 | $ conda create -n macapype_env python=3.10 37 | $ conda activate macapype_env 38 | 39 | Install macapype package 40 | ######################## 41 | 42 | from github 43 | ----------- 44 | 45 | .. _git_install: 46 | 47 | Using git 48 | ~~~~~~~~~ 49 | 50 | .. code:: bash 51 | 52 | $ git clone https://github.com/Macatools/macapype.git 53 | $ cd macapype 54 | $ python setup.py develop --user 55 | 56 | Using pip 57 | ~~~~~~~~~ 58 | 59 | .. code:: bash 60 | 61 | $ pip install git+https://github.com/Macatools/macapype 62 | 63 | .. _pip_install: 64 | 65 | from pypi 66 | --------- 67 | 68 | Macapype is available on * `pypi.org `_: 69 | 70 | If 'pip' package is installed on your system, you can install the lastest stable version with: 71 | 72 | .. code:: bash 73 | 74 | $ pip install macapype 75 | 76 | 77 | From conda 78 | ----------- 79 | 80 | Macapype is also available on `Anaconda cloud `_: 81 | 82 | If 'conda' (Anaconda, or miniconda) is installed on your system, you can type: 83 | 84 | .. code:: bash 85 | 86 | $ conda install -c macatools macapype 87 | 88 | !!!! The latest versions of macapype (from > 0.2.1) are not available, due to the inclusion of packages that are not yet packaged in conda 89 | use "pip install macapype" or "git clone https://github.com/Macatools/macapype.git" till further notice 90 | 91 | Testing the install 92 | ################### 93 | 94 | 95 | .. code:: bash 96 | 97 | $ ipython 98 | 99 | .. code:: ipython 100 | 101 | In [1]: import macapype; print (macapype.__version__) 102 | 103 | 104 | See :ref:`Quick test ` for testing if your installation works properly on test datasets. 105 | -------------------------------------------------------------------------------- /examples_doc/README.txt: -------------------------------------------------------------------------------- 1 | .. _general_examples: 2 | 3 | Examples Gallery 4 | ================ 5 | 6 | .. contents:: Contents 7 | :local: 8 | :depth: 3 9 | 10 | -------------------------------------------------------------------------------- /examples_doc/indiv_params_preparation.json: -------------------------------------------------------------------------------- 1 | { 2 | "sub-test": 3 | { 4 | "ses-01": 5 | { 6 | "reorient": 7 | { 8 | "comment": "OPTIONAL node in params", 9 | "params": "SwapDimensions (FSL)", 10 | "new_dims": "x z -y" 11 | }, 12 | 13 | "crop_T1": 14 | { 15 | "comment": "OPTIONAL node in params", 16 | "params": "fslroi (FSL)", 17 | "args": 0.8 18 | }, 19 | 20 | "reg_T1_on_template": 21 | { 22 | "params": "RegAladin (NiftyReg)" 23 | 24 | }, 25 | "reg_T1_on_template2": 26 | { 27 | "comment": "OPTIONAL node in params", 28 | "comment": "typically used with -soft _robustreg ", 29 | 30 | "params": "RegAladin (NiftyReg)" 31 | 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /examples_doc/indiv_params_segment_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "sub-test": 3 | { 4 | "ses-01": 5 | { 6 | "N4debias": 7 | { 8 | "params": "N4BiasFieldCorrection (ANTS)", 9 | "comment": "OPTIONAL", 10 | 11 | "dimension": 3, 12 | "bspline_fitting_distance": 200, 13 | "n_iterations": [50, 50, 40, 30], 14 | "convergence_threshold": 0.00000001, 15 | "shrink_factor": 2, 16 | "args": "-r 0 --verbose 1" 17 | }, 18 | "comment": "OR", 19 | "fast": 20 | { 21 | "comment": "OPTIONAL", 22 | "params": "FAST (FSL)", 23 | }, 24 | 25 | "atlas_brex": 26 | { 27 | "f": 0.8 28 | }, 29 | 30 | "debias": 31 | { 32 | "comment": "OPTIONAL", 33 | "params": "T1xT2BiasFieldCorrection (local wrap of Regis Trapeau tools)", 34 | "commnet": "# TODO is not used now... in code" , 35 | "s": 2 36 | }, 37 | 38 | 39 | 40 | "norm_intensity": 41 | { 42 | "comment": "OPTIONAL node in params, if register_NMT_pipe is defined", 43 | "params": "N4BiasFieldCorrection (ANTS)" 44 | }, 45 | "comment": "OR", 46 | "reg": 47 | { 48 | "comment": "OPTIONAL node in params, if reg is defined", 49 | "params": "IterREGBET (local wrap of Regis Trapeau tools)", 50 | 51 | "n": 2, 52 | "m": "ref", 53 | "dof": 12 54 | } 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /examples_doc/indiv_params_segment_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "sub-test": 3 | { 4 | "ses-01": 5 | { 6 | "debias": 7 | { 8 | "params": "T1xT2BiasFieldCorrection (local wrap of Regis Trapeau tools)", 9 | 10 | "s": 2 11 | }, 12 | 13 | "reg": 14 | { 15 | "params": "IterREGBET (local wrap of Regis Trapeau tools)", 16 | 17 | "n": 2, 18 | "m": "ref", 19 | "dof": 12 20 | }, 21 | 22 | 23 | "threshold_gm": 24 | { 25 | "params": "Threshold (FSL)", 26 | 27 | "thr": 0.5 28 | }, 29 | "threshold_wm": 30 | { 31 | "params": "Threshold (FSL)", 32 | 33 | "thr": 0.5 34 | }, 35 | "threshold_csf": 36 | { 37 | "params": "Threshold (FSL)", 38 | 39 | "thr": 0.5 40 | } 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /examples_doc/params_general_preparation.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "comment": "OPTIONAL", 5 | "comment": "specify a path on where to put the template after download", 6 | "comment": "By default set to the home directory of the user", 7 | 8 | "my_path" : "/path/to/template", 9 | 10 | 11 | "comment": "MANDOTORY, if not set as -template_path / template_files in command line", 12 | "comment": "Template used for priors segmentation", 13 | "comment": "see macapype/utils/templates.json for available names", 14 | 15 | "template_name": "bma_padded", 16 | 17 | 18 | "comment": "OPTIONAL", 19 | "comment": "if not set, template_name will be used by default", 20 | "comment": "Template used for crop_aladin / projection in stereo space", 21 | 22 | "template_stereo_name": "MBM_3.0", 23 | }, 24 | "short_preparation_pipe": 25 | { 26 | 27 | "use_T2": 28 | { 29 | 30 | "comment": "boolean marker, if present T2 and T1 are exchanged in the process, (T2 is used to register to template, and T1 is registered to T2)", 31 | 32 | "comment": "BE CAREFUL, the name of the internal nodes are still using the original names although the files are swapped", 33 | "comment": "Everything is swapped back as outputs of short_preparation_pipe", 34 | }, 35 | 36 | "avg_reorient_pipe": 37 | { 38 | "comment": "OPTIONAL", 39 | "reorient": 40 | { 41 | "params": "SwapDimensions (FSL)", 42 | "new_dims": "x z -y" 43 | } 44 | }, 45 | 46 | "aladin_T2_on_T1": 47 | { 48 | "comment": "boolean marker, if present RegAladin to coregister T2 on T1 and pad/resample, otherwise use FSL FLIRT (default)", 49 | }, 50 | 51 | "crop_T1": 52 | { 53 | "comment": "OPTIONAL", 54 | "comment": "if present, applied to both T1w and T2w", 55 | "params": "fslroi (FSL)", 56 | 57 | "args": "should be specified in indiv_params" 58 | }, 59 | 60 | "crop_aladin_pipe": 61 | { 62 | "remove_capsule_pipe":{}, 63 | "reg_T1_on_template": 64 | { 65 | "params": "RegAladin (NiftyReg)" 66 | 67 | }, 68 | "reg_T1_on_template2": 69 | { 70 | "comment": "OPTIONAL", 71 | "comment": "typically used with -soft _robustreg ", 72 | 73 | "params": "RegAladin (NiftyReg)" 74 | 75 | } 76 | }, 77 | 78 | "inv_transfo_aladin": 79 | { 80 | "comment": "OPTIONAL", 81 | "comment" : "not sure why it is parameterized", 82 | "params": "RegTransform (NiftyReg)" 83 | }, 84 | 85 | "denoise": 86 | { 87 | "comment": "OPTIONAL", 88 | "params": "DenoiseImage (ANTS)", 89 | 90 | "shrink_factor": 2 91 | }, 92 | 93 | "pad_template": 94 | { 95 | 96 | "comment": "OPTIONAL", 97 | "comment": "used for skullTo3d to keep an image bigger than brain, but is not used further in brain segmentation", 98 | "params": "ImageMath (ANTS)", 99 | 100 | "copy_header" : true, 101 | "operation" : "PadImage", 102 | "op2" : "70" 103 | }, 104 | 105 | 106 | "comment": "the following nodes are mutually exclusive, and are optional", 107 | 108 | "N4debias": 109 | { 110 | "comment": "PREFERED", 111 | "comment": "OPTIONAL", 112 | "params": "N4BiasFieldCorrection (ANTS)", 113 | "comment": "same parameters for T1 and T2", 114 | 115 | "dimension": 3, 116 | "bspline_fitting_distance": 200, 117 | "n_iterations": [50, 50, 40, 30], 118 | "convergence_threshold": 0.00000001, 119 | "shrink_factor": 2, 120 | "args": "-r 0 --verbose 1" 121 | }, 122 | "comment": "OR", 123 | "fast": 124 | { 125 | "comment": "OPTIONAL", 126 | "comment": "same parameters for T1 and T2", 127 | "params": "FAST (FSL) used for debias not segmentation", 128 | 129 | "args": "-l 3" 130 | }, 131 | "comment": "OR", 132 | "itk_debias": 133 | { 134 | "comment": "OPTIONAL", 135 | "comment": "same parameters for T1 and T2", 136 | "params": "N4BiasFieldCorrectionImageFilter (SimpleITK python package) used for debias", 137 | "comment": "TODO: not wrapped properly as a node", 138 | "comment": "not really used so far" 139 | } 140 | 141 | 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /examples_doc/params_segment_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "MANDATORY if mask is not provided as input of command line", 3 | "extract_pipe": 4 | { 5 | "use_T2": 6 | { 7 | "comment": "boolean marker, if present T2 and T1 are exchanged in the process, (T2 is used to register to template)", 8 | 9 | "comment": "Be careful that in this case, the template head and brain have to match T2w images ", 10 | "comment": "better to specify your own template with template_path template_files in the command line" 11 | }, 12 | "smooth": 13 | { 14 | "comment": "OPTIONAL", 15 | "params": "Smooth (FSL)", 16 | }, 17 | 18 | "comment": "the following nodes are mutually exclusive, but are mandatory if extract_pipe is defined (ie mask not provided)", 19 | "atlas_brex": 20 | { 21 | "comment": "DEFAULT if extract_pipe is defined", 22 | "params": "AtlasBREX (local wrap in macapype of Johannes Lohmeier tool)", 23 | 24 | "f": 0.7, 25 | "reg": 1, 26 | "msk": "b,0.5,0,0", 27 | "wrp": "10,10,10", 28 | "dil": 1, 29 | "nrm": 1 30 | }, 31 | "comment": "OR", 32 | "hd-bet": 33 | { 34 | "comment": "OPTIONAL", 35 | "comment": "corresponds to _quick option in -soft", 36 | "comment": "WARNING: requires the hd-bet package to be installed, or container with -spm-hdbet or -full options", 37 | "params": "hd-bet (HD-BET python package Deep Learning) used for debias not segmentation", 38 | }, 39 | "comment": "OR", 40 | "bet4animal": 41 | { 42 | "comment": "OPTIONAL", 43 | "comment": "corresponds to _4animal option in -soft", 44 | "params": "bet4animal (FSL > 6.0.6)", 45 | 46 | "f": 0.65, 47 | "label": 2, 48 | "robust": true 49 | } 50 | }, 51 | 52 | "comment": "the two following nodes are mutually exclusive, and are optional", 53 | "masked_correct_bias_pipe": 54 | { 55 | "smooth": 56 | { 57 | 58 | "comment": "mandatory if masked_correct_bias_pipe is defined", 59 | "params": "MathsCommand (FSL)", 60 | 61 | "args": "-bin -s 2" 62 | }, 63 | "norm_smooth": 64 | { 65 | "comment": "mandatory if masked_correct_bias_pipe is defined", 66 | "params": "MultiImageMaths (FSL)", 67 | 68 | "op_string": "-s 2 -div %s" 69 | }, 70 | "smooth_bias": 71 | { 72 | "comment": "mandatory if masked_correct_bias_pipe is defined", 73 | "params": "Smooth (FSL)", 74 | 75 | "sigma": 2 76 | } 77 | 78 | }, 79 | "comment": "OR", 80 | "debias": 81 | { 82 | 83 | "comment": "PREFERED", 84 | "comment": "performs same operations as masked_correct_bias_pipe in bash function", 85 | "params": "T1xT2BiasFieldCorrection (local wrap of Regis Trapeau tools)", 86 | 87 | "s": 2 88 | }, 89 | 90 | "brain_segment_pipe": 91 | { 92 | "use_T2": 93 | { 94 | "comment": "boolean marker, if present T2 and T1 are exchanged in the process, (T2 is used to register to template)", 95 | 96 | "comment": "Be careful that in this case, the template priors have to match T2w images ", 97 | "comment": "better to specify your own template with template_path template_files in the command line" 98 | }, 99 | 100 | "comment": "If use_priors is defined in segment_atropos_pipe, one of register_NMT_pipe or reg must be defined", 101 | "register_NMT_pipe": 102 | { 103 | "norm_intensity": 104 | { 105 | "comment": "OPTIONAL", 106 | "params": "N4BiasFieldCorrection (ANTS)" 107 | }, 108 | 109 | "deoblique": 110 | { 111 | "comment": "OPTIONAL", 112 | "params": "boolean marker (no params) for adding a node with Refit (AFNI) deoblique=True" 113 | }, 114 | 115 | "NMTSubjectAlign": 116 | { 117 | "comment": "OPTIONAL" 118 | }, 119 | "comment": "OR", 120 | "NMT_subject_align": 121 | { 122 | "comment": "default" 123 | } 124 | }, 125 | "comment": "OR", 126 | "reg": 127 | { 128 | "params": "IterREGBET (local wrap of Regis Trapeau tools)", 129 | 130 | "n": 2, 131 | "m": "ref", 132 | "dof": 12 133 | }, 134 | 135 | "segment_atropos_pipe": 136 | { 137 | 138 | "comment": "if present ( a value have to be specied), priors will be projected to stereo space (possibly not the same as another template can be specified for priors)", 139 | "comment": "(this is typically the case for marmoset: use_T2 with bma for short_preparation_pipe, and MBM for priors)", 140 | "comment": "If no usepriors, the register_NMT_pipe or reg are not used", 141 | "use_priors": 0.0, 142 | 143 | "Atropos": 144 | { 145 | "comment": "mandatory if segment_atropos_pipe is defined", 146 | "params" : "AtroposN4 (ANTS)", 147 | "dimension": 3, 148 | 149 | 150 | "comment": "numberOfClasses is OPTIONAL (default is 3)", 151 | "numberOfClasses": 3 152 | }, 153 | "tissue_dict": 154 | { 155 | "comment": "OPTIONAL", 156 | "comment": "default are : gm: 1, wm: 2, csf: 3", 157 | "comment": "possible to merge indexes for one label, for example csf: [1, 5], gm: [2, 3], wm: 4", 158 | 159 | "gm": 2, 160 | "wm": 3, 161 | "csf": 1 162 | } 163 | }, 164 | 165 | "comment": "optional (if segmentation in 5tt mrtrix format is required", 166 | "export_5tt_pipe": 167 | { 168 | "commment": "no node are parametrized" 169 | } 170 | }, 171 | 172 | 173 | "comment": "following pipelines are used for surface mesh", 174 | "comment": "all are mutually exclusive and optional", 175 | 176 | "comment": "prefered method", 177 | "IsoSurface_brain_pipe": 178 | { 179 | 180 | "merge_brain_tissues": 181 | { 182 | "keep_indexes": [2,3] 183 | } 184 | }, 185 | "comment": "OR", 186 | "nii2mesh_brain_pipe": 187 | { 188 | }, 189 | "comment": "OR", 190 | "nii_to_mesh_pipe": 191 | { 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /examples_doc/params_segment_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Haiko_v1.5" 5 | }, 6 | 7 | "comment": "mandatory, performing both debias and brain_extraction", 8 | "debias": 9 | { 10 | "params": "T1xT2BiasFieldCorrection (local wrap of Regis Trapeau tools)", 11 | 12 | "s": 2 13 | }, 14 | 15 | "comment": "mandatory", 16 | "reg": 17 | { 18 | "params": "IterREGBET (local wrap of Regis Trapeau tools)", 19 | "n": 2, 20 | "m": "ref", 21 | "dof": 12 22 | }, 23 | 24 | 25 | "old_segment_pipe": 26 | { 27 | "segment": 28 | { 29 | "comment": "mandatory if old_segment_pipe is defined", 30 | "params": "Segment (SPM12)", 31 | 32 | "gm_output_type": [false, false, true], 33 | "wm_output_type": [false, false, true], 34 | "csf_output_type": [false, false, true] 35 | }, 36 | 37 | "threshold_gm": 38 | { 39 | "comment": "mandatory if old_segment_pipe is defined", 40 | "params": "Threshold (FSL)", 41 | 42 | "thr": 0.5 43 | }, 44 | "threshold_wm": 45 | { 46 | "comment": "mandatory if old_segment_pipe is defined", 47 | "params": "Threshold (FSL)", 48 | 49 | "thr": 0.5 50 | }, 51 | "threshold_csf": 52 | { 53 | "comment": "mandatory if old_segment_pipe is defined", 54 | "params": "Threshold (FSL)", 55 | 56 | "thr": 0.5 57 | }, 58 | 59 | "comment": "optional (if segmentation in 5tt mrtrix format is required"), 60 | "export_5tt_pipe": 61 | { 62 | "commment": "no node are parametrized" 63 | } 64 | }, 65 | 66 | "comment": "OPTIONAL", 67 | "mask_from_seg_pipe": 68 | { 69 | "merge_indexed_mask": 70 | { 71 | "comment": "never used, not sure if modifying default parameters works", 72 | "comment": "default : index_csf=1, index_gm=2, index_wm=3", 73 | "params": "merge_masks (python)", 74 | 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /examples_doc/plot_segment_sphinx_macaque_ants_based.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. _plot_segment_sphinx_macaque_ants_based: 3 | 4 | ============================================================================== 5 | Plot the results of a segmentation with ANTS-based pipeline in sphinx position 6 | ============================================================================== 7 | """ 8 | 9 | # Authors: David Meunier, Bastien Cagna 10 | 11 | # License: BSD (3-clause) 12 | # sphinx_gallery_thumbnail_number = 2 13 | 14 | import os 15 | import os.path as op 16 | 17 | from macapype.utils.utils_tests import load_test_data 18 | 19 | ############################################################################## 20 | # Testing plot in local 21 | ############################################################################## 22 | 23 | #orig_data_path = load_test_data("data_test_sphinx_macaque") 24 | 25 | ############################################################################### 26 | ## Data preparation 27 | ############################################################################### 28 | 29 | 30 | ################################################################################ 31 | ### Reorient 32 | ###========================== 33 | 34 | 35 | #orig_T1_file = op.join(orig_data_path, "sub-ziggy_T1w.nii") 36 | 37 | ## displaying results 38 | #orig_T1 = os.path.join(orig_data_path, "orig_T1.png") 39 | #cmd = "fsleyes render --outfile {} --size 1800 600 {}".format(orig_T1, orig_T1_file) 40 | #os.system(cmd) 41 | 42 | #import matplotlib.pyplot as plt # noqa 43 | 44 | #fig, axs = plt.subplots(1, 1, figsize=(36, 24)) 45 | #axs.imshow(plt.imread(orig_T1)) 46 | #axs.axis('off') 47 | 48 | #plt.show() 49 | -------------------------------------------------------------------------------- /initial_scripts/Macaque Segmentation Steps.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/initial_scripts/Macaque Segmentation Steps.docx -------------------------------------------------------------------------------- /initial_scripts/extract_macapype_outputs.sh: -------------------------------------------------------------------------------- 1 | #input baboon name 2 | baboon_name=$1 3 | 4 | #Input name of subject 5 | name=_session_01_subject_${baboon_name} 6 | 7 | #directory with nmt template and scripts 8 | nmt_dir=/hpc/meca/users/loh.k/baboon_proc/haiko89_template/ 9 | 10 | #input subject directory -> your macapype output folder 11 | subjdir=/hpc/meca/users/loh.k/baboon_noT1/preproc/baboon_noT1_segmentation_test_indiv_params/ 12 | 13 | #Make a separate directory for the subject's Brainvisa input files 14 | mkdir /hpc/meca/users/loh.k/baboon_noT1/preproc/brainvisa_inputs/${name} 15 | cd /hpc/meca/users/loh.k/baboon_noT1/preproc/brainvisa_inputs/${name} 16 | 17 | #Copy T1 cropped + denoised file as t1_cropped -> serve as reference image for transformations later 18 | cp ${subjdir}/full_segment_pnh_noT1_subpipes_baboon/short_preparation_noT1_pipe/${name}/denoise_T1/*_noise_corrected.nii.gz ${name}_t1_cropped.nii.gz 19 | 20 | #get transformations from template to subject computed in macapype 21 | 22 | warp_nmt_to_subject=${subjdir}/full_segment_pnh_noT1_subpipes_baboon/brain_segment_from_mask_noT1_pipe/register_NMT_pipe/${name}/NMT_subject_align/*_WARPINV.nii.gz 23 | linear_nmt_to_subject=${subjdir}/full_segment_pnh_noT1_subpipes_baboon/brain_segment_from_mask_noT1_pipe/register_NMT_pipe/${name}/NMT_subject_align/*_composite_linear_to_NMT_inv.1D 24 | subject_shft_aff_ref=${subjdir}/full_segment_pnh_noT1_subpipes_baboon/brain_segment_from_mask_noT1_pipe/register_NMT_pipe/${name}/NMT_subject_align/*_shft_aff.nii.gz 25 | 26 | # Warp L and R template brainmask to subject space 27 | 28 | 3dNwarpApply -prefix ${name}_HAIKO_L_brainmask.nii.gz -source ${nmt_dir}NMT_registration/Haiko89_Asymmetric.Template_n89_brainmask_L.nii.gz -master ${subject_shft_aff_ref} -nwarp ${warp_nmt_to_subject} -ainterp NN -overwrite 29 | 3dAllineate -base ${name}_t1_cropped.nii.gz -source ${name}_HAIKO_L_brainmask.nii.gz -1Dmatrix_apply ${linear_nmt_to_subject} -final NN -prefix ${name}_HAIKO_L_brainmask.nii.gz -overwrite 30 | 31 | 3dNwarpApply -prefix ${name}_HAIKO_R_brainmask.nii.gz -source ${nmt_dir}NMT_registration/Haiko89_Asymmetric.Template_n89_brainmask_R.nii.gz -master ${subject_shft_aff_ref} -nwarp ${warp_nmt_to_subject} -ainterp NN -overwrite 32 | 3dAllineate -base ${name}_t1_cropped.nii.gz -source ${name}_HAIKO_R_brainmask.nii.gz -1Dmatrix_apply ${linear_nmt_to_subject} -final NN -prefix ${name}_HAIKO_R_brainmask.nii.gz -overwrite 33 | 34 | #binarize cerbellum mask from template and transform to subject space 35 | fslmaths ${nmt_dir}NMT_registration/Haiko89_Asymmetric.Template_n89_brainmask_cerebellum.nii.gz -bin HAIKO_cerebellum_brainmask.nii.gz #binarize template cerebellum 36 | 37 | 3dNwarpApply -prefix ${name}_HAIKO_cerebellum_brainmask.nii.gz -source HAIKO_cerebellum_brainmask.nii.gz -master ${subject_shft_aff_ref} -nwarp ${warp_nmt_to_subject} -ainterp NN -overwrite 38 | 3dAllineate -base ${name}_t1_cropped.nii.gz -source ${name}_HAIKO_cerebellum_brainmask.nii.gz -1Dmatrix_apply ${linear_nmt_to_subject} -final NN -prefix ${name}_HAIKO_cerebellum_brainmask.nii.gz -overwrite 39 | 40 | #Using LH and RH masks to obtain left and right hemisphere segmentation masks 41 | segmentationfile=${subjdir}full_segment_pnh_noT1_subpipes_baboon/brain_segment_from_mask_noT1_pipe/segment_atropos_pipe/${name}/seg_at/segment_Segmentation.nii.gz 42 | 43 | 3dcalc -a ${segmentationfile} -b ${name}_HAIKO_L_brainmask.nii.gz -expr 'a*b/b' -prefix ${name}_segmentation_LH.nii.gz 44 | 3dcalc -a ${segmentationfile} -b ${name}_HAIKO_R_brainmask.nii.gz -expr 'a*b/b' -prefix ${name}_segmentation_RH.nii.gz 45 | 46 | #remove cerebellum from left and right brain segmentations 47 | 3dcalc -a ${name}_segmentation_LH_corrected.nii.gz -b ${name}_HAIKO_cerebellum_brainmask.nii.gz -expr '(a*(not(b)))' -prefix ${name}_LH_seg_nocb.nii.gz -overwrite 48 | 3dcalc -a ${name}_segmentation_RH_corrected.nii.gz -b ${name}_HAIKO_cerebellum_brainmask.nii.gz -expr '(a*(not(b)))' -prefix ${name}_RH_seg_nocb.nii.gz -overwrite 49 | 50 | 51 | #create L/R GM and WM no-cerebellum binary masks from subject brain segmentation 52 | 53 | 3dcalc -a ${name}_LH_seg_nocb.nii.gz -expr 'iszero(a-2)' -prefix ${name}_L_GM_nocb_mask.nii.gz -overwrite 54 | 3dcalc -a ${name}_LH_seg_nocb.nii.gz -expr 'iszero(a-3)' -prefix ${name}_L_WM_nocb_mask.nii.gz -overwrite 55 | 56 | 3dcalc -a ${name}_RH_seg_nocb.nii.gz -expr 'iszero(a-2)' -prefix ${name}_R_GM_nocb_mask.nii.gz -overwrite 57 | 3dcalc -a ${name}_RH_seg_nocb.nii.gz -expr 'iszero(a-3)' -prefix ${name}_R_WM_nocb_mask.nii.gz -overwrite 58 | 59 | 60 | #Extract Cerebellum using template mask transformed to subject space 61 | dname=${name}_t1_cropped.nii.gz 62 | mask=${name}_HAIKO_cerebellum_brainmask.nii.gz 63 | 3dcalc -a $dname -b $mask -expr 'a*b/b' -prefix ${name}_cerebellum.nii.gz -overwrite 64 | 65 | 66 | #Extract L.GM using template mask transformed to subject space 67 | dname=${name}_t1_cropped.nii.gz 68 | mask=${name}_L_GM_nocb_mask.nii.gz 69 | 3dcalc -a $dname -b $mask -expr 'a*b/b' -prefix ${name}_LH_GM.nii.gz -overwrite 70 | 71 | #Extract L.WM using template mask transformed to subject space 72 | mask=${name}_L_WM_nocb_mask.nii.gz 73 | 3dcalc -a $dname -b $mask -expr 'a*b/b' -prefix ${name}_LH_WM.nii.gz -overwrite 74 | 75 | #Extract R.GM using template mask transformed to subject space 76 | dname=${name}_t1_cropped.nii.gz 77 | mask=${name}_R_GM_nocb_mask.nii.gz 78 | 3dcalc -a $dname -b $mask -expr 'a*b/b' -prefix ${name}_RH_GM.nii.gz -overwrite 79 | 80 | #Extract R.WM using template mask transformed to subject space 81 | mask=${name}_R_WM_nocb_mask.nii.gz 82 | 3dcalc -a $dname -b $mask -expr 'a*b/b' -prefix ${name}_RH_WM.nii.gz -overwrite 83 | 84 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/ANTS_N4.m: -------------------------------------------------------------------------------- 1 | function [N4_out_file,N4ed,i] = ANTS_N4(N4_in_file,ANTS_path) 2 | 3 | if ~exist('ANTS_path','var') || isempty(ANTS_path) 4 | ANTS_path = '/hpc/soft/ANTS/antsbin/bin'; 5 | end 6 | 7 | ANTSbin = 'N4BiasFieldCorrection'; 8 | [N4_out_file_base,ext] = fileparts2(N4_in_file); 9 | N4_tmp_file = sprintf('%sN4tmp%s',N4_out_file_base,ext); 10 | N4_out_file = sprintf('%sN4%s',N4_out_file_base,ext); 11 | 12 | stop = 0; 13 | i = 0; 14 | N4ed = 1; 15 | while ~stop 16 | i = i + 1; 17 | fprintf('\nN4BiasFieldCorrection iteration %i...',i) 18 | copyfile(N4_in_file,N4_tmp_file); 19 | system(sprintf('%s -i %s -o %s',fullfile(ANTS_path,ANTSbin),N4_in_file,N4_out_file)); 20 | fprintf(' done.\n') 21 | 22 | if i == 1 23 | uiwait(warndlg('FSLView will open the original and the bias corrected volumes. Inspect the result of N4 by playing with the opacity.')) 24 | end 25 | fslview_command = 'fslview'; 26 | [s,~] = system(sprintf('%s %s -l Greyscale %s -l Greyscale -t 1',fslview_command,N4_tmp_file,N4_out_file)); 27 | if s == 127 28 | error('Could not find %s! \nChange the ''fslview_command'' variable (3 lines above this error), \nto match with the way fslview is called on your system.',fslview_command) 29 | end 30 | % Time for the user to choose 31 | which_N4 = questdlg('What do you want to do?','Time to decide','New N4 pass','Stop here','Keep previous one','Stop here'); 32 | 33 | if strcmp(which_N4,'New N4 pass') 34 | N4_in_file = N4_out_file; 35 | else 36 | stop = 1; 37 | if strcmp(which_N4,'Keep previous one') 38 | if i == 1 39 | delete(N4_out_file) 40 | N4_out_file = N4_in_file; 41 | N4ed = 0; 42 | else 43 | copyfile(N4_tmp_file,N4_out_file); 44 | end 45 | end 46 | delete(N4_tmp_file) 47 | end 48 | end 49 | 50 | 51 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/create_seg_masks.m: -------------------------------------------------------------------------------- 1 | function create_seg_masks(T1_file,paths) 2 | % Create brain masks from segmentation files 3 | fprintf('Creating masks...') 4 | 5 | %% fileparts T1 6 | [pathstr,name,ext] = fileparts(T1_file); 7 | if exist('paths','var') 8 | if ~isempty(paths) 9 | pathstr = paths.segmentation; 10 | end 11 | end 12 | 13 | %% Load segmentation prob maps 14 | tissues = fieldnames(paths.anat_file_brain_segmentation.spm.tissues); 15 | for i = 1:numel(tissues) 16 | P.(tissues{i}) = spm_vol(paths.anat_file_brain_segmentation.spm.tissues.(tissues{i})); 17 | Y.(tissues{i}) = spm_read_vols(P.(tissues{i})); 18 | end 19 | 20 | %% Create masks 21 | prob = 0.05; 22 | brain_mask = Y.grey > prob | Y.white > prob | Y.csf > prob; % Union of all tissues 23 | 24 | %% Dilate & smooth mask 25 | dilated_brain_mask = erode_or_dilate(brain_mask,'dilate',18); 26 | smoothed_brain_mask = erode_or_dilate(dilated_brain_mask,'erode',18); 27 | 28 | %% Fill holes 29 | for i = 1:size(brain_mask,1) 30 | dilated_brain_mask(i,:,:) = imfill(dilated_brain_mask(i,:,:),'holes'); 31 | smoothed_brain_mask(i,:,:) = imfill(smoothed_brain_mask(i,:,:),'holes'); 32 | end 33 | for j = 1:size(brain_mask,2) 34 | dilated_brain_mask(:,j,:) = imfill(dilated_brain_mask(:,j,:),'holes'); 35 | smoothed_brain_mask(:,j,:) = imfill(smoothed_brain_mask(:,j,:),'holes'); 36 | end 37 | for k = 1:size(brain_mask,3) 38 | dilated_brain_mask(:,:,k) = imfill(dilated_brain_mask(:,:,k),'holes'); 39 | smoothed_brain_mask(:,:,k) = imfill(smoothed_brain_mask(:,:,k),'holes'); 40 | end 41 | 42 | %% Load T1 & extract brain using the newly created mask 43 | PT1 = spm_vol(T1_file); 44 | YT1 = spm_read_vols(PT1); 45 | YT1(~smoothed_brain_mask) = 0; 46 | 47 | %% Map of the 3 tissues 48 | Yall = zeros(size(Y.grey,1),size(Y.grey,2),size(Y.grey,3),numel(tissues)); 49 | for i = 1:numel(tissues) 50 | Yall(:,:,:,i) = Y.(tissues{i}); 51 | end 52 | 53 | Ymerged = zeros(size(Y.grey)); 54 | for i = 1:size(Y.grey,1) 55 | for j = 1:size(Y.grey,2) 56 | for k = 1:size(Y.grey,3) 57 | if sum(Yall(i,j,k,:)) 58 | [maxtissue,itissue] = max(Yall(i,j,k,:)); 59 | if maxtissue > prob 60 | Ymerged(i,j,k) = itissue; 61 | end 62 | end 63 | end 64 | end 65 | end 66 | 67 | %% save files 68 | P = PT1; 69 | 70 | P.fname = fullfile(pathstr,[name '_brain' ext]); 71 | spm_write_vol(P,YT1); 72 | 73 | P.fname = fullfile(pathstr,[name '_brain_mask' ext]); 74 | spm_write_vol(P,smoothed_brain_mask); 75 | 76 | P.fname = fullfile(pathstr,[name '_brain_mask_dil' ext]); 77 | spm_write_vol(P,dilated_brain_mask); 78 | 79 | P.fname = fullfile(pathstr,[name '_brain_segmented_SPM' ext]); 80 | spm_write_vol(P,Ymerged); 81 | 82 | fprintf(' done.\n') 83 | 84 | 85 | 86 | 87 | 88 | % for i = 1:size(Y,3) 89 | % imagesc(squeeze(Y(:,:,i))) 90 | % drawnow 91 | % end 92 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/erode_or_dilate.m: -------------------------------------------------------------------------------- 1 | function X = erode_or_dilate(X,type,conn,op_fact) 2 | % type: type of operation: 'erode' or 'dilate' 3 | % conn: connectivity, should be 6, 18 or 26 (default = 6) 4 | % op_fact: number of iterations of the chosen operation (default = 1) 5 | 6 | if strcmp(type,'erode') 7 | erosion = 1; 8 | elseif strcmp(type,'dilate') 9 | erosion = 0; 10 | else 11 | error('type must be ''erode'' or ''dilate''') 12 | end 13 | 14 | if nargin < 4 15 | op_fact = 1; 16 | elseif nargin < 3 17 | conn = 6; 18 | elseif nargin < 2 19 | error('Not enough input arguments') 20 | end 21 | 22 | if ~ismember(conn,[6 18 26]) 23 | error('Argument ''conn'' should be 6, 18 or 26') 24 | end 25 | 26 | if ~islogical(X) 27 | X(isnan(X(:))) = 0; 28 | X = logical(X); 29 | end 30 | 31 | % neighbors 32 | face_neighbors = [1 0 0;0 1 0; 0 0 1;-1 0 0;0 -1 0;0 0 -1]; 33 | 34 | if conn > 6 35 | edge_neighbors = [1 1 0;1 0 1;0 1 1;... 36 | -1 1 0;-1 0 1;0 -1 1;1 -1 0;1 0 -1;0 1 -1;... 37 | -1 -1 0;-1 0 -1; 0 -1 -1]; 38 | else 39 | edge_neighbor_coords = []; 40 | end 41 | if conn == 26 42 | vertex_neighbors = [1 1 1;1 -1 1;1 1 -1;-1 1 1;... 43 | -1 -1 1;-1 1 -1;1 -1 -1;-1 -1 -1]; 44 | else 45 | vertex_neighbor_coords = []; 46 | end 47 | if ndims(X) == 2 48 | sizeX = [size(X) 1]; 49 | elseif ndims(X) == 3 50 | sizeX = size(X); 51 | else 52 | error('Only works with 2D or 3D matrices') 53 | end 54 | 55 | if erosion 56 | X = ~X; 57 | end 58 | X = double(X); 59 | 60 | 61 | for it = 1:op_fact 62 | for i = 1:sizeX(1) 63 | for j = 1:sizeX(2) 64 | for k = 1:sizeX(3) 65 | if X(i,j,k) == 1 66 | voxel_coord = [i j k]; 67 | face_neighbor_coords = list_neighbor_coords(sizeX,voxel_coord,face_neighbors); 68 | if conn > 6 69 | edge_neighbor_coords = list_neighbor_coords(sizeX,voxel_coord,edge_neighbors); 70 | end 71 | if conn == 26 72 | vertex_neighbor_coords = list_neighbor_coords(sizeX,voxel_coord,vertex_neighbors); 73 | end 74 | n_coords = [face_neighbor_coords;edge_neighbor_coords;vertex_neighbor_coords]; 75 | for n = 1:size(n_coords,1) 76 | if ~X(n_coords(n,1),n_coords(n,2),n_coords(n,3)) 77 | X(n_coords(n,1),n_coords(n,2),n_coords(n,3)) = 2; 78 | end 79 | end 80 | end 81 | end 82 | end 83 | end 84 | X(X==2) = 1; 85 | end 86 | 87 | X = logical(X); 88 | if erosion 89 | X = ~X; 90 | end 91 | 92 | % Subfunction 93 | function neighbor_coords = list_neighbor_coords(sizeX,voxel_coord,neighbors_type) 94 | 95 | neighbor_coords = neighbors_type + repmat(voxel_coord,size(neighbors_type,1),1); 96 | neighbor_coords = neighbor_coords(~sum(neighbor_coords<1,2),:); 97 | neighbor_coords = neighbor_coords(~sum(neighbor_coords > repmat(sizeX,size(neighbor_coords,1),1),2),:); 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/fsl_fast.m: -------------------------------------------------------------------------------- 1 | function fsl_fast(T1_brain_file,paths) 2 | 3 | fprintf('Performing FSL segmentation...') 4 | 5 | %% fileparts T1 6 | [pathstr,name,ext] = fileparts(T1_brain_file); 7 | if strcmp(ext,'.gz') 8 | [~,name] = fileparts(name); 9 | end 10 | if exist('paths','var') 11 | if ~isempty(paths) 12 | pathstr = paths.segmentation; 13 | end 14 | end 15 | 16 | % path 17 | pathstr = fullfile(pathstr,'FSL'); 18 | if ~exist(pathstr,'dir');mkdir(pathstr);end % create FSL segmentation folder if non-existant 19 | 20 | %% Clear folder 21 | delete(fullfile(pathstr,'*')) 22 | 23 | %% Find fsl fast command 24 | FSL_prefix = 'fsl5.0-'; 25 | if exist('paths','var') 26 | if isfield(paths,'FSL_prefix') 27 | FSL_prefix = paths.FSL_prefix; 28 | end 29 | end 30 | [s,~] = system([FSL_prefix 'fast']); 31 | if s == 127 32 | [s,~] = system('fast'); 33 | if s == 127 34 | error('Cannot find FSL fast command \nIf the command needs a prefix (eg. fsl6.0-bet), put this prefix in a ''paths.FSL_prefix'' variable%s','') 35 | else 36 | FSL_prefix = ''; 37 | end 38 | end 39 | 40 | %% Execute FAST 41 | out_base = fullfile(pathstr,name); 42 | system(sprintf('%sfast -t 1 -o %s %s',FSL_prefix,out_base,T1_brain_file)); 43 | 44 | %% Apply the same tissue number as SPM 45 | seg_file = fullfile(pathstr,[name '_pveseg.nii.gz']); 46 | system(sprintf('gunzip %s',seg_file)); 47 | seg_file = fullfile(pathstr,[name '_pveseg.nii']); 48 | P = spm_vol(seg_file); 49 | Y = spm_read_vols(P); 50 | 51 | Y(Y==1) = 4; 52 | Y(Y==2) = 1; 53 | Y(Y==3) = 2; 54 | Y(Y==4) = 3; 55 | 56 | spm_write_vol(P,Y); 57 | system(sprintf('gzip %s',seg_file)); 58 | 59 | movefile(fullfile(pathstr,[name '_pve_0.nii.gz']),fullfile(pathstr,[name '_pve_3.nii.gz'])); 60 | 61 | fprintf(' Done.\n') 62 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/spm_old_segment.m: -------------------------------------------------------------------------------- 1 | function spm_old_segment(T1_file,grey,white,csf,paths) 2 | % script to use with macaque subjects 3 | % Note that multi-spectral (when there are two or more 4 | % registered images of different contrasts) processing is 5 | % not yet implemented for this method. (SPM manual) 6 | 7 | fprintf('Starting SPM segmentation...\n') 8 | 9 | T1 = cellstr([T1_file ',1']); 10 | TPM = {grey;white;csf}; 11 | 12 | %% matlabbatch 13 | matlabbatch{1}.spm.tools.oldseg.data = T1; 14 | matlabbatch{1}.spm.tools.oldseg.output.GM = [0 0 1]; 15 | matlabbatch{1}.spm.tools.oldseg.output.WM = [0 0 1]; 16 | matlabbatch{1}.spm.tools.oldseg.output.CSF = [0 0 1]; 17 | matlabbatch{1}.spm.tools.oldseg.output.biascor = 0; 18 | matlabbatch{1}.spm.tools.oldseg.output.cleanup = 0; 19 | matlabbatch{1}.spm.tools.oldseg.opts.tpm = TPM; 20 | matlabbatch{1}.spm.tools.oldseg.opts.ngaus = [2 21 | 2 22 | 2 23 | 4]; 24 | matlabbatch{1}.spm.tools.oldseg.opts.regtype = ''; % 'subj' 25 | matlabbatch{1}.spm.tools.oldseg.opts.warpreg = 1; 26 | matlabbatch{1}.spm.tools.oldseg.opts.warpco = 25; 27 | matlabbatch{1}.spm.tools.oldseg.opts.biasreg = 0.0001; 28 | matlabbatch{1}.spm.tools.oldseg.opts.biasfwhm = 60; 29 | matlabbatch{1}.spm.tools.oldseg.opts.samp = 3; 30 | matlabbatch{1}.spm.tools.oldseg.opts.msk = {''}; 31 | 32 | %% Initialization 33 | spm('defaults', 'FMRI'); 34 | spm_jobman('initcfg'); % initialization 35 | spm_get_defaults('cmdline',true) 36 | 37 | %% Run jobs 38 | spm_jobman('run', matlabbatch) 39 | 40 | %% fileparts T1 41 | [pathstr,name,ext] = fileparts(T1_file); 42 | 43 | %% Move files if needed 44 | if exist('paths','var') 45 | if ~isempty(paths) 46 | if ~exist(paths.segmentation,'dir');mkdir(paths.segmentation);end % create segmentation folder if non-existant 47 | movefile(fullfile(pathstr,['c*' name ext]),paths.segmentation); 48 | movefile(fullfile(pathstr,[name '_seg_*.mat']),paths.segmentation); 49 | end 50 | end 51 | 52 | %% Create brain masks from segmentation 53 | create_seg_masks(T1_file,paths) 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /initial_scripts/init_script_regis/spm_sanlm.m: -------------------------------------------------------------------------------- 1 | function [out_file,denoised] = spm_sanlm(in_file) 2 | 3 | fprintf('\nStarting denoising...\n\n') 4 | 5 | [in_path,in_name,ext] = fileparts(in_file); 6 | 7 | if strcmp(ext,'.gz') 8 | in_file = fullfile(in_path,in_name); 9 | if exist(in_file,'file') == 2; delete(in_file);end 10 | system(sprintf('gunzip %s',in_file)); 11 | end 12 | 13 | spm('defaults', 'FMRI'); 14 | spm_jobman('initcfg'); % initialization 15 | spm_get_defaults('cmdline',true) 16 | 17 | spm_prefix = 'sanlm_'; 18 | tmp_file = fullfile(in_path,[in_name 'Dtmp' ext]); 19 | out_file = fullfile(in_path,[in_name 'Denoised' ext]); 20 | 21 | matlabbatch{1}.spm.tools.cat.tools.sanlm.prefix = spm_prefix; 22 | matlabbatch{1}.spm.tools.cat.tools.sanlm.NCstr = Inf; 23 | matlabbatch{1}.spm.tools.cat.tools.sanlm.rician = 0; 24 | 25 | 26 | stop = 0; 27 | i = 0; 28 | denoised = 1; 29 | while ~stop 30 | i = i + 1; 31 | fprintf('\nDenoising iteration %i...',i) 32 | 33 | [in_path,in_name,ext] = fileparts(in_file); 34 | spm_out_file = fullfile(in_path,[spm_prefix in_name ext]); 35 | 36 | copyfile(in_file,tmp_file); 37 | matlabbatch{1}.spm.tools.cat.tools.sanlm.data = {in_file}; 38 | spm_jobman('run', matlabbatch); 39 | movefile(spm_out_file,out_file); 40 | 41 | if i == 1 42 | uiwait(warndlg('FSLView will open the original and the denoised volumes. Inspect the result of denoising by playing with the opacity.')) 43 | end 44 | 45 | fslview_command = 'fslview'; 46 | [s,~] = system(sprintf('%s %s -l Greyscale %s -l Greyscale -t 1',fslview_command,in_file,out_file)); 47 | if s == 127 48 | error('Could not find %s! \nChange the ''fslview_command'' variable (3 lines above this error), \nto match with the way fslview is called on your system.',fslview_command) 49 | end 50 | 51 | % Time for the user to choose 52 | answer = questdlg('What do you want to do?','Time to decide','New denoising pass','Stop here','Keep previous one','Stop here'); 53 | 54 | if strcmp(answer,'New denoising pass') 55 | in_file = out_file; 56 | else 57 | stop = 1; 58 | if strcmp(answer,'Keep previous one') 59 | if i == 1 60 | delete(out_file) 61 | out_file = in_file; 62 | denoised = 0; 63 | else 64 | copyfile(tmp_file,out_file); 65 | end 66 | end 67 | delete(tmp_file) 68 | end 69 | end 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /initial_scripts/seg_pipe.sh: -------------------------------------------------------------------------------- 1 | ## Commencer par Denoising du T1 et du T2 avec sanlm 2 | ## Il sait faire ça David. 3 | 4 | 5 | ## T1xT2BET. Le crop est large (20) car le crop final sera fait quand l'anat se retrouvera dans le template. 6 | ## Ce crop sert seulement a gagner du temps de calcul pour les étapes suivantes. 7 | ## l'option -f épend du singe, ici j'ai mis la valeur qui marche pour Maga 8 | ## C'est la seule option critique qu'il faut tester avec de l'essai erreur. La bonne nouvelle c'est que c'est sur une étape du début. 9 | T1xT2BET.sh -t1 sub-Maga_ses-01_T1w0p6mmDenoised.nii -t2 sub-Maga_ses-01_T2w0p6mmDenoised.nii.gz -aT2 -n 3 -f 0.45 -c 20 10 | 11 | 12 | 13 | 14 | ## IterREGBET. Je le fais avec l'inia car je trouve qu'il fonctionne mieux pour le recalage non-linéaire qui aura lieu ensuite 15 | template_brain=/hpc/banco/Primavoice_Data_and_Analysis/templates/inia19/inia19-t1-brain.nii.gz 16 | IterREGBET.sh -inw sub-Maga_ses-01_T1w0p6mmDenoised_cropped.nii.gz -inb sub-Maga_ses-01_T1w0p6mmDenoised_BET_cropped.nii.gz -refb $template_brain 17 | 18 | 19 | 20 | 21 | ## T1xT2BiasFieldCorrection en donnant le cerveau extrait précédemment. Il faudrait voir avec Julien pour déterminer automatiquement une valeur de sigma (option -s) en fonction de la résolution de l'image. 22 | T1xT2BiasFieldCorrection.sh -t1 sub-Maga_ses-01_T1w0p6mmDenoised_cropped.nii.gz -t2 sub-Maga_ses-01_T2w0p6mmDenoised-in-T1w_cropped.nii.gz -s 3 -b sub-Maga_ses-01_T1w0p6mmDenoised_cropped_IRbrain_mask.nii.gz 23 | 24 | 25 | 26 | 27 | ## Recalage rigide vers le template 28 | T1_brain=sub-Maga_ses-01_T1w0p6mmDenoised_cropped_debiased_brain.nii.gz 29 | T1_brain_in_temp=sub-Maga_ses-01_T1w0p6mmDenoised_cropped_debiased_brain-rigid-in-template.nii.gz 30 | anat2temp_xfm=T1_rigid_to_template.xfm 31 | flirt -in $T1_brain -ref $template_brain -dof 6 -cost normmi -out $T1_brain_in_temp -omat $anat2temp_xfm 32 | 33 | T1=sub-Maga_ses-01_T1w0p6mmDenoised_cropped_debiased.nii.gz 34 | T1_in_temp=sub-Maga_ses-01_T1w0p6mmDenoised_cropped_debiased-rigid-in-template.nii.gz 35 | flirt -in $T1 -ref $template_brain -out $T1_in_temp -applyxfm -init $anat2temp_xfm 36 | 37 | 38 | 39 | 40 | ## Recalage non-linéaire avec ANTS 41 | template_head=/hpc/banco/Primavoice_Data_and_Analysis/templates/inia19/inia19-t1.nii.gz 42 | ANTs_out_base=SyN_template_to_anat 43 | /hpc/soft/ANTS/ANTs/Scripts/antsRegistrationSyNQuick.sh -d 3 -f $T1_brain_in_temp -f $T1_in_temp -m $template_brain -m $template_head -o $ANTs_out_base -j 1 44 | 45 | 46 | 47 | 48 | ## Le reste je te le laisse en Matlab: en gros faut appliquer les transformations trouvées précédemment aux probas des tissus du template 49 | to_move = {'paths.template_tissue{1}';'paths.template_tissue{2}';'paths.template_tissue{3}';'paths.template_brain_mask'}; 50 | reg_prob_maps = cell(length(to_move),1); 51 | affine_xfm = [ANTs_out_base '0GenericAffine.mat']; 52 | warp_file = [ANTs_out_base '1Warp.nii.gz']; 53 | for i = 1:length(to_move) 54 | in_file = eval(to_move{i}); 55 | [~,in_name,ext] = fileparts(in_file); 56 | if strcmp(ext,'.gz'); [~,in_name] = fileparts(in_name); end 57 | reg_prob_maps{i} = fullfile(paths.segmentation,[in_name '-in-anat.nii.gz']); 58 | system(sprintf('%s -i %s -r %s -o %s -t %s -t %s -n NearestNeighbor',fullfile('$ANTSPATH','antsApplyTransforms'),in_file,T1_in_temp,reg_prob_maps{i},warp_file,affine_xfm)); 59 | end 60 | 61 | 62 | 63 | ## Et puis hop: segmentation 64 | [tissue_files,tissues] = spm_old_segment(T1_in_temp,reg_prob_maps{1},reg_prob_maps{2},reg_prob_maps{3}); 65 | 66 | 67 | 68 | 69 | 70 | ## Je te laisse aussi du matlab qui crée des volumes binaires des différents tissus en fonction de probas 71 | ## Puis y a la concaténation des trois tissus pour faire le masque du cerveau 72 | %% Create 01 & 99 percents masks for each tissue 73 | probs = {'01';'50';'99'}; 74 | for i = 1:numel(tissues) 75 | for p = 1:numel(probs) 76 | % Create a mask of voxels with prob >= probs-percent 77 | maths_out_file = fullfile(paths.segmentation,sprintf('%s_%s_%sp.nii.gz',T1_base_name,tissues{i},probs{p})); 78 | system(sprintf('%sfslmaths %s -thr 0.%s %s',paths.FSL_prefix,tissue_files{i},probs{p},maths_out_file)); 79 | system(sprintf('%sfslmaths %s -bin %s -odt short',paths.FSL_prefix,maths_out_file,maths_out_file)); 80 | 81 | % Keep the largest cluster 82 | system(sprintf('%s 3 %s GetLargestComponent %s',fullfile('$ANTSPATH','ImageMath'),maths_out_file,maths_out_file)); 83 | system(sprintf('%sfslmaths %s -add 0 %s -odt short',paths.FSL_prefix,maths_out_file,maths_out_file)); % convert to short (for brainvisa) 84 | end 85 | end 86 | 87 | %% Create brain mask from concatenation of 3 tissues at 1% prob 88 | T1_brainmask_in_temp = fullfile(paths.segmentation,[T1_base_name '-brainmask-in-template.nii.gz']); 89 | system(sprintf('%sfslmaths %s -add %s -add %s -bin %s -odt short',paths.FSL_prefix,fullfile(paths.segmentation,[T1_base_name '_white_01p.nii.gz']),fullfile(paths.segmentation,[T1_base_name '_grey_01p.nii.gz']),fullfile(paths.segmentation,[T1_base_name '_csf_01p.nii.gz']),T1_brainmask_in_temp)); 90 | system(sprintf('%s 3 %s FillHoles %s',fullfile('$ANTSPATH','ImageMath'),T1_brainmask_in_temp,T1_brainmask_in_temp)); 91 | system(sprintf('%sfslmaths %s -add 0 %s -odt short',paths.FSL_prefix,T1_brainmask_in_temp,T1_brainmask_in_temp)); % convert to short (for brainvisa) 92 | 93 | % mask brain 94 | system(sprintf('%sfslmaths %s -mas %s %s -odt short',paths.FSL_prefix,T1_in_temp,T1_brainmask_in_temp,T1_brain_in_temp)); -------------------------------------------------------------------------------- /macapype-conda/macapype/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set name = "macapype" %} 2 | {% set version = "0.2" %} 3 | 4 | package: 5 | name: "{{ name|lower }}" 6 | version: "{{ version }}" 7 | 8 | source: 9 | url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz" 10 | sha256: 9b62c1d502cea07e628c2667914f8bc75614aad253263a4a99bd38b8cca9b46f 11 | 12 | build: 13 | number: 0 14 | script: "{{ PYTHON }} -m pip install . -vv" 15 | 16 | requirements: 17 | host: 18 | - brain-slam 19 | - networkx >=2.0 20 | - nibabel 21 | - nilearn 22 | - nipype 23 | - numpy 24 | - pip 25 | - pybids 26 | - python 27 | - scikit-image 28 | run: 29 | - brain-slam 30 | - networkx >=2.0 31 | - nibabel 32 | - nilearn 33 | - nipype 34 | - numpy 35 | - pybids 36 | - python 37 | - scikit-image 38 | 39 | test: 40 | imports: 41 | - examples 42 | - macapype 43 | - macapype.nodes 44 | - macapype.pipelines 45 | - macapype.utils 46 | 47 | about: 48 | home: The package home page 49 | license: BSD 3 50 | license_family: BSD 51 | license_file: 52 | summary: "Pipeline for anatomic processing for macaque" 53 | doc_url: 54 | dev_url: 55 | 56 | extra: 57 | recipe-maintainers: 58 | - macatools 59 | -------------------------------------------------------------------------------- /macapype/__init__.py: -------------------------------------------------------------------------------- 1 | from . import pipelines # noqa 2 | from . import nodes # noqa 3 | from . import utils # noqa 4 | 5 | __version__ = "unknown" 6 | try: 7 | from ._version import __version__ # noqa 8 | except ImportError: 9 | # We're running in a tree that doesn't have a _version.py 10 | pass 11 | -------------------------------------------------------------------------------- /macapype/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.6' 2 | -------------------------------------------------------------------------------- /macapype/bash/CropVolume.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Crop Volume based on a brain extraction 3 | 4 | 5 | 6 | HELP() { 7 | cat < -b [options] 12 | 13 | Compulsory arguments: 14 | -i Volume to crop (you can specify as many -in as you want) 15 | -b Brain image or brain mask, in the same space as the in-file(s) 16 | 17 | Optional arguments: 18 | -o Prefix for the cropped image(s) (Must provide as many prefixes as input images with -o, default is the base name of each input image). 19 | -s Suffix for the cropped image(s) (default is "_cropped") 20 | -c 'c' is the space between the brain and the limits of the crop box expressed in percentage of 21 | the brain size (eg. if the brain size is 200 voxels in one dimension and c=10: the sides of 22 | the brain in this dimension will be 20 voxels away from the borders of the resulting 23 | crop box in this dimension). Default: c=10 24 | -p

Prefix for running FSL functions (can be a path or just a prefix) 25 | 26 | HELP 27 | } 28 | 29 | #/********************/ 30 | #/********************/ 31 | 32 | 33 | if [[ "$1" == "-h" || "$1" == "-help" || "$1" == "-H" || $# -eq 0 ]]; then 34 | HELP >&2 35 | exit 1 36 | fi 37 | 38 | 39 | # Defaults 40 | IN_FILES=() 41 | OUT_PREFIXES=() 42 | OUT_SUFFIX="_cropped" 43 | FSLPREFIX="" 44 | CROP_P=10; 45 | NO_DISP_PAR="" 46 | 47 | 48 | # reading command line arguments 49 | while getopts "i:I:b:B:o:O:s:S:c:C:p:P:d" OPT 50 | do 51 | case $OPT in 52 | i |I) # input images 53 | IN_FILES[${#IN_FILES[@]}]=$OPTARG 54 | ;; 55 | b |B) # brain file 56 | IN_BRAIN=$OPTARG 57 | ;; 58 | o |O) #output name prefix 59 | OUT_PREFIXES[${#OUT_PREFIXES[@]}]=$OPTARG 60 | ;; 61 | s |S) #output name prefix 62 | OUT_SUFFIX=$OPTARG 63 | ;; 64 | c |C) # crop percentage 65 | CROP_P=$OPTARG 66 | ;; 67 | p |P) # fsl prefix 68 | FSLPREFIX=$OPTARG 69 | ;; 70 | d) # do not display parameters 71 | NO_DISP_PAR=1 72 | ;; 73 | :) # getopts issues an error message 74 | echo "${0##*/} >> Bad usage. $OPTARG requires an argument" 1>&2 75 | exit 1 76 | ;; 77 | \?) # getopts issues an error message 78 | echo "${0##*/} >> See -help." 1>&2 79 | exit 1 80 | ;; 81 | esac 82 | done 83 | 84 | 85 | 86 | if [[ -z $IN_FILES || -z $IN_BRAIN ]]; then 87 | echo "${0##*/} >> Missing arguments" 88 | echo ">> See -help." 1>&2 89 | exit 1 90 | fi 91 | 92 | 93 | 94 | # ************************** 95 | # Functions 96 | 97 | extract_base_name() { 98 | IN=$1 99 | INext=${IN##*.} 100 | local INbase="" 101 | if [[ $INext == "gz" ]]; then 102 | INngz=${IN%.gz} 103 | INext=".${INngz##*.}.gz" 104 | INbase=${IN%$INext} 105 | else 106 | INext=".${INext}" 107 | INbase=${IN%$INext} 108 | fi 109 | INbase=${INbase##*/} 110 | echo $INbase 111 | } 112 | 113 | extract_path() { 114 | IN=$1 115 | BASE=${IN##*/} # base 116 | local DIR=${IN%$BASE} # dirpath 117 | echo $DIR 118 | } 119 | 120 | # ************************* 121 | 122 | 123 | 124 | 125 | # ************************* 126 | # Files 127 | 128 | if [[ -n $OUT_PREFIXES ]]; then # check if the number of prefixes is equal to the number of input images 129 | if [[ ${#IN_FILES[@]} -ne ${#OUT_PREFIXES[@]} ]]; then 130 | echo "Number of prefixes (-o) is not equal to the number of input images (-i)." 131 | exit 1 132 | fi 133 | else # if no prefix provided, create them 134 | for(( i=0; i<${#IN_FILES[@]}; i++ )); do 135 | INname=`extract_base_name ${IN_FILES[$i]}` 136 | INpath=`extract_path ${IN_FILES[$i]}` 137 | OUT_PREFIXES[$i]="$PWD/${INname}${OUT_SUFFIX}" 138 | done 139 | fi 140 | 141 | # ************************** 142 | 143 | 144 | 145 | 146 | if [[ -z $NO_DISP_PAR ]]; then 147 | 148 | cat <>> import copy 85 | >>> from nipype.interfaces.ants import DenoiseImage 86 | >>> denoise = DenoiseImage() 87 | >>> denoise.inputs.dimension = 3 88 | >>> denoise.inputs.input_image = 'im1.nii' 89 | >>> denoise.cmdline 90 | 'DenoiseImage -d 3 -i im1.nii -n Gaussian -o im1_noise_corrected.nii -s 1' 91 | 92 | >>> denoise_2 = copy.deepcopy(denoise) 93 | >>> denoise_2.inputs.output_image = 'output_corrected_image.nii.gz' 94 | >>> denoise_2.inputs.noise_model = 'Rician' 95 | >>> denoise_2.inputs.shrink_factor = 2 96 | >>> denoise_2.cmdline 97 | 'DenoiseImage -d 3 -i im1.nii -n Rician 98 | -o output_corrected_image.nii.gz -s 2' 99 | 100 | >>> denoise_3 = DenoiseImage() 101 | >>> denoise_3.inputs.input_image = 'im1.nii' 102 | >>> denoise_3.inputs.save_noise = True 103 | >>> denoise_3.cmdline 104 | 'DenoiseImage -i im1.nii -n Gaussian 105 | -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' 106 | 107 | """ 108 | 109 | input_spec = DenoiseImageInputSpec 110 | output_spec = DenoiseImageOutputSpec 111 | _cmd = "DenoiseImage" 112 | 113 | def _format_arg(self, name, trait_spec, value): 114 | if (name == "output_image") and ( 115 | self.inputs.save_noise or isdefined(self.inputs.noise_image) 116 | ): 117 | newval = "[ {}, {} ]".format( 118 | self._filename_from_source("output_image"), 119 | self._filename_from_source("noise_image"), 120 | ) 121 | return trait_spec.argstr % newval 122 | 123 | return super()._format_arg(name, trait_spec, value) 124 | -------------------------------------------------------------------------------- /macapype/nodes/pad.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | import nipype.pipeline.engine as pe 4 | 5 | from nipype.interfaces.niftyreg.regutils import RegResample 6 | 7 | 8 | def pad_back(seg_pipe, data_preparation_pipe, 9 | node, nodefile, 10 | outputnode, outputnodefile, 11 | params, inter_val="NN"): 12 | 13 | pad_nodename = "pad_" + outputnodefile 14 | 15 | if "short_preparation_pipe" in params.keys(): 16 | 17 | pad_node = pe.Node( 18 | RegResample(inter_val=inter_val), 19 | name=pad_nodename) 20 | 21 | seg_pipe.connect( 22 | node, nodefile, 23 | pad_node, "flo_file") 24 | 25 | seg_pipe.connect( 26 | data_preparation_pipe, "outputnode.native_T1", 27 | pad_node, "ref_file") 28 | 29 | seg_pipe.connect( 30 | data_preparation_pipe, "inv_tranfo.out_file", 31 | pad_node, "trans_file") 32 | 33 | # outputnode 34 | seg_pipe.connect( 35 | pad_node, "out_file", 36 | outputnode, outputnodefile) 37 | 38 | return pad_node 39 | 40 | 41 | def apply_to_stereo(seg_pipe, native_to_stereo_pipe, 42 | pad_node, pad_nodefile, 43 | outputnode, outputnodefile): 44 | 45 | apply_nodename = "apply_" + outputnodefile 46 | 47 | # apply stereo to masked_debiased_T1 48 | apply_node = pe.Node( 49 | RegResample(pad_val=0.0), 50 | name=apply_nodename) 51 | 52 | seg_pipe.connect( 53 | pad_node, pad_nodefile, 54 | apply_node, "flo_file") 55 | 56 | seg_pipe.connect( 57 | native_to_stereo_pipe, 58 | 'outputnode.native_to_stereo_trans', 59 | apply_node, "trans_file") 60 | 61 | seg_pipe.connect( 62 | native_to_stereo_pipe, 63 | 'outputnode.stereo_native_T1', 64 | apply_node, "ref_file") 65 | 66 | # output 67 | seg_pipe.connect( 68 | apply_node, "out_file", 69 | outputnode, outputnodefile) 70 | 71 | return apply_node 72 | -------------------------------------------------------------------------------- /macapype/nodes/tests/test_segment.py: -------------------------------------------------------------------------------- 1 | # from .binary_fill_holes import BinaryFillHoles 2 | 3 | 4 | def test_BinaryFillHoles(): 5 | 6 | """ 7 | TODO 8 | """ 9 | # val = BinaryFillHoles() 10 | assert True 11 | pass 12 | 13 | 14 | def test_split_indexed_mask(): 15 | 16 | from macapype.utils.utils_tests import load_test_data, format_template 17 | from macapype.nodes.segment import split_indexed_mask 18 | 19 | template_name = "NMT_v2.0_asym" 20 | nmt_dir = load_test_data(name=template_name) 21 | params_template = format_template(nmt_dir, template_name) 22 | 23 | list_split_files = split_indexed_mask(params_template["template_seg"]) 24 | 25 | assert len(list_split_files) != 0 26 | 27 | 28 | def test_copy_header(): 29 | assert True 30 | pass 31 | 32 | 33 | def test_fill_list_vol(): 34 | 35 | from macapype.utils.utils_tests import load_test_data, format_template 36 | from macapype.nodes.segment import fill_list_vol 37 | 38 | import nibabel as nib 39 | 40 | template_name = "MBM_v3.0.1" 41 | nmt_dir = load_test_data(name=template_name) 42 | params_template = format_template(nmt_dir, template_name) 43 | 44 | nb_classes = 5 45 | vol_3c = [params_template["template_gm"], 46 | params_template["template_wm"], 47 | params_template["template_csf"]] 48 | 49 | vol_5c = fill_list_vol(vol_3c, nb_classes) 50 | 51 | first_vol_shape = nib.load(vol_5c[0]).get_fdata().shape 52 | last_vol_shape = nib.load(vol_5c[-1]).get_fdata().shape 53 | 54 | assert len(vol_5c) == nb_classes 55 | 56 | assert first_vol_shape == last_vol_shape 57 | 58 | 59 | if __name__ == '__main__': 60 | test_fill_list_vol() 61 | -------------------------------------------------------------------------------- /macapype/nodes/tests/test_surface.py: -------------------------------------------------------------------------------- 1 | pass 2 | 3 | """ 4 | import macapype.nodes.surface as surf 5 | import numpy as np 6 | import nibabel as nb 7 | import os 8 | 9 | 10 | def test_meshify(): 11 | # Create a temporary test image 12 | print("Generate fake Nifti image") 13 | dt = np.zeros((100, 60, 100)) 14 | dt[40:60, 20:40, 20:50] = 1 15 | affine = np.eye(4) 16 | 17 | tmp_file = "./test_meshify_input_image.nii" 18 | nb.save(nb.Nifti1Image(dt, affine), tmp_file) 19 | 20 | try: 21 | # Test the node 22 | print("Create Meshify node") 23 | meshify = surf.Meshify() 24 | meshify.inputs.image_file = tmp_file 25 | print("Run Meshify node") 26 | meshify.run() 27 | 28 | print("Assert output") 29 | assert os.path.exists(tmp_file[:-4] + ".gii") 30 | pass 31 | finally: 32 | print("Remove temporary data") 33 | os.remove(tmp_file) 34 | os.remove(tmp_file[:-4] + ".gii") 35 | 36 | """ 37 | -------------------------------------------------------------------------------- /macapype/pipelines/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/macapype/pipelines/__init__.py -------------------------------------------------------------------------------- /macapype/pipelines/extract_brain.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pipelines for brain extraction 3 | 4 | """ 5 | import nipype.interfaces.utility as niu 6 | import nipype.pipeline.engine as pe 7 | 8 | import nipype.interfaces.fsl as fsl 9 | 10 | from ..nodes.extract_brain import AtlasBREX, HDBET, Bet4Animal 11 | 12 | from ..utils.utils_nodes import NodeParams, parse_key 13 | 14 | 15 | def create_extract_pipe(params_template, params={}, 16 | name="extract_T1_pipe"): 17 | """ 18 | Description: Extract T1 brain using AtlasBrex 19 | 20 | Params: 21 | 22 | - norm_intensity (see `N4BiasFieldCorrection `_ for arguments) 25 | - atlas_brex (see :class:`AtlasBREX \ 26 | ` for arguments) - also available \ 27 | as :ref:`indiv_params ` 28 | 29 | Inputs: 30 | 31 | inputnode: 32 | 33 | restore_T1: 34 | preprocessed (debiased/denoised) T1 file name 35 | 36 | arguments: 37 | 38 | params_template: 39 | dictionary of info about template 40 | 41 | params: 42 | dictionary of node sub-parameters (from a json file) 43 | 44 | name: 45 | pipeline name (default = "extract_pipe") 46 | 47 | Outputs: 48 | 49 | smooth_mask.out_file: 50 | Computed mask (after some smoothing) 51 | 52 | """ 53 | 54 | # creating pipeline 55 | extract_pipe = pe.Workflow(name=name) 56 | 57 | # creating inputnode 58 | inputnode = pe.Node( 59 | niu.IdentityInterface(fields=['restore_T1', 60 | "indiv_params"]), 61 | name='inputnode') 62 | 63 | outputnode = pe.Node(niu.IdentityInterface(fields=['mask_file']), 64 | name="outputnode") 65 | 66 | # smooth before brex 67 | if "smooth" in params.keys(): 68 | 69 | smooth = NodeParams(fsl.utils.Smooth(), 70 | params=parse_key(params, "smooth"), 71 | name='smooth') 72 | 73 | extract_pipe.connect(inputnode, 'restore_T1', 74 | smooth, 'in_file') 75 | 76 | if "bet4animal" in params: 77 | bet4animal = NodeParams( 78 | Bet4Animal(), 79 | params=parse_key(params, "bet4animal"), 80 | name='bet4animal') 81 | 82 | if "smooth" in params.keys(): 83 | extract_pipe.connect( 84 | smooth, 'smoothed_file', 85 | bet4animal, 'in_file') 86 | else: 87 | 88 | extract_pipe.connect( 89 | inputnode, 'restore_T1', 90 | bet4animal, 'in_file') 91 | 92 | extract_pipe.connect( 93 | inputnode, ("indiv_params", parse_key, "bet4animal"), 94 | bet4animal, 'indiv_params') 95 | 96 | # outputnode 97 | extract_pipe.connect(bet4animal, 'mask_file', outputnode, 'mask_file') 98 | 99 | elif "hdbet" in params.keys(): 100 | hdbet = NodeParams(HDBET(), 101 | params=parse_key(params, "hdbet"), 102 | name='hdbet') 103 | 104 | if "smooth" in params.keys(): 105 | extract_pipe.connect( 106 | smooth, 'smoothed_file', 107 | hdbet, 'in_file') 108 | else: 109 | 110 | extract_pipe.connect( 111 | inputnode, 'restore_T1', 112 | hdbet, 'in_file') 113 | 114 | extract_pipe.connect( 115 | inputnode, ("indiv_params", parse_key, "atlas_brex"), 116 | hdbet, 'indiv_params') 117 | 118 | # outputnode 119 | extract_pipe.connect(hdbet, 'mask_file', outputnode, 'mask_file') 120 | else: 121 | 122 | # atlas_brex 123 | atlas_brex = NodeParams( 124 | AtlasBREX(), 125 | params=parse_key(params, "atlas_brex"), 126 | name='atlas_brex') 127 | 128 | if "smooth" in params.keys(): 129 | extract_pipe.connect(smooth, 'smoothed_file', 130 | atlas_brex, 't1_restored_file') 131 | else: 132 | 133 | extract_pipe.connect(inputnode, 'restore_T1', 134 | atlas_brex, 't1_restored_file') 135 | 136 | atlas_brex.inputs.NMT_file = params_template["template_head"] 137 | atlas_brex.inputs.NMT_SS_file = params_template["template_brain"] 138 | 139 | extract_pipe.connect( 140 | inputnode, ("indiv_params", parse_key, "atlas_brex"), 141 | atlas_brex, 'indiv_params') 142 | 143 | # mask_brex 144 | mask_brex = pe.Node(fsl.UnaryMaths(), name='mask_brex') 145 | mask_brex.inputs.operation = 'bin' 146 | 147 | extract_pipe.connect(atlas_brex, 'brain_file', mask_brex, 'in_file') 148 | 149 | # smooth_mask 150 | smooth_mask = pe.Node(fsl.UnaryMaths(), name='smooth_mask') 151 | smooth_mask.inputs.operation = "bin" 152 | smooth_mask.inputs.args = "-s 1 -thr 0.5 -bin" 153 | 154 | extract_pipe.connect(mask_brex, 'out_file', smooth_mask, 'in_file') 155 | 156 | # outputnode 157 | extract_pipe.connect(smooth_mask, 'out_file', outputnode, 'mask_file') 158 | 159 | return extract_pipe 160 | -------------------------------------------------------------------------------- /macapype/pipelines/tests/test_prepare.py: -------------------------------------------------------------------------------- 1 | 2 | import os.path as op 3 | 4 | from macapype.utils.utils_tests import make_tmp_dir 5 | from macapype.utils.misc import parse_key 6 | from macapype.pipelines.prepare import create_short_preparation_pipe 7 | 8 | data_path = make_tmp_dir() 9 | 10 | 11 | def test_create_crop_aladin_pipe_short_preparation_pipe(): 12 | 13 | params = { 14 | "short_preparation_pipe": 15 | { 16 | "crop_aladin_pipe": 17 | { 18 | } 19 | } 20 | } 21 | 22 | params_template = {"template_head": ""} 23 | 24 | # running workflow 25 | segment_pnh = create_short_preparation_pipe( 26 | params_template=params_template, 27 | params=parse_key(params, "short_preparation_pipe"), 28 | name="short_manual_preparation_pipe") 29 | 30 | segment_pnh.base_dir = data_path 31 | 32 | segment_pnh.write_graph(graph2use="colored") 33 | assert op.exists(op.join(data_path, 34 | "short_manual_preparation_pipe", 35 | "graph.png")) 36 | 37 | 38 | def test_create_crop_T1_short_preparation_pipe(): 39 | 40 | params = { 41 | "short_preparation_pipe": 42 | { 43 | "crop_T1": 44 | { 45 | "args": "" 46 | } 47 | } 48 | } 49 | 50 | params_template = {"template_head": ""} 51 | # running workflow 52 | segment_pnh = create_short_preparation_pipe( 53 | params_template=params_template, 54 | params=parse_key(params, "short_preparation_pipe"), 55 | name="short_manual_preparation_pipe") 56 | 57 | segment_pnh.base_dir = data_path 58 | 59 | segment_pnh.write_graph(graph2use="colored") 60 | assert op.exists(op.join(data_path, 61 | "short_manual_preparation_pipe", 62 | "graph.png")) 63 | -------------------------------------------------------------------------------- /macapype/pipelines/tests/test_surface_pipelines.py: -------------------------------------------------------------------------------- 1 | 2 | import os.path as op 3 | 4 | from macapype.utils.utils_tests import (make_tmp_dir, load_test_data, 5 | format_template) 6 | 7 | from macapype.utils.misc import parse_key 8 | from macapype.pipelines.surface import (create_nii_to_mesh_pipe) 9 | 10 | 11 | data_path = make_tmp_dir() 12 | 13 | 14 | def test_create_surface_pipe(): 15 | 16 | # params 17 | params = { 18 | "nii_to_mesh_pipe": 19 | { 20 | "split_hemi_pipe": 21 | { 22 | }, 23 | 24 | } 25 | } 26 | 27 | # params_template 28 | template_name = "haiko89_template" 29 | template_dir = load_test_data(template_name) 30 | 31 | params_template = format_template(template_dir, template_name) 32 | 33 | # running workflow 34 | segment_pnh = create_nii_to_mesh_pipe( 35 | params=parse_key(params, "nii_to_mesh_pipe"), 36 | params_template=params_template, 37 | name="nii_to_mesh_pipe") 38 | 39 | segment_pnh.base_dir = data_path 40 | 41 | segment_pnh.write_graph(graph2use="colored") 42 | assert op.exists(op.join(data_path, 43 | "nii_to_mesh_pipe", 44 | "graph.png")) 45 | -------------------------------------------------------------------------------- /macapype/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/macapype/utils/__init__.py -------------------------------------------------------------------------------- /macapype/utils/data_test_servers.json: -------------------------------------------------------------------------------- 1 | { 2 | "ZENODO": { 3 | "server": "https://zenodo.org", 4 | "data_dir": { 5 | "Juna_Chimp_T1_1mm_skull.nii.gz": "4683381", 6 | "Juna_Chimp": "4738619" 7 | }, 8 | "cloud_format_3": "{}/record/{}/files/{}.zip?download=1" 9 | }, 10 | "AMUBOX": { 11 | "server": "https://amubox.univ-amu.fr", 12 | "data_dir": { 13 | "data_test_macaque": "qzsfymDYjx692Q4", 14 | "inia19":"Jq5abnZDZ3smmBJ", 15 | "macapype_CI": "KJ2L5j6L6orPXxM", 16 | 17 | 18 | "NMT_v1.2": "5YnwNf3Jr7Qsc8H", 19 | "NMT_v1.3better": "6DBctypBT7LsexL", 20 | "NMT_v2.0_asym": "BYj6NmwB76Z4Bt2", 21 | "NMT_v2.0_asym_0p5": "Estr3kFHSXLSGsf", 22 | 23 | "pad40_NMT_v2.0_asym": "Rd7ymLxfHpa4dgK", 24 | 25 | "haiko89_template": "3Agb3zecnkQGCr5", 26 | "Haiko_v1.5": "QdQ2Sefb53KQCG2", 27 | 28 | "BaBa21-3": "H5PwgcfWPQLAYWM", 29 | "BaBa21-2": "qmibGGGcHLszPJR", 30 | "BaBa21-1": "zY9cPqAAoMznFpf", 31 | "BaBa21-0": "xfERHnjBWyYz37X", 32 | 33 | "BaBa21-3_0p6": "QAEScG44SEoBHbb", 34 | "BaBa21-2_0p6": "5zaGnnc7YMX8y9f", 35 | "BaBa21-1_0p6": "M9cF7FqFNBxf283", 36 | "BaBa21-0_0p6": "QCZzrG2dgN74fBQ", 37 | 38 | 39 | "MBM_v3.0.1": "RMxXifHeigNkB3g", 40 | "MBM_v3.0.1_6seg": "yop65ARqHKMdfrP", 41 | "MBM_v3.0.1_6seg_stereoINT": "PREL3NDPrrcytPT", 42 | "MBM_v3.0.1_6seg_stereoINT_largeFOV": "PREL3NDPrrcytPT", 43 | 44 | "bma-1": "29qRZrnHnBxtCGB", 45 | "bma-1_0p2mm": "YoLAFawKMTi7FqR", 46 | 47 | "MNI_Fortin_1mm": "24R6837rwH6PQSj", 48 | "MNI_Fortin_2mm": "fEAWFSyfGD37nH9" 49 | 50 | }, 51 | "cloud_format": "{}/public.php?service=files&t={}&download", 52 | "curl_cloud_format": "{}/public.php/dav/files/{}" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /macapype/utils/misc.py: -------------------------------------------------------------------------------- 1 | # on the fly function for checking what is passed in "connect" 2 | # should end up in ~ nipype.utils.misc 3 | 4 | 5 | def show_files(files): 6 | print(files) 7 | return files 8 | 9 | 10 | def print_val(val): 11 | print(val) 12 | return val 13 | 14 | 15 | def print_nii_data(nii_file): 16 | import nibabel as nib 17 | 18 | data = nib.load(nii_file).get_data() 19 | print(nii_file, data) 20 | return nii_file 21 | 22 | 23 | def get_elem(list_elem, index_elem): 24 | assert isinstance(list_elem, list), 'Error, list_elem should be a list' 25 | assert 0 <= index_elem and index_elem < len(list_elem), \ 26 | ('error with index {}, does not match a list with {} elements'.format( 27 | index_elem, len(list_elem))) 28 | 29 | elem = list_elem[index_elem] 30 | print(elem) 31 | 32 | return elem 33 | 34 | 35 | def get_index(list_elem, index_elem): 36 | assert isinstance(list_elem, list), 'Error, list_elem should be a list' 37 | assert 1 <= index_elem and index_elem < len(list_elem)+1, \ 38 | ('error with index {}, shoud start at 1 and be smaller than {}'.format( 39 | index_elem, len(list_elem)+1)) 40 | 41 | return list_elem[index_elem-1] 42 | 43 | 44 | def get_pattern(list_elem, pattern): 45 | 46 | assert isinstance(list_elem, list), 'Error, list_elem should be a list' 47 | 48 | for elem in list_elem: 49 | if pattern in elem: 50 | print("Found {} in {}".format(pattern, elem)) 51 | return elem 52 | 53 | assert False, "Could not find {} in {}".format(pattern, elem) 54 | 55 | 56 | def get_list_length(list_elem): 57 | assert isinstance(list_elem, list), 'Error, list_elem should be a list' 58 | return len(list_elem) 59 | 60 | 61 | def get_first_elem(elem): 62 | print(elem) 63 | if isinstance(elem, list): 64 | print("OK, is list") 65 | assert len(elem) == 1, "Error, list should contain only one element" 66 | return elem[0] 67 | else: 68 | print("not a list") 69 | return elem 70 | 71 | 72 | def gzip(unzipped_file): 73 | 74 | import os 75 | import shutil 76 | import subprocess 77 | 78 | head, tail = os.path.split(unzipped_file) 79 | 80 | dest = os.path.abspath(tail) 81 | 82 | print("Copying {} to {}".format(unzipped_file, dest)) 83 | shutil.copy(unzipped_file, dest) 84 | cmd_line = "gzip {}".format(dest) 85 | 86 | subprocess.check_output(cmd_line, shell=True) 87 | 88 | zipped_file = dest + ".gz" 89 | 90 | assert os.path.exists(zipped_file), \ 91 | "Error, {} should exists".format(zipped_file) 92 | 93 | return zipped_file 94 | 95 | 96 | def gunzip(zipped_file): 97 | import subprocess 98 | import shutil 99 | import os 100 | 101 | head, tail = os.path.split(zipped_file) 102 | 103 | dest = os.path.abspath(tail) 104 | 105 | shutil.copy(zipped_file, dest) 106 | 107 | if zipped_file[-3:] == ".gz": 108 | subprocess.check_output("gunzip " + dest, shell=True) 109 | else: 110 | ValueError("Non GZip file given") 111 | 112 | unzipped_file = dest[:-3] 113 | return unzipped_file 114 | 115 | 116 | def merge_3_elem_to_list(elem1, elem2, elem3): 117 | return [elem1, elem2, elem3] 118 | 119 | 120 | def parse_key(params, key): 121 | 122 | from nipype.interfaces.base import isdefined 123 | 124 | def _parse_key(params, cur_key): 125 | if cur_key in params.keys(): 126 | return params[cur_key] 127 | else: 128 | "Error, key {} was not found in {}".format(key, params.keys()) 129 | return {} 130 | 131 | if isdefined(params): 132 | if isinstance(key, tuple): 133 | for cur_key in key: 134 | params = _parse_key(params, cur_key) 135 | else: 136 | params = _parse_key(params, key) 137 | 138 | return params 139 | 140 | else: 141 | return {} 142 | 143 | 144 | def list_input_files(list_T1, list_T2): 145 | 146 | print("list_T1:", list_T1) 147 | print("list_T2:", list_T2) 148 | 149 | return "" 150 | -------------------------------------------------------------------------------- /macapype/utils/regex_subs.json: -------------------------------------------------------------------------------- 1 | { 2 | "acq-[0-9a-zA-Z]*_": "", 3 | "run-[0-9]*_": "", 4 | "rec-[0-9a-zA-Z]*_": "", 5 | "_[0-9]_": "_", 6 | "SegmentationPosteriors[0-9]*_":"" 7 | } 8 | -------------------------------------------------------------------------------- /macapype/utils/subs.json: -------------------------------------------------------------------------------- 1 | { 2 | "T1w_roi_noise_corrected_debiased_BET_FLIRT-to_inia19-t1-brain": "space-inia19_desc-brain_T1w", 3 | "T1w_roi_corrected_debiased_BET_FLIRT-to_Haiko89_Asymmetric.Template_n89_flirt_thresh_fillh_indexed_mask":"space-orig_desc-brain_dseg", 4 | 5 | "BET_mask_": "", 6 | 7 | "T1w_roi_restore_debiased_brain_SegmentationPosteriors02_thresh_5tt": "space-orig_desc-5tt_dseg", 8 | "T1w_roi_corrected_restore_debiased_brain_SegmentationPosteriors02_thresh_5tt": "space-orig_desc-5tt_dseg", 9 | "T1w_flirt_res_restore_debiased_brain_Segmentation_1_merged_bin_5tt.nii.gz": "space-native_desc-5tt_dseg", 10 | 11 | "T1w_roi_restore_brain_bin_bin": "space-orig_desc-brain_mask", 12 | "T1w_roi_corrected_restore_brain_bin_bin": "space-orig_desc-brain_mask", 13 | 14 | "T1w_roi_restore_debiased_brain.nii.gz": "space-orig_desc-preproc_desc-brain_T1w.nii.gz", 15 | "T1w_roi_corrected_restore_debiased_brain.nii.gz": "space-orig_desc-preproc_desc-brain_T1w.nii.gz", 16 | 17 | "T1w_roi_restore.nii.gz": "space-orig_desc-preproc_T1w.nii.gz", 18 | "T1w_roi_corrected_restore.nii.gz": "space-orig_desc-preproc_T1w.nii.gz", 19 | 20 | "Segmentation_allineate": "space-template_desc-brain_dseg", 21 | "FLAIR_flirt_flirt": "space-inia19_FLAIR", 22 | 23 | "T1w_":"", 24 | "T2w_":"", 25 | "PDw_":"", 26 | "_aff_":"_", 27 | "_trans":"", 28 | "_roi":"", 29 | "_ROI":"", 30 | "_indexed": "", 31 | "_restore_debiased_brain":"", 32 | "_restore_debiased":"", 33 | "_restore_brain": "", 34 | "_restore":"", 35 | "_noise":"", 36 | "_brain":"", 37 | "_corrected":"", 38 | "_maths":"", 39 | "_masked":"", 40 | "_mask_":"_", 41 | "_merged":"", 42 | "_debiased_brain": "", 43 | "_debiased":"", 44 | "_debias":"", 45 | "_thresh":"", 46 | "_fillh":"", 47 | "_dil":"", 48 | "_bet":"", 49 | "_ero":"", 50 | "_reoriented":"", 51 | "_flirt":"", 52 | "_correct":"", 53 | "_bin": "", 54 | "_res": "", 55 | "_gcc": "", 56 | "_log": "", 57 | "_recip": "", 58 | "_pve": "", 59 | "_autothresh": "", 60 | "_lithresh": "", 61 | "_5tt": "", 62 | "c3": "", 63 | "avg_": "", 64 | "padded_": "", 65 | "BET_": "", 66 | "brain_Segmentation_": "", 67 | "Segmentation_": "" 68 | } 69 | -------------------------------------------------------------------------------- /macapype/utils/tests/test_misc.py: -------------------------------------------------------------------------------- 1 | from nipype.interfaces.base import traits 2 | 3 | from macapype.utils.misc import parse_key 4 | 5 | 6 | def test_parse_key_empty(): 7 | params = {} 8 | 9 | val = parse_key(params, "test") 10 | assert not val 11 | 12 | 13 | def test_parse_key_Undefined(): 14 | params = traits.Undefined 15 | 16 | val = parse_key(params, "test") 17 | assert not val 18 | -------------------------------------------------------------------------------- /macapype/utils/tests/test_utils_bids.py: -------------------------------------------------------------------------------- 1 | 2 | from macapype.utils.utils_bids import create_datasink 3 | 4 | 5 | def test_create_datasink_sub_ses(): 6 | iterables = [('subject', "mysub"), ('session', 'myses')] 7 | datasink = create_datasink(iterables) 8 | print(datasink) 9 | 10 | assert True 11 | 12 | 13 | def test_create_datasink_sub(): 14 | iterables = [('subject', "mysub")] 15 | datasink = create_datasink(iterables) 16 | print(datasink) 17 | 18 | assert True 19 | -------------------------------------------------------------------------------- /macapype/utils/tests/test_utils_nodes.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os.path as op 3 | from macapype.nodes.extract_brain import T1xT2BET 4 | 5 | from macapype.utils.utils_tests import load_test_data 6 | from macapype.utils.misc import parse_key 7 | 8 | from macapype.utils.utils_nodes import NodeParams, MapNodeParams, ParseParams 9 | 10 | from nipype.interfaces.base import traits 11 | import nipype.interfaces.fsl as fsl 12 | 13 | data_path = load_test_data("data_test_macaque") 14 | 15 | T1_file = op.join(data_path, "sub-Apache_ses-01_T1w.nii") 16 | T2_file = op.join(data_path, "sub-Apache_ses-01_T2w.nii") 17 | 18 | 19 | def test_NodeParams_init(): 20 | params = {"t_file": T1_file, "t2_file": T2_file, "aT2": True} 21 | with pytest.raises(AssertionError): 22 | bet_crop = NodeParams(interface=T1xT2BET(), params=params, # noqa 23 | name="bet_crop") 24 | 25 | 26 | def test_NodeParams_load_inputs_from_dict(): 27 | params = {"t_file": T1_file, "t2_file": T2_file, "aT2": True} 28 | bet_crop = NodeParams(interface=T1xT2BET(), name="bet_crop") 29 | with pytest.raises(AssertionError): 30 | bet_crop.load_inputs_from_dict(params) 31 | 32 | 33 | def test_MapNodeParams(): 34 | params = {"crop": {"args": "88 144 14 180 27 103"}} 35 | 36 | crop_bb = MapNodeParams(fsl.ExtractROI(), name='crop_bb', 37 | params=parse_key(params, "crop"), 38 | iterfield=["in_file"]) 39 | 40 | crop_bb.inputs.in_file = [T1_file, T2_file] 41 | 42 | with pytest.raises(ValueError): 43 | crop_bb.run() 44 | 45 | 46 | def test_ParseParams(): 47 | params = { 48 | "sub-01": { 49 | "ses-01": { 50 | "node1": { 51 | "arg": 1}}}} 52 | 53 | # should return non empty dict 54 | key = ("sub-01", "ses-01") 55 | val = ParseParams(params=params, key=key).run().outputs.parsed_params 56 | assert len(val) == 1 57 | 58 | # should return non empty dict 59 | key = "sub-01" 60 | val = ParseParams(params=params, key=key).run().outputs.parsed_params 61 | assert len(list(val.keys())) == 1 62 | 63 | # should return empty dict 64 | key = "sub-02" 65 | val = ParseParams(params=params, key=key).run().outputs.parsed_params 66 | assert len(val) == 0 67 | 68 | # should return empty dict 69 | key = "sub-01" 70 | params = traits.Undefined 71 | val = ParseParams(params=params, key=key).run().outputs.parsed_params 72 | assert len(val) == 0 73 | -------------------------------------------------------------------------------- /macapype/utils/tests/test_utils_tests.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | import os.path as op 5 | 6 | import pytest 7 | 8 | from macapype.nodes.extract_brain import T1xT2BET 9 | 10 | from macapype.utils.utils_tests import (load_test_data, format_template, 11 | make_tmp_dir) 12 | from macapype.utils.utils_nodes import NodeParams 13 | 14 | 15 | def test_server_amubox(): 16 | tmp_path = make_tmp_dir() 17 | name = op.join(tmp_path, "data_test_macaque.zip") 18 | 19 | code = "BdpeSWACJbrNp8r" 20 | server = "https://amubox.univ-amu.fr" 21 | add = "{}/public.php?service=files&t={}&download".format(server, code) 22 | cmd = "wget --no-check-certificate \"{}\" -O {}".format(add, name) 23 | 24 | os.system(cmd) 25 | 26 | 27 | def test_load_test_data(): 28 | with pytest.raises(AssertionError): 29 | load_test_data("do_not_exists") 30 | 31 | 32 | def test_load_test_data_dataset(): 33 | 34 | template_name = "NMT_v2.0_asym" 35 | nmt_dir = load_test_data(name=template_name) 36 | params_template = format_template(nmt_dir, template_name) 37 | 38 | assert len(params_template) != 0 39 | 40 | 41 | def test_data_test_macaque(): 42 | 43 | data_path = load_test_data("data_test_macaque") 44 | 45 | T1_file = op.join(data_path, "sub-Apache_ses-01_T1w.nii") 46 | T2_file = op.join(data_path, "sub-Apache_ses-01_T2w.nii") 47 | 48 | params = {"t1_file": T1_file, "t2_file": T2_file, "aT2": True} 49 | 50 | bet_crop = NodeParams(interface=T1xT2BET(), params=params, # noqa 51 | name="bet_crop") 52 | 53 | 54 | def test_zenodo_server(): 55 | 56 | test_file = "Juna_Chimp_T1_1mm_skull.nii.gz" 57 | download_file = os.path.abspath(test_file) 58 | 59 | os.system("wget --no-check-certificate --content-disposition \ 60 | https://zenodo.org/record/4683381/files/{}?download=1 \ 61 | -O {}".format(test_file, download_file)) 62 | 63 | assert os.path.exists(download_file) 64 | 65 | os.remove(download_file) 66 | 67 | 68 | def test_load_test_data_zenodo(): 69 | 70 | tmp_path = make_tmp_dir() 71 | name = "Juna_Chimp" 72 | data_path = load_test_data(name=name, path_to=tmp_path) 73 | 74 | print(data_path) 75 | 76 | assert os.path.exists(data_path) 77 | assert len(os.listdir(data_path)) 78 | 79 | shutil.rmtree(data_path) 80 | 81 | 82 | if __name__ == '__main__': 83 | 84 | # test_load_test_data() 85 | # test_data_test_macaque() 86 | 87 | test_zenodo_server() 88 | test_load_test_data_zenodo() 89 | -------------------------------------------------------------------------------- /macapype/utils/utils_bids.py: -------------------------------------------------------------------------------- 1 | import os.path as op 2 | 3 | import json 4 | 5 | from bids.layout import BIDSLayout 6 | 7 | import nipype.interfaces.io as nio 8 | import nipype.pipeline.engine as pe 9 | 10 | from .utils_nodes import BIDSDataGrabberParams 11 | 12 | 13 | def create_datasource(output_query, data_dir, subjects=None, sessions=None, 14 | acquisitions=None, reconstructions=None): 15 | """ Create a datasource node that have iterables following BIDS format """ 16 | bids_datasource = pe.Node( 17 | interface=nio.BIDSDataGrabber(), 18 | name='bids_datasource' 19 | ) 20 | 21 | bids_datasource.inputs.base_dir = data_dir 22 | bids_datasource.inputs.output_query = output_query 23 | 24 | layout = BIDSLayout(data_dir) 25 | 26 | # Verbose 27 | print("BIDS layout:", layout) 28 | print("\t", layout.get_subjects()) 29 | print("\t", layout.get_sessions()) 30 | 31 | if subjects is None: 32 | subjects = layout.get_subjects() 33 | 34 | if sessions is None: 35 | sessions = layout.get_sessions() 36 | 37 | iterables = [] 38 | iterables.append(('subject', subjects)) 39 | 40 | if sessions != []: 41 | iterables.append(('session', sessions)) 42 | 43 | if acquisitions is not None: 44 | iterables.append(('acquisition', acquisitions)) 45 | 46 | if reconstructions is not None: 47 | iterables.append(('reconstruction', reconstructions)) 48 | 49 | bids_datasource.iterables = iterables 50 | 51 | return bids_datasource 52 | 53 | 54 | def create_datasource_indiv_params(output_query, data_dir, indiv_params, 55 | subjects=None, sessions=None, 56 | acquisitions=None, reconstructions=None): 57 | """ Create a datasource node that have iterables following BIDS format, 58 | including a indiv_params file""" 59 | 60 | bids_datasource = pe.Node( 61 | interface=BIDSDataGrabberParams(indiv_params), 62 | name='bids_datasource' 63 | ) 64 | 65 | bids_datasource.inputs.base_dir = data_dir 66 | bids_datasource.inputs.output_query = output_query 67 | 68 | layout = BIDSLayout(data_dir) 69 | 70 | # Verbose 71 | print("BIDS layout:", layout) 72 | print("\t", layout.get_subjects()) 73 | print("\t", layout.get_sessions()) 74 | 75 | if subjects is None: 76 | subjects = layout.get_subjects() 77 | 78 | if sessions is None: 79 | sessions = layout.get_sessions() 80 | 81 | iterables = [] 82 | iterables.append(('subject', subjects)) 83 | 84 | if sessions != []: 85 | iterables.append(('session', sessions)) 86 | 87 | if acquisitions is not None: 88 | iterables.append(('acquisition', acquisitions)) 89 | 90 | if reconstructions is not None: 91 | iterables.append(('reconstruction', reconstructions)) 92 | 93 | bids_datasource.iterables = iterables 94 | 95 | return bids_datasource 96 | 97 | 98 | def create_datasink(iterables, name="output", params_subs={}, 99 | params_regex_subs={}): 100 | """ 101 | Description: reformating relevant outputs 102 | """ 103 | 104 | print("Datasink name: ", name) 105 | 106 | datasink = pe.Node(nio.DataSink(container=name), 107 | name='datasink') 108 | 109 | print(iterables) 110 | 111 | if len(iterables) == 1: 112 | if iterables[0][0] == 'subject': 113 | subjFolders = [ 114 | ('_subject_%s' % (sub), 115 | 'sub-%s/anat' % (sub)) for sub in iterables[0][1]] 116 | elif len(iterables) > 1: 117 | if iterables[0][0] == 'subject' and iterables[1][0] == 'session': 118 | subjFolders = [ 119 | ('_session_%s_subject_%s' % (ses, sub), 120 | 'sub-%s/ses-%s/anat' % (sub, ses)) for ses in iterables[1][1] 121 | for sub in iterables[0][1]] 122 | 123 | # subs 124 | json_subs = op.join(op.dirname(op.abspath(__file__)), 125 | "subs.json") 126 | 127 | dict_subs = json.load(open(json_subs)) 128 | 129 | dict_subs.update(params_subs) 130 | 131 | print(dict_subs) 132 | 133 | subs = [(key, value) for key, value in dict_subs.items()] 134 | 135 | subjFolders.extend(subs) 136 | 137 | print(subjFolders) 138 | 139 | datasink.inputs.substitutions = subjFolders 140 | 141 | # regex_subs 142 | json_regex_subs = op.join(op.dirname(op.abspath(__file__)), 143 | "regex_subs.json") 144 | 145 | dict_regex_subs = json.load(open(json_regex_subs)) 146 | 147 | dict_regex_subs.update(params_regex_subs) 148 | 149 | regex_subs = [(key, value) for key, value in dict_regex_subs.items()] 150 | 151 | datasink.inputs.regexp_substitutions = regex_subs 152 | 153 | return datasink 154 | -------------------------------------------------------------------------------- /macapype/utils/utils_nodes.py: -------------------------------------------------------------------------------- 1 | from nipype.pipeline.engine import Node, MapNode 2 | from nipype.interfaces.io import BIDSDataGrabber 3 | from .misc import parse_key 4 | 5 | from nipype.interfaces.base import (TraitedSpec, traits, BaseInterface, 6 | BaseInterfaceInputSpec, isdefined) 7 | 8 | 9 | def node_output_exists(node, output_name): 10 | return hasattr(node.outputs, output_name) 11 | 12 | 13 | class NodeParams(Node): 14 | 15 | """ 16 | Overloading of the class nodes for aloowing params reading directly from 17 | a dictionnary; ultimately should be added to nipype if required 18 | """ 19 | def __init__( 20 | self, 21 | interface, 22 | name, 23 | params={}): 24 | 25 | super(NodeParams, self).__init__(interface=interface, name=name) 26 | 27 | self.load_inputs_from_dict(params) 28 | 29 | def load_inputs_from_dict(self, params, overwrite=True): 30 | 31 | def_inputs = [] 32 | if not overwrite: 33 | def_inputs = list(self.inputs.get_traitsfree().keys()) 34 | 35 | new_inputs = list(set(list(params.keys())) - set(def_inputs)) 36 | 37 | for key in new_inputs: 38 | assert hasattr(self._interface.inputs, key), \ 39 | print("Warning, Could not find {} in inputs {} for node {}". 40 | format(key, self._interface.inputs, self._name)) 41 | setattr(self._interface.inputs, key, params[key]) 42 | 43 | def _check_inputs(self, parameter): 44 | if parameter == "indiv_params": 45 | print("**** checking for indiv_params****") 46 | return True 47 | else: 48 | return super(NodeParams, self)._check_inputs(parameter=parameter) 49 | 50 | def set_input(self, parameter, val): 51 | if parameter == "indiv_params": 52 | if isdefined(val): 53 | print("\n* setting indiv_params for {} *".format(self.name)) 54 | print(val, "\n") 55 | self.load_inputs_from_dict(val) 56 | else: 57 | print("\n**** indiv_params is undefined ****") 58 | else: 59 | super(NodeParams, self).set_input(parameter=parameter, val=val) 60 | 61 | 62 | class MapNodeParams(MapNode): 63 | 64 | """ 65 | Overloading of the class nodes for aloowing params reading directly from 66 | a dictionnary; ultimately should be added to nipype if required 67 | """ 68 | def __init__( 69 | self, 70 | interface, 71 | name, 72 | iterfield, 73 | params={}): 74 | 75 | iterfield.extend(list(params.keys())) 76 | super(MapNodeParams, self).__init__(interface=interface, name=name, 77 | iterfield=iterfield) 78 | 79 | self.load_inputs_from_dict(params) 80 | 81 | def load_inputs_from_dict(self, params, overwrite=True): 82 | 83 | def_inputs = [] 84 | if not overwrite: 85 | def_inputs = list(self.inputs.get_traitsfree().keys()) 86 | 87 | new_inputs = list(set(list(params.keys())) - set(def_inputs)) 88 | 89 | for key in new_inputs: 90 | assert hasattr(self.inputs, key), \ 91 | print("Warning, Could not find {} in inputs {} for node {}". 92 | format(key, self._interface.inputs, self._name)) 93 | setattr(self.inputs, key, params[key]) 94 | 95 | def _check_inputs(self, parameter): 96 | if parameter == "indiv_params": 97 | print("**** checking for indiv_params****") 98 | return True 99 | else: 100 | return super(MapNodeParams, self)._check_inputs( 101 | parameter=parameter) 102 | 103 | def set_input(self, parameter, val): 104 | if parameter == "indiv_params": 105 | print("**** setting indiv_params****") 106 | print(val) 107 | self.load_inputs_from_dict(val) 108 | else: 109 | super(MapNodeParams, self).set_input(parameter=parameter, val=val) 110 | 111 | 112 | class BIDSDataGrabberParams(BIDSDataGrabber): 113 | def __init__(self, indiv_params={}, **kwargs): 114 | super(BIDSDataGrabberParams, self).__init__(**kwargs) 115 | self._indiv_params = indiv_params 116 | 117 | def _set_indiv_params(self, outputs): 118 | 119 | assert "subject" in self._infields, \ 120 | "Error, subject should be defined as iterables" 121 | 122 | print(getattr(self.inputs, "session")) 123 | if isdefined(getattr(self.inputs, "session")): 124 | print("session is defined, adding") 125 | keys = ("sub-" + getattr(self.inputs, "subject"), 126 | "ses-" + getattr(self.inputs, "session")) 127 | 128 | else: 129 | print("no session was defined, skipping for key") 130 | keys = ("sub-" + getattr(self.inputs, "subject")) 131 | 132 | print("In BIDSDataGrabberParams") 133 | print(self._indiv_params) 134 | 135 | outputs["indiv_params"] = parse_key(self._indiv_params, keys) 136 | print(outputs["indiv_params"]) 137 | 138 | return outputs 139 | 140 | def _list_outputs(self): 141 | outputs = super(BIDSDataGrabberParams, self)._list_outputs() 142 | outputs = self._set_indiv_params(outputs) 143 | 144 | return outputs 145 | 146 | 147 | ############################################################################### 148 | class ParseParamsInputSpec(BaseInterfaceInputSpec): 149 | 150 | params = traits.Dict( 151 | desc='Dictionnary to tap from') 152 | 153 | key = traits.Either( 154 | traits.String(), 155 | traits.Tuple(), 156 | desc='which key to tap from') 157 | 158 | 159 | class ParseParamsOutputSpec(TraitedSpec): 160 | 161 | parsed_params = traits.Dict( 162 | desc="Part of the dict with key" 163 | ) 164 | 165 | 166 | class ParseParams(BaseInterface): 167 | """from a dict, give a sub dict corresponding to key 168 | 169 | Inputs 170 | -------- 171 | params: 172 | Dict, 'Dictionnary to tap from') 173 | 174 | key: 175 | Tuple of String, 'which key to tap from') 176 | 177 | 178 | 179 | Outputs 180 | --------- 181 | parsed_params = traits.Dict( 182 | desc="Part of the dict with key" 183 | 184 | """ 185 | input_spec = ParseParamsInputSpec 186 | output_spec = ParseParamsOutputSpec 187 | 188 | def _run_interface(self, runtime): 189 | 190 | params = self.inputs.params 191 | key = self.inputs.key 192 | 193 | self.parsed_params = parse_key(params, key) 194 | return runtime 195 | 196 | def _list_outputs(self): 197 | outputs = self._outputs().get() 198 | outputs["parsed_params"] = self.parsed_params 199 | 200 | return outputs 201 | -------------------------------------------------------------------------------- /macapype/utils/utils_spm.py: -------------------------------------------------------------------------------- 1 | import os 2 | import nipype.interfaces.spm as spm 3 | from nipype.interfaces.matlab import get_matlab_command 4 | 5 | 6 | def set_spm(): 7 | 8 | spm.SPMCommand.set_mlab_paths(matlab_cmd='matlab -nodesktop -nosplash') 9 | 10 | if get_matlab_command() is None: 11 | print("could not find matlab, will try with mcr_spm version") 12 | 13 | try: 14 | print(os.environ) 15 | print(os.environ["SPM_DIR"]) 16 | print(os.environ["SPM_VERSION"]) 17 | print(os.environ["MCR_VERSION"]) 18 | 19 | spm_dir = os.environ["SPM_DIR"] 20 | spm_ver = os.environ["SPM_VERSION"] 21 | mcr_version = os.environ["MCR_VERSION"] 22 | 23 | print("OK, SPM {} MCR version {} was found".format( 24 | spm_ver, mcr_version)) 25 | 26 | spm_cmd = 'bash {}/run_spm{}.sh /opt/mcr/{} script'.format( 27 | spm_dir, spm_ver, mcr_version) 28 | print(spm_cmd) 29 | 30 | spm.SPMCommand.set_mlab_paths(matlab_cmd=spm_cmd, use_mcr=True) 31 | return True 32 | 33 | except KeyError: 34 | print("Error, could not find SPM or MCR environement") 35 | 36 | print("Going for octave; still testing") 37 | 38 | # assert os.path.exists('/opt/spm12') 39 | 40 | spm.SPMCommand.set_mlab_paths( 41 | matlab_cmd='octave --no-window-system --no-gui --braindead', 42 | use_mcr=True) 43 | 44 | return True 45 | 46 | else: 47 | print("OK, matlab was found") 48 | return True 49 | -------------------------------------------------------------------------------- /macapype/utils/utils_tests.py: -------------------------------------------------------------------------------- 1 | """ 2 | Support function for loading test datasets 3 | """ 4 | import os 5 | import os.path as op 6 | 7 | import shutil 8 | import json 9 | import subprocess 10 | 11 | 12 | def _download_data_zip(data_zip, name): 13 | 14 | json_data = op.join(op.dirname(op.abspath(__file__)), 15 | "data_test_servers.json") 16 | 17 | data_dict = json.load(open(json_data)) 18 | 19 | for key, cloud_elem in data_dict.items(): 20 | print(key) 21 | 22 | data_dir = cloud_elem["data_dir"] 23 | 24 | if name not in data_dir.keys(): 25 | print("{} not found in {}".format(name, key)) 26 | continue 27 | server = cloud_elem["server"] 28 | 29 | if "curl_cloud_format" in list(cloud_elem.keys()): 30 | oc_path = cloud_elem["curl_cloud_format"].format(server, 31 | data_dir[name]) 32 | 33 | cmd = 'curl {} --output {} '.format(oc_path, data_zip) 34 | 35 | else: 36 | if "cloud_format" in list(cloud_elem.keys()): 37 | oc_path = cloud_elem["cloud_format"].format(server, 38 | data_dir[name]) 39 | elif "cloud_format_3" in list(cloud_elem.keys()): 40 | oc_path = cloud_elem["cloud_format_3"].format( 41 | server, data_dir[name], name) 42 | 43 | cmd = 'wget --no-check-certificate \ 44 | --content-disposition {} -O {} '.format(oc_path, data_zip) 45 | 46 | val = subprocess.call(cmd.split()) 47 | 48 | if val: 49 | print("Error with {} for {}".format(cmd, key)) 50 | continue 51 | 52 | if op.exists(data_zip): 53 | print(os.listdir(op.split(data_zip)[0])) 54 | 55 | print("Ok for download {} with {}".format(data_zip, key)) 56 | print("Quitting download function") 57 | 58 | return True 59 | 60 | assert op.exists(data_zip), \ 61 | "Error, data_zip = {} not found ".format(data_zip) 62 | 63 | return False 64 | 65 | 66 | def load_test_data(name, path_to=""): 67 | """ Load test data, template and needed scripts """ 68 | 69 | if path_to == "": 70 | path_to = op.expanduser("~") 71 | 72 | assert op.exists(path_to), "Breaking, {} do not exist".format(path_to) 73 | 74 | data_dirpath = op.join(path_to, "data_macapype") 75 | 76 | try: 77 | os.makedirs(data_dirpath) 78 | except OSError: 79 | print("data_dirpath {} already exists".format(data_dirpath)) 80 | 81 | data_path = op.join(data_dirpath, name) 82 | 83 | if op.exists(data_path): 84 | print("{} Already exists, skipping download".format(data_path)) 85 | return data_path 86 | 87 | data_zip = op.join(data_dirpath, "{}.zip".format(name)) 88 | 89 | if not op.exists(data_zip): 90 | 91 | print("Download {}".format(data_zip)) 92 | 93 | val = _download_data_zip(data_zip, name) 94 | 95 | assert val, "Error, cannot download {}".format(data_zip) 96 | 97 | assert op.exists(data_zip), "Error, cannot find {}".format(data_zip) 98 | 99 | os.system("ls -l {}".format(data_dirpath)) 100 | print("Unzip {} to {}".format(data_zip, data_path)) 101 | os.system("unzip -o {} -d {}".format(data_zip, data_path)) 102 | os.remove(data_zip) 103 | 104 | assert op.exists(data_path), "Error, cannot find {}".format(data_path) 105 | 106 | return data_path 107 | 108 | 109 | def format_template(data_path, template_name): 110 | 111 | import json 112 | 113 | json_template = op.join(op.dirname(op.abspath(__file__)), 114 | "templates.json") 115 | 116 | template_path_dict = json.load(open(json_template)) 117 | 118 | assert template_name in template_path_dict.keys(), \ 119 | "Error, could not find template formating for {} in {}".format( 120 | template_name, template_path_dict.keys()) 121 | template_dict = template_path_dict[template_name] 122 | print("Found template formating for {}:".format(template_name)) 123 | print(template_dict) 124 | 125 | for key, value in template_dict.items(): 126 | template_file = op.join(data_path, value) 127 | assert op.exists(template_file), "Error, file {} is missing".format( 128 | template_file) 129 | 130 | template_dict[key] = template_file 131 | 132 | return template_dict 133 | 134 | 135 | def make_tmp_dir(): 136 | tmp_dir = "/tmp/test_macapype" 137 | if op.exists(tmp_dir): 138 | shutil.rmtree(tmp_dir) 139 | os.makedirs(tmp_dir) 140 | os.chdir(tmp_dir) 141 | 142 | return tmp_dir 143 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools", "wheel" 4 | ] 5 | build-backend = "setuptools.build_meta" 6 | 7 | [project] 8 | dynamic = ['version'] 9 | name = "macapype" 10 | maintainers = [{name = "Macapype developers" }] 11 | authors= [{name = "Macatools team"}] 12 | description= "Pipeline for anatomic processing for macaque " 13 | readme = {content-type = "text/markdown", file = "README.md"} 14 | requires-python = ">= 3.10" 15 | license = {text = "BSD-3-Clause"} 16 | 17 | dependencies = [ 18 | "nipype", 19 | "rdflib==6.3.1", 20 | "pandas==2.2.3", 21 | "matplotlib", 22 | "nilearn", 23 | "networkx", 24 | "pybids", 25 | "scikit-image", 26 | "nibabel", 27 | "numpy", 28 | "SimpleITK" 29 | ] 30 | 31 | [project.optional-dependencies] 32 | # Dependencies for building the documentation 33 | doc_deps = [ 34 | "sphinx", 35 | "sphinx-gallery", 36 | "sphinx_bootstrap_theme", 37 | "numpydoc", 38 | "sphinxcontrib-fulltoc" 39 | ] 40 | 41 | # Dependencies for test 42 | test_deps = [ 43 | "pytest", 44 | "pytest-cov", 45 | "codecov", 46 | ] 47 | flake_deps = [ 48 | "flake8" 49 | ] 50 | 51 | # real calls 52 | test=["macapype[test_deps, flake_deps]"] 53 | doc=["macapype[flake_deps, test_deps, doc_deps]"] 54 | 55 | [project.scripts] 56 | segment_pnh = "workflows.segment_pnh:main" 57 | 58 | 59 | 60 | 61 | [tool.setuptools.packages] 62 | find = {} # Scanning implicit namespaces is active by default 63 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """Pydra: Dataflow Engine 5 | 6 | """ 7 | import sys 8 | from setuptools import setup 9 | 10 | import re 11 | 12 | def _get_version(): 13 | 14 | verstr = "unknown" 15 | try: 16 | verstrline = open('macapype/_version.py', "rt").read() 17 | except EnvironmentError: 18 | pass # Okay, there is no version file. 19 | else: 20 | VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]" 21 | mo = re.search(VSRE, verstrline, re.M) 22 | if mo: 23 | verstr = mo.group(1) 24 | else: 25 | raise RuntimeError("unable to find version in yourpackage/_version.py") 26 | return verstr 27 | 28 | if __name__ == "__main__": 29 | setup( 30 | version=_get_version() 31 | ) 32 | -------------------------------------------------------------------------------- /workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Macatools/macapype/38d6356305cfc6d5668f6cf422e70a85843007bf/workflows/__init__.py -------------------------------------------------------------------------------- /workflows/params_segment_baboon0_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-0" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "use_T2":{}, 9 | "aladin_T2_on_T1":{}, 10 | "crop_aladin_pipe": 11 | { 12 | "reg_T1_on_template": 13 | { 14 | "rig_only_flag":true 15 | }, 16 | "reg_T1_on_template2": 17 | { 18 | "rig_only_flag":true 19 | } 20 | }, 21 | "denoise": 22 | { 23 | }, 24 | "N4debias": 25 | { 26 | "dimension": 3, 27 | "bspline_fitting_distance": 200, 28 | "n_iterations": [50, 50, 40, 30], 29 | "convergence_threshold": 0.00000001, 30 | "shrink_factor": 2, 31 | "args": "-r 0 --verbose 1" 32 | } 33 | }, 34 | "extract_pipe": 35 | { 36 | "use_T2":{}, 37 | "atlas_brex": 38 | { 39 | "f": 0.7, 40 | "reg": 1, 41 | "msk": "b,0.5,0,0", 42 | "wrp": "10,10,10", 43 | "dil": 1, 44 | "nrm": 1 45 | } 46 | }, 47 | "debias": 48 | { 49 | "s": 2 50 | }, 51 | "brain_segment_pipe": 52 | { 53 | "use_T2": {}, 54 | "reg": 55 | { 56 | "n": 2, 57 | "m": "ref", 58 | "dof": 12 59 | }, 60 | "segment_atropos_pipe": 61 | { 62 | "use_priors": 0.0, 63 | "Atropos": 64 | { 65 | "dimension": 3 66 | }, 67 | "tissue_dict": 68 | { 69 | "gm": 2, 70 | "wm": 3, 71 | "csf": 1 72 | } 73 | }, 74 | "export_5tt_pipe": 75 | { 76 | }, 77 | "IsoSurface_brain_pipe": 78 | { 79 | "merge_brain_tissues": 80 | { 81 | "keep_indexes": [1,2,3] 82 | } 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon0_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-0" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "use_T2":{}, 9 | "aladin_T2_on_T1":{}, 10 | "crop_aladin_pipe": 11 | { 12 | "reg_T1_on_template": 13 | { 14 | "rig_only_flag":true 15 | }, 16 | "reg_T1_on_template2": 17 | { 18 | "rig_only_flag":true 19 | } 20 | }, 21 | "denoise": 22 | { 23 | }, 24 | "N4debias": 25 | { 26 | "dimension": 3, 27 | "bspline_fitting_distance": 200, 28 | "n_iterations": [50, 50, 40, 30], 29 | "convergence_threshold": 0.00000001, 30 | "shrink_factor": 2, 31 | "args": "-r 0 --verbose 1" 32 | } 33 | }, 34 | "extract_pipe": 35 | { 36 | "use_T2":{}, 37 | 38 | "bet4animal": 39 | { 40 | "f": 0.65, 41 | "label": 2, 42 | "robust": true 43 | } 44 | }, 45 | "debias": 46 | { 47 | "s": 2 48 | }, 49 | "brain_segment_pipe": 50 | { 51 | "use_T2": {}, 52 | "reg": 53 | { 54 | "n": 2, 55 | "m": "ref", 56 | "dof": 12 57 | }, 58 | "segment_atropos_pipe": 59 | { 60 | "use_priors": 0.0, 61 | "Atropos": 62 | { 63 | "dimension": 3 64 | }, 65 | "tissue_dict": 66 | { 67 | "gm": 2, 68 | "wm": 3, 69 | "csf": 1 70 | } 71 | }, 72 | "export_5tt_pipe": 73 | { 74 | }, 75 | "IsoSurface_brain_pipe": 76 | { 77 | "merge_brain_tissues": 78 | { 79 | "keep_indexes": [1,2,3] 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon1_0p6_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-1_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | } 44 | }, 45 | "debias": 46 | { 47 | "s": 2 48 | }, 49 | "brain_segment_pipe": 50 | { 51 | "reg": 52 | { 53 | "n": 2, 54 | "m": "ref", 55 | "dof": 12 56 | }, 57 | "segment_atropos_pipe": 58 | { 59 | "use_priors": 0.0, 60 | "Atropos": 61 | { 62 | "dimension": 3 63 | }, 64 | "tissue_dict": 65 | { 66 | "gm": 2, 67 | "wm": 3, 68 | "csf": 1 69 | } 70 | }, 71 | "export_5tt_pipe": 72 | { 73 | }, 74 | "IsoSurface_brain_pipe": 75 | { 76 | "merge_brain_tissues": 77 | { 78 | "keep_indexes": [1,2,3] 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon1_0p6_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-1_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon1_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-1" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | } 44 | }, 45 | "debias": 46 | { 47 | "s": 2 48 | }, 49 | "brain_segment_pipe": 50 | { 51 | "reg": 52 | { 53 | "n": 2, 54 | "m": "ref", 55 | "dof": 12 56 | }, 57 | "segment_atropos_pipe": 58 | { 59 | "use_priors": 0.0, 60 | "Atropos": 61 | { 62 | "dimension": 3 63 | }, 64 | "tissue_dict": 65 | { 66 | "gm": 2, 67 | "wm": 3, 68 | "csf": 1 69 | } 70 | }, 71 | "export_5tt_pipe": 72 | { 73 | }, 74 | "IsoSurface_brain_pipe": 75 | { 76 | "merge_brain_tissues": 77 | { 78 | "keep_indexes": [1,2,3] 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon1_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-1" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon2_0p6_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-2_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | } 44 | }, 45 | "debias": 46 | { 47 | "s": 2 48 | }, 49 | "brain_segment_pipe": 50 | { 51 | "reg": 52 | { 53 | "n": 2, 54 | "m": "ref", 55 | "dof": 12 56 | }, 57 | "segment_atropos_pipe": 58 | { 59 | "use_priors": 0.0, 60 | "Atropos": 61 | { 62 | "dimension": 3 63 | }, 64 | "tissue_dict": 65 | { 66 | "gm": 2, 67 | "wm": 3, 68 | "csf": 1 69 | } 70 | }, 71 | "export_5tt_pipe": 72 | { 73 | }, 74 | "IsoSurface_brain_pipe": 75 | { 76 | "merge_brain_tissues": 77 | { 78 | "keep_indexes": [1,2,3] 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon2_0p6_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-2_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon2_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-2" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | } 44 | }, 45 | "debias": 46 | { 47 | "s": 2 48 | }, 49 | "brain_segment_pipe": 50 | { 51 | "reg": 52 | { 53 | "n": 2, 54 | "m": "ref", 55 | "dof": 12 56 | }, 57 | "segment_atropos_pipe": 58 | { 59 | "use_priors": 0.0, 60 | "Atropos": 61 | { 62 | "dimension": 3 63 | }, 64 | "tissue_dict": 65 | { 66 | "gm": 2, 67 | "wm": 3, 68 | "csf": 1 69 | } 70 | }, 71 | "export_5tt_pipe": 72 | { 73 | }, 74 | "IsoSurface_brain_pipe": 75 | { 76 | "merge_brain_tissues": 77 | { 78 | "keep_indexes": [1,2,3] 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon2_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-2" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon2_ants_quick.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-2" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "hdbet": 36 | { 37 | 38 | } 39 | }, 40 | "debias": 41 | { 42 | "s": 2 43 | }, 44 | "brain_segment_pipe": 45 | { 46 | "reg": 47 | { 48 | "n": 2, 49 | "m": "ref", 50 | "dof": 12 51 | }, 52 | "segment_atropos_pipe": 53 | { 54 | "use_priors": 0.0, 55 | "Atropos": 56 | { 57 | "dimension": 3 58 | }, 59 | "tissue_dict": 60 | { 61 | "gm": 2, 62 | "wm": 3, 63 | "csf": 1 64 | } 65 | }, 66 | "export_5tt_pipe": 67 | { 68 | }, 69 | "IsoSurface_brain_pipe": 70 | { 71 | "merge_brain_tissues": 72 | { 73 | "keep_indexes": [1,2,3] 74 | } 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon3_0p6_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-3_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | 44 | } 45 | }, 46 | "debias": 47 | { 48 | "s": 2 49 | }, 50 | "brain_segment_pipe": 51 | { 52 | "reg": 53 | { 54 | "n": 2, 55 | "m": "ref", 56 | "dof": 12 57 | }, 58 | "segment_atropos_pipe": 59 | { 60 | "use_priors": 0.0, 61 | "Atropos": 62 | { 63 | "dimension": 3 64 | }, 65 | "tissue_dict": 66 | { 67 | "gm": 2, 68 | "wm": 3, 69 | "csf": 1 70 | } 71 | }, 72 | "export_5tt_pipe": 73 | { 74 | }, 75 | "IsoSurface_brain_pipe": 76 | { 77 | "merge_brain_tissues": 78 | { 79 | "keep_indexes": [1,2,3] 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon3_0p6_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-3_0p6" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon3_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-3" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1, 42 | "nrm": 1 43 | 44 | } 45 | }, 46 | "debias": 47 | { 48 | "s": 2 49 | }, 50 | "brain_segment_pipe": 51 | { 52 | "reg": 53 | { 54 | "n": 2, 55 | "m": "ref", 56 | "dof": 12 57 | }, 58 | "segment_atropos_pipe": 59 | { 60 | "use_priors": 0.0, 61 | "Atropos": 62 | { 63 | "dimension": 3 64 | }, 65 | "tissue_dict": 66 | { 67 | "gm": 2, 68 | "wm": 3, 69 | "csf": 1 70 | } 71 | }, 72 | "export_5tt_pipe": 73 | { 74 | }, 75 | "IsoSurface_brain_pipe": 76 | { 77 | "merge_brain_tissues": 78 | { 79 | "keep_indexes": [1,2,3] 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon3_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-3" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon3_ants_quick.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "BaBa21-3" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "hdbet": 36 | { 37 | 38 | } 39 | }, 40 | "debias": 41 | { 42 | "s": 2 43 | }, 44 | "brain_segment_pipe": 45 | { 46 | "reg": 47 | { 48 | "n": 2, 49 | "m": "ref", 50 | "dof": 12 51 | }, 52 | "segment_atropos_pipe": 53 | { 54 | "use_priors": 0.0, 55 | "Atropos": 56 | { 57 | "dimension": 3 58 | }, 59 | "tissue_dict": 60 | { 61 | "gm": 2, 62 | "wm": 3, 63 | "csf": 1 64 | } 65 | }, 66 | "export_5tt_pipe": 67 | { 68 | }, 69 | "IsoSurface_brain_pipe": 70 | { 71 | "merge_brain_tissues": 72 | { 73 | "keep_indexes": [1,2,3] 74 | } 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Haiko_v1.5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "atlas_brex": 36 | { 37 | "f": 0.7, 38 | "reg": 1, 39 | "msk": "b,0.5,0,0", 40 | "wrp": "10,10,10", 41 | "dil": 1 42 | } 43 | }, 44 | "debias": 45 | { 46 | "s": 2 47 | }, 48 | "brain_segment_pipe": 49 | { 50 | "reg": 51 | { 52 | "n": 2, 53 | "m": "ref", 54 | "dof": 12 55 | }, 56 | "segment_atropos_pipe": 57 | { 58 | "use_priors": 0.0, 59 | "Atropos": 60 | { 61 | "dimension": 3 62 | }, 63 | "tissue_dict": 64 | { 65 | "gm": 2, 66 | "wm": 3, 67 | "csf": 1 68 | } 69 | }, 70 | "export_5tt_pipe": 71 | { 72 | }, 73 | "IsoSurface_brain_pipe": 74 | { 75 | "merge_brain_tissues": 76 | { 77 | "keep_indexes": [1,2,3] 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Haiko_v1.5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "f": 0.5, 38 | "label": 2, 39 | "robust": true 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon_ants_quick.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Haiko_v1.5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "hdbet": 36 | { 37 | } 38 | }, 39 | "debias": 40 | { 41 | "s": 2 42 | }, 43 | "brain_segment_pipe": 44 | { 45 | "reg": 46 | { 47 | "n": 2, 48 | "m": "ref", 49 | "dof": 12 50 | }, 51 | "segment_atropos_pipe": 52 | { 53 | "use_priors": 0.0, 54 | "Atropos": 55 | { 56 | "dimension": 3 57 | }, 58 | "tissue_dict": 59 | { 60 | "gm": 2, 61 | "wm": 3, 62 | "csf": 1 63 | } 64 | }, 65 | "export_5tt_pipe": 66 | { 67 | }, 68 | "IsoSurface_brain_pipe": 69 | { 70 | "merge_brain_tissues": 71 | { 72 | "keep_indexes": [1,2,3] 73 | } 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /workflows/params_segment_baboon_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Haiko_v1.5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | } 20 | }, 21 | "debias": 22 | { 23 | "s": 2 24 | }, 25 | "reg": 26 | { 27 | "n": 2, 28 | "m": "ref", 29 | "dof": 12 30 | }, 31 | "old_segment_pipe": 32 | { 33 | "segment": 34 | { 35 | "gm_output_type": [false, false, true], 36 | "wm_output_type": [false, false, true], 37 | "csf_output_type": [false, false, true] 38 | }, 39 | "threshold_gm": 40 | { 41 | "thresh": 0.5 42 | }, 43 | "threshold_wm": 44 | { 45 | "thresh": 0.5 46 | }, 47 | "threshold_csf": 48 | { 49 | "thresh": 0.5 50 | } 51 | }, 52 | "mask_from_seg_pipe": 53 | { 54 | "dilate_mask": 55 | { 56 | "kernel_shape": "sphere", 57 | "kernel_size": 2 58 | }, 59 | "erode_mask": 60 | { 61 | "kernel_shape": "sphere", 62 | "kernel_size": 2 63 | }, 64 | "merge_indexed_mask": 65 | { 66 | "index_csf": 1, 67 | "index_gm": 2, 68 | "index_wm": 3 69 | } 70 | 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /workflows/params_segment_chimp_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "Juna_Chimp" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "crop_T1": 9 | { 10 | "args": "should be defined in indiv" 11 | }, 12 | "norm_intensity": 13 | { 14 | "dimension": 3, 15 | "bspline_fitting_distance": 200, 16 | "n_iterations": [50, 50, 40, 30], 17 | "convergence_threshold": 0.00000001, 18 | "shrink_factor": 2, 19 | "args": "-r 0 --verbose 1" 20 | } 21 | }, 22 | "extract_pipe": 23 | { 24 | "atlas_brex": 25 | { 26 | "f": 0.5, 27 | "reg": 2, 28 | "wrp": "1", 29 | "msk": "a,0,3" 30 | } 31 | }, 32 | "brain_segment_pipe": 33 | { 34 | "register_NMT_pipe": 35 | { 36 | "norm_intensity": 37 | { 38 | "dimension": 3, 39 | "bspline_fitting_distance": 200, 40 | "n_iterations": [50, 50, 40, 30], 41 | "convergence_threshold": 0.00000001, 42 | "shrink_factor": 2, 43 | "args": "-r 0 --verbose 1" 44 | } 45 | }, 46 | "segment_atropos_pipe": 47 | { 48 | "use_priors": 0.0, 49 | "Atropos": 50 | { 51 | "dimension": 3, 52 | "numberOfClasses": 3 53 | }, 54 | "threshold_gm": 55 | { 56 | "thresh": 0.5 57 | }, 58 | "threshold_wm": 59 | { 60 | "thresh": 0.5 61 | }, 62 | "threshold_csf": 63 | { 64 | "thresh": 0.5 65 | } 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /workflows/params_segment_human_1mm_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MNI_Fortin_1mm" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "robust": true, 38 | "label": 1, 39 | "f" : 0.3 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_human_2mm_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MNI_Fortin_2mm" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "N4debias": 24 | { 25 | "dimension": 3, 26 | "bspline_fitting_distance": 200, 27 | "n_iterations": [50, 50, 40, 30], 28 | "convergence_threshold": 0.00000001, 29 | "shrink_factor": 2, 30 | "args": "-r 0 --verbose 1" 31 | } 32 | }, 33 | "extract_pipe": 34 | { 35 | "bet4animal": 36 | { 37 | "robust": true, 38 | "label": 1, 39 | "f" : 0.25 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 2 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": 2, 64 | "wm": 3, 65 | "csf": 1 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_0p5_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "NMT_v2.0_asym_0p5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | 21 | "denoise": 22 | { 23 | "shrink_factor": 2 24 | }, 25 | "N4debias": 26 | { 27 | "dimension": 3, 28 | "bspline_fitting_distance": 200, 29 | "n_iterations": [50, 50, 40, 30], 30 | "convergence_threshold": 0.00000001, 31 | "shrink_factor": 2, 32 | "args": "-r 0 --verbose 1" 33 | } 34 | }, 35 | "extract_pipe": 36 | { 37 | "atlas_brex": 38 | { 39 | "f": 0.5, 40 | "reg": 1, 41 | "wrp": "10,10,10", 42 | "msk": "a,0,0", 43 | "dil": 2, 44 | "nrm": 1 45 | } 46 | 47 | }, 48 | "debias": 49 | { 50 | "s": 2 51 | }, 52 | "brain_segment_pipe": 53 | { 54 | "reg": 55 | { 56 | "n": 2, 57 | "m": "ref", 58 | "dof": 12 59 | }, 60 | "segment_atropos_pipe": 61 | { 62 | "use_priors": 0.0, 63 | "Atropos": 64 | { 65 | "dimension": 3 66 | }, 67 | "tissue_dict": 68 | { 69 | "gm": [2, 3], 70 | "wm": 4, 71 | "csf": [1, 5] 72 | } 73 | }, 74 | "export_5tt_pipe": 75 | { 76 | }, 77 | "IsoSurface_brain_pipe": 78 | { 79 | "merge_brain_tissues": 80 | { 81 | "keep_indexes": [2,3,4] 82 | } 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_0p5_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "NMT_v2.0_asym_0p5" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | 21 | "denoise": 22 | { 23 | "shrink_factor": 2 24 | }, 25 | "N4debias": 26 | { 27 | "dimension": 3, 28 | "bspline_fitting_distance": 200, 29 | "n_iterations": [50, 50, 40, 30], 30 | "convergence_threshold": 0.00000001, 31 | "shrink_factor": 2, 32 | "args": "-r 0 --verbose 1" 33 | } 34 | }, 35 | "extract_pipe": 36 | { 37 | "bet4animal": 38 | { 39 | "label": 2 40 | } 41 | 42 | }, 43 | "debias": 44 | { 45 | "s": 2 46 | }, 47 | "brain_segment_pipe": 48 | { 49 | "reg": 50 | { 51 | "n": 2, 52 | "m": "ref", 53 | "dof": 12 54 | }, 55 | "segment_atropos_pipe": 56 | { 57 | "use_priors": 0.0, 58 | "Atropos": 59 | { 60 | "dimension": 3 61 | }, 62 | "tissue_dict": 63 | { 64 | "gm": [2, 3], 65 | "wm": 4, 66 | "csf": [1, 5] 67 | } 68 | }, 69 | "export_5tt_pipe": 70 | { 71 | }, 72 | "IsoSurface_brain_pipe": 73 | { 74 | "merge_brain_tissues": 75 | { 76 | "keep_indexes": [2,3,4] 77 | } 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_0p5_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "inia19", 5 | "template_stereo_name": "NMT_v2.0_asym_0p5" 6 | }, 7 | "short_preparation_pipe": 8 | { 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | "shrink_factor": 2 23 | } 24 | }, 25 | "debias": 26 | { 27 | "s": 2 28 | }, 29 | "reg": 30 | { 31 | "n": 2, 32 | "m": "ref", 33 | "dof": 12 34 | }, 35 | "old_segment_pipe": 36 | { 37 | "segment": 38 | { 39 | "gm_output_type": [false, false, true], 40 | "wm_output_type": [false, false, true], 41 | "csf_output_type": [false, false, true] 42 | }, 43 | "threshold_gm": 44 | { 45 | "thresh": 0.5 46 | }, 47 | "threshold_wm": 48 | { 49 | "thresh": 0.5 50 | }, 51 | "threshold_csf": 52 | { 53 | "thresh": 0.5 54 | } 55 | }, 56 | "mask_from_seg_pipe": 57 | { 58 | "dilate_mask": 59 | { 60 | "kernel_shape": "sphere", 61 | "kernel_size": 2 62 | }, 63 | "erode_mask": 64 | { 65 | "kernel_shape": "sphere", 66 | "kernel_size": 2 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "NMT_v2.0_asym" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | "shrink_factor": 2 23 | }, 24 | "N4debias": 25 | { 26 | "dimension": 3, 27 | "bspline_fitting_distance": 200, 28 | "n_iterations": [50, 50, 40, 30], 29 | "convergence_threshold": 0.00000001, 30 | "shrink_factor": 2, 31 | "args": "-r 0 --verbose 1" 32 | } 33 | }, 34 | "extract_pipe": 35 | { 36 | "atlas_brex": 37 | { 38 | "f": 0.5, 39 | "reg": 1, 40 | "wrp": "10,10,10", 41 | "msk": "a,0,0", 42 | "dil": 2, 43 | "nrm": 1 44 | } 45 | 46 | }, 47 | "debias": 48 | { 49 | "s": 2 50 | }, 51 | "brain_segment_pipe": 52 | { 53 | "reg": 54 | { 55 | "n": 2, 56 | "m": "ref", 57 | "dof": 12 58 | }, 59 | "segment_atropos_pipe": 60 | { 61 | "use_priors": 0.0, 62 | "Atropos": 63 | { 64 | "dimension": 3 65 | }, 66 | "tissue_dict": 67 | { 68 | "gm": [2, 3], 69 | "wm": 4, 70 | "csf": [1, 5] 71 | } 72 | }, 73 | "export_5tt_pipe": 74 | { 75 | }, 76 | "IsoSurface_brain_pipe": 77 | { 78 | "merge_brain_tissues": 79 | { 80 | "keep_indexes": [2,3,4] 81 | } 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "NMT_v2.0_asym" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | "shrink_factor": 2 23 | }, 24 | "N4debias": 25 | { 26 | "dimension": 3, 27 | "bspline_fitting_distance": 200, 28 | "n_iterations": [50, 50, 40, 30], 29 | "convergence_threshold": 0.00000001, 30 | "shrink_factor": 2, 31 | "args": "-r 0 --verbose 1" 32 | } 33 | }, 34 | "extract_pipe": 35 | { 36 | "bet4animal": 37 | { 38 | "label": 2 39 | } 40 | }, 41 | "debias": 42 | { 43 | "s": 2 44 | }, 45 | "brain_segment_pipe": 46 | { 47 | "reg": 48 | { 49 | "n": 2, 50 | "m": "ref", 51 | "dof": 12 52 | }, 53 | "segment_atropos_pipe": 54 | { 55 | "use_priors": 0.0, 56 | "Atropos": 57 | { 58 | "dimension": 3 59 | }, 60 | "tissue_dict": 61 | { 62 | "gm": [2, 3], 63 | "wm": 4, 64 | "csf": [1, 5] 65 | } 66 | }, 67 | "export_5tt_pipe": 68 | { 69 | }, 70 | "IsoSurface_brain_pipe": 71 | { 72 | "merge_brain_tissues": 73 | { 74 | "keep_indexes": [2,3,4] 75 | } 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_ants_quick.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "NMT_v2.0_asym" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1":{}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | "shrink_factor": 2 23 | }, 24 | "N4debias": 25 | { 26 | "dimension": 3, 27 | "bspline_fitting_distance": 200, 28 | "n_iterations": [50, 50, 40, 30], 29 | "convergence_threshold": 0.00000001, 30 | "shrink_factor": 2, 31 | "args": "-r 0 --verbose 1" 32 | } 33 | }, 34 | "extract_pipe": 35 | { 36 | "hdbet": 37 | { 38 | } 39 | }, 40 | "debias": 41 | { 42 | "s": 2 43 | }, 44 | "brain_segment_pipe": 45 | { 46 | "reg": 47 | { 48 | "n": 2, 49 | "m": "ref", 50 | "dof": 12 51 | }, 52 | "segment_atropos_pipe": 53 | { 54 | "use_priors": 0.0, 55 | "Atropos": 56 | { 57 | "dimension": 3 58 | }, 59 | "tissue_dict": 60 | { 61 | "gm": [2, 3], 62 | "wm": 4, 63 | "csf": [1, 5] 64 | } 65 | }, 66 | "export_5tt_pipe": 67 | { 68 | }, 69 | "IsoSurface_brain_pipe": 70 | { 71 | "merge_brain_tissues": 72 | { 73 | "keep_indexes": [2,3,4] 74 | } 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /workflows/params_segment_macaque_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "inia19", 5 | "template_stereo_name": "NMT_v2.0_asym" 6 | }, 7 | "short_preparation_pipe": 8 | { 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | "shrink_factor": 2 23 | } 24 | }, 25 | "debias": 26 | { 27 | "s": 2 28 | }, 29 | "reg": 30 | { 31 | "n": 2, 32 | "m": "ref", 33 | "dof": 12 34 | }, 35 | "old_segment_pipe": 36 | { 37 | "segment": 38 | { 39 | "gm_output_type": [false, false, true], 40 | "wm_output_type": [false, false, true], 41 | "csf_output_type": [false, false, true] 42 | }, 43 | "threshold_gm": 44 | { 45 | "thresh": 0.5 46 | }, 47 | "threshold_wm": 48 | { 49 | "thresh": 0.5 50 | }, 51 | "threshold_csf": 52 | { 53 | "thresh": 0.5 54 | } 55 | }, 56 | "mask_from_seg_pipe": 57 | { 58 | "dilate_mask": 59 | { 60 | "kernel_shape": "sphere", 61 | "kernel_size": 2 62 | }, 63 | "erode_mask": 64 | { 65 | "kernel_shape": "sphere", 66 | "kernel_size": 2 67 | } 68 | }, 69 | "regex_subs": 70 | { 71 | "FLAIR_flirt": "preproc-coreg_FLAIR" 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /workflows/params_segment_marmo_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MBM_v3.0.1_6seg" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1": {}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "fast": 24 | { 25 | "args": "-l 3" 26 | } 27 | }, 28 | "extract_pipe": 29 | { 30 | "atlas_brex": 31 | { 32 | "f": 0.7, 33 | "reg": 1, 34 | "msk": "b,0.5,0,0", 35 | "wrp": "5,5,5", 36 | "dil": 4, 37 | "vox": 1 38 | } 39 | }, 40 | "debias": 41 | { 42 | "s": 4 43 | }, 44 | "brain_segment_pipe": 45 | { 46 | "reg": 47 | { 48 | "n": 2, 49 | "m": "ref", 50 | "dof": 12 51 | }, 52 | "segment_atropos_pipe": 53 | { 54 | "use_priors": 0.0, 55 | "Atropos": 56 | { 57 | "dimension": 3 58 | }, 59 | "tissue_dict": 60 | { 61 | "gm": [1,2], 62 | "wm": 3, 63 | "csf": 4 64 | } 65 | }, 66 | "export_5tt_pipe": 67 | { 68 | }, 69 | "IsoSurface_brain_pipe": 70 | { 71 | "merge_brain_tissues": 72 | { 73 | "keep_indexes": [1,2,3] 74 | } 75 | } 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /workflows/params_segment_marmo_ants_4animal.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MBM_v3.0.1_6seg" 5 | }, 6 | "short_preparation_pipe": 7 | { 8 | "aladin_T2_on_T1": {}, 9 | "crop_aladin_pipe": 10 | { 11 | "reg_T1_on_template": 12 | { 13 | "rig_only_flag":true 14 | }, 15 | "reg_T1_on_template2": 16 | { 17 | "rig_only_flag":true 18 | } 19 | }, 20 | "denoise": 21 | { 22 | }, 23 | "fast": 24 | { 25 | "args": "-l 3" 26 | } 27 | }, 28 | "extract_pipe": 29 | { 30 | "bet4animal": 31 | { 32 | "label": 3, 33 | "f": 0.25, 34 | "robust": false 35 | } 36 | }, 37 | "debias": 38 | { 39 | "s": 4 40 | }, 41 | "brain_segment_pipe": 42 | { 43 | "reg": 44 | { 45 | "n": 2, 46 | "m": "ref", 47 | "dof": 12 48 | }, 49 | "segment_atropos_pipe": 50 | { 51 | "use_priors": 0.0, 52 | "Atropos": 53 | { 54 | "dimension": 3 55 | }, 56 | "tissue_dict": 57 | { 58 | "gm": [1,2], 59 | "wm": 3, 60 | "csf": 4 61 | } 62 | }, 63 | "export_5tt_pipe": 64 | { 65 | }, 66 | "IsoSurface_brain_pipe": 67 | { 68 | "merge_brain_tissues": 69 | { 70 | "keep_indexes": [1,2,3] 71 | } 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /workflows/params_segment_marmo_spm.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MBM_v3.0.1", 5 | "template_stereo_name": "bma-1_0p2mm" 6 | }, 7 | "short_preparation_pipe": 8 | { 9 | "use_T2": {}, 10 | "crop_aladin_pipe": 11 | { 12 | "reg_T1_on_template": 13 | { 14 | "rig_only_flag":true 15 | }, 16 | "reg_T1_on_template2": 17 | { 18 | "rig_only_flag":true 19 | } 20 | } 21 | }, 22 | "debias": 23 | { 24 | "s": 4 25 | }, 26 | "reg": 27 | { 28 | "n": 2, 29 | "m": "ref", 30 | "dof": 12 31 | }, 32 | "old_segment_pipe": 33 | { 34 | "segment": 35 | { 36 | "gm_output_type": [false, false, true], 37 | "wm_output_type": [false, false, true], 38 | "csf_output_type": [false, false, true] 39 | }, 40 | "threshold_gm": 41 | { 42 | "thresh": 0.5 43 | }, 44 | "threshold_wm": 45 | { 46 | "thresh": 0.5 47 | }, 48 | "threshold_csf": 49 | { 50 | "thresh": 0.5 51 | }, 52 | "export_5tt_pipe": 53 | { 54 | }, 55 | "IsoSurface_brain_pipe": 56 | { 57 | "merge_brain_tissues": 58 | { 59 | "keep_indexes": [1,2,3] 60 | } 61 | } 62 | }, 63 | "mask_from_seg_pipe": 64 | { 65 | "dilate_mask": 66 | { 67 | "kernel_shape": "sphere", 68 | "kernel_size": 2 69 | }, 70 | "erode_mask": 71 | { 72 | "kernel_shape": "sphere", 73 | "kernel_size": 2 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /workflows/params_segment_marmot2_ants.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": 3 | { 4 | "template_name": "MBM_v3.0.1_6seg", 5 | "template_stereo_name": "bma-1_0p2mm" 6 | }, 7 | "short_preparation_pipe": 8 | { 9 | "use_T2": {}, 10 | "crop_aladin_pipe": 11 | { 12 | "remove_capsule_pipe":{}, 13 | "reg_T1_on_template": 14 | { 15 | "rig_only_flag":true 16 | }, 17 | "reg_T1_on_template2": 18 | { 19 | "rig_only_flag":true 20 | } 21 | }, 22 | "denoise": 23 | { 24 | }, 25 | "fast": 26 | { 27 | "args": "-l 3" 28 | } 29 | }, 30 | "extract_pipe": 31 | { 32 | "atlas_brex": 33 | { 34 | "f": 0.6, 35 | "reg": 1, 36 | "msk": "b,0.5,0,0", 37 | "wrp": "5,5,5", 38 | "dil": 4, 39 | "vox": 1 40 | } 41 | }, 42 | "debias": 43 | { 44 | "s": 4 45 | }, 46 | "brain_segment_pipe": 47 | { 48 | "reg": 49 | { 50 | "n": 2, 51 | "m": "ref", 52 | "dof": 12 53 | }, 54 | "segment_atropos_pipe": 55 | { 56 | "use_priors": 0.0, 57 | "Atropos": 58 | { 59 | "dimension": 3 60 | }, 61 | "tissue_dict": 62 | { 63 | "gm": [1,2], 64 | "wm": 3, 65 | "csf": 4 66 | } 67 | }, 68 | "export_5tt_pipe": 69 | { 70 | }, 71 | "IsoSurface_brain_pipe": 72 | { 73 | "merge_brain_tissues": 74 | { 75 | "keep_indexes": [1,2,3] 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /workflows/which_spm.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from macapype.utils.utils_spm import set_spm 4 | 5 | assert set_spm(), "Error, SPM was not found" 6 | 7 | 8 | 9 | 10 | --------------------------------------------------------------------------------