├── .circleci └── config.yml ├── .hadolint.yaml ├── .pre-commit-config.yaml ├── .travis.yml ├── Dockerfile ├── README.md ├── entry_init.sh ├── run.py └── version /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2.1 3 | 4 | jobs: 5 | build: 6 | machine: 7 | # https://circleci.com/developer/machine/image/ubuntu-2204 8 | image: ubuntu-2204:2022.10.2 9 | steps: 10 | - checkout 11 | - restore_cache: 12 | keys: 13 | - my_cache 14 | - run: 15 | name: Get test data 16 | command: | 17 | wget https://raw.githubusercontent.com/bids-apps/maintenance-tools/main/utils/get_data_from_osf.sh 18 | bash get_data_from_osf.sh hcp_example_bids_v3 19 | - run: 20 | name: Build Docker image 21 | command: | 22 | wget https://raw.githubusercontent.com/bids-apps/maintenance-tools/main/circleci/build_docker.sh 23 | bash build_docker.sh 24 | - save_cache: 25 | key: my_cache 26 | paths: 27 | - ~/docker 28 | - ~/data 29 | - ~/data 30 | - persist_to_workspace: 31 | root: /home/circleci 32 | paths: 33 | - data/hcp_example_bids_v3 34 | - docker/image.tar 35 | 36 | test: 37 | machine: 38 | image: ubuntu-2204:2022.10.2 39 | steps: 40 | - attach_workspace: 41 | at: /tmp/workspace 42 | - run: docker load -i /tmp/workspace/docker/image.tar 43 | - run: mkdir -p ${HOME}/outputs 44 | - run: 45 | name: print version 46 | command: | 47 | docker run -ti --rm --read-only \ 48 | -v /tmp/workspace/data/hcp_example_bids_v3:/bids_dataset \ 49 | bids/${CIRCLE_PROJECT_REPONAME,,} --version 50 | - run: 51 | name: participant level tests for single session dataset 52 | command: | 53 | docker run -ti --rm --read-only \ 54 | -v /tmp/workspace/data/hcp_example_bids_v3:/bids_dataset \ 55 | bids/${CIRCLE_PROJECT_REPONAME,,} \ 56 | /bids_dataset \ 57 | /outputs \ 58 | participant --participant_label 100307 \ 59 | --stages PreFreeSurfer \ 60 | --license_key="*CxjskRdd7" \ 61 | --n_cpus 2 62 | no_output_timeout: 6h 63 | 64 | deploy: 65 | machine: 66 | image: ubuntu-2204:2022.10.2 67 | steps: 68 | - attach_workspace: 69 | at: /tmp/workspace 70 | - run: docker load -i /tmp/workspace/docker/image.tar 71 | - run: 72 | name: push to dockerhub 73 | command: | 74 | wget https://raw.githubusercontent.com/bids-apps/maintenance-tools/main/circleci/push_docker.sh 75 | bash push_docker.sh 76 | 77 | workflows: 78 | build-test-deploy: 79 | jobs: 80 | - build 81 | - test: 82 | requires: 83 | - build 84 | - deploy: 85 | context: 86 | - dockerhub 87 | requires: 88 | - test 89 | filters: 90 | tags: 91 | only: /.*/ 92 | 93 | # VS Code Extension Version: 1.5.1 94 | -------------------------------------------------------------------------------- /.hadolint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | ignored: 3 | - DL3003 4 | - DL3008 5 | - DL3015 6 | - DL4001 7 | - DL4003 8 | - DL4006 9 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # See https://pre-commit.com for more information 3 | # See https://pre-commit.com/hooks.html for more hooks 4 | 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v5.0.0 8 | hooks: 9 | - id: trailing-whitespace 10 | - id: end-of-file-fixer 11 | - id: check-yaml 12 | - id: check-json 13 | - id: check-added-large-files 14 | - id: check-case-conflict 15 | - id: check-merge-conflict 16 | 17 | 18 | - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt 19 | rev: 0.2.3 20 | hooks: 21 | - id: yamlfmt 22 | args: [--mapping, '2', --sequence, '2', --offset, '0'] 23 | 24 | 25 | - repo: https://github.com/hadolint/hadolint 26 | rev: v2.13.1-beta 27 | hooks: 28 | - id: hadolint-docker 29 | name: Lint Dockerfiles 30 | description: Runs hadolint Docker image to lint Dockerfiles 31 | language: docker_image 32 | types: [dockerfile] 33 | entry: ghcr.io/hadolint/hadolint hadolint 34 | 35 | ci: 36 | skip: [hadolint-docker] 37 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sudo: required 3 | 4 | cache: 5 | directories: 6 | - ${HOME}/data 7 | 8 | services: 9 | - docker 10 | 11 | before_install: 12 | - export REPO_NAME=$( echo $TRAVIS_REPO_SLUG | cut -d'/' -f2 ) 13 | - if [[ ! -d ~/data/hcp_example_bids ]]; then travis_retry wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 10 -c -P ${HOME}/downloads/ 14 | "https://googledrive.com/host/0B2JWN60ZLkgkMGlUY3B4MXZIZW8/hcp_example_bids.tar" && mkdir -p ${HOME}/data && tar xf ${HOME}/downloads/hcp_example_bids.tar 15 | -C ${HOME}/data; fi 16 | - git describe --tags > version 17 | - docker build -t bids/${REPO_NAME,,} . 18 | - mkdir -p ${HOME}/outputs 19 | 20 | script: 21 | # print version 22 | - docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids:/bids_dataset bids/${REPO_NAME,,} --version 23 | # participant level tests for single session dataset 24 | - travis_wait docker run -ti --rm --read-only -v /tmp:/tmp -v /var/tmp:/var/tmp -v ${HOME}/data/hcp_example_bids:/bids_dataset -v ${HOME}/outputs1:/outputs 25 | bids/${REPO_NAME,,} /bids_dataset /outputs participant --participant_label 100307 --stages PreFreeSurfer --license_key="*CxjskRdd7" 26 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:xenial-20190515 2 | ARG DEBIAN_FRONTEND=noninteractive 3 | 4 | ENV LANG="C.UTF-8" \ 5 | LC_ALL="C.UTF-8" 6 | 7 | # Download FreeSurfer 8 | RUN apt-get -qq update && \ 9 | apt-get install -yq --no-install-recommends \ 10 | bc \ 11 | bzip2 \ 12 | ca-certificates \ 13 | curl \ 14 | libgomp1 \ 15 | perl-modules \ 16 | tar \ 17 | tcsh \ 18 | wget \ 19 | libxmu6 && \ 20 | apt-get clean && \ 21 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \ 22 | wget -qO- https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz \ 23 | | tar zxv -C /opt \ 24 | --exclude='freesurfer/trctrain' \ 25 | --exclude='freesurfer/subjects/fsaverage_sym' \ 26 | --exclude='freesurfer/subjects/fsaverage3' \ 27 | --exclude='freesurfer/subjects/fsaverage4' \ 28 | --exclude='freesurfer/subjects/fsaverage5' \ 29 | --exclude='freesurfer/subjects/fsaverage6' \ 30 | --exclude='freesurfer/subjects/cvs_avg35' \ 31 | --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ 32 | --exclude='freesurfer/subjects/bert' \ 33 | --exclude='freesurfer/subjects/V1_average' \ 34 | --exclude='freesurfer/average/mult-comp-cor' \ 35 | --exclude='freesurfer/lib/cuda' \ 36 | --exclude='freesurfer/lib/qt' && \ 37 | echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh 38 | 39 | # Set up the environment 40 | ENV OS=Linux \ 41 | FS_OVERRIDE=0 \ 42 | FIX_VERTEX_AREA= \ 43 | SUBJECTS_DIR=/opt/freesurfer/subjects \ 44 | FSF_OUTPUT_FORMAT=nii.gz \ 45 | MNI_DIR=/opt/freesurfer/mni \ 46 | LOCAL_DIR=/opt/freesurfer/local \ 47 | FREESURFER_HOME=/opt/freesurfer \ 48 | FSFAST_HOME=/opt/freesurfer/fsfast \ 49 | MINC_BIN_DIR=/opt/freesurfer/mni/bin \ 50 | MINC_LIB_DIR=/opt/freesurfer/mni/lib \ 51 | MNI_DATAPATH=/opt/freesurfer/mni/data \ 52 | FMRI_ANALYSIS_DIR=/opt/freesurfer/fsfast \ 53 | PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 \ 54 | MNI_PERL5LIB=/opt/freesurfer/mni/lib/perl5/5.8.5 \ 55 | PATH=/opt/freesurfer/bin:/opt/freesurfer/fsfast/bin:/opt/freesurfer/tktools:/opt/freesurfer/mni/bin:$PATH 56 | 57 | 58 | RUN apt-get update -qq \ 59 | && apt-get install -y -q --no-install-recommends \ 60 | libxext6 \ 61 | libxpm-dev \ 62 | libxt6 \ 63 | unzip \ 64 | && apt-get clean \ 65 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \ 66 | && echo "Downloading MATLAB Compiler Runtime ..." \ 67 | && curl -fsSL --retry 5 -o /tmp/mcr.zip https://ssd.mathworks.com/supportfiles/downloads/R2017b/deployment_files/R2017b/installers/glnxa64/MCR_R2017b_glnxa64_installer.zip \ 68 | && unzip -q /tmp/mcr.zip -d /tmp/mcrtmp \ 69 | && /tmp/mcrtmp/install -destinationFolder /opt/matlabmcr-2017b -mode silent -agreeToLicense yes \ 70 | && rm -rf /tmp/* 71 | 72 | # Install miniconda2 73 | # still need python 2 for gradunwarp 74 | ENV PATH="/usr/local/miniconda/bin:$PATH" 75 | RUN curl -fsSL -o miniconda.sh https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh && \ 76 | bash miniconda.sh -b -p /usr/local/miniconda && \ 77 | rm miniconda.sh && \ 78 | conda config --add channels conda-forge && \ 79 | conda install -y mkl=2020.0 mkl-service=2.3.0 numpy=1.18.1 nibabel=3.0.2 pandas=1.0.3 && sync && \ 80 | conda clean -tip && sync && \ 81 | /usr/local/miniconda/bin/pip install --no-cache-dir pybids==0.10.2 82 | 83 | # Install connectome-workbench 84 | WORKDIR /opt 85 | RUN apt-get -qq update && \ 86 | apt-get install -yq libfreetype6 libglib2.0 && \ 87 | wget -q https://www.humanconnectome.org/storage/app/media/workbench/workbench-linux64-v1.5.0.zip -O wb.zip \ 88 | && unzip wb.zip \ 89 | && rm wb.zip && \ 90 | apt-get clean && \ 91 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 92 | ENV CARET7DIR="/opt/workbench/bin_linux64" 93 | ENV PATH="${CARET7DIR}:${PATH}" 94 | 95 | # Install HCP Pipelines and MSM binaries 96 | RUN apt-get -qq update && \ 97 | apt-get install -yq --no-install-recommends gcc g++ libglu1 && \ 98 | rm -rf /tmp/* && \ 99 | wget -qO- https://github.com/Washington-University/HCPpipelines/archive/v4.3.0.tar.gz | tar xz -C /tmp && \ 100 | mv /tmp/* /opt/HCP-Pipelines && \ 101 | mkdir /opt/HCP-Pipelines/MSMBinaries && \ 102 | wget -q https://github.com/ecr05/MSM_HOCR/releases/download/v3.0FSL/msm_ubuntu_v3 -O /opt/HCP-Pipelines/MSMBinaries/msm && \ 103 | chmod 755 /opt/HCP-Pipelines/MSMBinaries/msm && \ 104 | apt-get clean && \ 105 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 106 | 107 | WORKDIR / 108 | 109 | ENV HCPPIPEDIR=/opt/HCP-Pipelines 110 | ENV HCPPIPEDIR_Templates=${HCPPIPEDIR}/global/templates \ 111 | HCPPIPEDIR_Bin=${HCPPIPEDIR}/global/binaries \ 112 | HCPPIPEDIR_Config=${HCPPIPEDIR}/global/config \ 113 | HCPPIPEDIR_PreFS=${HCPPIPEDIR}/PreFreeSurfer/scripts \ 114 | HCPPIPEDIR_FS=${HCPPIPEDIR}/FreeSurfer/scripts \ 115 | HCPPIPEDIR_PostFS=${HCPPIPEDIR}/PostFreeSurfer/scripts \ 116 | HCPPIPEDIR_fMRISurf=${HCPPIPEDIR}/fMRISurface/scripts \ 117 | HCPPIPEDIR_fMRIVol=${HCPPIPEDIR}/fMRIVolume/scripts \ 118 | HCPPIPEDIR_tfMRI=${HCPPIPEDIR}/tfMRI/scripts \ 119 | HCPPIPEDIR_dMRI=${HCPPIPEDIR}/DiffusionPreprocessing/scripts \ 120 | HCPPIPEDIR_dMRITract=${HCPPIPEDIR}/DiffusionTractography/scripts \ 121 | HCPPIPEDIR_Global=${HCPPIPEDIR}/global/scripts \ 122 | HCPPIPEDIR_tfMRIAnalysis=${HCPPIPEDIR}/TaskfMRIAnalysis/scripts \ 123 | MSMBINDIR=${HCPPIPEDIR}/MSMBinaries \ 124 | MSMCONFIGDIR=${HCPPIPEDIR}/MSMConfig 125 | 126 | ## Install the validator 127 | RUN wget -qO- https://deb.nodesource.com/setup_10.x | bash - && \ 128 | apt-get update && \ 129 | apt-get install -y --no-install-recommends nodejs && \ 130 | npm install -g bids-validator@1.7.2 && \ 131 | apt-get clean && \ 132 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 133 | 134 | # Install FSL 135 | RUN curl https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-6.0.2-centos6_64.tar.gz \ 136 | | tar -xz -C /usr/local && \ 137 | /usr/local/fsl/etc/fslconf/fslpython_install.sh -f /usr/local/fsl 138 | 139 | 140 | # Configure environment 141 | ENV FSLDIR=/usr/local/fsl 142 | ENV FSL_DIR="${FSLDIR}" \ 143 | FSLOUTPUTTYPE=NIFTI_GZ \ 144 | PATH=${FSLDIR}/bin:$PATH \ 145 | FSLMULTIFILEQUIT=TRUE \ 146 | POSSUMDIR=${FSLDIR} \ 147 | LD_LIBRARY_PATH=${FSLDIR}/lib:$LD_LIBRARY_PATH \ 148 | FSLTCLSH=/usr/bin/tclsh \ 149 | FSLWISH=/usr/bin/wish \ 150 | FSLOUTPUTTYPE=NIFTI_GZ 151 | 152 | # install gradient_unwarp.py (v1.2.0 with python 3 compatibility) 153 | WORKDIR /tmp 154 | RUN wget -q https://github.com/Washington-University/gradunwarp/archive/v1.2.0.zip && \ 155 | unzip v1.2.0.zip && \ 156 | cd gradunwarp-1.2.0 && \ 157 | python setup.py install && \ 158 | rm -rf gradunwarp-1.2.0 v1.2.0.zip 159 | 160 | # Install MCR 2017b 161 | ENV MATLABCMD="/opt/matlabmcr-2017b/v93/toolbox/matlab" \ 162 | MATLAB_COMPILER_RUNTIME="/opt/matlabmcr-2017b/v93" \ 163 | LD_LIBRARY_PATH="/opt/matlabmcr-2017b/v93/runtime/glnxa64:/opt/matlabmcr-2017b/v93/bin/glnxa64:/opt/matlabmcr-2017b/v93/sys/os/glnxa64:$LD_LIBRARY_PATH" 164 | 165 | # overwrite matlab mcr shared object 166 | RUN rm /opt/matlabmcr-2017b/v93/sys/os/glnxa64/libstdc++.so.6 && \ 167 | ln -s /usr/lib/x86_64-linux-gnu/libstdc++.so.6 /opt/matlabmcr-2017b/v93/sys/os/glnxa64/libstdc++.so.6 168 | 169 | 170 | 171 | COPY run.py version / 172 | RUN chmod +x /run.py 173 | 174 | 175 | ENTRYPOINT ["/run.py"] 176 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## HCP Pipelines BIDS App 2 | 3 | This a [BIDS App](https://bids-apps.neuroimaging.io) wrapper for [HCP Pipelines](https://github.com/Washington-University/Pipelines) [v4.3.0](https://github.com/Washington-University/HCPpipelines/releases/tag/v4.3.0). 4 | Like every BIDS App it consists of a container that includes all of the dependencies and run script that parses a [BIDS dataset](http://bids.neuroimaging.io). 5 | BIDS Apps run on Windows, Linux, Mac as well as HPCs/clusters. 6 | 7 | To convert DICOMs from your HCP-Style (CMRR) acquisitions to BIDS try using [heudiconv](https://github.com/nipy/heudiconv) with this [heuristic file](https://github.com/nipy/heudiconv/blob/master/heudiconv/heuristics/cmrr_heuristic.py). 8 | 9 | ### Description 10 | 11 | The HCP Pipelines product is a set of tools (primarily, but not exclusively, 12 | shell scripts) for processing MRI images for the [Human Connectome Project][HCP]. 13 | Among other things, these tools implement the Minimal Preprocessing Pipeline 14 | (MPP) described in [Glasser et al. 2013][GlasserEtAl]. 15 | 16 | **This BIDS App requires that each subject has at least one T1w and one T2w scan.** Lack fMRI or dMRI scans is handled robustly. Note that while anatomicals (T1w, T2w scans) can be processed without a fieldmap, a fieldmap is mandatory for processing fMRI scans. Support for the HCP-Pipelines 'legacy' processing mode will be added in an upcoming release. 17 | 18 | 19 | 20 | ### Documentation 21 | 22 | [Release Notes, Installation, and Usage][release-install-use] 23 | 24 | ### How to report errors 25 | Discussion of HCP Pipeline usage and improvements can be posted to the 26 | hcp-users discussion list. Sign up for hcp-users at 27 | [http://humanconnectome.org/contact/#subscribe][hcp-users-subscribe]. 28 | 29 | Please open an issue if you encounter errors building this BIDS App or believe you have encountered an error specific to the BIDS App wrapper. 30 | 31 | ### Acknowledgements 32 | 33 | Please cite [Glasser et al. 2013][GlasserEtAl] and [Smith et al. 2013][SmithEtAl]. 34 | 35 | ### Usage 36 | 37 | This App has the following command line arguments: 38 | 39 | usage: run.py [-h] 40 | [--participant_label PARTICIPANT_LABEL [PARTICIPANT_LABEL ...]] 41 | [--session_label SESSION_LABEL [SESSION_LABEL ...]] 42 | [--n_cpus N_CPUS] 43 | [--stages {PreFreeSurfer,FreeSurfer,PostFreeSurfer,fMRIVolume,fMRISurface} [{PreFreeSurfer,FreeSurfer,PostFreeSurfer,fMRIVolume,fMRISurface} ...]] 44 | [--coreg {MSMSulc,FS}] [--gdcoeffs GDCOEFFS] --license_key 45 | LICENSE_KEY [-v] [--anat_unwarpdir {x,y,z,-x,-y,-z}] 46 | [--skip_bids_validation] 47 | bids_dir output_dir {participant} 48 | 49 | HCP Pipelines BIDS App (T1w, T2w, fMRI) 50 | 51 | positional arguments: 52 | bids_dir The directory with the input dataset formatted 53 | according to the BIDS standard. 54 | output_dir The directory where the output files should be stored. 55 | If you are running group level analysis this folder 56 | should be prepopulated with the results of 57 | theparticipant level analysis. 58 | {participant} Level of the analysis that will be performed. Multiple 59 | participant level analyses can be run independently 60 | (in parallel) using the same output_dir. 61 | 62 | optional arguments: 63 | -h, --help show this help message and exit 64 | --participant_label PARTICIPANT_LABEL [PARTICIPANT_LABEL ...] 65 | The label of the participant that should be analyzed. 66 | The label corresponds to sub- from 67 | the BIDS spec (so it does not include "sub-"). If this 68 | parameter is not provided all subjects should be 69 | analyzed. Multiple participants can be specified with 70 | a space separated list. 71 | --session_label SESSION_LABEL [SESSION_LABEL ...] 72 | The label of the session that should be analyzed. The 73 | label corresponds to ses- from the BIDS 74 | spec (so it does not include "ses-"). If this 75 | parameter is not provided, all sessions should be 76 | analyzed. Multiple sessions can be specified with a 77 | space separated list. 78 | --n_cpus N_CPUS Number of CPUs/cores available to use. 79 | --stages {PreFreeSurfer,FreeSurfer,PostFreeSurfer,fMRIVolume,fMRISurface} [{PreFreeSurfer,FreeSurfer,PostFreeSurfer,fMRIVolume,fMRISurface} ...] 80 | Which stages to run. Space separated list. 81 | --coreg {MSMSulc,FS} Coregistration method to use 82 | --gdcoeffs GDCOEFFS Path to gradients coefficients file 83 | --license_key LICENSE_KEY 84 | FreeSurfer license key - letters and numbers after "*" 85 | in the email you received after registration. To 86 | register (for free) visit 87 | https://surfer.nmr.mgh.harvard.edu/registration.html 88 | -v, --version show program's version number and exit 89 | --anat_unwarpdir {x,y,z,x-,y-,z-} 90 | Unwarp direction for 3D volumes 91 | --skip_bids_validation, --skip-bids-validation 92 | assume the input dataset is BIDS compliant and skip 93 | the validation 94 | --processing_mode {hcp,legacy,auto}, --processing-mode {hcp,legacy,auto} 95 | Control HCP-Pipeline modehcp (HCPStyleData): require 96 | T2w and fieldmap modalitieslegacy (LegacyStyleData): 97 | always ignore T2w and fieldmapsauto: use T2w and/or 98 | fieldmaps if available 99 | --doslicetime Apply slice timing correction as part of fMRIVolume. 100 | 101 | To run it in participant level mode (for one participant): 102 | 103 | docker run -i --rm \ 104 | -v /Users/filo/data/ds005:/bids_dataset:ro \ 105 | -v /Users/filo/outputs:/outputs \ 106 | bids/hcppipelines \ 107 | /bids_dataset /outputs participant --participant_label 01 --license_key "XXXXXX" 108 | 109 | ### Commercial use 110 | 111 | This BIDS App incorporates several **non-free** packages required for the HCP Pipeline, including: 112 | 113 | - [MSM](https://github.com/ecr05/MSM_HOCR) 114 | - [FreeSurfer](https://surfer.nmr.mgh.harvard.edu/) 115 | - [FSL](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence) 116 | - [MATLAB Runtime](https://www.mathworks.com/products/compiler/matlab-runtime.html) 117 | 118 | 119 | If you are considering commercial use of this App please consult the relevant licenses. 120 | 121 | ### TODO 122 | 123 | - [ ] Add DiffusionProcessing stage 124 | - [ ] More testing for fMRI with different resolution 125 | - [ ] Run fMRI runs in parallel (when n_cpus present) 126 | - [ ] Add support for TOPUP and GE fieldmaps for structural scans (please get in touch if you can provide sample data) 127 | - [ ] Add support for GE fieldmaps for fMRI scans (please get in touch if you can provide sample data) 128 | - [ ] Avoid copying fsaverage folder for every participant 129 | - [ ] Add ICA FIX stage 130 | - [ ] Add group level analysis 131 | - [ ] Add task fMRI model fitting 132 | 133 | [HCP]: http://www.humanconnectome.org 134 | [GlasserEtAl]: http://www.ncbi.nlm.nih.gov/pubmed/23668970 135 | [SmithEtAl]: http://www.ncbi.nlm.nih.gov/pubmed/23702415 136 | [release-install-use]: hhttps://github.com/Washington-University/HCPpipelines/wiki/Installation-and-Usage-Instructions 137 | [hcp-users-subscribe]: http://humanconnectome.org/contact/#subscribe 138 | -------------------------------------------------------------------------------- /entry_init.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # entrypoint pre-initialization 3 | source /environment 4 | 5 | # run the user command 6 | exec "$@" 7 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | #!/usr/local/miniconda/bin/python 2 | import argparse 3 | import os 4 | import shutil 5 | import nibabel 6 | from glob import glob 7 | from subprocess import Popen, PIPE 8 | from shutil import rmtree 9 | import subprocess 10 | from bids.layout import BIDSLayout 11 | from functools import partial 12 | from collections import OrderedDict 13 | from pathlib import Path 14 | import numpy as np 15 | 16 | def run(command, env={}, cwd=None): 17 | merged_env = os.environ 18 | merged_env.update(env) 19 | merged_env.pop("DEBUG", None) 20 | print(command) 21 | process = Popen(command, stdout=PIPE, stderr=subprocess.STDOUT, 22 | shell=True, env=merged_env, cwd=cwd, 23 | universal_newlines=True) 24 | while True: 25 | line = process.stdout.readline() 26 | print(line.rstrip()) 27 | line = str(line)[:-1] 28 | if line == '' and process.poll() != None: 29 | break 30 | if process.returncode != 0: 31 | raise Exception("Non zero return code: %d"%process.returncode) 32 | 33 | grayordinatesres = "2" # This is currently the only option for which the is an atlas 34 | lowresmesh = 32 35 | 36 | def run_pre_freesurfer(**args): 37 | args.update(os.environ) 38 | args["t1"] = "@".join(t1ws) 39 | if t2ws != "NONE": 40 | args["t2"] = "@".join(t2ws) 41 | else: 42 | args["t2"] = "NONE" 43 | args["t2_template_res"] = args["t1_template_res"] 44 | 45 | cmd = '{HCPPIPEDIR}/PreFreeSurfer/PreFreeSurferPipeline.sh ' + \ 46 | '--path="{path}" ' + \ 47 | '--subject="{subject}" ' + \ 48 | '--t1="{t1}" ' + \ 49 | '--t2="{t2}" ' + \ 50 | '--t1template="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res}mm.nii.gz" ' + \ 51 | '--t1templatebrain="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res}mm_brain.nii.gz" ' + \ 52 | '--t1template2mm="{HCPPIPEDIR_Templates}/MNI152_T1_2mm.nii.gz" ' + \ 53 | '--t2template="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res}mm.nii.gz" ' + \ 54 | '--t2templatebrain="{HCPPIPEDIR_Templates}/MNI152_T2_{t2_template_res}mm_brain.nii.gz" ' + \ 55 | '--t2template2mm="{HCPPIPEDIR_Templates}/MNI152_T2_2mm.nii.gz" ' + \ 56 | '--t2samplespacing="{t2samplespacing}" ' + \ 57 | '--templatemask="{HCPPIPEDIR_Templates}/MNI152_T1_{t1_template_res}mm_brain_mask.nii.gz" ' + \ 58 | '--template2mmmask="{HCPPIPEDIR_Templates}/MNI152_T1_2mm_brain_mask_dil.nii.gz" ' + \ 59 | '--brainsize="150" ' + \ 60 | '--fnirtconfig="{HCPPIPEDIR_Config}/T1_2_MNI152_2mm.cnf" ' + \ 61 | '--fmapmag="{fmapmag}" ' + \ 62 | '--fmapphase="{fmapphase}" ' + \ 63 | '--fmapgeneralelectric="NONE" ' + \ 64 | '--echodiff="{echodiff}" ' + \ 65 | '--SEPhaseNeg="{SEPhaseNeg}" ' + \ 66 | '--SEPhasePos="{SEPhasePos}" ' + \ 67 | '--seechospacing="{echospacing}" ' + \ 68 | '--seunwarpdir="{seunwarpdir}" ' + \ 69 | '--t1samplespacing="{t1samplespacing}" ' + \ 70 | '--unwarpdir="{unwarpdir}" ' + \ 71 | '--gdcoeffs={gdcoeffs} ' + \ 72 | '--avgrdcmethod={avgrdcmethod} ' + \ 73 | '--topupconfig="{HCPPIPEDIR_Config}/b02b0.cnf" ' + \ 74 | '--processing-mode="{processing_mode}" ' + \ 75 | '--printcom=""' 76 | cmd = cmd.format(**args) 77 | run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) 78 | 79 | def run_freesurfer(**args): 80 | args.update(os.environ) 81 | args["subjectDIR"] = os.path.join(args["path"], args["subject"], "T1w") 82 | cmd = '{HCPPIPEDIR}/FreeSurfer/FreeSurferPipeline.sh ' + \ 83 | '--subject="{subject}" ' + \ 84 | '--subjectDIR="{subjectDIR}" ' + \ 85 | '--t1="{path}/{subject}/T1w/T1w_acpc_dc_restore.nii.gz" ' + \ 86 | '--t1brain="{path}/{subject}/T1w/T1w_acpc_dc_restore_brain.nii.gz" ' + \ 87 | '--processing-mode="{processing_mode}" ' 88 | if args["processing_mode"] != "LegacyStyleData": 89 | cmd = cmd + '--t2="{path}/{subject}/T1w/T2w_acpc_dc_restore.nii.gz" ' 90 | cmd = cmd.format(**args) 91 | 92 | if not os.path.exists(os.path.join(args["subjectDIR"], "fsaverage")): 93 | shutil.copytree(os.path.join(os.environ["SUBJECTS_DIR"], "fsaverage"), 94 | os.path.join(args["subjectDIR"], "fsaverage")) 95 | if not os.path.exists(os.path.join(args["subjectDIR"], "lh.EC_average")): 96 | shutil.copytree(os.path.join(os.environ["SUBJECTS_DIR"], "lh.EC_average"), 97 | os.path.join(args["subjectDIR"], "lh.EC_average")) 98 | if not os.path.exists(os.path.join(args["subjectDIR"], "rh.EC_average")): 99 | shutil.copytree(os.path.join(os.environ["SUBJECTS_DIR"], "rh.EC_average"), 100 | os.path.join(args["subjectDIR"], "rh.EC_average")) 101 | 102 | run(cmd, cwd=args["path"], env={"NSLOTS": str(args["n_cpus"]), 103 | "OMP_NUM_THREADS": str(args["n_cpus"])}) 104 | 105 | def run_post_freesurfer(**args): 106 | args.update(os.environ) 107 | cmd = '{HCPPIPEDIR}/PostFreeSurfer/PostFreeSurferPipeline.sh ' + \ 108 | '--path="{path}" ' + \ 109 | '--subject="{subject}" ' + \ 110 | '--surfatlasdir="{HCPPIPEDIR_Templates}/standard_mesh_atlases" ' + \ 111 | '--grayordinatesdir="{HCPPIPEDIR_Templates}/91282_Greyordinates" ' + \ 112 | '--grayordinatesres="{grayordinatesres:s}" ' + \ 113 | '--hiresmesh="164" ' + \ 114 | '--lowresmesh="{lowresmesh:d}" ' + \ 115 | '--subcortgraylabels="{HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt" ' + \ 116 | '--freesurferlabels="{HCPPIPEDIR_Config}/FreeSurferAllLut.txt" ' + \ 117 | '--refmyelinmaps="{HCPPIPEDIR_Templates}/standard_mesh_atlases/Conte69.MyelinMap_BC.164k_fs_LR.dscalar.nii" ' + \ 118 | '--regname="{regname}" ' + \ 119 | '--processing-mode="{processing_mode}"' 120 | cmd = cmd.format(**args) 121 | run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) 122 | 123 | def run_generic_fMRI_volume_processsing(**args): 124 | args.update(os.environ) 125 | cmd = '{HCPPIPEDIR}/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh ' + \ 126 | '--path={path} ' + \ 127 | '--subject={subject} ' + \ 128 | '--fmriname={fmriname} ' + \ 129 | '--fmritcs={fmritcs} ' + \ 130 | '--fmriscout={fmriscout} ' + \ 131 | '--SEPhaseNeg={SEPhaseNeg} ' + \ 132 | '--SEPhasePos={SEPhasePos} ' + \ 133 | '--fmapmag="NONE" ' + \ 134 | '--fmapphase="NONE" ' + \ 135 | '--fmapgeneralelectric="NONE" ' + \ 136 | '--echospacing={echospacing} ' + \ 137 | '--echodiff="NONE" ' + \ 138 | '--unwarpdir={unwarpdir} ' + \ 139 | '--fmrires={fmrires:s} ' + \ 140 | '--dcmethod={dcmethod} ' + \ 141 | '--gdcoeffs={gdcoeffs} ' + \ 142 | '--topupconfig={HCPPIPEDIR_Config}/b02b0.cnf ' + \ 143 | '--printcom="" ' + \ 144 | '--biascorrection={biascorrection} ' + \ 145 | '--mctype="MCFLIRT" ' + \ 146 | '--processing-mode="{processing_mode}" ' + \ 147 | '--doslicetime="{doslicetime}" ' + \ 148 | '--slicetimerparams="{slicetimerparams}" ' 149 | 150 | cmd = cmd.format(**args) 151 | run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) 152 | 153 | def run_generic_fMRI_surface_processsing(**args): 154 | args.update(os.environ) 155 | cmd = '{HCPPIPEDIR}/fMRISurface/GenericfMRISurfaceProcessingPipeline.sh ' + \ 156 | '--path={path} ' + \ 157 | '--subject={subject} ' + \ 158 | '--fmriname={fmriname} ' + \ 159 | '--lowresmesh="{lowresmesh:d}" ' + \ 160 | '--fmrires={fmrires:s} ' + \ 161 | '--smoothingFWHM={fmrires:s} ' + \ 162 | '--grayordinatesres="{grayordinatesres:s}" ' + \ 163 | '--regname="{regname}" ' 164 | cmd = cmd.format(**args) 165 | run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) 166 | 167 | def run_diffusion_processsing(**args): 168 | args.update(os.environ) 169 | cmd = '{HCPPIPEDIR}/DiffusionPreprocessing/DiffPreprocPipeline.sh ' + \ 170 | '--posData="{posData}" ' +\ 171 | '--negData="{negData}" ' + \ 172 | '--path="{path}" ' + \ 173 | '--subject="{subject}" ' + \ 174 | '--echospacing="{echospacing}" '+ \ 175 | '--PEdir={PEdir} ' + \ 176 | '--gdcoeffs={gdcoeffs} ' + \ 177 | '--printcom=""' 178 | cmd = cmd.format(**args) 179 | run(cmd, cwd=args["path"], env={"OMP_NUM_THREADS": str(args["n_cpus"])}) 180 | 181 | __version__ = open('/version').read() 182 | 183 | parser = argparse.ArgumentParser(description='HCP Pipelines BIDS App (T1w, T2w, fMRI)') 184 | parser.add_argument('bids_dir', help='The directory with the input dataset ' 185 | 'formatted according to the BIDS standard.') 186 | parser.add_argument('output_dir', help='The directory where the output files ' 187 | 'should be stored. If you are running group level analysis ' 188 | 'this folder should be prepopulated with the results of the' 189 | 'participant level analysis.') 190 | parser.add_argument('analysis_level', help='Level of the analysis that will be performed. ' 191 | 'Multiple participant level analyses can be run independently ' 192 | '(in parallel) using the same output_dir.', 193 | choices=['participant']) 194 | parser.add_argument('--participant_label', help='The label of the participant that should be analyzed. The label ' 195 | 'corresponds to sub- from the BIDS spec ' 196 | '(so it does not include "sub-"). If this parameter is not ' 197 | 'provided all subjects should be analyzed. Multiple ' 198 | 'participants can be specified with a space separated list.', 199 | nargs="+") 200 | parser.add_argument('--session_label', help='The label of the session that should be analyzed. The label ' 201 | 'corresponds to ses- from the BIDS spec ' 202 | '(so it does not include "ses-"). If this parameter is not ' 203 | 'provided, all sessions should be analyzed. Multiple ' 204 | 'sessions can be specified with a space separated list.', 205 | nargs="+") 206 | parser.add_argument('--n_cpus', help='Number of CPUs/cores available to use.', 207 | default=1, type=int) 208 | parser.add_argument('--stages', help='Which stages to run. Space separated list.', 209 | nargs="+", choices=['PreFreeSurfer', 'FreeSurfer', 210 | 'PostFreeSurfer', 'fMRIVolume', 211 | 'fMRISurface'], 212 | default=['PreFreeSurfer', 'FreeSurfer', 'PostFreeSurfer', 213 | 'fMRIVolume', 'fMRISurface']) 214 | parser.add_argument('--coreg', help='Coregistration method to use', 215 | choices=['MSMSulc', 'FS'], default='MSMSulc') 216 | parser.add_argument('--gdcoeffs', help='Path to gradients coefficients file', 217 | default="NONE") 218 | parser.add_argument('--license_key', help='FreeSurfer license key - letters and numbers after "*" in the email you received after registration. To register (for free) visit https://surfer.nmr.mgh.harvard.edu/registration.html', 219 | required=True) 220 | parser.add_argument('-v', '--version', action='version', 221 | version='HCP Pipelines BIDS App version {}'.format(__version__)) 222 | parser.add_argument('--anat_unwarpdir', help='Unwarp direction for 3D volumes', 223 | choices=['x', 'y', 'z', 'x-', 'y-', 'z-', 'NONE'], default="z") 224 | parser.add_argument('--skip_bids_validation', '--skip-bids-validation', action='store_true', 225 | default=False, 226 | help='assume the input dataset is BIDS compliant and skip the validation') 227 | parser.add_argument('--processing_mode', '--processing-mode', 228 | choices=['hcp', 'legacy', 'auto'], default='hcp', 229 | help='Control HCP-Pipeline mode' 230 | 'hcp (HCPStyleData): require T2w and fieldmap modalities' 231 | 'legacy (LegacyStyleData): always ignore T2w and fieldmaps' 232 | 'auto: use T2w and/or fieldmaps if available') 233 | parser.add_argument('--doslicetime', help="Apply slice timing correction as part of fMRIVolume.", 234 | action='store_true', default=False) 235 | 236 | args = parser.parse_args() 237 | 238 | 239 | if (args.gdcoeffs != 'NONE') and ('PreFreeSurfer' in args.stages) and (args.anat_unwarpdir == "NONE"): 240 | raise AssertionError('--anat_unwarpdir must be specified to use PreFreeSurfer distortion correction') 241 | 242 | if not args.skip_bids_validation: 243 | run("bids-validator " + args.bids_dir) 244 | 245 | layout = BIDSLayout(args.bids_dir, derivatives=False, absolute_paths=True) 246 | subjects_to_analyze = [] 247 | # only for a subset of subjects 248 | if args.participant_label: 249 | subjects_to_analyze = args.participant_label 250 | # for all subjects 251 | else: 252 | subject_dirs = glob(os.path.join(args.bids_dir, "sub-*")) 253 | subjects_to_analyze = [subject_dir.split("-")[-1] for subject_dir in subject_dirs] 254 | # only use a subset of sessions 255 | if args.session_label: 256 | session_to_analyze = dict(session=args.session_label) 257 | else: 258 | session_to_analyze = dict() 259 | 260 | # running participant level 261 | if args.analysis_level == "participant": 262 | # find all T1s and skullstrip them 263 | for subject_label in subjects_to_analyze: 264 | t1ws = [f.path for f in layout.get(subject=subject_label, 265 | suffix='T1w', 266 | extensions=["nii.gz", "nii"], 267 | **session_to_analyze)] 268 | assert (len(t1ws) > 0), "No T1w files found for subject %s!"%subject_label 269 | 270 | available_resolutions = ["0.7", "0.8", "1"] 271 | t1_zooms = nibabel.load(t1ws[0]).header.get_zooms() 272 | t1_res = float(min(t1_zooms[:3])) 273 | t1_template_res = min(available_resolutions, key=lambda x:abs(float(x)-t1_res)) 274 | t1_spacing = layout.get_metadata(t1ws[0])["DwellTime"] 275 | 276 | t2ws = [f.path for f in layout.get(subject=subject_label, 277 | suffix='T2w', 278 | extensions=["nii.gz", "nii"], 279 | **session_to_analyze)] 280 | if (len(t2ws) > 0) and ( args.processing_mode != 'legacy'): 281 | t2_zooms = nibabel.load(t2ws[0]).header.get_zooms() 282 | t2_res = float(min(t2_zooms[:3])) 283 | t2_template_res = min(available_resolutions, key=lambda x: abs(float(x) - t2_res)) 284 | t2_spacing = layout.get_metadata(t2ws[0])["DwellTime"] 285 | anat_processing_mode = "HCPStyleData" 286 | 287 | else: 288 | assert (args.processing_mode != 'hcp'), \ 289 | f"No T2w files found for sub-{subject_label}. Consider --procesing_mode [legacy | auto ]." 290 | 291 | t2ws = "NONE" 292 | t2_template_res = "NONE" 293 | t2_spacing = "NONE" 294 | anat_processing_mode = "LegacyStyleData" 295 | 296 | # parse fieldmaps for structural processing 297 | fieldmap_set = layout.get_fieldmap(t1ws[0], return_list=True) 298 | fmap_args = {"fmapmag": "NONE", 299 | "fmapphase": "NONE", 300 | "echodiff": "NONE", 301 | "t1samplespacing": "NONE", 302 | "t2samplespacing": "NONE", 303 | "unwarpdir": "NONE", 304 | "avgrdcmethod": "NONE", 305 | "SEPhaseNeg": "NONE", 306 | "SEPhasePos": "NONE", 307 | "echospacing": "NONE", 308 | "seunwarpdir": "NONE"} 309 | 310 | if fieldmap_set and ( args.processing_mode != 'legacy' ): 311 | 312 | # use an unwarpdir specified on the command line 313 | # this is different from the SE direction 314 | unwarpdir = args.anat_unwarpdir 315 | 316 | fmap_args.update({"t1samplespacing": "%.8f"%t1_spacing, 317 | "t2samplespacing": "%.8f"%t2_spacing, 318 | "unwarpdir": unwarpdir}) 319 | 320 | if fieldmap_set[0]["suffix"] == "phasediff": 321 | merged_file = "%s/tmp/%s/magfile.nii.gz"%(args.output_dir, subject_label) 322 | run("mkdir -p %s/tmp/%s/ && fslmerge -t %s %s %s"%(args.output_dir, 323 | subject_label, 324 | merged_file, 325 | fieldmap_set["magnitude1"], 326 | fieldmap_set["magnitude2"])) 327 | 328 | phasediff_metadata = layout.get_metadata(fieldmap_set["phasediff"]) 329 | te_diff = phasediff_metadata["EchoTime2"] - phasediff_metadata["EchoTime1"] 330 | # HCP expects TE in miliseconds 331 | te_diff = te_diff*1000.0 332 | 333 | fmap_args.update({"fmapmag": merged_file, 334 | "fmapphase": fieldmap_set["phasediff"], 335 | "echodiff": "%.6f"%te_diff, 336 | "avgrdcmethod": "SiemensFieldMap"}) 337 | elif fieldmap_set[0]["suffix"] == "epi": 338 | SEPhaseNeg = None 339 | SEPhasePos = None 340 | for fieldmap in fieldmap_set: 341 | enc_dir = layout.get_metadata(fieldmap['epi'])["PhaseEncodingDirection"] 342 | if "-" in enc_dir: 343 | SEPhaseNeg = fieldmap['epi'] 344 | else: 345 | SEPhasePos = fieldmap['epi'] 346 | 347 | seunwarpdir = layout.get_metadata(fieldmap_set[0]["epi"])["PhaseEncodingDirection"] 348 | seunwarpdir = seunwarpdir.replace("-", "").replace("i","x").replace("j", "y").replace("k", "z") 349 | 350 | #TODO check consistency of echo spacing instead of assuming it's all the same 351 | if "EffectiveEchoSpacing" in layout.get_metadata(fieldmap_set[0]["epi"]): 352 | echospacing = layout.get_metadata(fieldmap_set[0]["epi"])["EffectiveEchoSpacing"] 353 | elif "TotalReadoutTime" in layout.get_metadata(fieldmap_set["epi"][0]): 354 | # HCP Pipelines do not allow users to specify total readout time directly 355 | # Hence we need to reverse the calculations to provide echo spacing that would 356 | # result in the right total read out total read out time 357 | # see https://github.com/Washington-University/Pipelines/blob/master/global/scripts/TopupPreprocessingAll.sh#L202 358 | print("BIDS App wrapper: Did not find EffectiveEchoSpacing, calculating it from TotalReadoutTime") 359 | # TotalReadoutTime = EffectiveEchoSpacing * (len(PhaseEncodingDirection) - 1) 360 | total_readout_time = layout.get_metadata(fieldmap_set[0]["epi"])["TotalReadoutTime"] 361 | phase_len = nibabel.load(fieldmap_set[0]["epi"]).shape[{"x": 0, "y": 1}[seunwarpdir]] 362 | echospacing = total_readout_time / float(phase_len - 1) 363 | else: 364 | raise RuntimeError("EffectiveEchoSpacing or TotalReadoutTime not defined for the fieldmap intended for T1w image. Please fix your BIDS dataset.") 365 | 366 | fmap_args.update({"SEPhaseNeg": SEPhaseNeg, 367 | "SEPhasePos": SEPhasePos, 368 | "echospacing": "%.6f"%echospacing, 369 | "seunwarpdir": seunwarpdir, 370 | "avgrdcmethod": "TOPUP"}) 371 | #TODO add support for GE fieldmaps 372 | 373 | struct_stages_dict = OrderedDict([("PreFreeSurfer", partial(run_pre_freesurfer, 374 | path=args.output_dir, 375 | subject="sub-%s"%subject_label, 376 | t1ws=t1ws, 377 | t2ws=t2ws, 378 | n_cpus=args.n_cpus, 379 | t1_template_res=t1_template_res, 380 | t2_template_res=t2_template_res, 381 | gdcoeffs=args.gdcoeffs, 382 | processing_mode=anat_processing_mode, 383 | **fmap_args)), 384 | ("FreeSurfer", partial(run_freesurfer, 385 | path=args.output_dir, 386 | subject="sub-%s"%subject_label, 387 | n_cpus=args.n_cpus, 388 | processing_mode=anat_processing_mode)), 389 | ("PostFreeSurfer", partial(run_post_freesurfer, 390 | path=args.output_dir, 391 | subject="sub-%s"%subject_label, 392 | grayordinatesres=grayordinatesres, 393 | lowresmesh=lowresmesh, 394 | n_cpus=args.n_cpus, 395 | regname=args.coreg, 396 | processing_mode=anat_processing_mode)) 397 | ]) 398 | for stage, stage_func in struct_stages_dict.items(): 399 | if stage in args.stages: 400 | print(f'{stage} in {anat_processing_mode} mode') 401 | stage_func() 402 | 403 | bolds = [f.path for f in layout.get(subject=subject_label, 404 | suffix='bold', 405 | extensions=["nii.gz", "nii"], 406 | **session_to_analyze)] 407 | for fmritcs in bolds: 408 | fmriname = "_".join(fmritcs.split("sub-")[-1].split("_")[1:]).split(".")[0] 409 | assert fmriname 410 | 411 | fmriscout = fmritcs.replace("_bold", "_sbref") 412 | if not os.path.exists(fmriscout): 413 | fmriscout = "NONE" 414 | 415 | fieldmap_set = layout.get_fieldmap(fmritcs, return_list=True) 416 | if fieldmap_set and len(fieldmap_set) == 2 and all(item["suffix"] == "epi" for item in fieldmap_set) and ( args.processing_mode != 'legacy' ): 417 | SEPhaseNeg = None 418 | SEPhasePos = None 419 | for fieldmap in fieldmap_set: 420 | enc_dir = layout.get_metadata(fieldmap["epi"])["PhaseEncodingDirection"] 421 | if "-" in enc_dir: 422 | SEPhaseNeg = fieldmap['epi'] 423 | else: 424 | SEPhasePos = fieldmap['epi'] 425 | echospacing = layout.get_metadata(fmritcs)["EffectiveEchoSpacing"] 426 | unwarpdir = layout.get_metadata(fmritcs)["PhaseEncodingDirection"] 427 | unwarpdir = unwarpdir.replace("i","x").replace("j", "y").replace("k", "z") 428 | if len(unwarpdir) == 2: 429 | unwarpdir = "-" + unwarpdir[0] 430 | dcmethod = "TOPUP" 431 | biascorrection = "SEBASED" 432 | func_processing_mode = "HCPStyleData" 433 | else: 434 | assert (args.processing_mode != 'hcp'), \ 435 | f"No fieldmaps found for BOLD {fmritcs}. Consider --procesing_mode [legacy | auto ]." 436 | 437 | SEPhaseNeg = "NONE" 438 | SEPhasePos = "NONE" 439 | echospacing = "NONE" 440 | unwarpdir = "NONE" 441 | dcmethod = "NONE" 442 | biascorrection = "NONE" 443 | func_processing_mode = "LegacyStyleData" 444 | 445 | zooms = nibabel.load(fmritcs).header.get_zooms() 446 | fmrires = float(min(zooms[:3])) 447 | fmrires = "2" # https://github.com/Washington-University/Pipelines/blob/637b35f73697b77dcb1d529902fc55f431f03af7/fMRISurface/scripts/SubcorticalProcessing.sh#L43 448 | # While running '/usr/bin/wb_command -cifti-create-dense-timeseries /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION_temp_subject.dtseries.nii -volume /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/Results/EMOTION/EMOTION.nii.gz /scratch/users/chrisgor/hcp_output2/sub-100307/MNINonLinear/ROIs/ROIs.2.nii.gz': 449 | # ERROR: label volume has a different volume space than data volume 450 | 451 | # optional slice timing 452 | doslicetime = "FALSE" 453 | slicetimerparams = "" 454 | if args.doslicetime: 455 | doslicetime = "TRUE" 456 | func_processing_mode = "LegacyStyleData" 457 | try: 458 | slicetiming = layout.get_metadata(fmritcs)["SliceTiming"] 459 | tr = layout.get_metadata(fmritcs)["RepetitionTime"] 460 | except KeyError: 461 | print(f"SliceTiming metadata is required for slice timing correction of {fmritcs}") 462 | 463 | try: 464 | slicedirection = layout.get_metadata(fmritcs)["SliceEncodingDirection"] 465 | if '-' in slicedirection: 466 | slicetiming.reverse() 467 | except KeyError: 468 | pass 469 | 470 | # shift timing to the median slice, assuming equally spaced slices 471 | slicedelta = np.diff(np.sort(slicetiming)) 472 | slicedelta = np.mean(slicedelta[slicedelta > 0]) 473 | slicetiming = slicetiming / (np.max(slicetiming) + slicedelta) 474 | slicetiming = -(slicetiming - np.median(slicetiming)) 475 | 476 | tmpdir = f"{args.output_dir}/tmp/{subject_label}" 477 | Path(tmpdir).mkdir(parents=True, exist_ok=True) 478 | with open(f"{tmpdir}/{fmriname}_st.txt", "w") as fp: 479 | fp.writelines("%f\n" % t for t in slicetiming) 480 | 481 | slicetimerparams = f"--repeat={tr}@--tcustom={tmpdir}/{fmriname}_st.txt" 482 | 483 | func_stages_dict = OrderedDict([("fMRIVolume", partial(run_generic_fMRI_volume_processsing, 484 | path=args.output_dir, 485 | subject="sub-%s"%subject_label, 486 | fmriname=fmriname, 487 | fmritcs=fmritcs, 488 | fmriscout=fmriscout, 489 | SEPhaseNeg=SEPhaseNeg, 490 | SEPhasePos=SEPhasePos, 491 | echospacing=echospacing, 492 | unwarpdir=unwarpdir, 493 | fmrires=fmrires, 494 | dcmethod=dcmethod, 495 | biascorrection=biascorrection, 496 | n_cpus=args.n_cpus, 497 | gdcoeffs=args.gdcoeffs, 498 | doslicetime=doslicetime, 499 | slicetimerparams=slicetimerparams, 500 | processing_mode=func_processing_mode)), 501 | ("fMRISurface", partial(run_generic_fMRI_surface_processsing, 502 | path=args.output_dir, 503 | subject="sub-%s"%subject_label, 504 | fmriname=fmriname, 505 | fmrires=fmrires, 506 | n_cpus=args.n_cpus, 507 | grayordinatesres=grayordinatesres, 508 | lowresmesh=lowresmesh, 509 | regname=args.coreg)) 510 | ]) 511 | for stage, stage_func in func_stages_dict.items(): 512 | if stage in args.stages: 513 | print(f"Processing {fmritcs} in {func_processing_mode} mode.") 514 | stage_func() 515 | 516 | dwis = layout.get(subject=subject_label, suffix='dwi', 517 | extensions=["nii.gz", "nii"]) 518 | 519 | # print(dwis) 520 | # acqs = set(layout.get(target='acquisition', return_type='id', 521 | # subject=subject_label, type='dwi', 522 | # extensions=["nii.gz", "nii"])) 523 | # print(acqs) 524 | # posData = [] 525 | # negData = [] 526 | # for acq in acqs: 527 | # pos = "EMPTY" 528 | # neg = "EMPTY" 529 | # dwis = layout.get(subject=subject_label, 530 | # type='dwi', acquisition=acq, 531 | # extensions=["nii.gz", "nii"]) 532 | # assert len(dwis) <= 2 533 | # for dwi in dwis: 534 | # dwi = dwi.filename 535 | # if "-" in layout.get_metadata(dwi)["PhaseEncodingDirection"]: 536 | # neg = dwi 537 | # else: 538 | # pos = dwi 539 | # posData.append(pos) 540 | # negData.append(neg) 541 | # 542 | # print(negData) 543 | # print(posData) 544 | -------------------------------------------------------------------------------- /version: -------------------------------------------------------------------------------- 1 | dev 2 | --------------------------------------------------------------------------------