├── .circleci ├── bcp_anat_outputs.txt ├── bcp_anat_t2only_outputs.txt ├── bcp_full_outputs.txt └── config.yml ├── .codespellrc ├── .dockerignore ├── .git-blame-ignore-revs ├── .git_archival.txt ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml └── workflows │ ├── pytest.yml │ └── style.yml ├── .gitignore ├── .mailmap ├── .maint ├── contributors.json ├── developers.json ├── former.json ├── update_changes.sh └── update_zenodo.py ├── .pep8speaks.yml ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── .zenodo.json ├── CHANGES.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── docker └── files │ └── nipype.cfg ├── docs ├── Makefile ├── _static │ ├── nibabies_anat.png │ └── nibabies_func.png ├── community.md ├── conf.py ├── faqs.md ├── index.md ├── installation.md ├── make.bat ├── outputs.md ├── requirements.txt ├── sphinxext │ └── github_link.py └── usage.md ├── env.yml ├── long_description.md ├── nibabies ├── __init__.py ├── _types.py ├── cli │ ├── __init__.py │ ├── mcribs.py │ ├── parser.py │ ├── run.py │ ├── tests │ │ ├── __init__.py │ │ └── test_parser.py │ ├── version.py │ └── workflow.py ├── config.py ├── conftest.py ├── data │ ├── FreeSurferLabelRemappings.json │ ├── FreeSurferSubcorticalLabelTableLut.txt │ ├── MNIInfant_to_MNI1526NLinAsym.mat │ ├── __init__.py │ ├── antsBrainExtraction_precise.json │ ├── antsBrainExtraction_testing.json │ ├── boilerplate.bib │ ├── flirtsch │ │ └── bbr.sch │ ├── io_spec_anat.json │ ├── io_spec_func.json │ ├── itkIdentityTransform.txt │ ├── reports-spec.yml │ ├── t1-t2-coreg.json │ ├── tests │ │ └── config.toml │ ├── tpl-MNI152NLin6Asym_res-01_desc-avgwmparc_dseg.nii.gz │ └── xfm_manifest.json ├── interfaces │ ├── __init__.py │ ├── bids.py │ ├── confounds.py │ ├── conftest.py │ ├── freesurfer.py │ ├── gifti.py │ ├── maths.py │ ├── mcribs.py │ ├── metric.py │ ├── multiecho.py │ ├── nibabel.py │ ├── patches.py │ ├── reports.py │ ├── resampling.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_bids.py │ │ ├── test_mcribs.py │ │ └── test_nibabel.py │ ├── utils.py │ └── workbench.py ├── reports │ ├── __init__.py │ └── core.py ├── tests │ ├── __init__.py │ ├── data │ │ └── labelfile.txt │ └── test_config.py ├── utils │ ├── __init__.py │ ├── bids.py │ ├── confounds.py │ ├── debug.py │ ├── derivatives.py │ ├── filtering.py │ ├── misc.py │ ├── telemetry.py │ ├── tests │ │ ├── __init__.py │ │ ├── full-derivatives.yml │ │ ├── test_bids.py │ │ └── test_derivatives.py │ ├── transforms.py │ └── viz.py └── workflows │ ├── __init__.py │ ├── anatomical │ ├── __init__.py │ ├── apply.py │ ├── brain_extraction.py │ ├── fit.py │ ├── outputs.py │ ├── preproc.py │ ├── registration.py │ ├── resampling.py │ ├── segmentation.py │ ├── surfaces.py │ └── tests │ │ ├── __init__.py │ │ └── test_preproc.py │ ├── base.py │ ├── bold │ ├── __init__.py │ ├── alignment.py │ ├── apply.py │ ├── base.py │ ├── boldref.py │ ├── confounds.py │ ├── fit.py │ ├── hmc.py │ ├── outputs.py │ ├── reference.py │ ├── registration.py │ ├── resampling.py │ ├── stc.py │ └── t2s.py │ └── tests │ ├── __init__.py │ └── test_base.py ├── pyproject.toml ├── requirements.txt ├── scripts ├── anatprep.py ├── bold_subcortical.py ├── check_outputs.py └── fetch_templates.py ├── tox.ini └── wrapper ├── LICENSE ├── README.rst ├── pyproject.toml └── src └── nibabies_wrapper ├── __init__.py └── __main__.py /.circleci/bcp_anat_outputs.txt: -------------------------------------------------------------------------------- 1 | .bidsignore 2 | dataset_description.json 3 | desc-aparcaseg_dseg.tsv 4 | desc-aseg_dseg.tsv 5 | logs 6 | logs/CITATION.bib 7 | logs/CITATION.html 8 | logs/CITATION.md 9 | logs/CITATION.tex 10 | sub-01 11 | sub-01/ses-1mo 12 | sub-01/ses-1mo/anat 13 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T1w.json 14 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T1w.nii.gz 15 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T2w.json 16 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T2w.nii.gz 17 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-MNIInfant+1_to-T1w_mode-image_xfm.h5 18 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T1w_to-MNIInfant+1_mode-image_xfm.h5 19 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T1w_to-T2w_mode-image_xfm.h5 20 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T1w_to-fsnative_mode-image_xfm.txt 21 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T2w_to-T1w_mode-image_xfm.h5 22 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-fsnative_to-T1w_mode-image_xfm.txt 23 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_curv.shape.gii 24 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_inflated.surf.gii 25 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_midthickness.surf.gii 26 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_pial.surf.gii 27 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsLR_desc-reg_sphere.surf.gii 28 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsaverage_desc-reg_sphere.surf.gii 29 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_sphere.surf.gii 30 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_sulc.shape.gii 31 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_thickness.shape.gii 32 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_white.surf.gii 33 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_curv.shape.gii 34 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_inflated.surf.gii 35 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_midthickness.surf.gii 36 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_pial.surf.gii 37 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsLR_desc-reg_sphere.surf.gii 38 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii 39 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_sphere.surf.gii 40 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_sulc.shape.gii 41 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_thickness.shape.gii 42 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_white.surf.gii 43 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-brain_mask.json 44 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-brain_mask.nii.gz 45 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-preproc_T1w.json 46 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-preproc_T1w.nii.gz 47 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_dseg.nii.gz 48 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-CSF_probseg.nii.gz 49 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-GM_probseg.nii.gz 50 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-WM_probseg.nii.gz 51 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_desc-aparcaseg_dseg.nii.gz 52 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_desc-aseg_dseg.nii.gz 53 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_desc-ribbon_mask.json 54 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_desc-ribbon_mask.nii.gz 55 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_dseg.nii.gz 56 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_label-CSF_probseg.nii.gz 57 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_label-GM_probseg.nii.gz 58 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_label-WM_probseg.nii.gz 59 | sub-01_ses-1mo.html 60 | -------------------------------------------------------------------------------- /.circleci/bcp_anat_t2only_outputs.txt: -------------------------------------------------------------------------------- 1 | .bidsignore 2 | dataset_description.json 3 | desc-aparcaseg_dseg.tsv 4 | desc-aseg_dseg.tsv 5 | logs 6 | logs/CITATION.bib 7 | logs/CITATION.html 8 | logs/CITATION.md 9 | logs/CITATION.tex 10 | sub-01 11 | sub-01/ses-1mo 12 | sub-01/ses-1mo/anat 13 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T2w.json 14 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-preproc_T2w.nii.gz 15 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-MNI152NLin6Asym_to-T2w_mode-image_xfm.h5 16 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-MNIInfant+1_to-T2w_mode-image_xfm.h5 17 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T2w_to-MNI152NLin6Asym_mode-image_xfm.h5 18 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T2w_to-MNIInfant+1_mode-image_xfm.h5 19 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-T2w_to-fsnative_mode-image_xfm.txt 20 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_from-fsnative_to-T2w_mode-image_xfm.txt 21 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_curv.shape.gii 22 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_inflated.surf.gii 23 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_midthickness.surf.gii 24 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_pial.surf.gii 25 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsLR_den-32k_midthickness.surf.gii 26 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsLR_den-32k_pial.surf.gii 27 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsLR_den-32k_white.surf.gii 28 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsLR_desc-reg_sphere.surf.gii 29 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_space-fsaverage_desc-reg_sphere.surf.gii 30 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_sphere.surf.gii 31 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_sulc.shape.gii 32 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_thickness.shape.gii 33 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-L_white.surf.gii 34 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_curv.shape.gii 35 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_inflated.surf.gii 36 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_midthickness.surf.gii 37 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_pial.surf.gii 38 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsLR_den-32k_midthickness.surf.gii 39 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsLR_den-32k_pial.surf.gii 40 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsLR_den-32k_white.surf.gii 41 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsLR_desc-reg_sphere.surf.gii 42 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_space-fsaverage_desc-reg_sphere.surf.gii 43 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_sphere.surf.gii 44 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_sulc.shape.gii 45 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_thickness.shape.gii 46 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_hemi-R_white.surf.gii 47 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-brain_mask.json 48 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-brain_mask.nii.gz 49 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-preproc_T2w.json 50 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_desc-preproc_T2w.nii.gz 51 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_dseg.nii.gz 52 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-CSF_probseg.nii.gz 53 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-GM_probseg.nii.gz 54 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-WM_probseg.nii.gz 55 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_desc-aparcaseg_dseg.nii.gz 56 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_desc-aseg_dseg.nii.gz 57 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_desc-ribbon_mask.json 58 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_desc-ribbon_mask.nii.gz 59 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_dseg.nii.gz 60 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_label-CSF_probseg.nii.gz 61 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_label-GM_probseg.nii.gz 62 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T2w_label-WM_probseg.nii.gz 63 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_curv.dscalar.nii 64 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_curv.json 65 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_sulc.dscalar.nii 66 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_sulc.json 67 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_thickness.dscalar.nii 68 | sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-fsLR_den-91k_thickness.json 69 | sub-01_ses-1mo.html 70 | -------------------------------------------------------------------------------- /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | skip = .git,*.pdf,*.svg,*.html,dataset_description.json,*.bib 3 | # te - TE 4 | # Weill - name 5 | # reson - Reson. abbreviation in citation 6 | ignore-words-list = te,weill,reson 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # python cache 2 | __pycache__/**/* 3 | __pycache__ 4 | *.pyc 5 | 6 | # python distribution 7 | build/**/* 8 | build 9 | dist/**/* 10 | dist 11 | nibabies.egg-info/**/* 12 | nibabies.egg-info 13 | .eggs/**/* 14 | .eggs 15 | 16 | # other 17 | testdata/ 18 | work/**/* 19 | work 20 | out/**/* 21 | out/ 22 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # isort addition 2 | 6f809fa9950654cbb7f755922c45dffbc88dc45d 3 | # Ruff 0.9.2 4 | 03e848c6f7c102cc9e0907a38c311afe5c357e77 5 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 1e6afd415ec2776a4bf749628fa1a58646d536db 2 | node-date: 2025-05-19T14:00:57-04:00 3 | describe-name: 25.0.2-1-g1e6afd41 4 | ref-names: HEAD -> master 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a report to help us improve 3 | labels: ["bug", "triage"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for taking the time to fill out this bug report! 9 | - type: textarea 10 | id: what-happened 11 | attributes: 12 | label: What happened? 13 | description: Also tell us, what did you expect to happen? 14 | placeholder: Tell us what you see! 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: command 19 | attributes: 20 | label: What command did you use? 21 | description: This helps us replicate the problem. This will be automatically formatted into code, so no need for backticks. 22 | render: shell 23 | validations: 24 | required: true 25 | - type: input 26 | id: version 27 | attributes: 28 | label: What version of NiBabies are you using? 29 | validations: 30 | required: true 31 | - type: textarea 32 | id: logs 33 | attributes: 34 | label: Relevant log output 35 | description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. 36 | render: shell 37 | - type: textarea 38 | id: addinfo 39 | attributes: 40 | label: Add any additional information or context about the problem here. 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Questions 4 | url: https://github.com/nipreps/nibabies/discussions 5 | about: Please ask and answer questions related to NiBabies usage here. 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Suggest an idea for this project 3 | labels: ["enhancement"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Ideas for improvements are always appreciated! 9 | - type: textarea 10 | id: summary 11 | attributes: 12 | label: A short summary of what you would like to see in *NiBabies*. 13 | validations: 14 | required: true 15 | - type: dropdown 16 | id: interest 17 | attributes: 18 | label: Do you have any interest in helping implement the feature? 19 | description: Help is always welcome :) 20 | options: 21 | - Yes! 22 | - Yes, but I would need guidance 23 | - No :( 24 | validations: 25 | required: true 26 | - type: textarea 27 | id: addinfo 28 | attributes: 29 | label: Add any additional information or context about the request here. 30 | -------------------------------------------------------------------------------- /.github/workflows/pytest.yml: -------------------------------------------------------------------------------- 1 | name: Pytest 2 | 3 | on: 4 | push: 5 | branches: ['*'] 6 | tags: ['*'] 7 | pull_request: 8 | branches: [master, 'maint/*'] 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | env: 15 | FORCE_COLOR: true 16 | 17 | concurrency: 18 | group: ${{ github.workflow }}-${{ github.ref }} 19 | cancel-in-progress: true 20 | 21 | permissions: 22 | contents: read 23 | 24 | jobs: 25 | test: 26 | runs-on: ${{ matrix.os }} 27 | strategy: 28 | matrix: 29 | os: ['ubuntu-latest'] 30 | python-version: ['3.10', '3.11', '3.12'] 31 | dependencies: ['latest', 'pre'] 32 | include: 33 | - os: ubuntu-latest 34 | python-version: '3.10' 35 | dependencies: 'min' 36 | 37 | env: 38 | DEPENDS: ${{ matrix.dependencies }} 39 | 40 | steps: 41 | - uses: actions/checkout@v4 42 | with: 43 | submodules: recursive 44 | fetch-depth: 0 45 | - uses: actions/cache@v4 46 | with: 47 | path: ~/.cache/templateflow 48 | key: templateflow-v1 49 | - name: Install dependencies 50 | run: | 51 | sudo apt update 52 | sudo apt install -y --no-install-recommends graphviz 53 | - name: Set up Python ${{ matrix.python-version }} 54 | uses: actions/setup-python@v5 55 | with: 56 | python-version: ${{ matrix.python-version }} 57 | - name: Display Python version 58 | run: python -c "import sys; print(sys.version)" 59 | - name: Install tox 60 | run: | 61 | python -m pip install --upgrade pip 62 | python -m pip install tox tox-gh-actions 63 | - name: Show tox config 64 | run: tox c 65 | - name: Run tox 66 | run: tox -v --exit-and-dump-after 1200 67 | - uses: codecov/codecov-action@v4 68 | with: 69 | file: coverage.xml 70 | token: ${{ secrets.CODECOV_TOKEN }} 71 | if: ${{ always() }} 72 | -------------------------------------------------------------------------------- /.github/workflows/style.yml: -------------------------------------------------------------------------------- 1 | name: Style + spelling check 2 | 3 | on: 4 | push: 5 | branches: [ '*' ] 6 | tags: [ '*' ] 7 | pull_request: 8 | branches: [ master, 'maint/*' ] 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | concurrency: 15 | group: ${{ github.workflow }}-${{ github.ref }} 16 | cancel-in-progress: true 17 | 18 | permissions: 19 | contents: read 20 | 21 | jobs: 22 | style: 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 26 | - run: pipx run ruff check . 27 | - run: pipx run ruff format --diff . 28 | 29 | codespell: 30 | name: Check for spelling errors 31 | runs-on: ubuntu-latest 32 | 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v4 36 | - name: Codespell 37 | uses: codespell-project/actions-codespell@v2 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | pip-wheel-metadata/ 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | docs/api/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # IPython Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # dotenv 81 | .env 82 | 83 | # virtualenv 84 | venv/ 85 | ENV/ 86 | 87 | # Spyder project settings 88 | .spyderproject 89 | 90 | # Rope project settings 91 | .ropeproject 92 | 93 | # vim 94 | *.swp 95 | 96 | .DS_Store 97 | # local testing 98 | work/ 99 | testdata/ 100 | .vscode/ 101 | 102 | _version.py 103 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Christopher J. Markiewicz 2 | Christopher J. Markiewicz 3 | Christopher J. Markiewicz 4 | Mathias Goncalves 5 | Mathias Goncalves 6 | Mathias Goncalves 7 | Oscar Esteban 8 | Oscar Esteban 9 | -------------------------------------------------------------------------------- /.maint/contributors.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "affiliation": "Department of Pediatrics, University of Minnesota, MN, USA", 4 | "name": "Madison, Thomas", 5 | "orcid": "0000-0003-3030-6580" 6 | }, 7 | { 8 | "affiliation": "Montreal Neurological Institute, McGill University", 9 | "name": "Huberty, Scott", 10 | "orcid": "0000-0003-2637-031X" 11 | } 12 | ] 13 | -------------------------------------------------------------------------------- /.maint/developers.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "affiliation": "Department of Psychology, Stanford University", 4 | "name": "Goncalves, Mathias", 5 | "orcid": "0000-0002-7252-7771" 6 | }, 7 | { 8 | "affiliation": "Department of Psychology, Stanford University", 9 | "name": "Markiewicz, Christopher J.", 10 | "orcid": "0000-0002-6533-164X" 11 | }, 12 | { 13 | "affiliation": "Department of Psychology, Stanford University", 14 | "name": "Poldrack, Russell A.", 15 | "orcid": "0000-0001-6755-0259" 16 | }, 17 | { 18 | "affiliation": "Lausanne University Hospital and University of Lausanne, Lausanne, Switzerland", 19 | "name": "Esteban, Oscar", 20 | "orcid": "0000-0001-8435-6191" 21 | }, 22 | { 23 | "affiliation": "University of Minnesota", 24 | "name": "Feczko, Eric" 25 | }, 26 | { 27 | "affiliation": "University of Minnesota", 28 | "name": "Fair, Damien A." 29 | } 30 | ] 31 | -------------------------------------------------------------------------------- /.maint/former.json: -------------------------------------------------------------------------------- 1 | [] 2 | -------------------------------------------------------------------------------- /.maint/update_changes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Collects the pull-requests since the latest release and 4 | # aranges them in the CHANGES.rst.txt file. 5 | # 6 | # This is a script to be run before releasing a new version. 7 | # 8 | # Usage /bin/bash update_changes.sh 1.0.1 9 | # 10 | 11 | # Setting # $ help set 12 | set -u # Treat unset variables as an error when substituting. 13 | set -x # Print command traces before executing command. 14 | 15 | # Check whether the Upcoming release header is present 16 | head -1 CHANGES.md | grep -q Upcoming 17 | UPCOMING=$? 18 | if [[ "$UPCOMING" == "0" ]]; then 19 | head -n3 CHANGES.md >> newchanges 20 | fi 21 | 22 | # Elaborate today's release header 23 | HEADER="$1 ($(date '+%B %d, %Y'))" 24 | echo $HEADER >> newchanges 25 | echo $( printf "%${#HEADER}s" | tr " " "=" ) >> newchanges 26 | echo "" >> newchanges 27 | 28 | # Search for PRs since previous release 29 | MERGE_COMMITS=$( git log --grep="Merge pull request\|(#.*)$" `git describe --tags --abbrev=0`..HEAD --pretty='format:%h' ) 30 | for COMMIT in ${MERGE_COMMITS//\n}; do 31 | SUB=$( git log -n 1 --pretty="format:%s" $COMMIT ) 32 | if ( echo $SUB | grep "^Merge pull request" ); then 33 | # Merge commit 34 | PR=$( echo $SUB | sed -e "s/Merge pull request \#\([0-9]*\).*/\1/" ) 35 | TITLE=$( git log -n 1 --pretty="format:%b" $COMMIT ) 36 | else 37 | # Squashed merge 38 | PR=$( echo $SUB | sed -e "s/.*(\#\([0-9]*\))$/\1/" ) 39 | TITLE=$( echo $SUB | sed -e "s/\(.*\) (\#[0-9]*)$/\1/" ) 40 | fi 41 | echo " * $TITLE (#$PR)" >> newchanges 42 | done 43 | echo >> newchanges 44 | echo >> newchanges 45 | 46 | # Add back the Upcoming header if it was present 47 | if [[ "$UPCOMING" == "0" ]]; then 48 | tail -n+4 CHANGES.md >> newchanges 49 | else 50 | cat CHANGES.md >> newchanges 51 | fi 52 | 53 | # Replace old CHANGES.md with new file 54 | mv newchanges CHANGES.md 55 | -------------------------------------------------------------------------------- /.maint/update_zenodo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Update and sort the creators list of the zenodo record.""" 3 | import sys 4 | from pathlib import Path 5 | import json 6 | from fuzzywuzzy import fuzz, process 7 | 8 | # These ORCIDs should go last 9 | CREATORS_LAST = ['Poldrack, Russell A.', 'Fair, Damien A.'] 10 | CONTRIBUTORS_LAST = [] 11 | 12 | 13 | def sort_contributors(entries, git_lines, exclude=None, last=None): 14 | """Return a list of author dictionaries, ordered by contribution.""" 15 | last = last or [] 16 | sorted_authors = sorted(entries, key=lambda i: i['name']) 17 | 18 | first_last = [' '.join(val['name'].split(',')[::-1]).strip() 19 | for val in sorted_authors] 20 | first_last_excl = [' '.join(val['name'].split(',')[::-1]).strip() 21 | for val in exclude or []] 22 | 23 | unmatched = [] 24 | author_matches = [] 25 | position = 1 26 | for ele in git_lines: 27 | matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, 28 | limit=2) 29 | if not matches: 30 | return [], [] 31 | # matches is a list [('First match', % Match), ('Second match', % Match)] 32 | if matches[0][1] > 80: 33 | val = sorted_authors[first_last.index(matches[0][0])] 34 | else: 35 | # skip unmatched names 36 | if ele not in first_last_excl: 37 | unmatched.append(ele) 38 | continue 39 | 40 | if val not in author_matches: 41 | val['position'] = position 42 | author_matches.append(val) 43 | position += 1 44 | 45 | names = {' '.join(val['name'].split(',')[::-1]).strip() for val in author_matches} 46 | for missing_name in first_last: 47 | if missing_name not in names: 48 | missing = sorted_authors[first_last.index(missing_name)] 49 | missing['position'] = position 50 | author_matches.append(missing) 51 | position += 1 52 | 53 | all_names = [val['name'] for val in author_matches] 54 | for last_author in last: 55 | author_matches[all_names.index(last_author)]['position'] = position 56 | position += 1 57 | 58 | author_matches = sorted(author_matches, key=lambda k: k['position']) 59 | 60 | return author_matches, unmatched 61 | 62 | 63 | def get_git_lines(fname='line-contributors.txt'): 64 | """Run git-line-summary.""" 65 | import shutil 66 | import subprocess as sp 67 | contrib_file = Path(fname) 68 | 69 | lines = [] 70 | if contrib_file.exists(): 71 | print('WARNING: Reusing existing line-contributors.txt file.', file=sys.stderr) 72 | lines = contrib_file.read_text().splitlines() 73 | 74 | cmd = [shutil.which('git-line-summary')] 75 | if cmd == [None]: 76 | cmd = [shutil.which('git-summary'), "--line"] 77 | if not lines and cmd[0]: 78 | print(f"Running {' '.join(cmd)!r} on repo") 79 | lines = sp.check_output(cmd).decode().splitlines() 80 | lines = [l for l in lines if "Not Committed Yet" not in l] 81 | contrib_file.write_text('\n'.join(lines)) 82 | 83 | if not lines: 84 | raise RuntimeError("""\ 85 | Could not find line-contributors from git repository.%s""" % """ \ 86 | git-(line-)summary not found, please install git-extras. """ * (cmd[0] is None)) 87 | return [' '.join(line.strip().split()[1:-1]) for line in lines if '%' in line] 88 | 89 | 90 | if __name__ == '__main__': 91 | data = get_git_lines() 92 | 93 | zenodo_file = Path('.zenodo.json') 94 | zenodo = json.loads(zenodo_file.read_text()) 95 | 96 | creators = json.loads(Path('.maint/developers.json').read_text()) 97 | zen_creators, miss_creators = sort_contributors( 98 | creators, data, 99 | exclude=json.loads(Path('.maint/former.json').read_text()), 100 | last=CREATORS_LAST) 101 | contributors = json.loads(Path('.maint/contributors.json').read_text()) 102 | zen_contributors, miss_contributors = sort_contributors( 103 | contributors, data, 104 | exclude=json.loads(Path('.maint/former.json').read_text()), 105 | last=CONTRIBUTORS_LAST) 106 | zenodo['creators'] = zen_creators 107 | zenodo['contributors'] = zen_contributors 108 | 109 | print("Some people made commits, but are missing in .maint/ " 110 | "files: %s." % ', '.join(set(miss_creators).intersection(miss_contributors)), 111 | file=sys.stderr) 112 | 113 | # Remove position 114 | for creator in zenodo['creators']: 115 | del creator['position'] 116 | if isinstance(creator['affiliation'], list): 117 | creator['affiliation'] = creator['affiliation'][0] 118 | 119 | for creator in zenodo['contributors']: 120 | creator['type'] = 'Researcher' 121 | del creator['position'] 122 | if isinstance(creator['affiliation'], list): 123 | creator['affiliation'] = creator['affiliation'][0] 124 | 125 | zenodo_file.write_text('%s\n' % json.dumps(zenodo, indent=2, ensure_ascii=False)) 126 | -------------------------------------------------------------------------------- /.pep8speaks.yml: -------------------------------------------------------------------------------- 1 | scanner: 2 | diff_only: True 3 | linter: pycodestyle 4 | 5 | pycodestyle: 6 | max-line-length: 100 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: ".*/data/.*" 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.4.0 5 | hooks: 6 | - id: trailing-whitespace 7 | exclude: '.*\.svg' 8 | - id: end-of-file-fixer 9 | exclude: '.*\.svg' 10 | - id: check-yaml 11 | - id: check-json 12 | - id: check-toml 13 | - id: check-added-large-files 14 | - repo: https://github.com/astral-sh/ruff-pre-commit 15 | rev: v0.6.5 16 | hooks: 17 | - id: ruff 18 | args: [--fix] 19 | - id: ruff-format 20 | - repo: https://github.com/codespell-project/codespell 21 | rev: v2.3.0 22 | hooks: 23 | - id: codespell 24 | additional_dependencies: 25 | - tomli 26 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | build: 6 | os: ubuntu-22.04 7 | tools: 8 | python: "3.11" 9 | jobs: 10 | post_checkout: 11 | - git fetch --unshallow 12 | 13 | python: 14 | install: 15 | - requirements: docs/requirements.txt 16 | - method: pip 17 | path: . 18 | extra_requirements: 19 | - doc 20 | - method: pip 21 | path: wrapper/ 22 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "NiBabies: a robust preprocessing pipeline for infant functional MRI", 3 | "description": "

NiBabies is a robust and easy-to-use pipeline for preprocessing of diverse infant and neonate fMRI data. The transparent workflow dispenses of manual intervention, thereby ensuring the reproducibility of the results.

", 4 | "contributors": [], 5 | "creators": [ 6 | { 7 | "affiliation": "Department of Psychology, Stanford University", 8 | "name": "Goncalves, Mathias", 9 | "orcid": "0000-0002-7252-7771" 10 | }, 11 | { 12 | "affiliation": "Department of Psychology, Stanford University", 13 | "name": "Markiewicz, Christopher J.", 14 | "orcid": "0000-0002-6533-164X" 15 | }, 16 | { 17 | "affiliation": "Lausanne University Hospital and University of Lausanne, Lausanne, Switzerland", 18 | "name": "Esteban, Oscar", 19 | "orcid": "0000-0001-8435-6191" 20 | }, 21 | { 22 | "affiliation": "University of Minnesota", 23 | "name": "Feczko, Eric" 24 | }, 25 | { 26 | "affiliation": "Department of Psychology, Stanford University", 27 | "name": "Poldrack, Russell A.", 28 | "orcid": "0000-0001-6755-0259" 29 | }, 30 | { 31 | "affiliation": "University of Minnesota", 32 | "name": "Fair, Damien A." 33 | } 34 | ], 35 | "keywords": [ 36 | "neuroimaging", 37 | "workflow", 38 | "pipeline", 39 | "preprocessing", 40 | "fMRI", 41 | "BIDS", 42 | "infant" 43 | ], 44 | "license": "Apache-2.0", 45 | "related_identifiers": [ 46 | { 47 | "identifier": "https://github.com/nipreps/nibabies", 48 | "relation": "documents", 49 | "scheme": "url" 50 | }, 51 | { 52 | "identifier": "10.5281/zenodo.6418986", 53 | "relation": "isPartOf", 54 | "scheme": "doi" 55 | } 56 | ], 57 | "upload_type": "software" 58 | } 59 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help docker-build 2 | .DEFAULT: help 3 | 4 | tag="nibabies" 5 | 6 | help: 7 | @echo "Premade recipes" 8 | @echo 9 | @echo "make docker-build [tag=TAG]" 10 | @echo "\tBuilds a docker image from source. Defaults to 'nibabies' tag." 11 | 12 | 13 | docker-build: 14 | docker build --rm -t $(tag) \ 15 | --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \ 16 | --build-arg VCS_REF=`git rev-parse --short HEAD` \ 17 | --build-arg VERSION=`python -m hatch version` . 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # *NiBabies*: A robust preprocessing workflow tailored for neonate and infant MRI 2 | 3 | [![github](https://github.com/nipreps/nibabies/actions/workflows/pytest.yml/badge.svg)](https://github.com/nipreps/nibabies/actions) 4 | [![circle](https://circleci.com/gh/nipreps/nibabies/tree/master.svg?style=shield)](https://circleci.com/gh/nipreps/nibabies/tree/master) 5 | [![dockerhub](https://img.shields.io/badge/docker-nipreps/nibabies-brightgreen.svg?logo=docker&style=flat)](https://hub.docker.com/r/nipreps/nibabies/tags/) 6 | [![coverage](https://codecov.io/gh/nipreps/nibabies/branch/master/graph/badge.svg)](https://app.codecov.io/gh/nipreps/nibabies/branch/master) 7 | [![documentation](https://readthedocs.org/projects/nibabies/badge/?version=latest)](https://nibabies.readthedocs.io/en/latest/) 8 | [![DOI](https://zenodo.org/badge/264223087.svg)](https://zenodo.org/badge/latestdoi/264223087) 9 | 10 | Anatomical | Functional 11 | ---------- | ---------- 12 | ![nibabies-anat](https://raw.githubusercontent.com/nipreps/nibabies/master/docs/_static/nibabies_anat.png) | ![nibabies-func](https://raw.githubusercontent.com/nipreps/nibabies/master/docs/_static/nibabies_func.png) 13 | 14 | *NiBabies* is an open-source software pipeline designed to process anatomical and functional magnetic resonance imaging data. 15 | A member of the [NeuroImaging PREProcessing toolS (NiPreps)](https://www.nipreps.org/) family, *NiBabies* is designed and optimized for human infants between 0-2 years old. 16 | 17 | --- 18 | 19 | ## Getting Started 20 | 21 | For comprehensive information on *NiBabies*, including installation and usage, visit [our documentation](https://nibabies.readthedocs.io/en/stable/). 22 | -------------------------------------------------------------------------------- /docker/files/nipype.cfg: -------------------------------------------------------------------------------- 1 | [execution] 2 | hash_method = content 3 | poll_sleep_duration = 0.01 4 | remove_unnecessary_outputs = true 5 | crashfile_format = txt 6 | profile_runtime = false 7 | use_relative_paths = false -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/_static/nibabies_anat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/docs/_static/nibabies_anat.png -------------------------------------------------------------------------------- /docs/_static/nibabies_func.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/docs/_static/nibabies_func.png -------------------------------------------------------------------------------- /docs/community.md: -------------------------------------------------------------------------------- 1 | # NiPreps Community 2 | 3 | Check out the [official NiPreps community page](https://www.nipreps.org/community/), where topics such as contributing, code of conduct, and licensing are outlined. 4 | 5 | ## NiBabies Coding Style 6 | 7 | ### Pre-commit 8 | 9 | In addition to the [NiPreps coding style](https://www.nipreps.org/community/CONTRIBUTING/#nipreps-coding-style-guide), *NiBabies* leverages [`pre-commit`](https://pre-commit.com/#1-install-pre-commit) to ensure consistent code across contributors. 10 | To enable `pre-commit` checks, first [install](https://pre-commit.com/#1-install-pre-commit) the Python package and then run `pre-commit install` in the root of the *NiBabies* source tree. 11 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | import os 8 | import sys 9 | from datetime import datetime, timezone 10 | 11 | from packaging.version import Version, parse 12 | from sphinx import __version__ as sphinxversion 13 | 14 | import nibabies 15 | 16 | # -- Path setup -------------------------------------------------------------- 17 | here = os.path.dirname(__file__) 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.append(os.path.join(here, 'sphinxext')) 22 | sys.path.insert(0, os.path.join(here, '..', 'wrapper')) 23 | 24 | # this is only available after sphinxext to PATH 25 | from github_link import make_linkcode_resolve # noqa: E402 26 | 27 | # -- General configuration --------------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | needs_sphinx = '1.5.3' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'sphinx.ext.autodoc', 37 | 'sphinx.ext.doctest', 38 | 'sphinx.ext.intersphinx', 39 | 'sphinx.ext.coverage', 40 | 'sphinx.ext.mathjax', 41 | 'sphinx.ext.linkcode', 42 | 'sphinx.ext.napoleon', 43 | 'sphinxcontrib.bibtex', 44 | 'sphinxarg.ext', # argparse extension 45 | 'nipype.sphinxext.plot_workflow', 46 | 'myst_parser', # allow markdown 47 | # 'sphinx-togglebutton', # collapse admonitions 48 | ] 49 | 50 | bibtex_bibfiles = ['../nibabies/data/boilerplate.bib'] 51 | 52 | autodoc_mock_imports = ['numpy', 'nibabel', 'nilearn'] 53 | if parse(sphinxversion) >= parse('1.7.0'): 54 | autodoc_mock_imports = [ 55 | 'pandas', 56 | 'nilearn', 57 | 'seaborn', 58 | ] 59 | 60 | # Add any paths that contain templates here, relative to this directory. 61 | templates_path = ['_templates'] 62 | 63 | # List of patterns, relative to source directory, that match files and 64 | # directories to ignore when looking for source files. 65 | # This pattern also affects html_static_path and html_extra_path. 66 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 67 | 68 | 69 | source_suffix = ['.rst', '.md'] 70 | 71 | # -- Options for HTML output ------------------------------------------------- 72 | 73 | # The theme to use for HTML and HTML Help pages. See the documentation for 74 | # a list of builtin themes. 75 | # 76 | html_theme = 'shibuya' 77 | 78 | # Options specific to theme 79 | html_theme_options = { 80 | 'color_mode': 'light', 81 | 'dark_code': True, 82 | 'github_url': 'https://github.com/nipreps/nibabies', 83 | 'nav_links': [ 84 | { 85 | 'title': 'NiPreps Homepage', 86 | 'url': 'https://nipreps.org', 87 | 'external': True, 88 | }, 89 | { 90 | 'title': 'Docker Hub', 91 | 'url': 'https://hub.docker.com/r/nipreps/nibabies', 92 | 'external': True, 93 | }, 94 | ], 95 | } 96 | 97 | # Add any paths that contain custom static files (such as style sheets) here, 98 | # relative to this directory. They are copied after the builtin static files, 99 | # so a file named "default.css" will overwrite the builtin "default.css". 100 | html_static_path = ['_static'] 101 | 102 | # -- Napoleon parameters ----------------------------------------------------- 103 | # Accept custom section names to be parsed for numpy-style docstrings 104 | # of parameters. 105 | # Requires pinning sphinxcontrib-napoleon to a specific commit while 106 | # https://github.com/sphinx-contrib/napoleon/pull/10 is merged. 107 | napoleon_use_param = False 108 | napoleon_custom_sections = [ 109 | ('Inputs', 'Parameters'), 110 | ('Outputs', 'Parameters'), 111 | ] 112 | 113 | # -- MyST parameters --------------------------------------------------------- 114 | 115 | myst_heading_anchors = 3 116 | myst_enable_extensions = [ 117 | 'colon_fence', 118 | 'substitution', 119 | ] 120 | 121 | linkcode_resolve = make_linkcode_resolve( 122 | 'nibabies', 123 | 'https://github.com/nipreps/' 'nibabies/blob/{revision}/' '{package}/{path}#L{lineno}', 124 | ) 125 | 126 | project = 'NiBabies' 127 | author = 'The NiPreps developers' 128 | 129 | copyright = f'2021-{datetime.now(tz=timezone.utc)}, {author}' 130 | 131 | nibabies_ver = Version(nibabies.__version__) 132 | release = 'version' if nibabies_ver.is_prerelease else nibabies_ver.public 133 | 134 | # to avoid Python highlighting in literal text 135 | highlight_language = 'none' 136 | -------------------------------------------------------------------------------- /docs/faqs.md: -------------------------------------------------------------------------------- 1 | # Tips and FAQs 2 | 3 | ## Leveraging precomputed results 4 | 5 | Whether manual intervention is required, or you want to break up processing, *NiBabies* can reuse previously-computed files (either from NiBabies directly or a third party application) to be injected into the workflow directly. 6 | 7 | :::{versionchanged} 24.0.0 8 | 9 | In addition to the brain mask and anatomical segmentation, support was added for additional precomputed derivatives. To see which derivatives are supported, view [](outputs.md#anatomical-derivatives). 10 | ::: 11 | 12 | To use pre-computed results, one or more [BIDS Derivatives](https://bids-specification.readthedocs.io/en/stable/05-derivatives/01-introduction.html#bids-derivatives) directories must be passed in to *NiBabies* using the `--derivatives` flag. 13 | Derivative directories must include a [`dataset_description.json` and the required fields](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#derived-dataset-and-pipeline-description). 14 | Additionally, files must include the `space-T1w` or `space-T2w` key-value pair in the filenames, and a matching sidecar JSON file with the `SpatialReference` field defined. 15 | 16 | A sample layout of a derivatives directory can be found below: 17 | 18 | ```bash 19 | my_precomputed/ 20 | ├── dataset_description.json 21 | └── sub-01 22 | └── anat 23 | ├── sub-01_desc-preproc_T2w.nii.gz 24 | ├── sub-01_space-T2w_desc-aseg_dseg.json 25 | ├── sub-01_space-T2w_desc-aseg_dseg.nii.gz 26 | ├── sub-01_space-T2w_desc-brain_mask.json 27 | └── sub-01_space-T2w_desc-brain_mask.nii.gz 28 | ``` 29 | 30 | In this example, `sub-01_desc-preproc_T2w.nii.gz` will be used as the T2w reference. The other files (the brain mask and segmentation), will be in the same space. 31 | 32 | :::{warning} 33 | If no anatomical reference is provided, the outputs must be in the same space as the raw anatomical data. 34 | ::: 35 | 36 | :::{note} 37 | If an aseg is provided, it will be used for surface generation. 38 | ::: 39 | 40 | ## Multi-atlas segmentation with joint label fusion 41 | 42 | By default, *NiBabies* will run [FSL FAST](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FAST) for tissue segmentation, and Infant FreeSurfer for segmentation labels. 43 | 44 | Alternatively, ANTs {abbr}`JLF (Joint Label Fusion)` can be used by providing a directory with one or more template images composed of anatomicals and segmentations. To pass in this directory, use the `--segmentation-atlases-dir` flag. 45 | When using this approach, there are a few assumptions being made: 46 | 47 | 1. The anatomicals are brain masked. 48 | 1. The segmentation labels adhere to the [FreeSurfer LUT](https://surfer.nmr.mgh.harvard.edu/fswiki/FsTutorial/AnatomicalROI/FreeSurferColorLUT). 49 | 50 | Here is an example layout of what the `--segmentation-atlases-dir` flag expects: 51 | 52 | ```bash 53 | $ tree JLF-atlases 54 | 55 | JLF-atlases/ 56 | ├── dataset_description.json 57 | ├── participants.tsv 58 | ├── sub-01 59 | │ ├── sub-01_desc-aseg_dseg.nii.gz 60 | │ ├── [sub-01_T1w.json] * optional 61 | │ ├── sub-01_T1w.nii.gz 62 | │ ├── [sub-01_T2w.json] * optional 63 | │ └── sub-01_T2w.nii.gz 64 | ├── sub-02 65 | ... 66 | ``` 67 | 68 | ## More context on releases 69 | 70 | Like other *NiPreps*, *NiBabies* follows Calendar Versioning ([CalVer](https://calver.org/)), in format of `YY.MINOR.MICRO`. 71 | In short, here is a quick heuristic on how new releases should be looked at: 72 | 73 | 1. If the `YY` or `MINOR` has changed, it is a feature release, with substantial changes to the workflow. 74 | 1. If the `YY.MINOR` match the version you used, but the `MICRO` has changed, it is a bug-fix release. 75 | Check the [release notes](https://github.com/nipreps/nibabies/releases) - if the fixes do not pertain to your data, there is no need to upgrade. 76 | 77 | For more in-depth information, refer to the [*NiPreps* release documentation](https://www.nipreps.org/devs/releases/#principles). 78 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ```{include} ../README.md 2 | ``` 3 | 4 | # Table of Contents 5 | 6 | ```{toctree} 7 | :maxdepth: 3 8 | 9 | installation.md 10 | usage.md 11 | faqs.md 12 | outputs.md 13 | community.md 14 | ``` 15 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | There are two ways to install *NiBabies*: 4 | - using container technologies; or 5 | - within a manually prepared environment, also known as *bare-metal*. 6 | 7 | ## Container Installation 8 | 9 | Given its extensive dependencies, the easiest way to get up and running with *NiBabies* is by using a container service, such as [Docker](https://www.docker.com/get-started) or [Apptainer](https://apptainer.org/). 10 | 11 | ### Working with Docker 12 | 13 | Images are hosted on our [Docker Hub](https://hub.docker.com/r/nipreps/nibabies). 14 | To pull an image, the specific version tag must be specified in order to pull the images. 15 | For example, to pull the first release in the 24.0.0 series, you can do: 16 | 17 | ```shell 18 | docker pull nipreps/nibabies:24.0.0 19 | ``` 20 | 21 | There are also a few keyword tags, `latest` and `unstable`, that serve as special pointers. 22 | `latest` points to the latest release (excluding any betas or release candidates). 23 | `unstable` points to the most recent developmental change, and should only be used to test new features or fixes. 24 | 25 | :::{tip} 26 | `latest` will pull the most recent release, but beware that it will not be updated until calling the docker pull command again. For this reason, it is recommended to pull using the explicit version tag. 27 | ::: 28 | 29 | ### Working with Apptainer (formerly Singularity) 30 | 31 | Visit the [apptainer containers page](https://datasets.datalad.org/?dir=/repronim/containers/images/bids), courtesy of DataLad and ReproNim, to download already created images. 32 | 33 | :::{tip} 34 | Images are listed as `bids-nibabies--.sing`, where `` is the release tag. 35 | ::: 36 | 37 | Otherwise, you can create an Apptainer image from the [Docker](#working-with-docker) images hosted online. 38 | 39 | ```bash 40 | apptainer build nibabies-24.0.0.sif docker://nipreps/nibabies:24.0.0 41 | ``` 42 | 43 | ## Installing the nibabies-wrapper 44 | 45 | The `nibabies-wrapper` is a lightweight Python tool to facilitate running `nibabies` within a container service. 46 | To install or upgrade to the current release: 47 | 48 | ```bash 49 | pip install --update nibabies-wrapper 50 | ``` 51 | 52 | For further details, see [](usage.md#using-the-nibabies-wrapper). 53 | 54 | ## Bare-metal Installation 55 | 56 | If you would prefer to install this tool natively, you can refer the [Dockerfile](https://github.com/nipreps/nibabies/blob/master/Dockerfile) as a guide for all the dependencies. 57 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | myst_nb 3 | sphinx-argparse 4 | # Relative to repository root 5 | ./wrapper/ 6 | -------------------------------------------------------------------------------- /docs/sphinxext/github_link.py: -------------------------------------------------------------------------------- 1 | """ 2 | This script comes from scikit-learn: 3 | https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/github_link.py 4 | """ 5 | from operator import attrgetter 6 | import inspect 7 | import subprocess 8 | import os 9 | import sys 10 | from functools import partial 11 | 12 | REVISION_CMD = 'git rev-parse --short HEAD' 13 | 14 | 15 | def _get_git_revision(): 16 | try: 17 | revision = subprocess.check_output(REVISION_CMD.split()).strip() 18 | except (subprocess.CalledProcessError, OSError): 19 | print('Failed to execute git to get revision') 20 | return None 21 | return revision.decode('utf-8') 22 | 23 | 24 | def _linkcode_resolve(domain, info, package, url_fmt, revision): 25 | """Determine a link to online source for a class/method/function 26 | 27 | This is called by sphinx.ext.linkcode 28 | 29 | An example with a long-untouched module that everyone has 30 | >>> _linkcode_resolve('py', {'module': 'tty', 31 | ... 'fullname': 'setraw'}, 32 | ... package='tty', 33 | ... url_fmt='http://hg.python.org/cpython/file/' 34 | ... '{revision}/Lib/{package}/{path}#L{lineno}', 35 | ... revision='xxxx') 36 | 'http://hg.python.org/cpython/file/xxxx/Lib/tty/tty.py#L18' 37 | """ 38 | 39 | if revision is None: 40 | return 41 | if domain not in ('py', 'pyx'): 42 | return 43 | if not info.get('module') or not info.get('fullname'): 44 | return 45 | 46 | class_name = info['fullname'].split('.')[0] 47 | if type(class_name) != str: 48 | # Python 2 only 49 | class_name = class_name.encode('utf-8') 50 | module = __import__(info['module'], fromlist=[class_name]) 51 | obj = attrgetter(info['fullname'])(module) 52 | 53 | try: 54 | fn = inspect.getsourcefile(obj) 55 | except Exception: 56 | fn = None 57 | if not fn: 58 | try: 59 | fn = inspect.getsourcefile(sys.modules[obj.__module__]) 60 | except Exception: 61 | fn = None 62 | if not fn: 63 | return 64 | 65 | fn = os.path.relpath(fn, 66 | start=os.path.dirname(__import__(package).__file__)) 67 | try: 68 | lineno = inspect.getsourcelines(obj)[1] 69 | except Exception: 70 | lineno = '' 71 | return url_fmt.format(revision=revision, package=package, 72 | path=fn, lineno=lineno) 73 | 74 | 75 | def make_linkcode_resolve(package, url_fmt): 76 | """Returns a linkcode_resolve function for the given URL format 77 | 78 | revision is a git commit reference (hash or name) 79 | 80 | package is the name of the root module of the package 81 | 82 | url_fmt is along the lines of ('https://github.com/USER/PROJECT/' 83 | 'blob/{revision}/{package}/' 84 | '{path}#L{lineno}') 85 | """ 86 | revision = _get_git_revision() 87 | return partial(_linkcode_resolve, revision=revision, package=package, 88 | url_fmt=url_fmt) 89 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | ## The BIDS format 4 | 5 | The *NiBabies* workflow takes as principal input the path of the dataset 6 | that is to be processed. 7 | The input dataset is required to be in valid 8 | {abbr}`BIDS (The Brain Imaging Data Structure)` format, 9 | and it must include at least one T1-weighted and 10 | one T2-weighted structural image and 11 | a BOLD series (unless using the `--anat-only` flag). 12 | 13 | We highly recommend that you validate your dataset with the free, online 14 | [BIDS Validator](http://bids-standard.github.io/bids-validator/). 15 | 16 | ### Participant Ages 17 | *NiBabies* will attempt to automatically extract participant ages (in months) from the BIDS layout. 18 | Specifically, these two files will be checked: 19 | - [Sessions file](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#sessions-file): `//subject_sessions.tsv` 20 | - [Participants file](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#participants-file): `/participants.tsv` 21 | 22 | Either file should include `age` (or if you wish to be more explicit: `age_months`) columns, and it is 23 | recommended to have an accompanying JSON file to further describe these fields, and explicitly state the values are in months. 24 | 25 | ## The FreeSurfer license 26 | 27 | *NiBabies* uses FreeSurfer tools, which require a license to run. 28 | 29 | To obtain a FreeSurfer license, simply register for free at https://surfer.nmr.mgh.harvard.edu/registration.html. 30 | 31 | FreeSurfer will search for a license key file first using the `$FS_LICENSE` environment variable and then in the default path to the license key file (`$FREESURFER_HOME`/license.txt). If `$FS_LICENSE` is set, the [`nibabies-wrapper`](#using-the-nibabies-wrapper) will automatically handle setting the license within the container. 32 | Otherwise, you will need to use the `--fs-license-file` flag to ensure the license is available. 33 | 34 | 35 | ## Example command 36 | 37 | The exact command to run *NiBabies* depends on the [Installation](./installation.md) method. 38 | The common parts of the command follow the 39 | [BIDS-Apps](https://github.com/BIDS-Apps) definition. 40 | Example: 41 | 42 | ```Shell 43 | $ nibabies data/bids_root/ out/ participant -w work/ --participant-label 01 44 | ``` 45 | 46 | Further information about BIDS and BIDS-Apps can be found at the 47 | [NiPreps portal](https://www.nipreps.org/apps/framework/). 48 | 49 | ## Command-Line Arguments 50 | ```{argparse} 51 | :ref: nibabies.cli.parser._build_parser 52 | :prog: nibabies 53 | :nodefaultconst: 54 | ``` 55 | 56 | ## More information on command-line arguments 57 | 58 | At minimum, the following *positional* arguments are required. 59 | 60 | - **`bids_dir`** - the root folder of a BIDS valid dataset. 61 | - **`output_dir`** - folder to store outputs and reports. 62 | - **`level`** - processing stage to be run, currently can only be `participant`. 63 | 64 | However, as infant brains can vastly differ depending on age, providing the following arguments is highly recommended: 65 | 66 | - **`--participant-label`** - participant ID 67 | 68 | - **`--session-id`** - session ID 69 | 70 | - **`--segmentation-atlases-dir`** - directory containing pre-labeled segmentations to use for Joint Label Fusion. 71 | 72 | :::{admonition} Tip 73 | :class: tip 74 | 75 | The segmentation directory layout should consist of one or more template directories containing: 76 | * A segmented and labeled NIfTI that includes `Segmentation` in the filename. 77 | * A brainmasked T1w NIfTI that includes `T1w` in the filename. 78 | 79 | ::: 80 | 81 | ## Using the nibabies wrapper 82 | 83 | The wrapper will generate a Docker or Singularity command line for you, print it out for reporting purposes, and then execute it without further action needed. 84 | For installation instructions, please see [](installation.md#installing-the-nibabies-wrapper) 85 | 86 | ### Sample Docker usage 87 | 88 | ``` 89 | $ nibabies-wrapper docker /path/to/data /path/to/output participant --fs-license-file /usr/freesurfer/license.txt 90 | 91 | RUNNING: docker run --rm -e DOCKER_VERSION_8395080871=20.10.6 -it -v /path/to/data:/data:ro \ 92 | -v /path/to/output:/out -v /usr/freesurfer/license.txt:/opt/freesurfer/license.txt:ro \ 93 | nipreps/nibabies:23.0.0 /data /out participant 94 | ... 95 | ``` 96 | 97 | :::{admonition} Docker usage warning 98 | :class: warning 99 | 100 | When using Docker, the wrapper will default to using the same version of `nibabies` as the wrapper. 101 | This can be overridden by using the `-i` flag to specify a particular Docker image. 102 | ::: 103 | 104 | ### Sample Singularity usage 105 | 106 | ``` 107 | $ nibabies-wrapper singularity /path/to/data /path/to/output participant -i nibabies-23.0.0.sif --fs-license-file /usr/freesurfer/license.txt 108 | 109 | RUNNING: singularity run --cleanenv -B /path/to/data:/data:ro \ 110 | -B /path/to/output:/out -B /usr/freesurfer/license.txt:/opt/freesurfer/license.txt:ro \ 111 | nibabies-23.0.0.sif /data /out participant 112 | ... 113 | ``` 114 | 115 | :::{admonition} Singularity usage warning 116 | :class: warning 117 | 118 | Note that the `-i` flag is required when using Singularity, and should be the path to the already built Singularity image file. 119 | ::: 120 | 121 | The command-line interface of the nibabies wrapper 122 | ------------------------------------------------ 123 | 124 | ```{argparse} 125 | :ref: nibabies_wrapper.get_parser 126 | :prog: nibabies-wrapper 127 | :nodefault: 128 | :nodefaultconst: 129 | ``` 130 | -------------------------------------------------------------------------------- /env.yml: -------------------------------------------------------------------------------- 1 | name: nibabies 2 | channels: 3 | - https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/ 4 | - conda-forge 5 | # Update this ~yearly; last updated April 2023 6 | dependencies: 7 | - python =3.11 8 | # Needed for svgo and bids-validator; consider moving to deno 9 | - nodejs=20 10 | # Intel Math Kernel Library for numpy 11 | - mkl=2023.2.0 12 | - mkl-service=2.4.0 13 | # git-annex for templateflow users with DataLad superdatasets 14 | - git-annex=*=alldep* 15 | # Base scientific python stack; required by FSL, so pinned here 16 | - numpy=1.26 17 | - scipy=1.11 18 | - matplotlib=3.8 19 | - pandas=2.2 20 | - h5py=3.10 21 | # Dependencies compiled against numpy, best to stick with conda 22 | - nitime=0.10 23 | - scikit-image=0.22 24 | - scikit-learn=1.4 25 | # Utilities 26 | - graphviz=9.0 27 | - pandoc=3.1 28 | # Workflow dependencies: ANTs 29 | - ants=2.5 30 | # Workflow dependencies: Convert3d 31 | - convert3d=1.4 32 | # Workflow dependencies: FSL (versions pinned in 6.0.7.7) 33 | - fsl-bet2=2111.4 34 | - fsl-flirt=2111.2 35 | - fsl-fast4=2111.3 36 | - fsl-fugue=2201.4 37 | - fsl-mcflirt=2111.0 38 | - fsl-miscmaths=2203.2 39 | - fsl-topup=2203.2 40 | - pip 41 | - pip: 42 | - -r requirements.txt 43 | variables: 44 | FSLOUTPUTTYPE: NIFTI_GZ 45 | -------------------------------------------------------------------------------- /long_description.md: -------------------------------------------------------------------------------- 1 | Magnetic resonance imaging (MRI) requires a set of preprocessing steps before 2 | any statistical analysis. In an effort to standardize preprocessing, 3 | we developed [fMRIPrep](https://fmriprep.org/en/stable/) (a preprocessing tool 4 | for functional MRI, fMRI), and generalized its standardization approach to 5 | other neuroimaging modalities ([NiPreps](https://www.nipreps.org/)). NiPreps 6 | brings standardization and ease of use to the researcher, and effectively 7 | limits the methodological variability within preprocessing. fMRIPrep is designed 8 | to be used across wide ranges of populations; however it is designed for (and 9 | evaluated with) human adult datasets. Infant MRI (i.e., 0-2 years) presents 10 | unique challenges due to head size (e.g., reduced SNR and increased partial 11 | voluming and rapid shifting in tissue contrast due to myelination. These and 12 | other challenges require a more specialized workflow. *NiBabies*, an open-source 13 | pipeline extending from fMRIPrep for infant structural and functional MRI 14 | preprocessing, aims to address this need. 15 | 16 | The workflow is built atop [Nipype](https://nipype.readthedocs.io) and encompasses a large 17 | set of tools from well-known neuroimaging packages, including 18 | [FSL](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/), 19 | [ANTs](https://stnava.github.io/ANTs/), 20 | [FreeSurfer](https://surfer.nmr.mgh.harvard.edu/), 21 | [AFNI](https://afni.nimh.nih.gov/), 22 | [Connectome Workbench](https://humanconnectome.org/software/connectome-workbench), 23 | and [Nilearn](https://nilearn.github.io/). 24 | This pipeline was designed to provide the best software implementation for each state of 25 | preprocessing, and will be updated as newer and better neuroimaging software becomes 26 | available. 27 | 28 | *NiBabies* performs basic preprocessing steps (coregistration, normalization, unwarping, 29 | segmentation, skullstripping etc.) providing outputs that can be 30 | easily submitted to a variety of group level analyses, including task-based or resting-state 31 | fMRI, graph theory measures, surface or volume-based statistics, etc. 32 | *NiBabies* allows you to easily do the following: 33 | 34 | * Take fMRI data from *unprocessed* (only reconstructed) to ready for analysis. 35 | * Implement tools from different software packages. 36 | * Achieve optimal data processing quality by using the best tools available. 37 | * Generate preprocessing-assessment reports, with which the user can easily identify problems. 38 | * Receive verbose output concerning the stage of preprocessing for each subject, including 39 | meaningful errors. 40 | * Automate and parallelize processing steps, which provides a significant speed-up from 41 | typical linear, manual processing. 42 | 43 | [Repository](https://github.com/nipreps/nibabies) 44 | [Documentation](https://nibabies.readthedocs.io/en/stable/) 45 | -------------------------------------------------------------------------------- /nibabies/__init__.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """Top-module metadata.""" 24 | 25 | try: 26 | from ._version import __version__ 27 | except ImportError: 28 | __version__ = '0+unknown' 29 | -------------------------------------------------------------------------------- /nibabies/_types.py: -------------------------------------------------------------------------------- 1 | import typing as ty 2 | 3 | Anatomical = ty.Literal['T1w', 'T2w'] 4 | AffineDOF = ty.Literal[6, 9, 12] 5 | RegistrationInit = ty.Literal['t1w', 't2w', 'header'] 6 | -------------------------------------------------------------------------------- /nibabies/cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/cli/__init__.py -------------------------------------------------------------------------------- /nibabies/cli/mcribs.py: -------------------------------------------------------------------------------- 1 | import os 2 | from argparse import ArgumentParser 3 | 4 | import nipype.pipeline.engine as pe 5 | from niworkflows.interfaces.nibabel import MapLabels 6 | 7 | from nibabies.interfaces.mcribs import MCRIBReconAll 8 | 9 | 10 | def _parser(): 11 | parser = ArgumentParser(description='Test script for MCRIBS surfaces') 12 | parser.add_argument('subject', help='Subject ID') 13 | parser.add_argument('t2w', type=os.path.abspath, help='Input T2w (radioisotropic)') 14 | parser.add_argument( 15 | 'segmentation', type=os.path.abspath, help='Input anatomical segmentation in T2w space' 16 | ) 17 | parser.add_argument( 18 | '--outdir', type=os.path.abspath, help='Output directory to persist MCRIBS output' 19 | ) 20 | parser.add_argument('--nthreads', type=int, help='Number of threads to parallelize tasks') 21 | return parser 22 | 23 | 24 | def main(argv: list = None): 25 | pargs = _parser().parse_args(argv) 26 | 27 | t2w_file = _check_file(pargs.t2w) 28 | seg_file = _check_file(pargs.segmentation) 29 | 30 | aseg2mcrib = { 31 | 2: 51, 32 | 3: 21, 33 | 4: 49, 34 | 5: 0, 35 | 7: 17, 36 | 8: 17, 37 | 10: 43, 38 | 11: 41, 39 | 12: 47, 40 | 13: 47, 41 | 14: 0, 42 | 15: 0, 43 | 16: 19, 44 | 17: 1, 45 | 18: 3, 46 | 26: 41, 47 | 28: 45, 48 | 31: 49, 49 | 41: 52, 50 | 42: 20, 51 | 43: 50, 52 | 44: 0, 53 | 46: 18, 54 | 47: 18, 55 | 49: 42, 56 | 50: 40, 57 | 51: 46, 58 | 52: 46, 59 | 53: 2, 60 | 54: 4, 61 | 58: 40, 62 | 60: 44, 63 | 63: 50, 64 | 253: 48, 65 | } 66 | map_labels = pe.Node(MapLabels(in_file=seg_file, mappings=aseg2mcrib), name='map_labels') 67 | 68 | recon = pe.Node( 69 | MCRIBReconAll(subject_id=pargs.subject, t2w_file=t2w_file), name='mcribs_recon' 70 | ) 71 | if pargs.outdir: 72 | recon.inputs.outdir = pargs.outdir 73 | if pargs.nthreads: 74 | recon.inputs.nthreads = pargs.nthreads 75 | 76 | wf = pe.Workflow(f'MRA_{pargs.subject}') 77 | wf.connect(map_labels, 'out_file', recon, 'segmentation_file') 78 | wf.run() 79 | 80 | 81 | def _check_file(fl: str) -> str: 82 | import nibabel as nb 83 | import numpy as np 84 | 85 | img = nb.load(fl) 86 | if len(img.shape) != 3: 87 | raise ValueError('Image {fl} is not 3 dimensional.') 88 | 89 | voxdims = img.header['pixdim'][1:4] 90 | if not np.allclose(voxdims, voxdims[1]): 91 | raise ValueError(f'Image {fl} is not isotropic: {voxdims}.') 92 | 93 | ornt = nb.io_orientation(img.affine) 94 | axcodes = nb.orientations.ornt2axcodes(ornt) 95 | if ''.join(axcodes) != 'LAS': 96 | las = nb.orientations.axcodes2ornt('LAS') 97 | transform = nb.orientations.ornt_transform(ornt, las) 98 | reornt = img.as_reoriented(transform) 99 | outfl = os.path.abspath(f'LASornt_{os.path.basename(fl)}') 100 | print(f'Creating reorientated image {outfl}') 101 | reornt.to_filename(outfl) 102 | return outfl 103 | return fl 104 | 105 | 106 | if __name__ == '__main__': 107 | main() 108 | -------------------------------------------------------------------------------- /nibabies/cli/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """NiBabies runner.""" 3 | 4 | from .. import config 5 | 6 | 7 | def main(): 8 | """Entry point.""" 9 | import gc 10 | import os 11 | import sys 12 | from pathlib import Path 13 | 14 | from ..utils.bids import write_bidsignore, write_derivative_description 15 | from .parser import parse_args 16 | from .workflow import build_boilerplate, build_workflow 17 | 18 | _cwd = os.getcwd() 19 | 20 | parse_args() 21 | 22 | if 'pdb' in config.execution.debug: 23 | from nibabies.utils.debug import setup_exceptionhook 24 | 25 | setup_exceptionhook() 26 | config.nipype.plugin = 'Linear' 27 | 28 | # collect and submit telemetry information 29 | # if `--notrack` is specified, nothing is done. 30 | if not config.execution.notrack and not config.execution.debug: 31 | from nibabies.utils.telemetry import setup_migas 32 | 33 | setup_migas() 34 | 35 | if 'participant' in config.workflow.analysis_level: 36 | _pool = None 37 | if config.nipype.plugin == 'MultiProc': 38 | import multiprocessing as mp 39 | from concurrent.futures import ProcessPoolExecutor 40 | from contextlib import suppress 41 | 42 | # should drastically reduce VMS 43 | # see https://github.com/nipreps/mriqc/pull/984 for more details 44 | os.environ['OMP_NUM_THREADS'] = '1' 45 | 46 | with suppress(RuntimeError): 47 | mp.set_start_method('fork') 48 | gc.collect() 49 | 50 | _pool = ProcessPoolExecutor( 51 | max_workers=config.nipype.nprocs, 52 | initializer=config._process_initializer, 53 | initargs=(_cwd, config.nipype.omp_nthreads), 54 | ) 55 | 56 | config_file = config.execution.work_dir / config.execution.run_uuid / 'config.toml' 57 | config_file.parent.mkdir(exist_ok=True, parents=True) 58 | config.to_filename(config_file) 59 | 60 | # build the workflow within the same process 61 | # it still needs to be saved / loaded to be properly initialized 62 | retval = build_workflow(config_file) 63 | exitcode = retval['return_code'] 64 | nibabies_wf = retval['workflow'] 65 | 66 | # exit conditions: 67 | # - no workflow (--reports-only) 68 | # - retcode is not 0 69 | # - boilerplate only 70 | 71 | if nibabies_wf is None and not config.execution.reports_only: 72 | sys.exit(exitcode) 73 | 74 | if config.execution.write_graph: 75 | nibabies_wf.write_graph(graph2use='colored', format='svg', simple_form=True) 76 | 77 | if exitcode != 0: 78 | sys.exit(exitcode) 79 | 80 | # generate boilerplate 81 | build_boilerplate(nibabies_wf) 82 | if config.execution.boilerplate_only: 83 | sys.exit(exitcode) 84 | 85 | gc.collect() 86 | 87 | config.loggers.workflow.log( 88 | 15, 89 | '\n'.join(['nibabies config:'] + [f'\t\t{s}' for s in config.dumps().splitlines()]), 90 | ) 91 | config.loggers.workflow.log(25, 'nibabies started!') 92 | 93 | # Hack MultiProc's pool to reduce VMS 94 | _plugin = config.nipype.get_plugin() 95 | if _pool: 96 | from nipype.pipeline.plugins.multiproc import MultiProcPlugin 97 | 98 | multiproc = MultiProcPlugin(plugin_args=config.nipype.plugin_args) 99 | multiproc.pool = _pool 100 | _plugin = {'plugin': multiproc} 101 | 102 | gc.collect() 103 | try: 104 | nibabies_wf.run(**_plugin) 105 | except Exception as e: 106 | config.loggers.workflow.critical('nibabies failed: %s', e) 107 | raise 108 | else: 109 | config.loggers.workflow.log(25, 'nibabies finished successfully!') 110 | # Bother users with the boilerplate only iff the workflow went okay. 111 | boiler_file = config.execution.nibabies_dir / 'logs' / 'CITATION.md' 112 | if boiler_file.exists(): 113 | if config.environment.exec_env in ( 114 | 'singularity', 115 | 'docker', 116 | 'nibabies-docker', 117 | ): 118 | boiler_file = Path('') / boiler_file.relative_to( 119 | config.execution.output_dir 120 | ) 121 | config.loggers.workflow.log( 122 | 25, 123 | 'Works derived from this nibabies execution should include the ' 124 | f'boilerplate text found in {boiler_file}.', 125 | ) 126 | 127 | if config.workflow.run_reconall: 128 | from niworkflows.utils.misc import _copy_any 129 | from templateflow import api 130 | 131 | dseg_tsv = str( 132 | api.get( 133 | 'fsaverage', 134 | hemi=None, 135 | atlas=None, 136 | segmentation='aparc', 137 | suffix='dseg', 138 | extension=['.tsv'], 139 | ) 140 | ) 141 | _copy_any(dseg_tsv, str(config.execution.nibabies_dir / 'desc-aseg_dseg.tsv')) 142 | _copy_any(dseg_tsv, str(config.execution.nibabies_dir / 'desc-aparcaseg_dseg.tsv')) 143 | # errno = 0 144 | finally: 145 | from ..reports.core import generate_reports 146 | 147 | # Generate reports phase 148 | generate_reports( 149 | config.execution.unique_labels, 150 | config.execution.nibabies_dir, 151 | config.execution.run_uuid, 152 | ) 153 | write_derivative_description( 154 | config.execution.bids_dir, 155 | config.execution.nibabies_dir, 156 | config.execution.dataset_links, 157 | ) 158 | write_bidsignore(config.execution.nibabies_dir) 159 | 160 | 161 | if __name__ == '__main__': 162 | raise RuntimeError( 163 | 'Please `pip install` this and run via the commandline interfaces, `nibabies `' 164 | ) 165 | -------------------------------------------------------------------------------- /nibabies/cli/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/cli/tests/__init__.py -------------------------------------------------------------------------------- /nibabies/cli/version.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """Version CLI helpers.""" 4 | 5 | from contextlib import suppress 6 | from datetime import datetime, timezone 7 | from pathlib import Path 8 | 9 | import requests 10 | 11 | from .. import __version__ 12 | 13 | RELEASE_EXPIRY_DAYS = 14 14 | DATE_FMT = '%Y%m%d' 15 | 16 | 17 | def check_latest(): 18 | """Determine whether this is the latest version.""" 19 | from packaging.version import InvalidVersion, Version 20 | 21 | latest = None 22 | date = None 23 | outdated = None 24 | now = datetime.now(tz=timezone.utc) 25 | cachefile = Path.home() / '.cache' / 'nibabies' / 'latest' 26 | try: 27 | cachefile.parent.mkdir(parents=True, exist_ok=True) 28 | except OSError: 29 | cachefile = None 30 | 31 | if cachefile and cachefile.exists(): 32 | try: 33 | latest, date = cachefile.read_text().split('|') 34 | except Exception: # noqa: S110, BLE001 35 | pass 36 | else: 37 | try: 38 | latest = Version(latest) 39 | date = datetime.strptime(date, DATE_FMT).astimezone(timezone.utc) 40 | except (InvalidVersion, ValueError): 41 | latest = None 42 | else: 43 | if abs((now - date).days) > RELEASE_EXPIRY_DAYS: 44 | outdated = True 45 | 46 | if latest is None or outdated is True: 47 | response = None 48 | with suppress(Exception): 49 | response = requests.get(url='https://pypi.org/pypi/nibabies/json', timeout=1.0) 50 | 51 | if response and response.status_code == 200: 52 | versions = [Version(rel) for rel in response.json()['releases'].keys()] 53 | versions = [rel for rel in versions if not rel.is_prerelease] 54 | if versions: 55 | latest = sorted(versions)[-1] 56 | else: 57 | latest = None 58 | 59 | if cachefile is not None and latest is not None: 60 | with suppress(OSError): 61 | cachefile.write_text(f'{latest}|{now.strftime(DATE_FMT)}') 62 | 63 | return latest 64 | 65 | 66 | def is_flagged(): 67 | """Check whether current version is flagged.""" 68 | # https://raw.githubusercontent.com/nipreps/fmriprep/master/.versions.json 69 | flagged = () 70 | response = None 71 | with suppress(Exception): 72 | response = requests.get( 73 | url="""\ 74 | https://raw.githubusercontent.com/nipreps/nibabies/master/.versions.json""", 75 | timeout=1.0, 76 | ) 77 | 78 | if response and response.status_code == 200: 79 | flagged = response.json().get('flagged', {}) or {} 80 | 81 | if __version__ in flagged: 82 | return True, flagged[__version__] 83 | 84 | return False, None 85 | -------------------------------------------------------------------------------- /nibabies/conftest.py: -------------------------------------------------------------------------------- 1 | """py.test configuration""" 2 | 3 | import json 4 | from pathlib import Path 5 | from shutil import copytree 6 | 7 | import nibabel as nb 8 | import numpy as np 9 | import pytest 10 | 11 | from nibabies.data import load as load_data 12 | 13 | try: 14 | from importlib.resources import files as ir_files 15 | except ImportError: # PY<3.9 16 | from importlib_resources import files as ir_files 17 | 18 | 19 | def copytree_or_skip(source, target): 20 | data_dir = ir_files('nibabies') / source 21 | if not data_dir.exists(): 22 | pytest.skip(f'Cannot chdir into {data_dir!r}. Probably in a zipped distribution.') 23 | 24 | try: 25 | copytree(data_dir, target / data_dir.name) 26 | except Exception: # noqa: BLE001 27 | pytest.skip(f'Cannot copy {data_dir!r} into {target / data_dir.name}. Probably in a zip.') 28 | 29 | 30 | @pytest.fixture(autouse=True) 31 | def _populate_namespace(doctest_namespace, tmp_path): 32 | doctest_namespace['copytree_or_skip'] = copytree_or_skip 33 | doctest_namespace['testdir'] = tmp_path 34 | doctest_namespace['datadir'] = load_data() 35 | 36 | 37 | @pytest.fixture 38 | def minimal_bids(tmp_path): 39 | bids = tmp_path / 'bids' 40 | bids.mkdir() 41 | Path.write_text( 42 | bids / 'dataset_description.json', json.dumps({'Name': 'Test DS', 'BIDSVersion': '1.8.0'}) 43 | ) 44 | T1w = bids / 'sub-01' / 'anat' / 'sub-01_T1w.nii.gz' 45 | T1w.parent.mkdir(parents=True) 46 | nb.Nifti1Image(np.zeros((5, 5, 5)), np.eye(4)).to_filename(T1w) 47 | return bids 48 | -------------------------------------------------------------------------------- /nibabies/data/FreeSurferLabelRemappings.json: -------------------------------------------------------------------------------- 1 | { 2 | "173": 16, 3 | "174": 16, 4 | "175": 16, 5 | 6 | "500": 53, 7 | "501": 53, 8 | "502": 53, 9 | "503": 53, 10 | "504": 53, 11 | "505": 53, 12 | "506": 53, 13 | "507": 53, 14 | "508": 53, 15 | 16 | "550": 17, 17 | "551": 17, 18 | "552": 17, 19 | "553": 17, 20 | "554": 17, 21 | "555": 17, 22 | "556": 17, 23 | "557": 17, 24 | "558": 17 25 | } 26 | -------------------------------------------------------------------------------- /nibabies/data/FreeSurferSubcorticalLabelTableLut.txt: -------------------------------------------------------------------------------- 1 | ACCUMBENS_LEFT 2 | 26 255 165 0 255 3 | ACCUMBENS_RIGHT 4 | 58 255 165 0 255 5 | AMYGDALA_LEFT 6 | 18 103 255 255 255 7 | AMYGDALA_RIGHT 8 | 54 103 255 255 255 9 | BRAIN_STEM 10 | 16 119 159 176 255 11 | CAUDATE_LEFT 12 | 11 122 186 220 255 13 | CAUDATE_RIGHT 14 | 50 122 186 220 255 15 | CEREBELLUM_LEFT 16 | 8 230 148 34 255 17 | CEREBELLUM_RIGHT 18 | 47 230 148 34 255 19 | DIENCEPHALON_VENTRAL_LEFT 20 | 28 165 42 42 255 21 | DIENCEPHALON_VENTRAL_RIGHT 22 | 60 165 42 42 255 23 | HIPPOCAMPUS_LEFT 24 | 17 220 216 20 255 25 | HIPPOCAMPUS_RIGHT 26 | 53 220 216 20 255 27 | PALLIDUM_LEFT 28 | 13 12 48 255 255 29 | PALLIDUM_RIGHT 30 | 52 13 48 255 255 31 | PUTAMEN_LEFT 32 | 12 236 13 176 255 33 | PUTAMEN_RIGHT 34 | 51 236 13 176 255 35 | THALAMUS_LEFT 36 | 10 0 118 14 255 37 | THALAMUS_RIGHT 38 | 49 0 118 14 255 39 | -------------------------------------------------------------------------------- /nibabies/data/MNIInfant_to_MNI1526NLinAsym.mat: -------------------------------------------------------------------------------- 1 | 1.330660649 -0.004427136451 0.01025602509 -41.20370054 2 | -0.003753457765 1.339140797 0.153721562 -68.37553187 3 | -0.01546533827 -0.05324436952 1.426601839 -22.76900701 4 | 0 0 0 1 5 | -------------------------------------------------------------------------------- /nibabies/data/__init__.py: -------------------------------------------------------------------------------- 1 | from acres import Loader 2 | 3 | load = Loader(__package__) 4 | -------------------------------------------------------------------------------- /nibabies/data/antsBrainExtraction_precise.json: -------------------------------------------------------------------------------- 1 | { 2 | "collapse_output_transforms": true, 3 | "convergence_threshold": [ 1E-6, 1E-8, 1E-8, 1E-9 ], 4 | "convergence_window_size": [10, 10, 10, 15], 5 | "dimension": 3, 6 | "interpolation": "LanczosWindowedSinc", 7 | "metric": [ 8 | "Mattes", 9 | "Mattes", 10 | "Mattes", 11 | ["CC", "CC"] 12 | ], 13 | "metric_weight": [ 14 | 1, 15 | 1, 16 | 1, 17 | [0.5, 0.5] 18 | ], 19 | "number_of_iterations": [ 20 | [ 500, 0 ], 21 | [1000, 500, 250, 100], 22 | [1000, 500, 250, 100], 23 | [50, 10, 0] 24 | ], 25 | "output_transform_prefix": "anat2std", 26 | "radius_or_number_of_bins": [ 27 | 32, 28 | 32, 29 | 32, 30 | [4, 4] 31 | ], 32 | "sampling_percentage": [ 33 | 0.25, 34 | 0.25, 35 | 0.25, 36 | [1, 1] 37 | ], 38 | "sampling_strategy": [ 39 | "Regular", 40 | "Regular", 41 | "Regular", 42 | ["None", "None"] 43 | ], 44 | "shrink_factors": [ 45 | [ 4, 1 ], 46 | [8, 4, 2, 1], 47 | [8, 4, 2, 1], 48 | [4, 2, 1] 49 | ], 50 | "sigma_units": ["vox", "vox", "vox", "vox"], 51 | "smoothing_sigmas": [ 52 | [ 6.0, 0.0 ], 53 | [4, 2, 1, 0], 54 | [4, 2, 1, 0], 55 | [2, 1, 0] 56 | ], 57 | "transform_parameters": [ 58 | [ 1.0 ], 59 | [0.1], 60 | [0.1], 61 | [0.1, 3.0, 0.0] 62 | ], 63 | "transforms": ["Translation", "Rigid", "Affine", "SyN"], 64 | "use_histogram_matching": true, 65 | "verbose": true, 66 | "winsorize_lower_quantile": 0.0001, 67 | "winsorize_upper_quantile": 0.9999, 68 | "write_composite_transform": false 69 | } 70 | -------------------------------------------------------------------------------- /nibabies/data/antsBrainExtraction_testing.json: -------------------------------------------------------------------------------- 1 | { 2 | "collapse_output_transforms": true, 3 | "convergence_threshold": [1E-6, 1E-8, 1E-8, 1E-9], 4 | "convergence_window_size": [10, 10, 10, 15], 5 | "dimension": 3, 6 | "interpolation": "LanczosWindowedSinc", 7 | "metric": [ 8 | "Mattes", 9 | "Mattes", 10 | "Mattes", 11 | ["CC", "CC"] 12 | ], 13 | "metric_weight": [ 14 | 1, 15 | 1, 16 | 1, 17 | [0.5, 0.5] 18 | ], 19 | "number_of_iterations": [ 20 | [500, 0], 21 | [100, 100, 50, 10], 22 | [100, 100, 50, 10], 23 | [5, 0] 24 | ], 25 | "output_transform_prefix": "anat2std", 26 | "radius_or_number_of_bins": [ 27 | 32, 28 | 32, 29 | 32, 30 | [4, 4] 31 | ], 32 | "sampling_percentage": [ 33 | 0.25, 34 | 0.25, 35 | 0.25, 36 | [1, 1] 37 | ], 38 | "sampling_strategy": [ 39 | "Regular", 40 | "Regular", 41 | "Regular", 42 | ["None", "None"] 43 | ], 44 | "shrink_factors": [ 45 | [4, 1], 46 | [8, 4, 2, 1], 47 | [8, 4, 2, 1], 48 | [2, 1] 49 | ], 50 | "sigma_units": ["vox", "vox", "vox", "vox"], 51 | "smoothing_sigmas": [ 52 | [6, 0], 53 | [4, 2, 1, 0], 54 | [4, 2, 1, 0], 55 | [1, 0] 56 | ], 57 | "transform_parameters": [ 58 | [1.0], 59 | [0.1], 60 | [0.1], 61 | [0.1, 3.0, 0.0] 62 | ], 63 | "transforms": ["Translation", "Rigid", "Affine", "SyN"], 64 | "use_histogram_matching": true, 65 | "verbose": true, 66 | "winsorize_lower_quantile": 0.0001, 67 | "winsorize_upper_quantile": 0.9999, 68 | "write_composite_transform": false 69 | } -------------------------------------------------------------------------------- /nibabies/data/flirtsch/bbr.sch: -------------------------------------------------------------------------------- 1 | # 1mm scale 2 | setscale 1 force 3 | setoption costfunction bbr 4 | setoption optimisationtype brent 5 | setoption tolerance 0.0005 0.0005 0.0005 0.02 0.02 0.02 0.002 0.002 0.002 0.001 0.001 0.001 6 | #setoption tolerance 0.005 0.005 0.005 0.2 0.2 0.2 0.02 0.02 0.02 0.01 0.01 0.01 7 | setoption boundguess 1 8 | setoption bbrstep 200 9 | clear UA 10 | clear UU 11 | clear UV 12 | clear U 13 | setrowqsform UU 14 | setrow UU 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 15 | measurecost 6 UU:1-2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 16 | gridmeasurecost 6 UU:1-2 -0.07 0.07 0.07 -0.07 0.07 0.07 -0.07 0.07 0.07 -4.0 4.0 4.0 -4.0 4.0 4.0 -4.0 4.0 4.0 0.0 0.0 0.0 abs 8 17 | sort U 18 | copy U UA 19 | clear U 20 | optimise 6 UA:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 21 | setoption optimisationtype powell 22 | optimise 6 U:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 23 | setoption optimisationtype brent 24 | optimise 6 U:2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 25 | sort U 26 | setoption tolerance 0.0002 0.0002 0.0002 0.02 0.02 0.02 0.002 0.002 0.002 0.001 0.001 0.001 27 | setoption bbrstep 2 28 | clear UU 29 | copy U UU 30 | clear U 31 | gridmeasurecost 6 UU:1 -0.0017 0.0017 0.0017 -0.0017 0.0017 0.0017 -0.0017 0.0017 0.0017 -0.1 0.1 0.1 -0.1 0.1 0.1 -0.1 0.1 0.1 0.0 0.0 0.0 abs 8 32 | sort U 33 | clear UB 34 | copy U UB 35 | clear U 36 | setoption optimisationtype brent 37 | optimise 6 UB:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 38 | setoption optimisationtype powell 39 | optimise 12 U:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 40 | setoption optimisationtype brent 41 | optimise 12 U:2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 42 | sort U 43 | print U:1 44 | -------------------------------------------------------------------------------- /nibabies/data/io_spec_func.json: -------------------------------------------------------------------------------- 1 | { 2 | "queries": { 3 | "baseline": { 4 | "hmc": { 5 | "datatype": "func", 6 | "space": null, 7 | "desc": "hmc", 8 | "suffix": "boldref", 9 | "extension": [ 10 | ".nii.gz", 11 | ".nii" 12 | ] 13 | }, 14 | "coreg": { 15 | "datatype": "func", 16 | "space": null, 17 | "desc": "coreg", 18 | "suffix": "boldref", 19 | "extension": [ 20 | ".nii.gz", 21 | ".nii" 22 | ] 23 | } 24 | }, 25 | "transforms": { 26 | "hmc": { 27 | "datatype": "func", 28 | "from": "orig", 29 | "to": "boldref", 30 | "mode": "image", 31 | "suffix": "xfm", 32 | "extension": ".txt" 33 | }, 34 | "boldref2anat": { 35 | "datatype": "func", 36 | "from": "orig", 37 | "to": "anat", 38 | "mode": "image", 39 | "suffix": "xfm", 40 | "extension": ".txt" 41 | }, 42 | "boldref2fmap": { 43 | "datatype": "func", 44 | "from": "orig", 45 | "mode": "image", 46 | "suffix": "xfm", 47 | "extension": ".txt" 48 | } 49 | } 50 | }, 51 | "patterns": [ 52 | "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_res-{res}][_label-{label}][_echo-{echo}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", 53 | "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", 54 | "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix}.{extension}", 55 | "sub-{subject}[/ses-{session}]/{datatype|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_res-{res}][_den-{den}][_hemi-{hemi}[_label-{label}][_desc-{desc}]_{suffix<|boldref|dseg|mask>}.{extension}" 56 | ] 57 | } 58 | -------------------------------------------------------------------------------- /nibabies/data/itkIdentityTransform.txt: -------------------------------------------------------------------------------- 1 | #Insight Transform File V1.0 2 | #Transform 0 3 | Transform: MatrixOffsetTransformBase_double_3_3 4 | Parameters: 1 0 0 0 1 0 0 0 1 0 0 0 5 | FixedParameters: 0 0 0 6 | -------------------------------------------------------------------------------- /nibabies/data/t1-t2-coreg.json: -------------------------------------------------------------------------------- 1 | { 2 | "collapse_output_transforms": true, 3 | "convergence_threshold": [ 1E-6, 1E-6, 1E-9 ], 4 | "convergence_window_size": [ 10, 5, 2], 5 | "dimension": 3, 6 | "interpolation": "BSpline", 7 | "metric": [ "Mattes", "Mattes", "Mattes" ], 8 | "metric_weight": [ 1.0, 1.0, 1.0 ], 9 | "number_of_iterations": [ 10 | [ 500, 0 ], 11 | [ 1000, 500, 250, 100 ], 12 | [ 50, 10 ] 13 | ], 14 | "radius_or_number_of_bins": [ 32, 32, 32 ], 15 | "sampling_percentage": [ 0.25, 0.25, 0.8 ], 16 | "sampling_strategy": [ "Random", "Regular", "Random" ], 17 | "shrink_factors": [ 18 | [ 4, 1 ], 19 | [ 8, 4, 2, 1 ], 20 | [ 2, 1 ] 21 | ], 22 | "sigma_units": [ "vox", "vox", "vox" ], 23 | "smoothing_sigmas": [ 24 | [ 6.0, 0.0 ], 25 | [ 4.0, 2.0, 1.0, 0.0], 26 | [ 2.0, 0 ] 27 | ], 28 | "transform_parameters": [ 29 | [ 1.0 ], 30 | [ 0.1 ], 31 | [ 0.1, 3.0, 0.0 ] 32 | ], 33 | "transforms": [ "Translation", "Rigid", "SyN" ], 34 | "use_histogram_matching": false, 35 | "verbose": true, 36 | "winsorize_lower_quantile": 0.0001, 37 | "winsorize_upper_quantile": 0.9999, 38 | "write_composite_transform": true 39 | } 40 | -------------------------------------------------------------------------------- /nibabies/data/tests/config.toml: -------------------------------------------------------------------------------- 1 | [environment] 2 | cpu_count = 8 3 | exec_env = "posix" 4 | free_mem = 2.2 5 | overcommit_policy = "heuristic" 6 | overcommit_limit = "50%" 7 | nipype_version = "1.5.0" 8 | templateflow_version = "24.2.2" 9 | version = "24.1.0" 10 | 11 | [execution] 12 | bids_dir = "ds000005/" 13 | bids_description_hash = "5d42e27751bbc884eca87cb4e62b9a0cca0cd86f8e578747fe89b77e6c5b21e5" 14 | boilerplate_only = false 15 | fs_license_file = "/opt/freesurfer/license.txt" 16 | fs_subjects_dir = "/opt/freesurfer/subjects" 17 | log_dir = "/tmp/fmriprep/logs" 18 | log_level = 40 19 | low_mem = false 20 | md_only_boilerplate = false 21 | notrack = true 22 | output_dir = "/tmp" 23 | output_spaces = "MNIInfant:cohort-1" 24 | reports_only = false 25 | run_uuid = "20200306-105302_d365772b-fd60-4741-a722-372c2f558b50" 26 | participant_label = [ "01",] 27 | templateflow_home = "~/.cache/templateflow" 28 | work_dir = "work/" 29 | write_graph = false 30 | 31 | [workflow] 32 | anat_only = false 33 | bold2anat_dof = 6 34 | fd_radius = 45 35 | fmap_bspline = false 36 | force_syn = false 37 | hires = true 38 | ignore = [] 39 | longitudinal = false 40 | medial_surface_nan = false 41 | project_goodvoxels = false 42 | regressors_all_comps = false 43 | regressors_dvars_th = 1.5 44 | regressors_fd_th = 0.5 45 | skull_strip_fixed_seed = false 46 | skull_strip_template = "UNCInfant:cohort-1" 47 | surface_recon_method = "auto" 48 | 49 | [nipype] 50 | crashfile_format = "txt" 51 | get_linked_libs = false 52 | memory_gb = 32 53 | nprocs = 8 54 | omp_nthreads = 8 55 | plugin = "MultiProc" 56 | resource_monitor = false 57 | stop_on_first_crash = false 58 | 59 | [nipype.plugin_args] 60 | maxtasksperchild = 1 61 | raise_insufficient = false 62 | 63 | [execution.bids_filters.t1w] 64 | reconstruction = "" 65 | 66 | [execution.bids_filters.t2w] 67 | reconstruction = "" -------------------------------------------------------------------------------- /nibabies/data/tpl-MNI152NLin6Asym_res-01_desc-avgwmparc_dseg.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/data/tpl-MNI152NLin6Asym_res-01_desc-avgwmparc_dseg.nii.gz -------------------------------------------------------------------------------- /nibabies/data/xfm_manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "from-MNI152NLin6Asym_to-MNIInfant+10_xfm.h5": { 3 | "url": "https://osf.io/download/jf6vz/", 4 | "hash": "md5:df6d40e5bbdca85f083866ad26507796" 5 | }, 6 | "from-MNI152NLin6Asym_to-MNIInfant+11_xfm.h5": { 7 | "url": "https://osf.io/download/zmjyn/", 8 | "hash": "md5:ff8435f06c0a44be88e050492b90ffea" 9 | }, 10 | "from-MNI152NLin6Asym_to-MNIInfant+1_xfm.h5": { 11 | "url": "https://osf.io/download/kx7ny/", 12 | "hash": "md5:c27d35dff75d59d605c8d786c985594e" 13 | }, 14 | "from-MNI152NLin6Asym_to-MNIInfant+2_xfm.h5": { 15 | "url": "https://osf.io/download/6758aa0c67a7782b00f73c77/", 16 | "hash": "md5:81fabdc70c200bf099893bd1377ef0f7" 17 | }, 18 | "from-MNI152NLin6Asym_to-MNIInfant+3_xfm.h5": { 19 | "url": "https://osf.io/download/6758aa1c76bfbc22cbf73b0a/", 20 | "hash": "md5:c8f5b79b95f9aa65add5524e88601cc6" 21 | }, 22 | "from-MNI152NLin6Asym_to-MNIInfant+4_xfm.h5": { 23 | "url": "https://osf.io/download/6758aa1bb96fd819c41e25a4/", 24 | "hash": "md5:1e4b927115a76b031c46e6180fc76a30" 25 | }, 26 | "from-MNI152NLin6Asym_to-MNIInfant+5_xfm.h5": { 27 | "url": "https://osf.io/download/6758aa146e0cd8ca5f563b2b/", 28 | "hash": "md5:25bfd0837a88db267762974c0a530535" 29 | }, 30 | "from-MNI152NLin6Asym_to-MNIInfant+6_xfm.h5": { 31 | "url": "https://osf.io/download/6758ab50b95a2e75b11e23f0/", 32 | "hash": "md5:7ed4732832ed6dd45dd2259d8b4454e7" 33 | }, 34 | "from-MNI152NLin6Asym_to-MNIInfant+7_xfm.h5": { 35 | "url": "https://osf.io/download/6758ab5667a7782b00f73cfa/", 36 | "hash": "md5:a1244c38b7b4825abefc5834d0398b08" 37 | }, 38 | "from-MNI152NLin6Asym_to-MNIInfant+8_xfm.h5": { 39 | "url": "https://osf.io/download/rq2an/", 40 | "hash": "md5:50d11fdac22c6589af8a7f61e4b3e41a" 41 | }, 42 | "from-MNI152NLin6Asym_to-MNIInfant+9_xfm.h5": { 43 | "url": "https://osf.io/download/6758ab67eacdd8b34803d991/", 44 | "hash": "md5:3184d91f8b3a386ca3ec913c365651d8" 45 | }, 46 | "from-MNIInfant+10_to-MNI152NLin6Asym_xfm.h5": { 47 | "url": "https://osf.io/download/4xh9q/", 48 | "hash": "md5:a1f3dd3c0ac8b05efbaf893cca6f9641" 49 | }, 50 | "from-MNIInfant+11_to-MNI152NLin6Asym_xfm.h5": { 51 | "url": "https://osf.io/download/6758a5026e0cd8ca5f56380d/", 52 | "hash": "md5:0d1aadef884574e54065d4e2cdb8e398" 53 | }, 54 | "from-MNIInfant+1_to-MNI152NLin6Asym_xfm.h5": { 55 | "url": "https://osf.io/download/7ge2b/", 56 | "hash": "md5:d5e4272140c6f582f64b7f39b31ca837" 57 | }, 58 | "from-MNIInfant+2_to-MNI152NLin6Asym_xfm.h5": { 59 | "url": "https://osf.io/download/6758a3c1a678894ad71e2422/", 60 | "hash": "md5:b03651dae4d378410c44f1c6c63dbea0" 61 | }, 62 | "from-MNIInfant+3_to-MNI152NLin6Asym_xfm.h5": { 63 | "url": "https://osf.io/download/6758a3c3bc61ce5912662055/", 64 | "hash": "md5:7cc099e26647e670c8e75ead2cfe39a6" 65 | }, 66 | "from-MNIInfant+4_to-MNI152NLin6Asym_xfm.h5": { 67 | "url": "https://osf.io/download/6758a3bc040c053b58f73b47/", 68 | "hash": "md5:e92e9150f2ad4d2730f005aa9750438d" 69 | }, 70 | "from-MNIInfant+5_to-MNI152NLin6Asym_xfm.h5": { 71 | "url": "https://osf.io/download/6758a3bdea7294dbdd66161a/", 72 | "hash": "md5:9cf6cf3fb500c229da15490c9080201a" 73 | }, 74 | "from-MNIInfant+6_to-MNI152NLin6Asym_xfm.h5": { 75 | "url": "https://osf.io/download/6758a3bf040c053b58f73b4b/", 76 | "hash": "md5:2212fdb57b85e8a0f7fa9feea5b0dd1b" 77 | }, 78 | "from-MNIInfant+7_to-MNI152NLin6Asym_xfm.h5": { 79 | "url": "https://osf.io/download/6758a4f78af26d0a97661ca9/", 80 | "hash": "md5:6913f8191201350311ff61525fae8a21" 81 | }, 82 | "from-MNIInfant+8_to-MNI152NLin6Asym_xfm.h5": { 83 | "url": "https://osf.io/download/6758a500f82c189df71e256f/", 84 | "hash": "md5:809455af8416cd61c1693b5c7eafbd13" 85 | }, 86 | "from-MNIInfant+9_to-MNI152NLin6Asym_xfm.h5": { 87 | "url": "https://osf.io/download/6758a4ff040c053b58f73bd1/", 88 | "hash": "md5:49317cbb038c399d4df7428f07d36983" 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /nibabies/interfaces/__init__.py: -------------------------------------------------------------------------------- 1 | from niworkflows.interfaces.bids import DerivativesDataSink as _DDS 2 | 3 | 4 | # TODO: Set default as default in niworkflows 5 | class DerivativesDataSink(_DDS): 6 | out_path_base = '' 7 | -------------------------------------------------------------------------------- /nibabies/interfaces/bids.py: -------------------------------------------------------------------------------- 1 | """BIDS-related interfaces.""" 2 | 3 | from pathlib import Path 4 | 5 | from bids.utils import listify 6 | from nipype.interfaces.base import ( 7 | DynamicTraitedSpec, 8 | SimpleInterface, 9 | TraitedSpec, 10 | isdefined, 11 | traits, 12 | ) 13 | from nipype.interfaces.io import add_traits 14 | from nipype.interfaces.utility.base import _ravel 15 | 16 | 17 | class _BIDSURIInputSpec(DynamicTraitedSpec): 18 | dataset_links = traits.Dict(mandatory=True, desc='Dataset links') 19 | out_dir = traits.Str(mandatory=True, desc='Output directory') 20 | 21 | 22 | class _BIDSURIOutputSpec(TraitedSpec): 23 | out = traits.List( 24 | traits.Str, 25 | desc='BIDS URI(s) for file', 26 | ) 27 | 28 | 29 | class BIDSURI(SimpleInterface): 30 | """Convert input filenames to BIDS URIs, based on links in the dataset. 31 | 32 | This interface can combine multiple lists of inputs. 33 | """ 34 | 35 | input_spec = _BIDSURIInputSpec 36 | output_spec = _BIDSURIOutputSpec 37 | 38 | def __init__(self, numinputs=0, **inputs): 39 | super().__init__(**inputs) 40 | self._numinputs = numinputs 41 | if numinputs >= 1: 42 | input_names = [f'in{i + 1}' for i in range(numinputs)] 43 | else: 44 | input_names = [] 45 | add_traits(self.inputs, input_names) 46 | 47 | def _run_interface(self, runtime): 48 | inputs = [getattr(self.inputs, f'in{i + 1}') for i in range(self._numinputs)] 49 | in_files = listify(inputs) 50 | in_files = _ravel(in_files) 51 | # Remove undefined inputs 52 | in_files = [f for f in in_files if isdefined(f)] 53 | # Convert the dataset links to BIDS URI prefixes 54 | updated_keys = {f'bids:{k}:': Path(v) for k, v in self.inputs.dataset_links.items()} 55 | updated_keys['bids::'] = Path(self.inputs.out_dir) 56 | # Convert the paths to BIDS URIs 57 | out = [_find_nearest_path(updated_keys, f) for f in in_files] 58 | self._results['out'] = out 59 | 60 | return runtime 61 | 62 | 63 | def _find_nearest_path(paths: dict[str, Path], input_path: str | Path): 64 | """Find the nearest relative path from an input path to a dictionary of paths. 65 | 66 | If ``input_path`` is not relative to any of the paths in ``path_dict``, 67 | the absolute path string is returned. 68 | 69 | If ``input_path`` is already a BIDS-URI, then it will be returned unmodified. 70 | 71 | Parameters 72 | ---------- 73 | path_dict : dict of (str, Path) 74 | A dictionary of paths. 75 | input_path : Path 76 | The input path to match. 77 | 78 | Returns 79 | ------- 80 | matching_path : str 81 | The nearest relative path from the input path to a path in the dictionary. 82 | This is either the concatenation of the associated key from ``path_dict`` 83 | and the relative path from the associated value from ``path_dict`` to ``input_path``, 84 | or the absolute path to ``input_path`` if no matching path is found from ``path_dict``. 85 | 86 | Examples 87 | -------- 88 | >>> from pathlib import Path 89 | >>> path_dict = { 90 | ... 'bids::': Path('/data/derivatives/fmriprep'), 91 | ... 'bids:raw:': Path('/data'), 92 | ... 'bids:deriv-0:': Path('/data/derivatives/source-1'), 93 | ... } 94 | >>> input_path = Path('/data/derivatives/source-1/sub-01/func/sub-01_task-rest_bold.nii.gz') 95 | >>> _find_nearest_path(path_dict, input_path) # match to 'bids:deriv-0:' 96 | 'bids:deriv-0:sub-01/func/sub-01_task-rest_bold.nii.gz' 97 | >>> input_path = Path('/out/sub-01/func/sub-01_task-rest_bold.nii.gz') 98 | >>> _find_nearest_path(path_dict, input_path) # no match- absolute path 99 | '/out/sub-01/func/sub-01_task-rest_bold.nii.gz' 100 | >>> input_path = Path('/data/sub-01/func/sub-01_task-rest_bold.nii.gz') 101 | >>> _find_nearest_path(path_dict, input_path) # match to 'bids:raw:' 102 | 'bids:raw:sub-01/func/sub-01_task-rest_bold.nii.gz' 103 | >>> input_path = 'bids::sub-01/func/sub-01_task-rest_bold.nii.gz' 104 | >>> _find_nearest_path(path_dict, input_path) # already a BIDS-URI 105 | 'bids::sub-01/func/sub-01_task-rest_bold.nii.gz' 106 | """ 107 | # Don't modify BIDS-URIs 108 | if isinstance(input_path, str) and input_path.startswith('bids:'): 109 | return input_path 110 | 111 | input_path = Path(input_path) 112 | matching_path = None 113 | for key, path in paths.items(): 114 | if input_path.is_relative_to(path): 115 | relative_path = input_path.relative_to(path) 116 | if (matching_path is None) or (len(relative_path.parts) < len(matching_path.parts)): 117 | matching_key = key 118 | matching_path = relative_path 119 | 120 | if matching_path is None: 121 | matching_path = str(input_path.absolute()) 122 | else: 123 | matching_path = f'{matching_key}{matching_path}' 124 | 125 | return matching_path 126 | -------------------------------------------------------------------------------- /nibabies/interfaces/conftest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from shutil import copytree 3 | from tempfile import TemporaryDirectory 4 | 5 | import pytest 6 | 7 | try: 8 | from contextlib import chdir as _chdir 9 | except ImportError: # PY310 10 | import os 11 | from contextlib import contextmanager 12 | 13 | @contextmanager # type: ignore 14 | def _chdir(path): 15 | cwd = os.getcwd() 16 | os.chdir(path) 17 | try: 18 | yield 19 | finally: 20 | os.chdir(cwd) 21 | 22 | 23 | DATA_FILES = ( 24 | 'functional.nii', 25 | 'anatomical.nii', 26 | 'func.dlabel.nii', 27 | 'func.dtseries.nii', 28 | 'epi.nii', 29 | 'T1w.nii', 30 | 'func_to_struct.mat', 31 | 'atlas.nii', 32 | 'label_list.txt', 33 | 'sub-01_run-01_echo-1_bold.nii.gz', 34 | 'sub-01_run-01_echo-2_bold.nii.gz', 35 | 'sub-01_run-01_echo-3_bold.nii.gz', 36 | 'xfm0.h5', 37 | 'xfm1.h5', 38 | ) 39 | 40 | 41 | @pytest.fixture(scope='package') 42 | def data_dir(): 43 | with TemporaryDirectory() as tmpdir: 44 | tmp_path = Path(tmpdir) 45 | for fname in DATA_FILES: 46 | Path.touch(tmp_path / fname) 47 | yield tmp_path 48 | 49 | 50 | @pytest.fixture(autouse=True) 51 | def _docdir(data_dir, request, tmp_path): 52 | # Trigger ONLY for the doctests. 53 | doctest_plugin = request.config.pluginmanager.getplugin('doctest') 54 | if isinstance(request.node, doctest_plugin.DoctestItem): 55 | copytree(data_dir, tmp_path, dirs_exist_ok=True) 56 | 57 | # Chdir only for the duration of the test. 58 | with _chdir(tmp_path): 59 | yield 60 | 61 | else: 62 | # For normal tests, we have to yield, since this is a yield-fixture. 63 | yield 64 | -------------------------------------------------------------------------------- /nibabies/interfaces/freesurfer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from pathlib import Path 4 | 5 | from nipype.interfaces.base import ( 6 | CommandLine, 7 | CommandLineInputSpec, 8 | Directory, 9 | File, 10 | TraitedSpec, 11 | isdefined, 12 | traits, 13 | ) 14 | 15 | from ..utils.misc import check_total_memory 16 | 17 | 18 | class InfantReconAllInputSpec(CommandLineInputSpec): 19 | subjects_dir = Directory( 20 | exists=True, 21 | hash_files=False, 22 | desc='path to subjects directory', 23 | ) 24 | subject_id = traits.Str( 25 | 'recon_all', 26 | argstr='--subject %s', 27 | desc='subject name', 28 | required=True, 29 | ) 30 | t1_file = File( 31 | exists=True, 32 | desc='path to T1w file', 33 | ) 34 | age = traits.Range( 35 | low=0, 36 | argstr='--age %d', 37 | desc='Subject age in months', 38 | ) 39 | outdir = Directory( 40 | argstr='--outdir %s', 41 | desc='Output directory where the reconall results are written.' 42 | 'The default location is /', 43 | ) 44 | mask_file = traits.File( 45 | argstr='--masked %s', 46 | desc='Skull-stripped and INU-corrected T1 (skips skullstripping step)', 47 | ) 48 | newborn = traits.Bool( 49 | xor=['age'], 50 | argstr='--newborn', 51 | help='Use newborns from set', 52 | ) 53 | aseg_file = File( 54 | argstr='--segfile %s', 55 | desc='Pre-computed segmentation file', 56 | ) 57 | 58 | 59 | class InfantReconAllOutputSpec(TraitedSpec): 60 | outdir = Directory(exists=True, desc='Output directory.') 61 | subject_id = traits.Str(desc='Subject name for whom to retrieve data') 62 | 63 | 64 | class InfantReconAll(CommandLine): 65 | """ 66 | Runs the infant recon all pipeline 67 | """ 68 | 69 | _cmd = 'infant_recon_all' 70 | input_spec = InfantReconAllInputSpec 71 | output_spec = InfantReconAllOutputSpec 72 | _no_run = False 73 | 74 | @property 75 | def cmdline(self): 76 | cmd = super().cmdline 77 | # check if previously run 78 | if isdefined(self.inputs.outdir): 79 | logdir = Path(self.inputs.outdir) / 'log' 80 | if logdir.exists(): 81 | try: 82 | log = sorted(logdir.glob('summary.*.log'))[0] 83 | self._no_run = 'Successfully finished infant_recon_all' in log.read_text() 84 | except IndexError: 85 | pass 86 | if self._no_run: 87 | return 'echo infant_recon_all: nothing to do' 88 | return cmd 89 | 90 | def _run_interface(self, runtime): 91 | # make sure directory structure is intact 92 | if not isdefined(self.inputs.subjects_dir): 93 | self.inputs.subjects_dir = _set_subjects_dir() 94 | if not isdefined(self.inputs.outdir): 95 | subjdir = Path(self.inputs.subjects_dir) / self.inputs.subject_id 96 | self.inputs.outdir = str(subjdir) 97 | try: 98 | subjdir.mkdir(parents=True, exist_ok=True) 99 | except OSError as err: 100 | raise OSError( 101 | f'Current SUBJECTS_DIR <{subjdir}> cannot be written to. To fix this,' 102 | 'either define the input or unset the environmental variable.' 103 | ) from err 104 | # T1 image is expected to be in a specific location if no mask is present 105 | if not (subjdir / 'mprage.nii.gz').exists() and not (subjdir / 'mprage.mgz').exists(): 106 | if isdefined(self.inputs.t1_file): 107 | Path(self.inputs.t1_file).symlink_to(subjdir / 'mprage.nii.gz') 108 | elif not isdefined(self.inputs.mask_file): 109 | raise RuntimeError('Neither T1 or mask present!') 110 | # warn users that this might fail... 111 | if not check_total_memory(recommended_gb=20): 112 | logging.getLogger('nipype.interface').warning( 113 | f'For best results, run {self._cmd} with at least 20GB available RAM.' 114 | ) 115 | return super()._run_interface(runtime) 116 | 117 | def _list_outputs(self): 118 | outputs = self._outputs().get() 119 | outputs['subject_id'] = self.inputs.subject_id 120 | outputs['outdir'] = self.inputs.outdir 121 | return outputs 122 | 123 | 124 | def _set_subjects_dir(): 125 | subjdir = os.getenv('SUBJECTS_DIR') 126 | if not subjdir: 127 | subjdir = os.getcwd() 128 | os.environ['SUBJECTS_DIR'] = subjdir 129 | return subjdir 130 | -------------------------------------------------------------------------------- /nibabies/interfaces/gifti.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import nibabel as nb 4 | import nibabel.gifti as ngi 5 | import numpy as np 6 | from nipype.interfaces.base import ( 7 | BaseInterfaceInputSpec, 8 | File, 9 | SimpleInterface, 10 | TraitedSpec, 11 | traits, 12 | ) 13 | 14 | from .. import __version__ 15 | 16 | 17 | class _MaskGiftiInputSpec(BaseInterfaceInputSpec): 18 | in_file = File(exists=True, mandatory=True, desc='Input GIFTI (n-darrays)') 19 | mask_file = File(exists=True, mandatory=True, desc='Input mask (single binary darray)') 20 | threshold = traits.Float( 21 | desc='If mask is probabilistic, inclusion limit', 22 | ) 23 | metadata = traits.Dict( 24 | desc='Metadata to insert into GIFTI', 25 | ) 26 | 27 | 28 | class _MaskGiftiOutputSpec(TraitedSpec): 29 | out_file = File(desc='Masked file') 30 | 31 | 32 | class MaskGifti(SimpleInterface): 33 | """Mask file across GIFTI darrays""" 34 | 35 | input_spec = _MaskGiftiInputSpec 36 | output_spec = _MaskGiftiOutputSpec 37 | 38 | def _run_interface(self, runtime): 39 | self._results['out_file'] = _mask_gifti( 40 | self.inputs.in_file, 41 | self.inputs.mask_file, 42 | threshold=self.inputs.threshold or None, 43 | metadata=self.inputs.metadata, 44 | newpath=runtime.cwd, 45 | ) 46 | return runtime 47 | 48 | 49 | def _mask_gifti(in_file, mask_file, *, threshold=None, metadata=None, newpath=None): 50 | """ 51 | Mask and create a GIFTI image. 52 | """ 53 | metadata = metadata or {} 54 | 55 | img = nb.load(in_file) 56 | mask = nb.load(mask_file).agg_data() 57 | 58 | indices = np.nonzero(mask)[0] 59 | if threshold is not None: 60 | indices = np.where(mask > threshold)[0] 61 | 62 | data = img.agg_data() 63 | if isinstance(data, tuple): 64 | try: 65 | data = np.vstack(data) 66 | except Exception as err: # noqa: BLE001 67 | raise NotImplementedError(f'Tricky GIFTI: {in_file} not supported.') from err 68 | else: 69 | data = data.T 70 | masked = data[:, indices] 71 | 72 | # rather than creating new GiftiDataArrays, just modify the data directly 73 | # and preserve the existing attributes 74 | for i, darr in enumerate(img.darrays): 75 | darr.data = masked[i] 76 | darr.dims = list(masked[i].shape) 77 | 78 | # Finalize by adding additional metadata to file 79 | metad = { 80 | **{'CreatedBy': f'MaskGifti (NiBabies-{__version__})'}, 81 | **metadata, 82 | } 83 | if int(nb.__version__[0]) >= 4: # API will change in 4.0.0 84 | existing_meta = img.meta or {} 85 | img.meta = ngi.GiftiMetaData({**metad, **existing_meta}) 86 | else: 87 | meta = img.meta.data or [] 88 | for k, v in metad.items(): 89 | meta.append(ngi.GiftiNVPairs(k, v)) 90 | img.meta.data = meta 91 | 92 | if newpath is None: 93 | newpath = Path() 94 | out_file = str((Path(newpath) / f'masked_{Path(in_file).name}').absolute()) 95 | nb.save(img, out_file) 96 | return out_file 97 | -------------------------------------------------------------------------------- /nibabies/interfaces/maths.py: -------------------------------------------------------------------------------- 1 | """A module for interfaces""" 2 | 3 | import os 4 | 5 | import numpy as np 6 | from nipype.interfaces.base import File, SimpleInterface, TraitedSpec, traits 7 | from nipype.utils.filemanip import fname_presuffix 8 | 9 | 10 | class ClipInputSpec(TraitedSpec): 11 | in_file = File(exists=True, mandatory=True, desc='Input imaging file') 12 | out_file = File(desc='Output file name') 13 | minimum = traits.Float( 14 | -np.inf, usedefault=True, desc='Values under minimum are set to minimum' 15 | ) 16 | maximum = traits.Float(np.inf, usedefault=True, desc='Values over maximum are set to maximum') 17 | 18 | 19 | class ClipOutputSpec(TraitedSpec): 20 | out_file = File(desc='Output file name') 21 | 22 | 23 | class Clip(SimpleInterface): 24 | """Simple clipping interface that clips values to specified minimum/maximum 25 | If no values are outside the bounds, nothing is done and the in_file is passed 26 | as the out_file without copying. 27 | """ 28 | 29 | input_spec = ClipInputSpec 30 | output_spec = ClipOutputSpec 31 | 32 | def _run_interface(self, runtime): 33 | import nibabel as nb 34 | 35 | img = nb.load(self.inputs.in_file) 36 | data = img.get_fdata() 37 | 38 | out_file = self.inputs.out_file 39 | if out_file: 40 | out_file = os.path.join(runtime.cwd, out_file) 41 | 42 | if np.any((data < self.inputs.minimum) | (data > self.inputs.maximum)): 43 | if not out_file: 44 | out_file = fname_presuffix( 45 | self.inputs.in_file, suffix='_clipped', newpath=runtime.cwd 46 | ) 47 | np.clip(data, self.inputs.minimum, self.inputs.maximum, out=data) 48 | img.__class__(data, img.affine, img.header).to_filename(out_file) 49 | elif not out_file: 50 | out_file = self.inputs.in_file 51 | 52 | self._results['out_file'] = out_file 53 | return runtime 54 | 55 | 56 | class Label2MaskInputSpec(TraitedSpec): 57 | in_file = File(exists=True, mandatory=True, desc='Input label file') 58 | label_val = traits.Int(mandatory=True, dec='Label value to create mask from') 59 | 60 | 61 | class Label2MaskOutputSpec(TraitedSpec): 62 | out_file = File(desc='Output file name') 63 | 64 | 65 | class Label2Mask(SimpleInterface): 66 | """Create mask file for a label from a multi-label segmentation""" 67 | 68 | input_spec = Label2MaskInputSpec 69 | output_spec = Label2MaskOutputSpec 70 | 71 | def _run_interface(self, runtime): 72 | import nibabel as nb 73 | 74 | img = nb.load(self.inputs.in_file) 75 | 76 | mask = np.uint16(img.dataobj) == self.inputs.label_val 77 | out_img = img.__class__(mask, img.affine, img.header) 78 | out_img.set_data_dtype(np.uint8) 79 | 80 | out_file = fname_presuffix(self.inputs.in_file, suffix='_mask', newpath=runtime.cwd) 81 | 82 | out_img.to_filename(out_file) 83 | 84 | self._results['out_file'] = out_file 85 | return runtime 86 | -------------------------------------------------------------------------------- /nibabies/interfaces/multiecho.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright 2023 The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """ 24 | Multi-echo EPI 25 | ~~~~~~~~~~~~~~ 26 | 27 | For using multi-echo EPI data. 28 | 29 | Change directory to provide relative paths for doctests 30 | >>> import os 31 | >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) 32 | >>> datadir = os.path.realpath(os.path.join(filepath, '../data/')) 33 | >>> os.chdir(datadir) 34 | 35 | """ 36 | 37 | import os 38 | 39 | from nipype import logging 40 | from nipype.interfaces.base import ( 41 | CommandLine, 42 | CommandLineInputSpec, 43 | File, 44 | TraitedSpec, 45 | traits, 46 | ) 47 | 48 | LOGGER = logging.getLogger('nipype.interface') 49 | 50 | 51 | class T2SMapInputSpec(CommandLineInputSpec): 52 | in_files = traits.List( 53 | File(exists=True), 54 | argstr='-d %s', 55 | position=1, 56 | mandatory=True, 57 | minlen=3, 58 | desc='multi-echo BOLD EPIs', 59 | ) 60 | echo_times = traits.List( 61 | traits.Float, 62 | argstr='-e %s', 63 | position=2, 64 | mandatory=True, 65 | minlen=3, 66 | desc='echo times', 67 | ) 68 | mask_file = File( 69 | argstr='--mask %s', 70 | position=3, 71 | desc='mask file', 72 | exists=True, 73 | ) 74 | fittype = traits.Enum( 75 | 'curvefit', 76 | 'loglin', 77 | argstr='--fittype %s', 78 | position=4, 79 | usedefault=True, 80 | desc='Desired fitting method: ' 81 | '"loglin" means that a linear model is fit ' 82 | 'to the log of the data. ' 83 | '"curvefit" means that a more computationally ' 84 | 'demanding monoexponential model is fit ' 85 | 'to the raw data.', 86 | ) 87 | 88 | 89 | class T2SMapOutputSpec(TraitedSpec): 90 | t2star_map = File(exists=True, desc='limited T2* map') 91 | s0_map = File(exists=True, desc='limited S0 map') 92 | optimal_comb = File(exists=True, desc='optimally combined ME-EPI time series') 93 | 94 | 95 | class T2SMap(CommandLine): 96 | """ 97 | Runs the tedana T2* workflow to generate an adaptive T2* map and create 98 | an optimally combined ME-EPI time series. 99 | 100 | Example 101 | ======= 102 | >>> from nibabies.interfaces import multiecho 103 | >>> t2smap = multiecho.T2SMap() 104 | >>> t2smap.inputs.in_files = ['sub-01_run-01_echo-1_bold.nii.gz', \ 105 | 'sub-01_run-01_echo-2_bold.nii.gz', \ 106 | 'sub-01_run-01_echo-3_bold.nii.gz'] 107 | >>> t2smap.inputs.echo_times = [0.013, 0.027, 0.043] 108 | >>> t2smap.cmdline # doctest: +ELLIPSIS 109 | 't2smap -d sub-01_run-01_echo-1_bold.nii.gz sub-01_run-01_echo-2_bold.nii.gz \ 110 | sub-01_run-01_echo-3_bold.nii.gz -e 13.0 27.0 43.0 --fittype curvefit' 111 | 112 | """ 113 | 114 | _cmd = 't2smap' 115 | input_spec = T2SMapInputSpec 116 | output_spec = T2SMapOutputSpec 117 | 118 | def _format_arg(self, name, trait_spec, value): 119 | if name == 'echo_times': 120 | value = [te * 1000 for te in value] 121 | return super()._format_arg(name, trait_spec, value) 122 | 123 | def _list_outputs(self): 124 | outputs = self._outputs().get() 125 | out_dir = os.getcwd() 126 | outputs['t2star_map'] = os.path.join(out_dir, 'T2starmap.nii.gz') 127 | outputs['s0_map'] = os.path.join(out_dir, 'S0map.nii.gz') 128 | outputs['optimal_comb'] = os.path.join(out_dir, 'desc-optcom_bold.nii.gz') 129 | return outputs 130 | -------------------------------------------------------------------------------- /nibabies/interfaces/nibabel.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from nipype.interfaces.base import ( 4 | BaseInterfaceInputSpec, 5 | File, 6 | SimpleInterface, 7 | TraitedSpec, 8 | traits, 9 | ) 10 | 11 | 12 | class ReorientImageInputSpec(BaseInterfaceInputSpec): 13 | in_file = File(exists=True, mandatory=True, desc='Moving file') 14 | target_file = File( 15 | exists=True, xor=['target_orientation'], desc='Reference file to reorient to' 16 | ) 17 | target_orientation = traits.Str( 18 | xor=['target_file'], desc='Axis codes of coordinate system to reorient to' 19 | ) 20 | 21 | 22 | class ReorientImageOutputSpec(TraitedSpec): 23 | out_file = File(desc='Reoriented file') 24 | 25 | 26 | class ReorientImage(SimpleInterface): 27 | input_spec = ReorientImageInputSpec 28 | output_spec = ReorientImageOutputSpec 29 | 30 | def _run_interface(self, runtime): 31 | self._results['out_file'] = reorient_image( 32 | self.inputs.in_file, 33 | target_file=self.inputs.target_file, 34 | target_ornt=self.inputs.target_orientation, 35 | ) 36 | return runtime 37 | 38 | 39 | def reorient_image( 40 | in_file: str, *, target_file: str = None, target_ornt: str = None, newpath: str = None 41 | ) -> str: 42 | """ 43 | Reorient an image. 44 | 45 | New orientation targets can be either another image, or a string representation of the 46 | orientation axis. 47 | 48 | Parameters 49 | ---------- 50 | in_file : Image to be reoriented 51 | target_file : Reference image of desired orientation 52 | target_ornt : Orientation denoted by the first letter of each axis (i.e., "RAS", "LPI") 53 | """ 54 | import nibabel as nb 55 | 56 | img = nb.load(in_file) 57 | img_axcodes = nb.aff2axcodes(img.affine) 58 | in_ornt = nb.orientations.axcodes2ornt(img_axcodes) 59 | 60 | if target_file: 61 | target_img = nb.load(target_file) 62 | target_ornt = nb.aff2axcodes(target_img.affine) 63 | 64 | out_ornt = nb.orientations.axcodes2ornt(target_ornt) 65 | ornt_xfm = nb.orientations.ornt_transform(in_ornt, out_ornt) 66 | reoriented = img.as_reoriented(ornt_xfm) 67 | 68 | if newpath is None: 69 | newpath = Path() 70 | out_file = str((Path(newpath) / 'reoriented.nii.gz').absolute()) 71 | reoriented.to_filename(out_file) 72 | return out_file 73 | -------------------------------------------------------------------------------- /nibabies/interfaces/patches.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from nipype.interfaces import ( 4 | freesurfer as fs, 5 | ) 6 | from nipype.interfaces.ants.base import ANTSCommand, ANTSCommandInputSpec 7 | from nipype.interfaces.ants.registration import ( 8 | CompositeTransformUtil as _CompositeTransformUtil, 9 | ) 10 | from nipype.interfaces.ants.registration import ( 11 | CompositeTransformUtilOutputSpec as _CompositeTransformUtilOutputSpec, 12 | ) 13 | from nipype.interfaces.base import File, InputMultiObject, TraitedSpec, traits 14 | 15 | 16 | class _MRICoregInputSpec(fs.registration.MRICoregInputSpec): 17 | reference_file = File( 18 | argstr='--ref %s', 19 | desc='reference (target) file', 20 | copyfile=False, 21 | ) 22 | subject_id = traits.Str( 23 | argstr='--s %s', 24 | position=1, 25 | requires=['subjects_dir'], 26 | desc='freesurfer subject ID (implies ``reference_mask == ' 27 | 'aparc+aseg.mgz`` unless otherwise specified)', 28 | ) 29 | 30 | 31 | class MRICoreg(fs.MRICoreg): 32 | """ 33 | Patched that allows setting both a reference file and the subjects dir. 34 | """ 35 | 36 | input_spec = _MRICoregInputSpec 37 | 38 | 39 | class ConcatXFMInputSpec(ANTSCommandInputSpec): 40 | transforms = InputMultiObject( 41 | traits.Either(File(exists=True), 'identity'), 42 | argstr='%s', 43 | mandatory=True, 44 | desc='transform files: will be applied in reverse order. For ' 45 | 'example, the last specified transform will be applied first.', 46 | ) 47 | out_xfm = traits.File( 48 | 'concat_xfm.h5', 49 | usedefault=True, 50 | argstr='--output [ %s, 1 ]', 51 | desc='output file name', 52 | ) 53 | reference_image = File( 54 | argstr='--reference-image %s', 55 | mandatory=True, 56 | desc='reference image space that you wish to warp INTO', 57 | exists=True, 58 | ) 59 | invert_transform_flags = InputMultiObject(traits.Bool()) 60 | 61 | 62 | class ConcatXFMOutputSpec(TraitedSpec): 63 | out_xfm = File(desc='Combined transform') 64 | 65 | 66 | class ConcatXFM(ANTSCommand): 67 | """ 68 | Streamed use of antsApplyTransforms to combine multiple xfms into a single file 69 | 70 | Examples 71 | -------- 72 | 73 | >>> from nibabies.interfaces.patches import ConcatXFM 74 | >>> cxfm = ConcatXFM() 75 | >>> cxfm.inputs.transforms = [testdir / 'xfm0.h5', testdir / 'xfm1.h5'] 76 | >>> cxfm.inputs.reference_image = testdir / 'anatomical.nii' 77 | >>> cxfm.cmdline # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE 78 | 'antsApplyTransforms --output [ concat_xfm.h5, 1 ] --reference-image .../anatomical.nii \ 79 | --transform .../xfm0.h5 --transform .../xfm1.h5' 80 | 81 | """ 82 | 83 | _cmd = 'antsApplyTransforms' 84 | input_spec = ConcatXFMInputSpec 85 | output_spec = ConcatXFMOutputSpec 86 | 87 | def _get_transform_filenames(self): 88 | retval = [] 89 | invert_flags = self.inputs.invert_transform_flags 90 | if not invert_flags: 91 | invert_flags = [False] * len(self.inputs.transforms) 92 | elif len(self.inputs.transforms) != len(invert_flags): 93 | raise ValueError( 94 | 'ERROR: The invert_transform_flags list must have the same number ' 95 | 'of entries as the transforms list.' 96 | ) 97 | 98 | for transform, invert in zip(self.inputs.transforms, invert_flags, strict=False): 99 | if invert: 100 | retval.append(f'--transform [ {transform}, 1 ]') 101 | else: 102 | retval.append(f'--transform {transform}') 103 | return ' '.join(retval) 104 | 105 | def _format_arg(self, opt, spec, val): 106 | if opt == 'transforms': 107 | return self._get_transform_filenames() 108 | return super()._format_arg(opt, spec, val) 109 | 110 | def _list_outputs(self): 111 | outputs = self._outputs().get() 112 | outputs['out_xfm'] = Path(self.inputs.out_xfm).absolute() 113 | return outputs 114 | 115 | 116 | class CompositeTransformUtilOutputSpec(_CompositeTransformUtilOutputSpec): 117 | out_transforms = traits.List(desc='list of ordered transform components') 118 | 119 | 120 | class CompositeTransformUtil(_CompositeTransformUtil): 121 | """Outputs have changed in newer versions of ANTs.""" 122 | 123 | output_spec = CompositeTransformUtilOutputSpec 124 | 125 | def _list_outputs(self): 126 | outputs = self.output_spec().get() 127 | 128 | # Ordering may change depending on forward/inverse transform 129 | # Forward: _00_AffineTransform.mat, _01_DisplacementFieldTransform.nii.gz 130 | # Inverse: _01_AffineTransform.mat, _00_DisplacementFieldTransform.nii.gz 131 | if self.inputs.process == 'disassemble': 132 | transforms = [ 133 | str(Path(x).absolute()) 134 | for x in sorted(Path().glob(f'{self.inputs.output_prefix}_*')) 135 | ] 136 | outputs['out_transforms'] = transforms 137 | 138 | # Potentially could be more than one affine / displacement per composite transform... 139 | outputs['affine_transform'] = [ 140 | x for x in transforms if 'AffineTransform' in Path(x).name 141 | ][0] 142 | outputs['displacement_field'] = [ 143 | x for x in transforms if 'DisplacementFieldTransform' in Path(x).name 144 | ][0] 145 | elif self.inputs.process == 'assemble': 146 | outputs['out_file'] = Path(self.inputs.out_file).absolute() 147 | return outputs 148 | -------------------------------------------------------------------------------- /nibabies/interfaces/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/interfaces/tests/__init__.py -------------------------------------------------------------------------------- /nibabies/interfaces/tests/test_bids.py: -------------------------------------------------------------------------------- 1 | from nibabies.interfaces.bids import BIDSURI 2 | 3 | 4 | def test_BIDSURI(): 5 | """Test the BIDSURI interface.""" 6 | 7 | dataset_links = { 8 | 'raw': '/data', 9 | 'deriv-0': '/data/derivatives/source-1', 10 | } 11 | out_dir = '/data/derivatives/nibabies' 12 | 13 | # A single element as a string 14 | interface = BIDSURI( 15 | numinputs=1, 16 | dataset_links=dataset_links, 17 | out_dir=out_dir, 18 | ) 19 | interface.inputs.in1 = '/data/sub-01/func/sub-01_task-rest_bold.nii.gz' 20 | results = interface.run() 21 | assert results.outputs.out == ['bids:raw:sub-01/func/sub-01_task-rest_bold.nii.gz'] 22 | 23 | # A single element as a list 24 | interface = BIDSURI( 25 | numinputs=1, 26 | dataset_links=dataset_links, 27 | out_dir=out_dir, 28 | ) 29 | interface.inputs.in1 = ['/data/sub-01/func/sub-01_task-rest_bold.nii.gz'] 30 | results = interface.run() 31 | assert results.outputs.out == ['bids:raw:sub-01/func/sub-01_task-rest_bold.nii.gz'] 32 | 33 | # Two inputs: a string and a list 34 | interface = BIDSURI( 35 | numinputs=2, 36 | dataset_links=dataset_links, 37 | out_dir=out_dir, 38 | ) 39 | interface.inputs.in1 = '/data/sub-01/func/sub-01_task-rest_bold.nii.gz' 40 | interface.inputs.in2 = [ 41 | '/data/derivatives/source-1/sub-01/func/sub-01_task-rest_bold.nii.gz', 42 | '/out/sub-01/func/sub-01_task-rest_bold.nii.gz', 43 | ] 44 | results = interface.run() 45 | assert results.outputs.out == [ 46 | 'bids:raw:sub-01/func/sub-01_task-rest_bold.nii.gz', 47 | 'bids:deriv-0:sub-01/func/sub-01_task-rest_bold.nii.gz', 48 | '/out/sub-01/func/sub-01_task-rest_bold.nii.gz', # No change 49 | ] 50 | 51 | # Two inputs as lists 52 | interface = BIDSURI( 53 | numinputs=2, 54 | dataset_links=dataset_links, 55 | out_dir=out_dir, 56 | ) 57 | interface.inputs.in1 = [ 58 | '/data/sub-01/func/sub-01_task-rest_bold.nii.gz', 59 | 'bids:raw:sub-01/func/sub-01_task-rest_boldref.nii.gz', 60 | ] 61 | interface.inputs.in2 = [ 62 | '/data/derivatives/source-1/sub-01/func/sub-01_task-rest_bold.nii.gz', 63 | '/out/sub-01/func/sub-01_task-rest_bold.nii.gz', 64 | ] 65 | results = interface.run() 66 | assert results.outputs.out == [ 67 | 'bids:raw:sub-01/func/sub-01_task-rest_bold.nii.gz', 68 | 'bids:raw:sub-01/func/sub-01_task-rest_boldref.nii.gz', # No change 69 | 'bids:deriv-0:sub-01/func/sub-01_task-rest_bold.nii.gz', 70 | '/out/sub-01/func/sub-01_task-rest_bold.nii.gz', # No change 71 | ] 72 | -------------------------------------------------------------------------------- /nibabies/interfaces/tests/test_mcribs.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from nibabies.interfaces.mcribs import MCRIBReconAll 7 | 8 | SUBJECT_ID = 'X' 9 | 10 | 11 | @pytest.fixture 12 | def mcribs_directory(tmp_path): 13 | def make_tree(path, tree): 14 | for d, fls in tree.items(): 15 | (path / d).mkdir(exist_ok=True) 16 | for f in fls: 17 | (path / d / f).touch() 18 | 19 | root = tmp_path / 'mcribs' 20 | surfrecon = root / SUBJECT_ID / 'SurfReconDeformable' / SUBJECT_ID 21 | surfrecon.mkdir(parents=True, exist_ok=True) 22 | make_tree(surfrecon, MCRIBReconAll._expected_files['surfrecon']) 23 | autorecon = root / SUBJECT_ID / 'freesurfer' / SUBJECT_ID 24 | autorecon.mkdir(parents=True, exist_ok=True) 25 | make_tree(autorecon, MCRIBReconAll._expected_files['autorecon']) 26 | 27 | yield root 28 | 29 | shutil.rmtree(root) 30 | 31 | 32 | def test_MCRIBReconAll(mcribs_directory): 33 | t2w = Path('T2w.nii.gz') 34 | t2w.touch() 35 | 36 | surfrecon = MCRIBReconAll( 37 | subject_id=SUBJECT_ID, 38 | surfrecon=True, 39 | surfrecon_method='Deformable', 40 | join_thresh=1.0, 41 | fast_collision=True, 42 | ) 43 | 44 | # Requires T2w input 45 | with pytest.raises(AttributeError): 46 | surfrecon.cmdline # noqa 47 | 48 | surfrecon.inputs.t2w_file = t2w 49 | # Since no existing directory is found, will run fresh 50 | assert 'MCRIBReconAll --deformablefastcollision --deformablejointhresh' in surfrecon.cmdline 51 | 52 | # But should not need to run again 53 | surfrecon.inputs.outdir = mcribs_directory 54 | assert surfrecon.cmdline == 'echo MCRIBReconAll: nothing to do' 55 | 56 | t2w.unlink() 57 | 58 | autorecon = MCRIBReconAll( 59 | subject_id=SUBJECT_ID, 60 | autorecon_after_surf=True, 61 | ) 62 | # No need for T2w here 63 | assert autorecon.cmdline == 'MCRIBReconAll --autoreconaftersurf X' 64 | autorecon.inputs.outdir = mcribs_directory 65 | assert autorecon.cmdline == 'echo MCRIBReconAll: nothing to do' 66 | -------------------------------------------------------------------------------- /nibabies/interfaces/tests/test_nibabel.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from uuid import uuid4 3 | 4 | import nibabel as nb 5 | import numpy as np 6 | import pytest 7 | 8 | from ..nibabel import ReorientImage 9 | 10 | 11 | def create_save_img(ornt: str): 12 | data = np.random.rand(2, 2, 2) 13 | img = nb.Nifti1Image(data, affine=np.eye(4)) 14 | # img will always be in RAS at the start 15 | ras = nb.orientations.axcodes2ornt('RAS') 16 | if ornt != 'RAS': 17 | new = nb.orientations.axcodes2ornt(ornt) 18 | xfm = nb.orientations.ornt_transform(ras, new) 19 | img = img.as_reoriented(xfm) 20 | out_file = f'{uuid4()}.nii.gz' 21 | img.to_filename(out_file) 22 | return out_file 23 | 24 | 25 | @pytest.mark.parametrize( 26 | ('in_ornt', 'out_ornt'), 27 | [ 28 | ('RAS', 'RAS'), 29 | ('RAS', 'LAS'), 30 | ('LAS', 'RAS'), 31 | ('RAS', 'RPI'), 32 | ('LPI', 'RAS'), 33 | ], 34 | ) 35 | def test_reorient_image(tmpdir, in_ornt, out_ornt): 36 | tmpdir.chdir() 37 | 38 | in_file = create_save_img(ornt=in_ornt) 39 | in_img = nb.load(in_file) 40 | assert ''.join(nb.aff2axcodes(in_img.affine)) == in_ornt 41 | 42 | # test string representation 43 | res = ReorientImage(in_file=in_file, target_orientation=out_ornt).run() 44 | out_file = res.outputs.out_file 45 | out_img = nb.load(out_file) 46 | assert ''.join(nb.aff2axcodes(out_img.affine)) == out_ornt 47 | Path(out_file).unlink() 48 | 49 | # test with target file 50 | target_file = create_save_img(ornt=out_ornt) 51 | target_img = nb.load(target_file) 52 | assert ''.join(nb.aff2axcodes(target_img.affine)) == out_ornt 53 | res = ReorientImage(in_file=in_file, target_file=target_file).run() 54 | out_file = res.outputs.out_file 55 | out_img = nb.load(out_file) 56 | assert ''.join(nb.aff2axcodes(out_img.affine)) == out_ornt 57 | 58 | # cleanup 59 | for f in (in_file, target_file, out_file): 60 | Path(f).unlink() 61 | -------------------------------------------------------------------------------- /nibabies/interfaces/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | from nipype.interfaces.base import ( 5 | BaseInterfaceInputSpec, 6 | File, 7 | InputMultiObject, 8 | OutputMultiObject, 9 | SimpleInterface, 10 | TraitedSpec, 11 | traits, 12 | ) 13 | 14 | 15 | class CiftiSelectInputSpec(BaseInterfaceInputSpec): 16 | hemi = traits.Enum('L', 'R', desc='Hemisphere') 17 | surfaces = InputMultiObject(File(exists=True), desc='Surfaces') 18 | morphometrics = InputMultiObject(File(exists=True), desc='Surface morphometrics') 19 | spherical_registrations = InputMultiObject( 20 | File(exists=True), desc='Spherical registration to fsLR' 21 | ) 22 | template_spheres = InputMultiObject(File(exists=True), desc='fsLR sphere') 23 | template_surfaces = InputMultiObject(File(exists=True), desc='fsLR midthickness') 24 | template_rois = InputMultiObject(File(exists=True), desc='fsLR ROIs') 25 | 26 | 27 | class CiftiSelectOutputSpec(TraitedSpec): 28 | white = OutputMultiObject(File, desc='white surface') 29 | pial = OutputMultiObject(File, desc='pial surface') 30 | midthickness = OutputMultiObject(File, desc='midthickness surface') 31 | thickness = OutputMultiObject(File, desc='thickness surface') 32 | sphere_reg = OutputMultiObject(File, desc='fsLR spherical registration') 33 | template_sphere = OutputMultiObject(File, desc='fsLR sphere') 34 | template_surface = OutputMultiObject(File, desc='fsLR surface (midthickness)') 35 | template_roi = OutputMultiObject(File, desc='fsLR ROIs') 36 | 37 | 38 | class CiftiSelect(SimpleInterface): 39 | input_spec = CiftiSelectInputSpec 40 | output_spec = CiftiSelectOutputSpec 41 | 42 | def _run_interface(self, runtime): 43 | idx = 0 if self.inputs.hemi == 'L' else 1 44 | all_surfaces = (self.inputs.surfaces or []) + (self.inputs.morphometrics or []) 45 | container = { 46 | 'white': [], 47 | 'pial': [], 48 | 'midthickness': [], 49 | 'thickness': [], 50 | 'sphere_reg': self.inputs.spherical_registrations or [], 51 | 'template_sphere': self.inputs.template_spheres or [], 52 | 'template_surface': self.inputs.template_surfaces or [], 53 | 'template_roi': self.inputs.template_rois or [], 54 | } 55 | find_name = re.compile(r'(?:^|[^d])(?Pwhite|pial|midthickness|thickness)') 56 | for surface in all_surfaces: 57 | match = find_name.search(os.path.basename(surface)) 58 | if match: 59 | container[match.group('name')].append(surface) 60 | 61 | for name, vals in container.items(): 62 | if vals: 63 | self._results[name] = sorted(vals, key=os.path.basename)[idx] 64 | return runtime 65 | -------------------------------------------------------------------------------- /nibabies/reports/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/reports/__init__.py -------------------------------------------------------------------------------- /nibabies/reports/core.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from nireports.assembler.report import Report 4 | 5 | from nibabies.data import load as load_data 6 | 7 | 8 | def run_reports( 9 | out_dir, 10 | subject, 11 | run_uuid, 12 | session=None, 13 | out_filename=None, 14 | reportlets_dir=None, 15 | packagename=None, 16 | ): 17 | """ 18 | Run the reports. 19 | """ 20 | return Report( 21 | out_dir, 22 | run_uuid, 23 | subject=subject, 24 | session=session, 25 | bootstrap_file=load_data.readable('reports-spec.yml'), 26 | reportlets_dir=reportlets_dir, 27 | ).generate_report() 28 | 29 | 30 | def generate_reports( 31 | sub_ses_list, 32 | output_dir, 33 | run_uuid, 34 | work_dir=None, 35 | packagename=None, 36 | ): 37 | """Execute run_reports on a list of subjects.""" 38 | reportlets_dir = None 39 | if work_dir is not None: 40 | reportlets_dir = Path(work_dir) / 'reportlets' 41 | 42 | report_errors = [] 43 | for subject, session in sub_ses_list: 44 | report_errors.append( 45 | run_reports( 46 | output_dir, 47 | subject, 48 | run_uuid, 49 | session=session, 50 | packagename=packagename, 51 | reportlets_dir=reportlets_dir, 52 | ) 53 | ) 54 | 55 | errno = sum(report_errors) 56 | if errno: 57 | import logging 58 | 59 | logger = logging.getLogger('cli') 60 | error_list = ', '.join( 61 | f'{subid} ({err})' 62 | for subid, err in zip(sub_ses_list, report_errors, strict=False) 63 | if err 64 | ) 65 | logger.error( 66 | 'Preprocessing did not finish successfully. Errors occurred while processing ' 67 | 'data from participants: %s. Check the HTML reports for details.', 68 | error_list, 69 | ) 70 | return errno 71 | -------------------------------------------------------------------------------- /nibabies/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/tests/__init__.py -------------------------------------------------------------------------------- /nibabies/tests/data/labelfile.txt: -------------------------------------------------------------------------------- 1 | CEREBELLUM_LEFT 2 | 8 230 148 34 255 3 | THALAMUS_LEFT 4 | 10 0 118 14 255 5 | CAUDATE_LEFT 6 | 11 122 186 220 255 7 | -------------------------------------------------------------------------------- /nibabies/tests/test_config.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright 2023 The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """Check the configuration module and file.""" 24 | 25 | import os 26 | from unittest.mock import patch 27 | 28 | import pytest 29 | from niworkflows.utils.spaces import format_reference 30 | from toml import loads 31 | 32 | from nibabies import config 33 | from nibabies.data import load as load_data 34 | 35 | 36 | def _reset_config(): 37 | """ 38 | Forcibly reload the configuration module to restore defaults. 39 | .. caution:: 40 | `importlib.reload` creates new sets of objects, but will not remove 41 | previous references to those objects.""" 42 | import importlib 43 | 44 | importlib.reload(config) 45 | 46 | 47 | def test_reset_config(): 48 | execution = config.execution 49 | execution.bids_dir = 'TESTING' 50 | assert config.execution.bids_dir == 'TESTING' 51 | _reset_config() 52 | assert config.execution.bids_dir is None 53 | # Even though the config module was reset, 54 | # previous references to config classes 55 | # have not been touched. 56 | assert execution.bids_dir == 'TESTING' 57 | 58 | 59 | def test_config_spaces(): 60 | """Check that all necessary spaces are recorded in the config.""" 61 | settings = loads(load_data.readable('tests/config.toml').read_text()) 62 | for sectionname, configs in settings.items(): 63 | if sectionname != 'environment': 64 | section = getattr(config, sectionname) 65 | section.load(configs, init=False) 66 | config.nipype.init() 67 | config.loggers.init() 68 | age = 8 69 | spaces = _load_spaces(age) 70 | assert 'MNI152NLin6Asym:res-2' not in [str(s) for s in spaces.get_standard(full_spec=True)] 71 | 72 | assert 'MNI152NLin6Asym_res-2' not in [ 73 | format_reference((s.fullname, s.spec)) 74 | for s in spaces.references 75 | if s.standard and s.dim == 3 76 | ] 77 | # Only enabled if CIFTI is requested 78 | assert 'MNI152NLin6Asym:res-2' not in [str(s) for s in spaces.get_standard(full_spec=True)] 79 | assert 'MNI152NLin6Asym_res-2' not in [ 80 | format_reference((s.fullname, s.spec)) 81 | for s in spaces.references 82 | if s.standard and s.dim == 3 83 | ] 84 | 85 | config.execution.output_spaces = None 86 | 87 | with pytest.raises(RuntimeError): 88 | spaces = _load_spaces(None) 89 | 90 | config.execution.output_spaces = None 91 | config.workflow.cifti_output = '91k' 92 | spaces = _load_spaces(1) 93 | 94 | assert [str(s) for s in spaces.get_standard(full_spec=True)] == [ 95 | 'MNIInfant:cohort-1:res-native', # Default output space 96 | 'MNI152NLin6Asym:res-2', 97 | 'MNIInfant:cohort-1:res-2', # CIFTI: MNIInfant (2x2x2) -> MNI152NLin6Asym (2x2x2) 98 | ] 99 | 100 | assert [ 101 | format_reference((s.fullname, s.spec)) 102 | for s in spaces.references 103 | if s.standard and s.dim == 3 104 | ] == ['MNIInfant_cohort-1_res-native', 'MNI152NLin6Asym_res-2', 'MNIInfant_cohort-1_res-2'] 105 | _reset_config() 106 | 107 | config.execution.output_spaces = None 108 | config.workflow.cifti_output = '170k' 109 | spaces = _load_spaces(1) 110 | 111 | assert [str(s) for s in spaces.get_standard(full_spec=True)] == [ 112 | 'MNIInfant:cohort-1:res-native', # Default output space 113 | 'MNI152NLin6Asym:res-1', 114 | 'MNIInfant:cohort-1:res-1', 115 | ] 116 | 117 | assert [ 118 | format_reference((s.fullname, s.spec)) 119 | for s in spaces.references 120 | if s.standard and s.dim == 3 121 | ] == ['MNIInfant_cohort-1_res-native', 'MNI152NLin6Asym_res-1', 'MNIInfant_cohort-1_res-1'] 122 | _reset_config() 123 | 124 | 125 | @pytest.mark.parametrize( 126 | ('master_seed', 'ants_seed', 'numpy_seed'), [(1, 17612, 8272), (100, 19094, 60232)] 127 | ) 128 | def test_prng_seed(master_seed, ants_seed, numpy_seed): 129 | """Ensure seeds are properly tracked""" 130 | seeds = config.seeds 131 | with patch.dict(os.environ, {}): 132 | seeds.load({'_random_seed': master_seed}, init=True) 133 | assert seeds.master == master_seed 134 | assert seeds.ants == ants_seed 135 | assert seeds.numpy == numpy_seed 136 | assert os.getenv('ANTS_RANDOM_SEED') == str(ants_seed) 137 | 138 | _reset_config() 139 | for seed in ('_random_seed', 'master', 'ants', 'numpy'): 140 | assert getattr(config.seeds, seed) is None 141 | 142 | 143 | def _load_spaces(age): 144 | from nibabies.workflows.base import init_execution_spaces, init_workflow_spaces 145 | 146 | # Conditional based on workflow necessities 147 | spaces = init_workflow_spaces(init_execution_spaces(), age) 148 | return spaces 149 | -------------------------------------------------------------------------------- /nibabies/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/utils/__init__.py -------------------------------------------------------------------------------- /nibabies/utils/confounds.py: -------------------------------------------------------------------------------- 1 | """Utilities for confounds manipulation.""" 2 | 3 | 4 | def mask2vf(in_file, zooms=None, out_file=None): 5 | """ 6 | Convert a binary mask on a volume fraction map. 7 | 8 | The algorithm simply applies a Gaussian filter with the kernel size scaled 9 | by the zooms given as argument. 10 | 11 | """ 12 | import nibabel as nb 13 | import numpy as np 14 | from scipy.ndimage import gaussian_filter 15 | 16 | img = nb.load(in_file) 17 | imgzooms = np.array(img.header.get_zooms()[:3], dtype=float) 18 | if zooms is None: 19 | zooms = imgzooms 20 | 21 | zooms = np.array(zooms, dtype=float) 22 | sigma = 0.5 * (zooms / imgzooms) 23 | 24 | data = gaussian_filter(img.get_fdata(dtype=np.float32), sigma=sigma) 25 | 26 | max_data = np.percentile(data[data > 0], 99) 27 | data = np.clip(data / max_data, a_min=0, a_max=1) 28 | 29 | if out_file is None: 30 | return data 31 | 32 | hdr = img.header.copy() 33 | hdr.set_data_dtype(np.float32) 34 | nb.Nifti1Image(data.astype(np.float32), img.affine, hdr).to_filename(out_file) 35 | return out_file 36 | 37 | 38 | def acompcor_masks(in_files, is_aseg=False, zooms=None): 39 | """ 40 | Generate aCompCor masks. 41 | 42 | This function selects the CSF partial volume map from the input, 43 | and generates the WM and combined CSF+WM masks for aCompCor. 44 | 45 | The implementation deviates from Behzadi et al. 46 | Their original implementation thresholded the CSF and the WM partial-volume 47 | masks at 0.99 (i.e., 99% of the voxel volume is filled with a particular tissue), 48 | and then binary eroded that 2 voxels: 49 | 50 | > Anatomical data were segmented into gray matter, white matter, 51 | > and CSF partial volume maps using the FAST algorithm available 52 | > in the FSL software package (Smith et al., 2004). Tissue partial 53 | > volume maps were linearly interpolated to the resolution of the 54 | > functional data series using AFNI (Cox, 1996). In order to form 55 | > white matter ROIs, the white matter partial volume maps were 56 | > thresholded at a partial volume fraction of 0.99 and then eroded by 57 | > two voxels in each direction to further minimize partial voluming 58 | > with gray matter. CSF voxels were determined by first thresholding 59 | > the CSF partial volume maps at 0.99 and then applying a threedimensional 60 | > nearest neighbor criteria to minimize multiple tissue 61 | > partial voluming. Since CSF regions are typically small compared 62 | > to white matter regions mask, erosion was not applied. 63 | 64 | This particular procedure is not generalizable to BOLD data with different voxel zooms 65 | as the mathematical morphology operations will be scaled by those. 66 | Also, from reading the excerpt above and the tCompCor description, I (@oesteban) 67 | believe that they always operated slice-wise given the large slice-thickness of 68 | their functional data. 69 | 70 | Instead, *NiBabies*'s implementation deviates from Behzadi's implementation on two 71 | aspects: 72 | 73 | * the masks are prepared in high-resolution, anatomical space and then 74 | projected into BOLD space; and, 75 | * instead of using binary erosion, a dilated GM map is generated -- thresholding 76 | the corresponding PV map at 0.05 (i.e., pixels containing at least 5% of GM tissue) 77 | and then subtracting that map from the CSF, WM and CSF+WM (combined) masks. 78 | This should be equivalent to eroding the masks, except that the erosion 79 | only happens at direct interfaces with GM. 80 | 81 | When the probseg maps provene from FreeSurfer's ``recon-all`` (i.e., they are 82 | discrete), binary maps are *transformed* into some sort of partial volume maps 83 | by means of a Gaussian smoothing filter with sigma adjusted by the size of the 84 | BOLD data. 85 | 86 | """ 87 | from pathlib import Path 88 | 89 | import nibabel as nb 90 | import numpy as np 91 | from scipy.ndimage import binary_dilation 92 | from skimage.morphology import ball 93 | 94 | if len(in_files) != 3: 95 | raise ValueError('Expected GM, WM, and CSF files. Got %s', in_files) 96 | 97 | csf_file = in_files[2] # BIDS labeling (CSF=2; last of list) 98 | # Load PV maps (fast) or segments (recon-all) 99 | gm_vf = nb.load(in_files[0]) 100 | wm_vf = nb.load(in_files[1]) 101 | csf_vf = nb.load(csf_file) 102 | 103 | # Prepare target zooms 104 | imgzooms = np.array(gm_vf.header.get_zooms()[:3], dtype=float) 105 | if zooms is None: 106 | zooms = imgzooms 107 | zooms = np.array(zooms, dtype=float) 108 | 109 | if not is_aseg: 110 | gm_data = gm_vf.get_fdata() > 0.05 111 | wm_data = wm_vf.get_fdata() 112 | csf_data = csf_vf.get_fdata() 113 | else: 114 | csf_file = mask2vf( 115 | csf_file, 116 | zooms=zooms, 117 | out_file=str(Path('acompcor_csf.nii.gz').absolute()), 118 | ) 119 | csf_data = nb.load(csf_file).get_fdata() 120 | wm_data = mask2vf(in_files[1], zooms=zooms) 121 | 122 | # We do not have partial volume maps (recon-all route) 123 | gm_data = np.asanyarray(gm_vf.dataobj, np.uint8) > 0 124 | 125 | # Dilate the GM mask 126 | gm_data = binary_dilation(gm_data, structure=ball(3)) 127 | 128 | # Output filenames 129 | wm_file = str(Path('acompcor_wm.nii.gz').absolute()) 130 | combined_file = str(Path('acompcor_wmcsf.nii.gz').absolute()) 131 | 132 | # Prepare WM mask 133 | wm_data[gm_data] = 0 # Make sure voxel does not contain GM 134 | nb.Nifti1Image(wm_data, gm_vf.affine, gm_vf.header).to_filename(wm_file) 135 | 136 | # Prepare combined CSF+WM mask 137 | comb_data = csf_data + wm_data 138 | comb_data[gm_data] = 0 # Make sure voxel does not contain GM 139 | nb.Nifti1Image(comb_data, gm_vf.affine, gm_vf.header).to_filename(combined_file) 140 | return [csf_file, wm_file, combined_file] 141 | -------------------------------------------------------------------------------- /nibabies/utils/debug.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | # STATEMENT OF CHANGES: This file is derived from sources licensed under the Apache-2.0 terms, 24 | # and uses the following portion of the original code: 25 | # https://github.com/dandi/dandi-cli/blob/da3b7a726c4a352dfb53a0c6bee59e660de827e6/dandi/utils.py#L49-L82 26 | # 27 | # 28 | # ORIGINAL WORK'S ATTRIBUTION NOTICE: 29 | # 30 | # Copyright DANDI Client Developers 31 | # 32 | # Licensed under the Apache License, Version 2.0 (the "License"); 33 | # you may not use this file except in compliance with the License. 34 | # You may obtain a copy of the License at 35 | # 36 | # http://www.apache.org/licenses/LICENSE-2.0 37 | # 38 | # Unless required by applicable law or agreed to in writing, software 39 | # distributed under the License is distributed on an "AS IS" BASIS, 40 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 41 | # See the License for the specific language governing permissions and 42 | # limitations under the License. 43 | # 44 | import sys 45 | 46 | 47 | def is_interactive(): 48 | """Return True if all in/outs are tty""" 49 | # TODO: check on windows if hasattr check would work correctly and add value: 50 | # 51 | return sys.stdin.isatty() and sys.stdout.isatty() and sys.stderr.isatty() 52 | 53 | 54 | def setup_exceptionhook(ipython=False): 55 | """Overloads default sys.excepthook with our exceptionhook handler. 56 | If interactive, our exceptionhook handler will invoke 57 | pdb.post_mortem; if not interactive, then invokes default handler. 58 | """ 59 | 60 | def _pdb_excepthook(type, value, tb): 61 | import traceback 62 | 63 | traceback.print_exception(type, value, tb) 64 | print() 65 | if is_interactive(): 66 | import pdb 67 | 68 | pdb.post_mortem(tb) 69 | 70 | if ipython: 71 | from IPython.core import ultratb 72 | 73 | sys.excepthook = ultratb.FormattedTB( 74 | mode='Verbose', 75 | # color_scheme='Linux', 76 | call_pdb=is_interactive(), 77 | ) 78 | else: 79 | sys.excepthook = _pdb_excepthook 80 | -------------------------------------------------------------------------------- /nibabies/utils/derivatives.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import shutil 5 | from collections import defaultdict 6 | from pathlib import Path 7 | 8 | from bids.layout import BIDSLayout 9 | from niworkflows.data import load as nwf_load 10 | 11 | from nibabies.data import load 12 | 13 | 14 | def collect_anatomical_derivatives( 15 | derivatives_dir: Path | str, 16 | subject_id: str, 17 | std_spaces: list, 18 | session_id: str | None, 19 | spec: dict | None = None, 20 | patterns: list | None = None, 21 | ): 22 | """ 23 | Collect outputs from across processing stages. 24 | 25 | Potential files: 26 | - T1w preproc 27 | - T2w preproc 28 | - T1w mask 29 | - T2w mask 30 | 31 | 32 | """ 33 | 34 | if spec is None or patterns is None: 35 | _spec, _patterns = tuple(json.loads(load('io_spec_anat.json').read_text()).values()) 36 | 37 | if spec is None: 38 | spec = _spec 39 | if patterns is None: 40 | patterns = _patterns 41 | 42 | deriv_config = nwf_load('nipreps.json') 43 | layout = BIDSLayout(derivatives_dir, config=deriv_config, validate=False) 44 | derivs_cache = {} 45 | 46 | base_qry = { 47 | 'subject': subject_id, 48 | } 49 | if session_id is not None: 50 | base_qry['session'] = session_id 51 | 52 | for key, qry in spec['baseline'].items(): 53 | qry.update(base_qry) 54 | item = layout.get(return_type='filename', **qry) 55 | if not item: 56 | continue 57 | 58 | derivs_cache[key] = item[0] if len(item) == 1 else item 59 | 60 | for key, qry in spec['coreg'].items(): # T1w->T2w, T2w->T1w 61 | qry.update(base_qry) 62 | item = layout.get(return_type='filename', **qry) 63 | if not item: 64 | continue 65 | derivs_cache[key] = item[0] if len(item) == 1 else item 66 | 67 | transforms = derivs_cache.setdefault('transforms', {}) 68 | for _space in std_spaces: 69 | space = _space.replace(':cohort-', '+') 70 | for key, qry in spec['transforms'].items(): 71 | qry = qry.copy() 72 | qry.update(base_qry) 73 | qry['from'] = qry['from'] or space 74 | qry['to'] = qry['to'] or space 75 | item = layout.get(return_type='filename', **qry) 76 | if not item: 77 | continue 78 | transforms.setdefault(_space, {})[key] = item[0] if len(item) == 1 else item 79 | 80 | for key, qry in spec['surfaces'].items(): 81 | qry.update(base_qry) 82 | item = layout.get(return_type='filename', **qry) 83 | if not item or len(item) != 2: 84 | continue 85 | 86 | derivs_cache[key] = sorted(item) 87 | 88 | return derivs_cache 89 | 90 | 91 | def collect_functional_derivatives( 92 | derivatives_dir: Path, 93 | entities: dict, 94 | fieldmap_id: str | None, 95 | spec: dict | None = None, 96 | patterns: list[str] | None = None, 97 | ): 98 | """Gather existing derivatives and compose a cache.""" 99 | if spec is None or patterns is None: 100 | _spec, _patterns = tuple( 101 | json.loads(load.readable('io_spec_func.json').read_text()).values() 102 | ) 103 | 104 | if spec is None: 105 | spec = _spec 106 | if patterns is None: 107 | patterns = _patterns 108 | 109 | derivs_cache = defaultdict(list, {}) 110 | deriv_config = nwf_load('nipreps.json') 111 | layout = BIDSLayout(derivatives_dir, config=deriv_config, validate=False) 112 | derivatives_dir = Path(derivatives_dir) 113 | 114 | # search for both boldrefs 115 | for key, qry in spec['baseline'].items(): 116 | query = {**qry, **entities} 117 | item = layout.get(return_type='filename', **query) 118 | if not item: 119 | continue 120 | derivs_cache[f'{key}_boldref'] = item[0] if len(item) == 1 else item 121 | 122 | for xfm, qry in spec['transforms'].items(): 123 | query = {**qry, **entities} 124 | if xfm == 'boldref2fmap': 125 | query['to'] = fieldmap_id 126 | item = layout.get(return_type='filename', **query) 127 | if not item: 128 | continue 129 | derivs_cache[xfm] = item[0] if len(item) == 1 else item 130 | return derivs_cache 131 | 132 | 133 | def copy_derivatives( 134 | derivs: dict, 135 | outdir: Path, 136 | modality: str, 137 | subject_id: str, 138 | session_id: str | None = None, 139 | ) -> None: 140 | """ 141 | Creates a copy of any found derivatives into output directory. 142 | 143 | Attempts to preserve file metadata to distinguish from generated files. 144 | """ 145 | out_levels = [subject_id, modality] 146 | if session_id: 147 | out_levels.insert(1, session_id) 148 | 149 | outpath = outdir.joinpath(*out_levels) 150 | outpath.mkdir(parents=True, exist_ok=True) 151 | 152 | for deriv in derivs.values(): 153 | # Skip empty, lists 154 | if not isinstance(deriv, str): 155 | continue 156 | deriv = Path(deriv) 157 | 158 | shutil.copy2(deriv, outpath / deriv.name) 159 | json = deriv.parent / (deriv.name.split('.')[0] + '.json') 160 | if json.exists(): 161 | shutil.copy2(json, outpath / json.name) 162 | -------------------------------------------------------------------------------- /nibabies/utils/filtering.py: -------------------------------------------------------------------------------- 1 | """Signal processing filters.""" 2 | 3 | 4 | def truncation( 5 | in_file, 6 | clip_max=99.9, 7 | dtype='int16', 8 | out_file=None, 9 | out_max=1000, 10 | out_min=0, 11 | percentiles=(0.1, 95), 12 | ): 13 | """Truncate and clip the input image intensities.""" 14 | from pathlib import Path 15 | 16 | import nibabel as nb 17 | import numpy as np 18 | from nipype.utils.filemanip import fname_presuffix 19 | 20 | try: 21 | info = np.iinfo(dtype) 22 | except ValueError: 23 | info = np.finfo(dtype) 24 | 25 | img = nb.load(in_file) 26 | hdr = img.header.copy() 27 | hdr.set_data_dtype(dtype) 28 | 29 | data = img.get_fdata() 30 | 31 | out_min = max(out_min, info.min) 32 | out_max = min(out_max, info.max) 33 | 34 | a_min = np.percentile(data.reshape(-1), percentiles[0]) 35 | data -= a_min 36 | a_max = np.percentile(data.reshape(-1), percentiles[1]) 37 | data *= out_max / a_max 38 | data = np.clip(data, info.min, info.max) 39 | 40 | if clip_max is not None: 41 | data = np.clip(data, 0, np.percentile(data.reshape(-1), clip_max)) 42 | 43 | if out_file is None: 44 | out_file = fname_presuffix(Path(in_file).name, suffix='_trunc') 45 | 46 | out_file = str(Path(out_file).absolute()) 47 | img.__class__(data.astype(dtype), img.affine, hdr).to_filename(out_file) 48 | return out_file 49 | 50 | 51 | def gaussian_filter(in_file, sigma=None, out_file=None): 52 | """Filter input image by convolving with a Gaussian kernel.""" 53 | from pathlib import Path 54 | 55 | import nibabel as nb 56 | import numpy as np 57 | from nipype.utils.filemanip import fname_presuffix 58 | from scipy.ndimage import gaussian_filter 59 | 60 | if out_file is None: 61 | out_file = fname_presuffix(Path(in_file).name, suffix='_gauss') 62 | out_file = str(Path(out_file).absolute()) 63 | 64 | img = nb.load(in_file) 65 | if sigma is None: 66 | sigma = tuple(np.array(img.header.get_zooms()[:3]) * 2.0) 67 | img.__class__(gaussian_filter(img.dataobj, sigma), img.affine, img.header).to_filename( 68 | out_file 69 | ) 70 | return out_file 71 | -------------------------------------------------------------------------------- /nibabies/utils/misc.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """Miscellaneous utilities.""" 4 | 5 | from __future__ import annotations 6 | 7 | from functools import cache 8 | from pathlib import Path 9 | 10 | 11 | def fix_multi_source_name(in_files): 12 | """ 13 | Make up a generic source name when there are multiple 14 | >>> fix_multi_source_name([ 15 | ... '/path/to/sub-045_ses-test_T1w.nii.gz', 16 | ... '/path/to/sub-045_ses-retest_T1w.nii.gz']) 17 | '/path/to/sub-045_T1w.nii.gz' 18 | """ 19 | import re 20 | from pathlib import Path 21 | 22 | from nipype.utils.filemanip import filename_to_list 23 | 24 | if not isinstance(in_files, tuple | list): 25 | return in_files 26 | elif len(in_files) == 1: 27 | return in_files[0] 28 | 29 | p = Path(filename_to_list(in_files)[0]) 30 | # subject_label = p.name.split("_", 1)[0].split("-")[1] 31 | try: 32 | subj = re.search(r'(?<=^sub-)[a-zA-Z0-9]*', p.name).group() 33 | suffix = re.search(r'(?<=_)\w+(?=\.)', p.name).group() 34 | except AttributeError as e: 35 | raise AttributeError('Could not extract BIDS information') from e 36 | return str(p.parent / f'sub-{subj}_{suffix}.nii.gz') 37 | 38 | 39 | def check_deps(workflow): 40 | """Make sure dependencies are present in this system.""" 41 | from nipype.utils.filemanip import which 42 | 43 | return sorted( 44 | (node.interface.__class__.__name__, node.interface._cmd) 45 | for node in workflow._get_all_nodes() 46 | if (hasattr(node.interface, '_cmd') and which(node.interface._cmd.split()[0]) is None) 47 | ) 48 | 49 | 50 | def cohort_by_months(template, months): 51 | """ 52 | Produce a recommended cohort based on partipants age 53 | """ 54 | cohort_key = { 55 | 'MNIInfant': ( 56 | # upper bound of template | cohort 57 | 2, # 1 58 | 5, # 2 59 | 8, # 3 60 | 11, # 4 61 | 14, # 5 62 | 17, # 6 63 | 21, # 7 64 | 27, # 8 65 | 33, # 9 66 | 44, # 10 67 | 60, # 11 68 | ), 69 | 'UNCInfant': ( 70 | 8, # 1 71 | 12, # 2 72 | 24, # 3 73 | ), 74 | } 75 | ages = cohort_key.get(template) 76 | if ages is None: 77 | raise KeyError('Template cohort information does not exist.') 78 | 79 | for cohort, age in enumerate(ages, 1): 80 | if months <= age: 81 | return cohort 82 | raise KeyError('Age exceeds all cohorts!') 83 | 84 | 85 | def check_total_memory(recommended_gb): 86 | """ 87 | Check total memory allocated to the process, and compare with a recommended value. 88 | If available memory is equal to or greater than recommended, return ``True``. 89 | Otherwise, return ``False``. 90 | """ 91 | 92 | try: 93 | import psutil 94 | except ImportError: 95 | return 96 | 97 | tot = int(psutil.virtual_memory().total / 1024**3) 98 | return tot >= recommended_gb 99 | 100 | 101 | def combine_meepi_source(in_files): 102 | """ 103 | Create a new source name when optimally 104 | combining multiple multi-echo EPIs 105 | >>> combine_meepi_source([ 106 | ... 'sub-01_run-01_echo-1_bold.nii.gz', 107 | ... 'sub-01_run-01_echo-2_bold.nii.gz', 108 | ... 'sub-01_run-01_echo-3_bold.nii.gz',]) 109 | 'sub-01_run-01_bold.nii.gz' 110 | """ 111 | import os 112 | 113 | from nipype.utils.filemanip import filename_to_list 114 | 115 | base, in_file = os.path.split(filename_to_list(in_files)[0]) 116 | entities = [ent for ent in in_file.split('_') if not ent.startswith('echo-')] 117 | basename = '_'.join(entities) 118 | return os.path.join(base, basename) 119 | 120 | 121 | def get_file(pkg: str, src_path: str | Path) -> str: 122 | """ 123 | Get or extract a source file. 124 | Assures the file will be available until the lifetime of the current Python process. 125 | """ 126 | import atexit 127 | from contextlib import ExitStack 128 | 129 | try: 130 | from importlib.resources import as_file, files 131 | except ImportError: 132 | from importlib_resources import as_file, files 133 | 134 | file_manager = ExitStack() 135 | atexit.register(file_manager.close) 136 | ref = files(pkg) / str(src_path) 137 | fl = file_manager.enter_context(as_file(ref)) 138 | return str(fl) 139 | 140 | 141 | @cache 142 | def estimate_bold_mem_usage(bold_fname: str) -> tuple[int, dict]: 143 | import nibabel as nb 144 | import numpy as np 145 | 146 | img = nb.load(bold_fname) 147 | nvox = int(np.prod(img.shape, dtype='u8')) 148 | # Assume tools will coerce to 8-byte floats to be safe 149 | bold_size_gb = 8 * nvox / (1024**3) 150 | bold_tlen = img.shape[-1] 151 | mem_gb = { 152 | 'filesize': bold_size_gb, 153 | 'resampled': bold_size_gb * 4, 154 | 'largemem': bold_size_gb * (max(bold_tlen / 100, 1.0) + 4), 155 | } 156 | return bold_tlen, mem_gb 157 | -------------------------------------------------------------------------------- /nibabies/utils/telemetry.py: -------------------------------------------------------------------------------- 1 | from nibabel.optpkg import optional_package 2 | 3 | from .. import __version__, config 4 | 5 | migas = optional_package('migas')[0] 6 | 7 | 8 | def setup_migas(init_ping: bool = True, exit_ping: bool = True) -> None: 9 | """ 10 | Prepare the migas python client to communicate with a migas server. 11 | If ``init`` is ``True``, send an initial breadcrumb. 12 | """ 13 | # generate session UUID from generated run UUID 14 | session_id = None 15 | if config.execution.run_uuid: 16 | session_id = config.execution.run_uuid.split('_', 1)[-1] 17 | 18 | migas.setup(session_id=session_id) 19 | if init_ping: 20 | # send initial status ping 21 | send_crumb(status='R', status_desc='workflow start') 22 | if exit_ping: 23 | from migas.error.nipype import node_execution_error 24 | 25 | migas.track_exit( 26 | 'nipreps/nibabies', 27 | __version__, 28 | {'NodeExecutionError': node_execution_error}, 29 | ) 30 | 31 | 32 | def send_crumb(**kwargs) -> dict: 33 | """ 34 | Communicate with the migas telemetry server. This requires `migas.setup()` to be called. 35 | """ 36 | return migas.add_breadcrumb('nipreps/nibabies', __version__, **kwargs) 37 | -------------------------------------------------------------------------------- /nibabies/utils/tests/__init__.py: -------------------------------------------------------------------------------- 1 | from acres import Loader 2 | 3 | load_data = Loader(__package__) 4 | 5 | DERIV_SKELETON = load_data('full-derivatives.yml') 6 | -------------------------------------------------------------------------------- /nibabies/utils/tests/full-derivatives.yml: -------------------------------------------------------------------------------- 1 | dataset_description: 2 | Name: nibabies-outputs 3 | BIDSVersion: 1.9.0 4 | DatasetType: derivative 5 | '01': 6 | anat: 7 | - suffix: mask 8 | space: T2w 9 | desc: brain 10 | - suffix: T2w 11 | desc: preproc 12 | - suffix: dseg 13 | space: T2w 14 | - suffix: probseg 15 | space: T2w 16 | label: CSF 17 | - suffix: probseg 18 | space: T2w 19 | label: GM 20 | - suffix: probseg 21 | space: T2w 22 | label: WM 23 | - suffix: xfm 24 | from: MNIInfant+1 25 | to: T2w 26 | mode: image 27 | extension: .h5 28 | - suffix: xfm 29 | from: T2w 30 | to: MNIInfant+1 31 | mode: image 32 | extension: .h5 33 | - suffix: white 34 | hemi: L 35 | extension: .surf.gii 36 | - suffix: white 37 | hemi: R 38 | extension: .surf.gii 39 | - suffix: pial 40 | hemi: L 41 | extension: .surf.gii 42 | - suffix: pial 43 | hemi: R 44 | extension: .surf.gii 45 | - suffix: midthickness 46 | hemi: L 47 | extension: .surf.gii 48 | - suffix: midthickness 49 | hemi: R 50 | extension: .surf.gii 51 | - suffix: sphere 52 | hemi: L 53 | extension: .surf.gii 54 | - suffix: sphere 55 | hemi: R 56 | extension: .surf.gii 57 | - suffix: sphere 58 | hemi: L 59 | desc: reg 60 | extension: .surf.gii 61 | - suffix: sphere 62 | hemi: R 63 | desc: reg 64 | extension: .surf.gii 65 | - suffix: sphere 66 | hemi: L 67 | space: fsLR 68 | desc: reg 69 | extension: .surf.gii 70 | - suffix: sphere 71 | hemi: R 72 | space: fsLR 73 | desc: reg 74 | extension: .surf.gii 75 | - suffix: thickness 76 | hemi: L 77 | extension: .shape.gii 78 | - suffix: thickness 79 | hemi: R 80 | extension: .shape.gii 81 | - suffix: sulc 82 | hemi: L 83 | extension: .shape.gii 84 | - suffix: sulc 85 | hemi: R 86 | extension: .shape.gii 87 | 88 | func: 89 | - suffix: boldref 90 | desc: coreg 91 | - suffix: boldref 92 | desc: hmc 93 | - suffix: xfm 94 | extension: .txt 95 | from: boldref 96 | to: T2w 97 | mode: image 98 | - suffix: xfm 99 | extension: .txt 100 | from: orig 101 | to: boldref 102 | mode: image 103 | desc: hmc 104 | - suffix: boldref 105 | space: MNIInfant 106 | cohort: 1 107 | - suffix: mask 108 | space: MNIInfant 109 | cohort: 1 110 | desc: brain 111 | - suffix: bold 112 | space: MNIInfant 113 | cohort: 1 114 | desc: preproc 115 | - suffix: bold 116 | space: fsLR 117 | den: 91k 118 | extension: .dtseries.nii 119 | -------------------------------------------------------------------------------- /nibabies/utils/tests/test_bids.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from nibabies.utils.bids import _get_age_from_tsv 6 | 7 | 8 | def create_tsv(data: dict, out_file: Path) -> None: 9 | import pandas as pd 10 | 11 | pd.DataFrame(data).to_csv(out_file, index=False, sep='\t') 12 | 13 | 14 | def create_sidecar(tsv_file: Path, units) -> None: 15 | import json 16 | 17 | out_file = tsv_file.with_suffix('.json') 18 | data = {'age': {'Units': units}} 19 | out_file.write_text(json.dumps(data)) 20 | 21 | 22 | age = {'age': [4, 4, 4]} 23 | age_weeks = {'age_weeks': [4, 8, 12]} 24 | age_months = {'age_months': [3, 6, 9]} 25 | age_years = {'age_years': [1, 1, 2]} 26 | 27 | participants = {'participant_id': ['sub-1', 'sub-2', 'sub-11']} 28 | sessions = {'session_id': ['ses-1', 'ses-2', 'ses-3']} 29 | scans = { 30 | 'filename': [ 31 | 'dwi/sub-01_dwi.nii.gz', 32 | 'anat/sub-01_T1w.nii.gz', 33 | 'func/sub-01_task-rest_bold.nii.gz', 34 | ] 35 | } 36 | 37 | 38 | @pytest.mark.parametrize( 39 | ('idx_col', 'idx_val', 'data', 'units', 'expected'), 40 | [ 41 | ('session_id', 'ses-1', age, 'months', 4), 42 | ('session_id', 'ses-1', age, 'weeks', 1), # Convert from 4 weeks -> 1 month 43 | ('session_id', 'ses-2', age_weeks, False, 2), 44 | ('participant_id', 'sub-1', age_months, False, 3), 45 | ('participant_id', 'sub-11', age_years, False, 24), 46 | ('session_id', 'ses-3', {**age_months, **age}, False, 9), 47 | ('filename', r'^anat.*', age_months, False, 6), 48 | ], 49 | ) 50 | def test_get_age_from_tsv(tmp_path, idx_col, idx_val, data, units, expected): 51 | tsv_file = tmp_path / 'test-age-parsing.tsv' 52 | 53 | if idx_col == 'participant_id': 54 | base = participants 55 | elif idx_col == 'session_id': 56 | base = sessions 57 | elif idx_col == 'filename': 58 | base = scans 59 | 60 | create_tsv({**base, **data}, tsv_file) 61 | if units: 62 | create_sidecar(tsv_file, units) 63 | 64 | res = _get_age_from_tsv(tsv_file, idx_col, idx_val) 65 | assert res == expected 66 | 67 | 68 | def test_get_age_from_tsv_error(tmp_path): 69 | tsv_file = tmp_path / 'participants.tsv' 70 | 71 | create_tsv({**participants, **age}, tsv_file) 72 | with pytest.raises(FileNotFoundError): 73 | _get_age_from_tsv(tsv_file, 'participant_id', 'sub-1') 74 | 75 | 76 | def test_get_age_from_tsv_warning(tmp_path): 77 | tsv_file = tmp_path / 'participants.tsv' 78 | dual_participants = {'participant_id': ['sub-1', 'sub-2', 'sub-2']} 79 | create_tsv({**dual_participants, **age_months}, tsv_file) 80 | 81 | with pytest.warns(UserWarning): 82 | _get_age_from_tsv(tsv_file, 'participant_id', 'sub-2') 83 | -------------------------------------------------------------------------------- /nibabies/utils/tests/test_derivatives.py: -------------------------------------------------------------------------------- 1 | from niworkflows.utils.testing import generate_bids_skeleton 2 | 3 | from nibabies.utils.derivatives import ( 4 | collect_anatomical_derivatives, 5 | collect_functional_derivatives, 6 | copy_derivatives, 7 | ) 8 | 9 | from . import DERIV_SKELETON 10 | 11 | 12 | def test_collect_derivatives(tmp_path): 13 | deriv_dir = tmp_path / 'derivatives' 14 | generate_bids_skeleton(deriv_dir, str(DERIV_SKELETON)) 15 | output_spaces = ['MNIInfant:cohort-1'] 16 | 17 | anat_cache = collect_anatomical_derivatives( 18 | derivatives_dir=deriv_dir, 19 | subject_id='01', 20 | session_id=None, 21 | std_spaces=output_spaces, 22 | ) 23 | for suffix in ('preproc', 'mask', 'dseg'): 24 | assert anat_cache[f't2w_{suffix}'] 25 | assert len(anat_cache['t2w_tpms']) == 3 26 | xfms = anat_cache['transforms'] 27 | for space in output_spaces: 28 | assert xfms[space]['reverse'] 29 | assert xfms[space]['forward'] 30 | for surface in ( 31 | 'white', 32 | 'pial', 33 | 'midthickness', 34 | 'sphere', 35 | 'thickness', 36 | 'sulc', 37 | 'sphere_reg', 38 | 'sphere_reg_fsLR', 39 | ): 40 | assert len(anat_cache[surface]) == 2 41 | 42 | func_cache = collect_functional_derivatives(deriv_dir, {'subject': '01'}, None) 43 | for val in ('hmc_boldref', 'coreg_boldref', 'hmc'): 44 | assert func_cache[val] 45 | 46 | 47 | def test_copy_derivatives(tmp_path): 48 | precomp = tmp_path / 'precomputed' 49 | precomp.mkdir() 50 | out = tmp_path / 'out' 51 | out.mkdir() 52 | 53 | mask = precomp / 'mask.nii.gz' 54 | mask.touch() 55 | aseg = precomp / 'aseg.nii.gz' 56 | aseg.touch() 57 | aseg_meta = precomp / 'aseg.json' 58 | aseg_meta.touch() 59 | 60 | derivs = { 61 | 't2w_mask': str(mask), 62 | 't2w_aseg': str(aseg), 63 | 'transforms': {}, 64 | } 65 | 66 | copy_derivatives(derivs, out, 'anat', 'sub-01') 67 | outpath = out / 'sub-01' / 'anat' 68 | 69 | for fl in (mask, aseg, aseg_meta): 70 | assert (outpath / fl.name).exists() 71 | 72 | copy_derivatives(derivs, out, 'anat', 'sub-01', 'ses-a') 73 | outpath = out / 'sub-01' / 'ses-a' / 'anat' 74 | 75 | for fl in (mask, aseg, aseg_meta): 76 | assert (outpath / fl.name).exists() 77 | -------------------------------------------------------------------------------- /nibabies/utils/transforms.py: -------------------------------------------------------------------------------- 1 | """Utilities for loading transforms for resampling""" 2 | 3 | from pathlib import Path 4 | 5 | import nitransforms as nt 6 | 7 | 8 | def load_transforms(xfm_paths: list[Path], inverse: list[bool]) -> nt.base.TransformBase: 9 | """Load a series of transforms as a nitransforms TransformChain 10 | 11 | An empty list will return an identity transform 12 | """ 13 | if len(inverse) == 1: 14 | inverse *= len(xfm_paths) 15 | elif len(inverse) != len(xfm_paths): 16 | raise ValueError('Mismatched number of transforms and inverses') 17 | 18 | chain = None 19 | for path, inv in zip(xfm_paths[::-1], inverse[::-1], strict=False): 20 | path = Path(path) 21 | if path.suffix == '.h5': 22 | # Load as a TransformChain 23 | xfm = nt.manip.load(path) 24 | else: 25 | xfm = nt.linear.load(path) 26 | if inv: 27 | xfm = ~xfm 28 | if chain is None: 29 | chain = xfm 30 | else: 31 | chain += xfm 32 | if chain is None: 33 | chain = nt.Affine() # Identity 34 | return chain 35 | -------------------------------------------------------------------------------- /nibabies/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/workflows/__init__.py -------------------------------------------------------------------------------- /nibabies/workflows/anatomical/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/workflows/anatomical/__init__.py -------------------------------------------------------------------------------- /nibabies/workflows/anatomical/preproc.py: -------------------------------------------------------------------------------- 1 | import nipype.interfaces.utility as niu 2 | import nipype.pipeline.engine as pe 3 | from niworkflows.engine import Workflow, tag 4 | 5 | 6 | @tag('anat.preproc') 7 | def init_anat_preproc_wf( 8 | *, 9 | bspline_fitting_distance: int = 200, 10 | name: str = 'anat_preproc_wf', 11 | ) -> Workflow: 12 | """Polish up raw anatomical data. 13 | 14 | This workflow accepts T1w/T2w images as inputs (either raw or a merged template) and performs: 15 | - Intensity clipping 16 | - N4 Bias Field Correction 17 | 18 | The outputs of this workflow will be a structural reference used for subsequent processing. 19 | 20 | Inputs 21 | ------ 22 | in_anat : :obj:`str` 23 | A single volume T1w/T2w image 24 | 25 | Outputs 26 | ------- 27 | anat_preproc: :obj:`str` 28 | Preprocessed anatomical image (Denoising/INU/Clipping) 29 | """ 30 | from nipype.interfaces.ants import N4BiasFieldCorrection 31 | from niworkflows.interfaces.header import ValidateImage 32 | from niworkflows.interfaces.nibabel import IntensityClip 33 | 34 | wf = Workflow(name=name) 35 | inputnode = pe.Node( 36 | niu.IdentityInterface(fields=['in_anat']), 37 | name='inputnode', 38 | ) 39 | outputnode = pe.Node( 40 | niu.IdentityInterface(fields=['anat_preproc']), 41 | name='outputnode', 42 | ) 43 | 44 | # validate image 45 | validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) 46 | clip = pe.Node(IntensityClip(p_min=10.0, p_max=99.5), name='clip') 47 | n4_correct = pe.Node( 48 | N4BiasFieldCorrection( 49 | dimension=3, 50 | bspline_fitting_distance=bspline_fitting_distance, 51 | save_bias=True, 52 | copy_header=True, 53 | n_iterations=[50] * 5, 54 | convergence_threshold=1e-7, 55 | rescale_intensities=True, 56 | shrink_factor=4, 57 | ), 58 | name='n4_correct', 59 | ) 60 | final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name='final_clip') 61 | 62 | wf.connect([ 63 | (inputnode, validate, [('in_anat', 'in_file')]), 64 | (validate, clip, [('out_file', 'in_file')]), 65 | (clip, n4_correct, [('out_file', 'input_image')]), 66 | (n4_correct, final_clip, [('output_image', 'in_file')]), 67 | (final_clip, outputnode, [('out_file', 'anat_preproc')]), 68 | ]) # fmt:skip 69 | return wf 70 | 71 | 72 | @tag('anat.csf_norm') 73 | def init_csf_norm_wf(name: str = 'csf_norm_wf') -> Workflow: 74 | """Replace low intensity voxels within the CSF mask with the median value.""" 75 | 76 | workflow = Workflow(name=name) 77 | workflow.__desc__ = ( 78 | 'The CSF mask was used to normalize the anatomical template by the median of voxels ' 79 | 'within the mask.' 80 | ) 81 | inputnode = pe.Node( 82 | niu.IdentityInterface(fields=['anat_preproc', 'anat_tpms']), 83 | name='inputnode', 84 | ) 85 | outputnode = pe.Node(niu.IdentityInterface(fields=['anat_preproc']), name='outputnode') 86 | 87 | # select CSF from BIDS-ordered list (GM, WM, CSF) 88 | select_csf = pe.Node(niu.Select(index=2), name='select_csf') 89 | norm_csf = pe.Node(niu.Function(function=_normalize_roi), name='norm_csf') 90 | 91 | workflow.connect([ 92 | (inputnode, select_csf, [('anat_tpms', 'inlist')]), 93 | (select_csf, norm_csf, [('out', 'mask_file')]), 94 | (inputnode, norm_csf, [('anat_preproc', 'in_file')]), 95 | (norm_csf, outputnode, [('out', 'anat_preproc')]), 96 | ]) # fmt:skip 97 | 98 | return workflow 99 | 100 | 101 | def _normalize_roi(in_file, mask_file, threshold=0.2, out_file=None): 102 | """Normalize low intensity voxels that fall within a given mask.""" 103 | import nibabel as nb 104 | import numpy as np 105 | 106 | img = nb.load(in_file) 107 | img_data = np.asanyarray(img.dataobj) 108 | mask_img = nb.load(mask_file) 109 | # binary mask 110 | bin_mask = np.asanyarray(mask_img.dataobj) > threshold 111 | mask_data = bin_mask * img_data 112 | masked_data = mask_data[mask_data > 0] 113 | 114 | median = np.median(masked_data).astype(masked_data.dtype) 115 | normed_data = np.maximum(img_data, bin_mask * median) 116 | 117 | oimg = img.__class__(normed_data, img.affine, img.header) 118 | if not out_file: 119 | from nipype.utils.filemanip import fname_presuffix 120 | 121 | out_file = fname_presuffix(in_file, suffix='normed') 122 | oimg.to_filename(out_file) 123 | return out_file 124 | -------------------------------------------------------------------------------- /nibabies/workflows/anatomical/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/workflows/anatomical/tests/__init__.py -------------------------------------------------------------------------------- /nibabies/workflows/anatomical/tests/test_preproc.py: -------------------------------------------------------------------------------- 1 | import typing as ty 2 | from pathlib import Path 3 | 4 | import nibabel as nb 5 | import numpy as np 6 | import pytest 7 | 8 | from nibabies.workflows.anatomical.preproc import _normalize_roi, init_csf_norm_wf 9 | 10 | EXPECTED_CSF_NORM = np.array([[[10, 73], [73, 29]], [[77, 80], [6, 16]]], dtype='uint8') 11 | 12 | 13 | @pytest.fixture 14 | def csf_norm_data(tmp_path) -> ty.Generator[tuple[Path, list[Path]], None, None]: 15 | np.random.seed(10) 16 | 17 | in_file = tmp_path / 'input.nii.gz' 18 | data = np.random.randint(1, 101, size=(2, 2, 2), dtype='uint8') 19 | img = nb.Nifti1Image(data, np.eye(4)) 20 | img.to_filename(in_file) 21 | 22 | masks = [] 23 | for tpm in ('gm', 'wm', 'csf'): 24 | name = tmp_path / f'{tpm}.nii.gz' 25 | binmask = data > np.random.randint(10, 90) 26 | masked = (binmask * 1).astype('uint8') 27 | mask = nb.Nifti1Image(masked, img.affine) 28 | mask.to_filename(name) 29 | masks.append(name) 30 | 31 | yield in_file, masks 32 | 33 | in_file.unlink() 34 | for m in masks: 35 | m.unlink() 36 | 37 | 38 | def test_csf_norm_wf(tmp_path, csf_norm_data): 39 | anat, tpms = csf_norm_data 40 | wf = init_csf_norm_wf() 41 | wf.base_dir = tmp_path 42 | 43 | wf.inputs.inputnode.anat_preproc = anat 44 | wf.inputs.inputnode.anat_tpms = tpms 45 | 46 | # verify workflow runs 47 | wf.run() 48 | 49 | # verify function works as expected 50 | outfile = _normalize_roi(anat, tpms[2]) 51 | assert np.array_equal( 52 | np.asanyarray(nb.load(outfile).dataobj), 53 | EXPECTED_CSF_NORM, 54 | ) 55 | Path(outfile).unlink() 56 | -------------------------------------------------------------------------------- /nibabies/workflows/bold/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/nibabies/workflows/bold/__init__.py -------------------------------------------------------------------------------- /nibabies/workflows/bold/boldref.py: -------------------------------------------------------------------------------- 1 | import nipype.interfaces.utility as niu 2 | import nipype.pipeline.engine as pe 3 | 4 | 5 | def init_infant_epi_reference_wf( 6 | omp_nthreads: int, 7 | is_sbref: bool = False, 8 | start_frame: int = 17, 9 | name: str = 'infant_epi_reference_wf', 10 | ) -> pe.Workflow: 11 | """ 12 | Workflow to generate a reference map from one or more infant EPI images. 13 | 14 | If any single-band references are provided, the reference map will be calculated from those. 15 | 16 | If no single-band references are provided, the BOLD files are used. 17 | To account for potential increased motion on the start of image acquisition, this 18 | workflow discards a bigger chunk of the initial frames. 19 | 20 | Parameters 21 | ---------- 22 | omp_nthreads 23 | Maximum number of threads an individual process may use 24 | is_sbref 25 | A single-band reference is provided. 26 | start_frame 27 | BOLD frame to start creating the reference map from. Any earlier frames are discarded. 28 | 29 | Inputs 30 | ------ 31 | bold_file 32 | BOLD EPI file 33 | sbref_file 34 | single-band reference EPI 35 | 36 | Outputs 37 | ------- 38 | boldref_file 39 | The generated reference map 40 | boldref_mask 41 | Binary brain mask of the ``boldref_file`` 42 | boldref_xfm 43 | Rigid-body transforms in LTA format 44 | 45 | """ 46 | from niworkflows.workflows.epi.refmap import init_epi_reference_wf 47 | from sdcflows.interfaces.brainmask import BrainExtraction 48 | 49 | wf = pe.Workflow(name=name) 50 | 51 | inputnode = pe.Node( 52 | niu.IdentityInterface(fields=['epi_file']), 53 | name='inputnode', 54 | ) 55 | outputnode = pe.Node( 56 | niu.IdentityInterface(fields=['boldref_file', 'boldref_mask']), 57 | name='outputnode', 58 | ) 59 | 60 | epi_reference_wf = init_epi_reference_wf(omp_nthreads, auto_bold_nss=False) 61 | 62 | boldref_mask = pe.Node(BrainExtraction(), name='boldref_mask') 63 | 64 | # fmt:off 65 | wf.connect([ 66 | (inputnode, epi_reference_wf, [('epi_file', 'inputnode.in_files')]), 67 | (epi_reference_wf, boldref_mask, [('outputnode.epi_ref_file', 'in_file')]), 68 | (epi_reference_wf, outputnode, [('outputnode.epi_ref_file', 'boldref_file')]), 69 | (boldref_mask, outputnode, [('out_mask', 'boldref_mask')]), 70 | ]) 71 | # fmt:on 72 | if not is_sbref: 73 | select_frames = pe.Node( 74 | niu.Function(function=_select_frames, output_names=['t_masks']), 75 | name='select_frames', 76 | ) 77 | select_frames.inputs.start_frame = start_frame 78 | # fmt:off 79 | wf.connect([ 80 | (inputnode, select_frames, [('epi_file', 'in_file')]), 81 | (select_frames, epi_reference_wf, [('t_masks', 'inputnode.t_masks')]), 82 | ]) 83 | # fmt:on 84 | else: 85 | # Won't be used but needed to placate iternode 86 | # To consider: Add a check to ensure this is a 3D file 87 | epi_reference_wf.inputs.inputnode.t_masks = [True] 88 | return wf 89 | 90 | 91 | def _select_frames(in_file: str, start_frame: int) -> list: 92 | import nibabel as nb 93 | import numpy as np 94 | 95 | img = nb.load(in_file) 96 | img_len = img.shape[3] 97 | if start_frame >= img_len: 98 | start_frame = img_len - 1 99 | t_mask = np.array([False] * img_len, dtype=bool) 100 | t_mask[start_frame:] = True 101 | return list(t_mask) 102 | -------------------------------------------------------------------------------- /nibabies/workflows/bold/hmc.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """ 24 | Head-Motion Estimation and Correction (HMC) of BOLD images 25 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 26 | 27 | .. autofunction:: init_bold_hmc_wf 28 | 29 | """ 30 | 31 | from nipype.interfaces import fsl 32 | from nipype.interfaces import utility as niu 33 | from nipype.pipeline import engine as pe 34 | 35 | from ...config import DEFAULT_MEMORY_MIN_GB 36 | 37 | 38 | def init_bold_hmc_wf(mem_gb: float, omp_nthreads: int, name: str = 'bold_hmc_wf'): 39 | """ 40 | Build a workflow to estimate head-motion parameters. 41 | 42 | This workflow estimates the motion parameters to perform 43 | :abbr:`HMC (head motion correction)` over the input 44 | :abbr:`BOLD (blood-oxygen-level dependent)` image. 45 | 46 | Workflow Graph 47 | .. workflow:: 48 | :graph2use: orig 49 | :simple_form: yes 50 | 51 | from fmriprep.workflows.bold import init_bold_hmc_wf 52 | wf = init_bold_hmc_wf( 53 | mem_gb=3, 54 | omp_nthreads=1) 55 | 56 | Parameters 57 | ---------- 58 | mem_gb : :obj:`float` 59 | Size of BOLD file in GB 60 | omp_nthreads : :obj:`int` 61 | Maximum number of threads an individual process may use 62 | name : :obj:`str` 63 | Name of workflow (default: ``bold_hmc_wf``) 64 | 65 | Inputs 66 | ------ 67 | bold_file 68 | BOLD series NIfTI file 69 | raw_ref_image 70 | Reference image to which BOLD series is motion corrected 71 | 72 | Outputs 73 | ------- 74 | xforms 75 | ITKTransform file aligning each volume to ``ref_image`` 76 | movpar_file 77 | MCFLIRT motion parameters, normalized to SPM format (X, Y, Z, Rx, Ry, Rz) 78 | rmsd_file 79 | Root mean squared deviation as measured by ``fsl_motion_outliers`` [Jenkinson2002]_. 80 | 81 | """ 82 | from niworkflows.engine.workflows import LiterateWorkflow as Workflow 83 | from niworkflows.interfaces.confounds import NormalizeMotionParams 84 | from niworkflows.interfaces.itk import MCFLIRT2ITK 85 | 86 | workflow = Workflow(name=name) 87 | workflow.__desc__ = """\ 88 | Head-motion parameters with respect to the BOLD reference 89 | (transformation matrices, and six corresponding rotation and translation 90 | parameters) are estimated before any spatiotemporal filtering using 91 | `mcflirt` [FSL {fsl_ver}, @mcflirt]. 92 | """.format(fsl_ver=fsl.Info().version() or '') 93 | 94 | inputnode = pe.Node( 95 | niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']), name='inputnode' 96 | ) 97 | outputnode = pe.Node( 98 | niu.IdentityInterface(fields=['xforms', 'movpar_file', 'rmsd_file']), name='outputnode' 99 | ) 100 | 101 | # Head motion correction (hmc) 102 | mcflirt = pe.Node( 103 | fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True), 104 | name='mcflirt', 105 | mem_gb=mem_gb * 3, 106 | ) 107 | 108 | fsl2itk = pe.Node(MCFLIRT2ITK(), name='fsl2itk', mem_gb=0.05, n_procs=omp_nthreads) 109 | 110 | normalize_motion = pe.Node( 111 | NormalizeMotionParams(format='FSL'), name='normalize_motion', mem_gb=DEFAULT_MEMORY_MIN_GB 112 | ) 113 | 114 | def _pick_rel(rms_files): 115 | return rms_files[-1] 116 | 117 | workflow.connect([ 118 | (inputnode, mcflirt, [('raw_ref_image', 'ref_file'), 119 | ('bold_file', 'in_file')]), 120 | (inputnode, fsl2itk, [('raw_ref_image', 'in_source'), 121 | ('raw_ref_image', 'in_reference')]), 122 | (mcflirt, fsl2itk, [('mat_file', 'in_files')]), 123 | (mcflirt, normalize_motion, [('par_file', 'in_file')]), 124 | (mcflirt, outputnode, [(('rms_files', _pick_rel), 'rmsd_file')]), 125 | (fsl2itk, outputnode, [('out_file', 'xforms')]), 126 | (normalize_motion, outputnode, [('out_file', 'movpar_file')]), 127 | ]) # fmt:skip 128 | 129 | return workflow 130 | -------------------------------------------------------------------------------- /nibabies/workflows/bold/stc.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright 2023 The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """ 24 | Slice-Timing Correction (STC) of BOLD images 25 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 26 | 27 | .. autofunction:: init_bold_stc_wf 28 | 29 | """ 30 | 31 | import nibabel as nb 32 | import numpy as np 33 | from nipype.interfaces import afni 34 | from nipype.interfaces import utility as niu 35 | from nipype.interfaces.base import isdefined 36 | from nipype.pipeline import engine as pe 37 | 38 | from ... import config 39 | 40 | LOGGER = config.loggers.workflow 41 | 42 | 43 | class TShift(afni.TShift): 44 | """Patched version of TShift implementing the "TooShort" behavior.""" 45 | 46 | def _pre_run_hook(self, runtime): 47 | ignore = self.inputs.ignore if isdefined(self.inputs.ignore) else 0 48 | ntsteps = nb.load(self.inputs.in_file).shape[3] 49 | if ntsteps - ignore < 5: 50 | raise RuntimeError( 51 | f'Insufficient length of BOLD data ({ntsteps} time points) after ' 52 | f"discarding {ignore} nonsteady-state (or 'dummy') time points." 53 | ) 54 | return runtime 55 | 56 | 57 | def init_bold_stc_wf(metadata, name='bold_stc_wf'): 58 | """ 59 | Create a workflow for :abbr:`STC (slice-timing correction)`. 60 | 61 | This workflow performs :abbr:`STC (slice-timing correction)` over the input 62 | :abbr:`BOLD (blood-oxygen-level dependent)` image. 63 | 64 | Workflow Graph 65 | .. workflow:: 66 | :graph2use: orig 67 | :simple_form: yes 68 | 69 | from fmriprep.workflows.bold import init_bold_stc_wf 70 | wf = init_bold_stc_wf( 71 | metadata={"RepetitionTime": 2.0, 72 | "SliceTiming": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]}, 73 | ) 74 | 75 | Parameters 76 | ---------- 77 | metadata : :obj:`dict` 78 | BIDS metadata for BOLD file 79 | name : :obj:`str` 80 | Name of workflow (default: ``bold_stc_wf``) 81 | 82 | Inputs 83 | ------ 84 | bold_file 85 | BOLD series NIfTI file 86 | skip_vols 87 | Number of non-steady-state volumes detected at beginning of ``bold_file`` 88 | 89 | Outputs 90 | ------- 91 | stc_file 92 | Slice-timing corrected BOLD series NIfTI file 93 | 94 | """ 95 | from niworkflows.engine.workflows import LiterateWorkflow as Workflow 96 | from niworkflows.interfaces.header import CopyXForm 97 | 98 | slice_times = metadata['SliceTiming'] 99 | first, last = min(slice_times), max(slice_times) 100 | frac = config.workflow.slice_time_ref 101 | tzero = np.round(first + frac * (last - first), 3) 102 | 103 | afni_ver = ''.join(f'{v:02d}' % v for v in afni.Info().version() or []) 104 | workflow = Workflow(name=name) 105 | workflow.__desc__ = f"""\ 106 | BOLD runs were slice-time corrected to {tzero:0.3g}s ({frac:g} of slice acquisition range 107 | {first:.3g}s-{last:.3g}s) using `3dTshift` from AFNI {afni_ver} [@afni, RRID:SCR_005927]. 108 | """ 109 | inputnode = pe.Node(niu.IdentityInterface(fields=['bold_file', 'skip_vols']), name='inputnode') 110 | outputnode = pe.Node(niu.IdentityInterface(fields=['stc_file']), name='outputnode') 111 | 112 | LOGGER.log(25, f'BOLD series will be slice-timing corrected to an offset of {tzero:.3g}s.') 113 | 114 | # It would be good to fingerprint memory use of afni.TShift 115 | slice_timing_correction = pe.Node( 116 | TShift( 117 | outputtype='NIFTI_GZ', 118 | tr=f'{metadata["RepetitionTime"]}s', 119 | slice_timing=metadata['SliceTiming'], 120 | slice_encoding_direction=metadata.get('SliceEncodingDirection', 'k'), 121 | tzero=tzero, 122 | ), 123 | name='slice_timing_correction', 124 | ) 125 | 126 | copy_xform = pe.Node(CopyXForm(), name='copy_xform', mem_gb=0.1) 127 | 128 | # fmt:off 129 | workflow.connect([ 130 | (inputnode, slice_timing_correction, [('bold_file', 'in_file'), 131 | ('skip_vols', 'ignore')]), 132 | (slice_timing_correction, copy_xform, [('out_file', 'in_file')]), 133 | (inputnode, copy_xform, [('bold_file', 'hdr_file')]), 134 | (copy_xform, outputnode, [('out_file', 'stc_file')]), 135 | ]) 136 | # fmt:on 137 | return workflow 138 | -------------------------------------------------------------------------------- /nibabies/workflows/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | # 4 | # Copyright The NiPreps Developers 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # We support and encourage derived works from this project, please read 19 | # about our expectations at 20 | # 21 | # https://www.nipreps.org/community/licensing/ 22 | # 23 | """Utilities and mocks for testing and documentation building.""" 24 | 25 | import os 26 | import shutil 27 | from contextlib import contextmanager 28 | from pathlib import Path 29 | from tempfile import mkdtemp 30 | 31 | from toml import loads 32 | 33 | from nibabies import data 34 | from nibabies.workflows.base import init_execution_spaces 35 | 36 | 37 | @contextmanager 38 | def mock_config(bids_dir=None): 39 | """Create a mock config for documentation and testing purposes.""" 40 | from ... import config 41 | 42 | _old_fs = os.getenv('FREESURFER_HOME') 43 | if not _old_fs: 44 | os.environ['FREESURFER_HOME'] = mkdtemp() 45 | 46 | settings = loads(data.load.readable('tests/config.toml').read_text()) 47 | for sectionname, configs in settings.items(): 48 | if sectionname != 'environment': 49 | section = getattr(config, sectionname) 50 | section.load(configs, init=False) 51 | config.nipype.omp_nthreads = 1 52 | config.nipype.init() 53 | config.loggers.init() 54 | init_execution_spaces() 55 | 56 | bids_dir = bids_dir or data.load('tests/ds000005').absolute() 57 | 58 | config.execution.work_dir = Path(mkdtemp()) 59 | config.execution.bids_dir = bids_dir 60 | config.execution.nibabies_dir = Path(mkdtemp()) 61 | config.execution.bids_database_dir = None 62 | config.execution._layout = None 63 | config.execution.init() 64 | 65 | yield 66 | 67 | shutil.rmtree(config.execution.work_dir) 68 | shutil.rmtree(config.execution.nibabies_dir) 69 | 70 | if not _old_fs: 71 | del os.environ['FREESURFER_HOME'] 72 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs", "nipreps-versions"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "nibabies" 7 | description = "Processing workflows for magnetic resonance images of the brain in infants" 8 | readme = "long_description.md" 9 | authors = [{name = "The NiPreps Developers", email = "nipreps@gmail.com"}] 10 | classifiers = [ 11 | "Development Status :: 4 - Beta", 12 | "Intended Audience :: Science/Research", 13 | "Topic :: Scientific/Engineering :: Image Recognition", 14 | "License :: OSI Approved :: Apache Software License", 15 | "Programming Language :: Python :: 3.10", 16 | "Programming Language :: Python :: 3.11", 17 | "Programming Language :: Python :: 3.12", 18 | ] 19 | license = {file = "LICENSE"} 20 | requires-python = ">=3.10" 21 | dependencies = [ 22 | "acres", 23 | "nibabel >= 5.0.0", 24 | "nipype >= 1.8.5", 25 | "nireports >= 23.2.0", 26 | "nitime", 27 | "nitransforms >= 24.1.1", 28 | "niworkflows >= 1.13.1", 29 | "numpy >= 1.21.0", 30 | "packaging", 31 | "pandas", 32 | "pooch", 33 | "psutil >= 5.4", 34 | "pybids >= 0.15.0", 35 | "requests", 36 | "SimpleITK", 37 | "sdcflows >= 2.13.0", 38 | "smriprep >= 0.17.0", 39 | "tedana >= 23.0.2", 40 | "templateflow >= 24.2.0", 41 | "toml", 42 | "typing_extensions; python_version<'3.11'", 43 | ] 44 | dynamic = ["version"] 45 | 46 | [project.urls] 47 | Documentation = "https://nibabies.readthedocs.io/en/latest/" 48 | "Source Code" = "https://github.com/nipreps/nibabies" 49 | "Bug Tracker" = "https://github.com/nipreps/nibabies/issues" 50 | "Docker Images" = "https://hub.docker.com/r/nipreps/nibabies" 51 | 52 | [project.optional-dependencies] 53 | container = [ 54 | "nibabies[telemetry]", 55 | "datalad", 56 | "datalad-osf", 57 | ] 58 | dev = [ 59 | "ruff", 60 | "pre-commit", 61 | ] 62 | doc = [ 63 | "pydot >= 1.2.3", 64 | "sphinx >= 1.8", 65 | "sphinx-argparse", 66 | "shibuya", 67 | "myst_parser", 68 | "sphinx-togglebutton", 69 | "sphinxcontrib.bibtex", 70 | ] 71 | duecredit = ["duecredit"] 72 | maint = [ 73 | "fuzzywuzzy", 74 | "python-Levenshtein", 75 | ] 76 | test = [ 77 | "coverage[toml]", 78 | "pytest", 79 | "pytest-cov", 80 | "pytest-env", 81 | "pytest-xdist", 82 | ] 83 | telemetry = ["migas >= 0.4.0"] 84 | # Aliases 85 | docs = ["nibabies[doc]"] 86 | tests = ["nibabies[test]"] 87 | all = ["nibabies[dev,doc,maint,telemetry,test]"] 88 | 89 | [project.scripts] 90 | nibabies = "nibabies.cli.run:main" 91 | nibabies-mcribs = "nibabies.cli.mcribs:main" 92 | 93 | [tool.hatch.metadata] 94 | allow-direct-references = true 95 | 96 | [tool.hatch.build.targets.sdist] 97 | exclude = [".git_archival.txt"] # No longer needed in sdist 98 | 99 | [tool.hatch.build.targets.wheel] 100 | packages = ["nibabies"] 101 | 102 | [tool.hatch.version] 103 | source = "vcs" 104 | raw-options = { version_scheme = "nipreps-calver" } 105 | 106 | [tool.hatch.build.hooks.vcs] 107 | version-file = "nibabies/_version.py" 108 | 109 | # 110 | # Developer tool configurations 111 | # 112 | 113 | [tool.black] 114 | exclude = ".*" 115 | 116 | [tool.flake8] 117 | max-line-length = "99" 118 | doctests = "False" 119 | exclude = "*build/" 120 | ignore = ["W503", "E203"] 121 | per-file-ignores = [ 122 | "**/__init__.py : F401", 123 | "docs/conf.py : E265", 124 | ] 125 | 126 | [tool.pytest.ini_options] 127 | minversion = "6" 128 | testpaths = ["nibabies"] 129 | log_cli_level = "INFO" 130 | xfail_strict = true 131 | norecursedirs = [".git"] 132 | addopts = [ 133 | "-svx", 134 | "-ra", 135 | "--strict-config", 136 | "--strict-markers", 137 | "--doctest-modules", 138 | # Config pytest-cov 139 | "--cov=nibabies", 140 | "--cov-report=xml", 141 | "--cov-config=pyproject.toml", 142 | ] 143 | doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" 144 | env = "PYTHONHASHSEED=0" 145 | filterwarnings = ["ignore::DeprecationWarning"] 146 | junit_family = "xunit2" 147 | 148 | [tool.ruff] 149 | line-length = 99 150 | extend-exclude = [ 151 | "wrapper/**", 152 | ".maint/**", 153 | "scripts/**", 154 | "docs/**", 155 | ] 156 | 157 | [tool.ruff.lint] 158 | extend-select = [ 159 | "F", 160 | "E", 161 | "W", 162 | "I", 163 | "UP", 164 | "YTT", 165 | "S", 166 | "BLE", 167 | "B", 168 | "A", 169 | # "CPY", 170 | "C4", 171 | "DTZ", 172 | "T10", 173 | # "EM", 174 | "EXE", 175 | "FA", 176 | "ISC", 177 | "ICN", 178 | "PT", 179 | "Q", 180 | ] 181 | extend-ignore = [ 182 | "S311", # We are not using random for cryptographic purposes 183 | "ISC001", 184 | "S603", 185 | ] 186 | 187 | [tool.ruff.lint.flake8-quotes] 188 | inline-quotes = "single" 189 | 190 | [tool.ruff.lint.extend-per-file-ignores] 191 | "*/test_*.py" = ["S101"] 192 | "nibabies/utils/debug.py" = ["A002", "T100"] 193 | "docs/conf.py" = ["A001"] 194 | "docs/sphinxext/github_link.py" = ["BLE001"] 195 | 196 | [tool.ruff.format] 197 | quote-style = "single" 198 | 199 | [tool.coverage.run] 200 | branch = true 201 | omit = [ 202 | "*/_version.py" 203 | ] 204 | 205 | [tool.coverage.paths] 206 | source = [ 207 | "nibabies", 208 | "**/site-packages/nibabies" 209 | ] 210 | -------------------------------------------------------------------------------- /scripts/anatprep.py: -------------------------------------------------------------------------------- 1 | """Script for testing the initial preprocessing steps of T1w and T2w.""" 2 | 3 | 4 | def init_workflow(bids_path, output_path, participant_label, workdir=None): 5 | """Create the preprocessing workflow.""" 6 | from nipype.pipeline import engine as pe 7 | from nibabies.workflows.anatomical.preproc import init_anat_average_wf 8 | from nibabies.workflows.anatomical.registration import init_coregistration_wf 9 | from nibabies.workflows.anatomical.brain_extraction import ( 10 | init_infant_brain_extraction_wf, 11 | ) 12 | from nibabies.workflows.anatomical.outputs import init_coreg_report_wf 13 | 14 | wf = pe.Workflow(name="nibabies_anat") 15 | for subid in participant_label: 16 | sub_wf = pe.Workflow(name=f"nibabies_anat_{subid}") 17 | t1w_files = list( 18 | (bids_path / f"sub-{subid}" / "anat").glob(f"sub-{subid}*_T1w.nii.gz") 19 | ) 20 | 21 | t2w_files = list( 22 | (bids_path / f"sub-{subid}" / "anat").glob(f"sub-{subid}*_T2w.nii.gz") 23 | ) 24 | 25 | t1w_ref = init_anat_average_wf( 26 | num_maps=len(t1w_files), name="t1w_ref", omp_nthreads=8 27 | ) 28 | t2w_ref = init_anat_average_wf( 29 | num_maps=len(t2w_files), name="t2w_ref", omp_nthreads=8 30 | ) 31 | 32 | t1w_ref.inputs.inputnode.in_files = [str(f) for f in t1w_files] 33 | t2w_ref.inputs.inputnode.in_files = [str(f) for f in t2w_files] 34 | 35 | be = init_infant_brain_extraction_wf(omp_nthreads=8, age_months=2) 36 | cr = init_coregistration_wf(omp_nthreads=8, sloppy=True) 37 | 38 | rpt = init_coreg_report_wf(output_dir=str(output_path.absolute())) 39 | rpt.inputs.inputnode.source_file = [str(f) for f in t1w_files] 40 | 41 | # fmt:off 42 | sub_wf.connect([ 43 | (t2w_ref, be, [("outputnode.out_file", "inputnode.in_t2w")]), 44 | (t1w_ref, cr, [("outputnode.out_file", "inputnode.in_t1w")]), 45 | (be, cr, [ 46 | ("outputnode.t2w_preproc", "inputnode.in_t2w_preproc"), 47 | ("outputnode.out_mask", "inputnode.in_mask"), 48 | ("outputnode.out_probmap", "inputnode.in_probmap"), 49 | ]), 50 | (cr, rpt, [ 51 | ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), 52 | ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), 53 | ("outputnode.t1w_mask", "inputnode.in_mask"), 54 | ]), 55 | ]) 56 | # fmt:on 57 | wf.add_nodes([sub_wf]) 58 | 59 | if workdir: 60 | wf.base_dir = workdir 61 | return wf 62 | 63 | 64 | if __name__ == "__main__": 65 | from pathlib import Path 66 | import re 67 | from argparse import ArgumentParser 68 | from argparse import RawTextHelpFormatter 69 | 70 | parser = ArgumentParser( 71 | description="sMRIPrep-infants: Structural MRI PREProcessing workflows", 72 | formatter_class=RawTextHelpFormatter, 73 | ) 74 | parser.add_argument( 75 | "bids_dir", 76 | action="store", 77 | type=Path, 78 | help="the root folder of a BIDS valid dataset (sub-XXXXX folders should " 79 | "be found at the top level in this folder).", 80 | ) 81 | parser.add_argument( 82 | "output_dir", 83 | action="store", 84 | type=Path, 85 | help="the output path for the outcomes of preprocessing and visual " "reports", 86 | ) 87 | parser.add_argument( 88 | "--participant-label", 89 | "--participant_label", 90 | action="store", 91 | nargs="+", 92 | help="a space delimited list of participant identifiers or a single " 93 | "identifier (the sub- prefix can be removed)", 94 | ) 95 | 96 | opts = parser.parse_args() 97 | 98 | participant_label = [ 99 | re.sub(r"^sub-", "", p) for p in opts.participant_label 100 | ] 101 | init_workflow( 102 | opts.bids_dir, 103 | opts.output_dir, 104 | participant_label, 105 | workdir=Path.cwd().absolute() / "workdir" 106 | ).run() 107 | -------------------------------------------------------------------------------- /scripts/bold_subcortical.py: -------------------------------------------------------------------------------- 1 | """Script for testing the subcortical MNI alignment""" 2 | from pathlib import Path 3 | 4 | 5 | def init_workflow(bold_file, bold_roi, bold_atlas_roi, vol_sigma): 6 | from nibabies.workflows.bold.alignment import init_subcortical_mni_alignment_wf 7 | 8 | wf = init_subcortical_mni_alignment_wf(vol_sigma=vol_sigma) 9 | wf.inputs.inputnode.bold_file = bold_file 10 | wf.inputs.inputnode.bold_roi = bold_roi 11 | wf.inputs.inputnode.atlas_roi = bold_atlas_roi 12 | 13 | wf.base_dir = Path('workdir').absolute() 14 | return wf 15 | 16 | 17 | if __name__ == "__main__": 18 | from argparse import ArgumentParser, RawTextHelpFormatter 19 | 20 | parser = ArgumentParser( 21 | description="DCAN subcortical MNI alignment", 22 | formatter_class=RawTextHelpFormatter, 23 | ) 24 | parser.add_argument( 25 | "bold_file", 26 | type=Path, 27 | help="the input BOLD file", 28 | ) 29 | parser.add_argument( 30 | "bold_roi", 31 | type=Path, 32 | help="segmentations in BOLD space", 33 | ) 34 | parser.add_argument( 35 | "bold_atlas_roi", 36 | type=Path, 37 | help="segmentations in ROI space, unrefined", 38 | ) 39 | parser.add_argument( 40 | "--vol-sigma", 41 | type=float, 42 | default=0.8, 43 | help="The sigma for the gaussian volume smoothing kernel, in mm", 44 | ) 45 | parser.add_argument( 46 | "--nipype-plugin", 47 | default="MultiProc", 48 | help="Nipype plugin to run workflow with", 49 | ) 50 | opts = parser.parse_args() 51 | wf = init_workflow( 52 | opts.bold_file.absolute(), 53 | opts.bold_roi.absolute(), 54 | opts.bold_atlas_roi.absolute(), 55 | vol_sigma=opts.vol_sigma, 56 | ) 57 | 58 | wf.config['execution']['crashfile_format'] = 'txt' 59 | wf.run(plugin=opts.nipype_plugin) 60 | -------------------------------------------------------------------------------- /scripts/fetch_templates.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 'Preemptive caching of commonly used TemplateFlow templates' 4 | import templateflow.api as tf 5 | 6 | 7 | def fetch_MNI6(): 8 | """ 9 | Expected templates: 10 | 11 | tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-01_T1w.nii.gz 12 | tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_T1w.nii.gz 13 | tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-01_desc-brain_mask.nii.gz 14 | tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_desc-brain_mask.nii.gz 15 | tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_atlas-HCP_dseg.nii.gz 16 | """ 17 | template = 'MNI152NLin6Asym' 18 | 19 | tf.get(template, resolution=(1, 2), desc=None, suffix='T1w') 20 | tf.get(template, resolution=(1, 2), desc='brain', suffix='mask') 21 | # CIFTI 22 | tf.get(template, resolution=2, atlas='HCP', suffix='dseg') 23 | 24 | 25 | def fetch_UNCInfant(): 26 | """ 27 | Expected templates: 28 | 29 | tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_T1w.nii.gz 30 | tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-brain_probseg.nii.gz 31 | tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-brain_mask.nii.gz 32 | tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-BrainCerebellumExtraction_mask.nii.gz 33 | """ 34 | template = 'UNCInfant' 35 | 36 | tf.get(template, cohort=1, desc=None, suffix='T1w') 37 | tf.get(template, cohort=1, label='brain', suffix='probseg') 38 | tf.get(template, cohort=1, label='brain', suffix='mask') 39 | tf.get(template, cohort=1, label='BrainCerebellumExtraction', suffix='mask') 40 | 41 | 42 | def fetch_fsaverage(): 43 | """ 44 | Expected templates: 45 | 46 | tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_desc-std_sphere.surf.gii 47 | tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-std_sphere.surf.gii 48 | tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_desc-vaavg_midthickness.shape.gii 49 | tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-vaavg_midthickness.shape.gii 50 | tpl-fsaverage/tpl-fsaverage_hemi-L_den-41k_sphere.surf.gii 51 | tpl-fsaverage/tpl-fsaverage_hemi-R_den-41k_sphere.surf.gii 52 | """ 53 | template = 'fsaverage' 54 | 55 | tf.get(template, density='164k', desc='std', suffix='sphere') 56 | tf.get(template, density='164k', desc='vaavg', suffix='midthickness') 57 | tf.get(template, density='41k', desc=None, suffix='sphere', extension='.surf.gii') 58 | 59 | 60 | def fetch_fsLR(): 61 | """ 62 | Expected templates: 63 | 64 | tpl-fsLR/tpl-fsLR_hemi-L_den-32k_desc-nomedialwall_dparc.label.gii 65 | tpl-fsLR/tpl-fsLR_hemi-L_den-32k_desc-vaavg_midthickness.shape.gii 66 | tpl-fsLR/tpl-fsLR_hemi-L_den-32k_sphere.surf.gii 67 | tpl-fsLR/tpl-fsLR_hemi-R_den-32k_desc-nomedialwall_dparc.label.gii 68 | tpl-fsLR/tpl-fsLR_hemi-R_den-32k_desc-vaavg_midthickness.shape.gii 69 | tpl-fsLR/tpl-fsLR_hemi-R_den-32k_sphere.surf.gii 70 | tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-L_den-32k_sphere.surf.gii 71 | tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii 72 | """ 73 | tf.get('fsLR', density='32k') 74 | tf.get('fsLR', density='164k', desc='std', suffix='sphere') 75 | tf.get('fsLR', density='164k', suffix='midthickness') 76 | 77 | 78 | def fetch_MNIInfant(cohort=1): 79 | """ 80 | Expected templates: 81 | 82 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-1_T1w.nii.gz 83 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-1_T2w.nii.gz 84 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-1_desc-brain_mask.nii.gz 85 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-2_T1w.nii.gz 86 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-2_T1w.nii.gz 87 | tpl-MNIInfant/cohort-1/tpl-MNIInfant_cohort-1_res-2_desc-brain_mask.nii.gz 88 | """ 89 | template = 'MNIInfant' 90 | 91 | tf.get(template, cohort=cohort, suffix='T1w') 92 | tf.get(template, cohort=cohort, suffix='T2w') 93 | tf.get(template, cohort=cohort, desc='brain', suffix='mask') 94 | 95 | 96 | def fetch_dhcpAsym(cohort=42): 97 | """ 98 | Expected templates: 99 | 100 | tpl-dhcpAsym_cohort-42_hemi-L_den-32k_sphere.surf.gii 101 | tpl-dhcpAsym_cohort-42_hemi-R_den-32k_sphere.surf.gii 102 | tpl-dhcpAsym_cohort-42_space-fsaverage_hemi-L_den-41k_desc-reg_sphere.surf.gii 103 | tpl-dhcpAsym_cohort-42_space-fsaverage_hemi-R_den-41k_desc-reg_sphere.surf.gii 104 | """ 105 | template = 'dhcpAsym' 106 | 107 | tf.get(template, cohort=cohort, density='32k', desc=None, suffix='sphere') 108 | tf.get(template, cohort=cohort, space='fsaverage', density='41k', desc='reg', suffix='sphere') 109 | 110 | 111 | def fetch_MNI2009(): 112 | template = 'MNI152NLin2009cAsym' 113 | 114 | tf.get(template, resolution=(1, 2), desc=None, suffix='T1w') 115 | tf.get(template, resolution=(1, 2), desc='brain', suffix='mask') 116 | tf.get(template, resolution=2, desc='fMRIPrep', suffix='boldref') 117 | tf.get(template, resolution=1, label='brain', suffix='probseg') 118 | 119 | 120 | def main(): 121 | fetch_MNI6() 122 | fetch_UNCInfant() 123 | fetch_fsaverage() 124 | fetch_fsLR() 125 | fetch_MNIInfant() 126 | fetch_dhcpAsym() 127 | fetch_MNI2009() 128 | 129 | 130 | if __name__ == '__main__': 131 | main() 132 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | tox>=4 4 | envlist = 5 | py3{10,11,12}-latest 6 | py310-min 7 | py3{10,11,12}-pre 8 | skip_missing_interpreters = true 9 | 10 | # Configuration that allows us to split tests across GitHub runners effectively 11 | [gh-actions] 12 | python = 13 | 3.10: py310 14 | 3.11: py311 15 | 3.12: py312 16 | 17 | [gh-actions:env] 18 | DEPENDS = 19 | min: min 20 | latest: latest 21 | pre: pre 22 | 23 | [testenv] 24 | description = Pytest with coverage 25 | labels = test 26 | pip_pre = 27 | pre: true 28 | pass_env = 29 | # getpass.getuser() sources for Windows: 30 | LOGNAME 31 | USER 32 | LNAME 33 | USERNAME 34 | # Pass user color preferences through 35 | PY_COLORS 36 | FORCE_COLOR 37 | NO_COLOR 38 | CLICOLOR 39 | CLICOLOR_FORCE 40 | PYTHON_GIL 41 | extras = test 42 | setenv = 43 | pre: PIP_EXTRA_INDEX_URL=https://pypi.anaconda.org/scientific-python-nightly-wheels/simple 44 | deps = 45 | min: nibabel == 4.0.1 46 | min: nipype == 1.8.5 47 | min: nitransforms == 21.0.0 48 | min: numpy == 1.22 49 | min: psutil == 5.4 50 | min: pybids == 0.15.2 51 | min: tedana == 23.0.2 52 | min: templateflow == 24.1.0 53 | 54 | commands_pre = 55 | python scripts/fetch_templates.py 56 | commands = 57 | pytest --cov-report term-missing --durations=20 --durations-min=1.0 {posargs:-n auto} 58 | 59 | [testenv:style] 60 | description = Check our style guide 61 | labels = check 62 | deps = 63 | ruff 64 | skip_install = true 65 | commands = 66 | ruff check --diff 67 | ruff format --diff 68 | 69 | [testenv:style-fix] 70 | description = Auto-apply style guide to the extent possible 71 | labels = pre-release 72 | deps = 73 | ruff 74 | skip_install = true 75 | commands = 76 | ruff check --fix 77 | ruff format 78 | ruff check --select ISC001 79 | 80 | [testenv:spellcheck] 81 | description = Check spelling 82 | labels = check 83 | deps = 84 | codespell[toml] 85 | skip_install = true 86 | commands = 87 | codespell . {posargs} 88 | 89 | [testenv:build{,-strict}] 90 | labels = 91 | check 92 | pre-release 93 | deps = 94 | build 95 | twine 96 | skip_install = true 97 | set_env = 98 | # Ignore specific known warnings: 99 | # https://github.com/pypa/pip/issues/11684 100 | # https://github.com/pypa/pip/issues/12243 101 | strict: PYTHONWARNINGS=error,once:pkg_resources is deprecated as an API.:DeprecationWarning:pip._internal.metadata.importlib._envs,once:Unimplemented abstract methods {'locate_file'}:DeprecationWarning:pip._internal.metadata.importlib._dists 102 | commands = 103 | python -m build 104 | python -m twine check dist/* 105 | 106 | [testenv:publish] 107 | depends = build 108 | labels = release 109 | deps = 110 | twine 111 | skip_install = true 112 | commands = 113 | python -m twine upload dist/* 114 | -------------------------------------------------------------------------------- /wrapper/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015-2023, the Nipreps developers. 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of nibabies nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /wrapper/README.rst: -------------------------------------------------------------------------------- 1 | The NiBabies Docker/Singularity wrapper 2 | --------------------------------------- 3 | 4 | NiBabies is a functional magnetic resonance image pre-processing pipeline 5 | optimized for infant and neonate MRI. It is designed to provide an easily 6 | accessible, state-of-the-art interface that is robust to differences in 7 | scan acquisition protocols and that requires minimal user input, while 8 | providing easily interpretable and comprehensive error and output reporting. 9 | 10 | This is a Python wrapper to run NiBabies. 11 | It generates the appropriate Docker or Singularity commands, providing an 12 | intuitive interface to running the fMRIPrep workflow in whichever environment. 13 | Docker or Singularity must be installed, and in the case of Docker, running. 14 | Installations can be check by running :: 15 | 16 | docker info # Docker 17 | singularity version # Singularity 18 | 19 | Please report any feedback to our `GitHub repository 20 | `_ and do not 21 | forget to `credit `_ all 22 | the authors of software that NiBabies uses. 23 | 24 | 25 | Usage 26 | ----- 27 | 28 | Example Docker usage :: 29 | 30 | nibabies-wrapper docker participant 31 | 32 | Example Singularity usage :: 33 | 34 | nibabies-wrapper singularity participant -i 35 | -------------------------------------------------------------------------------- /wrapper/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs", "nipreps-versions"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "nibabies-wrapper" 7 | description = "A wrapper for generating Docker and Apptainer/Singularity commands for simpler NiBabies usage" 8 | readme = "README.rst" 9 | authors = [{name = "The NiPreps Developers", email = "nipreps@gmail.com"}] 10 | classifiers = [ 11 | "Development Status :: 4 - Beta", 12 | "Intended Audience :: Science/Research", 13 | "Topic :: Scientific/Engineering :: Image Recognition", 14 | "License :: OSI Approved :: Apache Software License", 15 | "License :: OSI Approved :: BSD License", 16 | "Programming Language :: Python :: 2.7", 17 | "Programming Language :: Python :: 3.5", 18 | "Programming Language :: Python :: 3.6", 19 | "Programming Language :: Python :: 3.7", 20 | "Programming Language :: Python :: 3.8", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | ] 24 | license = {file = "LICENSE"} 25 | requires-python = ">=2.7" 26 | dependencies = [] 27 | dynamic = ["version"] 28 | 29 | [project.urls] 30 | Documentation = "https://nibabies.readthedocs.io/en/latest/" 31 | "Source Code" = "https://github.com/nipreps/nibabies" 32 | "Bug Tracker" = "https://github.com/nipreps/nibabies/issues" 33 | "Docker Images" = "https://hub.docker.com/r/nipreps/nibabies" 34 | NiPreps = "https://www.nipreps.org/" 35 | 36 | [project.scripts] 37 | nibabies-wrapper = "nibabies_wrapper.__main__:main" 38 | 39 | # 40 | # Hatch configurations 41 | # 42 | 43 | [tool.hatch.build.targets.wheel] 44 | packages = ["src/nibabies_wrapper"] 45 | 46 | [tool.hatch.version] 47 | source = "vcs" 48 | raw-options = { version_scheme = "nipreps-calver", root = ".." } 49 | 50 | [tool.hatch.build.hooks.vcs] 51 | version-file = "src/nibabies_wrapper/_version.py" 52 | template = """\ 53 | # file generated by setuptools_scm 54 | # don't change, don't track in version control 55 | __version__ = version = {version!r} 56 | __version_tuple__ = version_tuple = {version_tuple!r} 57 | """ 58 | 59 | [tool.distutils.bdist_wheel] 60 | universal = true 61 | 62 | # 63 | # Developer tool configurations 64 | # 65 | 66 | [tool.black] 67 | line-length = 99 68 | target-version = ['py39'] 69 | skip-string-normalization = true 70 | 71 | [tool.isort] 72 | profile = 'black' 73 | -------------------------------------------------------------------------------- /wrapper/src/nibabies_wrapper/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipreps/nibabies/1e6afd415ec2776a4bf749628fa1a58646d536db/wrapper/src/nibabies_wrapper/__init__.py --------------------------------------------------------------------------------