├── LICENSE ├── NipypeBeginnersGuide.pdf ├── README.md ├── homepage ├── Makefile ├── _static │ ├── background.jpg │ ├── data.png │ ├── expert.png │ ├── favicon.ico │ ├── logo │ │ ├── logoAFNI.png │ │ ├── logoANTS.png │ │ ├── logoAnaconda.png │ │ ├── logoFSL.jpg │ │ ├── logoFreeSurfer.jpg │ │ ├── logoMatlab.png │ │ ├── logoNeurodebian.png │ │ ├── logoNipype.png │ │ ├── logoOpenfmri.png │ │ ├── logoPython.png │ │ └── logoSPM12.png │ ├── nipype-beginners-guide-html_logo.png │ ├── nipype-beginners-guide-pdf_logo.png │ ├── nipype.png │ ├── nipype_logo.svg │ ├── nipype_tutorial_logo.svg │ ├── setup.png │ ├── sphinxdoc.css │ ├── start.png │ └── support.png ├── _templates │ ├── layout.html │ └── layout_addition.html ├── conf.py ├── faq.rst ├── firstLevel.rst ├── firstSteps.rst ├── glossary.rst ├── help.rst ├── images │ ├── 2nd_level_colored.png │ ├── BOLDresponse.png │ ├── GLM.png │ ├── GM.gif │ ├── GM.png │ ├── WM.gif │ ├── WM.png │ ├── architecture.png │ ├── artifact_detection.png │ ├── brain.gif │ ├── brain.png │ ├── contrast_acoustic.png │ ├── contrasts.png │ ├── coregistration.png │ ├── datafolder_structure.png │ ├── datastructure.png │ ├── designmatrix.png │ ├── example_node_connection.png │ ├── firstlevel.png │ ├── flow.png │ ├── func2norm.png │ ├── functional_activation.png │ ├── graph_colored.png │ ├── graph_colored.svg │ ├── graph_exec_detailed.png │ ├── graph_exec_detailed.svg │ ├── graph_exec_simple.png │ ├── graph_exec_simple.svg │ ├── graph_flat_detailed.png │ ├── graph_flat_detailed.svg │ ├── graph_flat_simple.png │ ├── graph_flat_simple.svg │ ├── graph_hierarchical.png │ ├── graph_hierarchical.svg │ ├── graph_orig_detailed.png │ ├── graph_orig_detailed.svg │ ├── graph_orig_notsimple.png │ ├── graph_orig_notsimple.svg │ ├── graph_orig_simple.png │ ├── graph_orig_simple.svg │ ├── highpassfilter.png │ ├── kernel.png │ ├── metaflow.png │ ├── movement.gif │ ├── movement.png │ ├── neuroimaging.png │ ├── neuroimaging.pptx │ ├── neuroimaging1.png │ ├── neuroimaging2.png │ ├── neuroimaging_nipype.dot │ ├── neuroimaging_nipype.png │ ├── neuroimaging_nipype.svg │ ├── norm_ants_colored_complete.png │ ├── norm_ants_colored_partial.png │ ├── norm_spm_colored.png │ ├── normalization.png │ ├── normvsnorm.png │ ├── pvalues.png │ ├── realignment_bad.png │ ├── realignment_good.png │ ├── rotation_40.gif │ ├── rotation_40.png │ ├── segmentation.gif │ ├── segmentation.png │ ├── slicetiming.png │ ├── slicetiming_small.gif │ ├── smoothed.png │ ├── stimuli.png │ ├── subj2norm.png │ ├── subjects2normtemp.png │ ├── time.png │ ├── tractography.png │ ├── tractography_small.gif │ └── voxel.png ├── index.rst ├── installation.rst ├── links.rst ├── neuroimaging.rst ├── nipype.rst ├── nipypeAndNeuroimaging.rst ├── normalize.rst ├── prepareData.rst ├── secondLevel.rst ├── tableofcontent.rst ├── updateHomepage.sh └── visualizePipeline.rst └── scripts ├── example_fMRI_1_first_level.py ├── example_fMRI_2_normalize_ANTS_complete.py ├── example_fMRI_2_normalize_ANTS_partial.py ├── example_fMRI_2_normalize_SPM.py ├── example_fMRI_3_second_level.py ├── tutorial_1_create_dataset.sh ├── tutorial_2_recon_python.py ├── tutorial_2_recon_shell.sh └── tutorial_3_first_steps.py /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2012, Michael Notter and the nipype-beginner-s-guide developers 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /NipypeBeginnersGuide.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/NipypeBeginnersGuide.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Nipype Beginner's Guide 2 | 3 | [![GitHub issues](https://img.shields.io/github/issues/miykael/nipype-beginner-s-guide.svg)](https://github.com/miykael/nipype-beginner-s-guide/issues/) 4 | [![GitHub pull-requests](https://img.shields.io/github/issues-pr/miykael/nipype-beginner-s-guide.svg)](https://github.com/miykael/nipype-beginner-s-guide/pulls/) 5 | [![GitHub contributors](https://img.shields.io/github/contributors/miykael/nipype-beginner-s-guide.svg)](https://GitHub.com/miykael/nipype-beginner-s-guide/graphs/contributors/) 6 | [![GitHub Commits](https://github-basic-badges.herokuapp.com/commits/miykael/nipype-beginner-s-guide.svg)](https://github.com/miykael/nipype-beginner-s-guide/commits/master) 7 | [![GitHub size](https://github-size-badge.herokuapp.com/miykael/nipype-beginner-s-guide.svg)](https://github.com/miykael/nipype-beginner-s-guide/archive/master.zip) 8 | [![GitHub HitCount](http://hits.dwyl.io/miykael/nipype-beginner-s-guide.svg)](http://hits.dwyl.io/miykael/nipype-beginner-s-guide) 9 | 10 | 11 | This homepage provides a beginner's guide for Nipype, a user-friendly software written in Python that provides a uniform interface to existing neuroimaging softwares like SPM, FSL, FreeSurfer, Camino, AFNI, Slicer, etc. 12 | 13 | 14 | ## Website 15 | 16 | Nipype Beginner's Guide is located here: 17 | http://miykael.github.com/nipype-beginner-s-guide/ 18 | 19 | Nipype website is located here: 20 | http://nipy.sourceforge.net/nipype/ 21 | 22 | 23 | ## Documentation 24 | 25 | A PDF-Version of this beginner's guide can be found here: 26 | http://github.com/miykael/nipype-beginner-s-guide/blob/master/NipypeBeginnersGuide.pdf?raw=true 27 | 28 | 29 | ## Examples 30 | 31 | All examples and codes which are used in the guide can be found here: 32 | http://github.com/miykael/nipype-beginner-s-guide/blob/master/scripts 33 | 34 | 35 | ## Contact 36 | 37 | You can contact me on: miykaelnotter@gmail.com 38 | 39 | 40 | ## License 41 | 42 | This beginner's guide was mostly written by Michael Notter. But some of its ideas and content is from the original Nipype homepage (http://nipy.sourceforge.net/nipype/) and other various homepages (e.g. http://mindhive.mit.edu/imaging). 43 | 44 | Copyright (c) 2009-2014, NIPY Developers 45 | All rights reserved. 46 | 47 | Redistribution and use in source and binary forms, with or without 48 | modification, are permitted provided that the following conditions are 49 | met: 50 | 51 | * Redistributions of source code must retain the above copyright 52 | notice, this list of conditions and the following disclaimer. 53 | 54 | * Redistributions in binary form must reproduce the above 55 | copyright notice, this list of conditions and the following 56 | disclaimer in the documentation and/or other materials provided 57 | with the distribution. 58 | 59 | * Neither the name of the NIPY Developers nor the names of any 60 | contributors may be used to endorse or promote products derived 61 | from this software without specific prior written permission. 62 | 63 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 64 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 65 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 66 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 67 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 68 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 69 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 70 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 71 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 72 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 73 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 74 | -------------------------------------------------------------------------------- /homepage/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help 23 | help: 24 | @echo "Please use \`make ' where is one of" 25 | @echo " html to make standalone HTML files" 26 | @echo " dirhtml to make HTML files named index.html in directories" 27 | @echo " singlehtml to make a single large HTML file" 28 | @echo " pickle to make pickle files" 29 | @echo " json to make JSON files" 30 | @echo " htmlhelp to make HTML files and a HTML help project" 31 | @echo " qthelp to make HTML files and a qthelp project" 32 | @echo " applehelp to make an Apple Help Book" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | @echo " coverage to run coverage check of the documentation (if enabled)" 49 | 50 | .PHONY: clean 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | .PHONY: html 55 | html: 56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 57 | @echo 58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 59 | 60 | .PHONY: dirhtml 61 | dirhtml: 62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 63 | @echo 64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 65 | 66 | .PHONY: singlehtml 67 | singlehtml: 68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 69 | @echo 70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 71 | 72 | .PHONY: pickle 73 | pickle: 74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 75 | @echo 76 | @echo "Build finished; now you can process the pickle files." 77 | 78 | .PHONY: json 79 | json: 80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 81 | @echo 82 | @echo "Build finished; now you can process the JSON files." 83 | 84 | .PHONY: htmlhelp 85 | htmlhelp: 86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 87 | @echo 88 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 89 | ".hhp project file in $(BUILDDIR)/htmlhelp." 90 | 91 | .PHONY: qthelp 92 | qthelp: 93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 94 | @echo 95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/trhahathath.qhcp" 98 | @echo "To view the help file:" 99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/trhahathath.qhc" 100 | 101 | .PHONY: applehelp 102 | applehelp: 103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 104 | @echo 105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 106 | @echo "N.B. You won't be able to view it unless you put it in" \ 107 | "~/Library/Documentation/Help or install it in your application" \ 108 | "bundle." 109 | 110 | .PHONY: devhelp 111 | devhelp: 112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 113 | @echo 114 | @echo "Build finished." 115 | @echo "To view the help file:" 116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/trhahathath" 117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/trhahathath" 118 | @echo "# devhelp" 119 | 120 | .PHONY: epub 121 | epub: 122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 123 | @echo 124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 125 | 126 | .PHONY: latex 127 | latex: 128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 129 | @echo 130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 132 | "(use \`make latexpdf' here to do that automatically)." 133 | 134 | .PHONY: latexpdf 135 | latexpdf: 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through pdflatex..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | .PHONY: latexpdfja 142 | latexpdfja: 143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 144 | @echo "Running LaTeX files through platex and dvipdfmx..." 145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 147 | 148 | .PHONY: text 149 | text: 150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 151 | @echo 152 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 153 | 154 | .PHONY: man 155 | man: 156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 157 | @echo 158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 159 | 160 | .PHONY: texinfo 161 | texinfo: 162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 163 | @echo 164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 165 | @echo "Run \`make' in that directory to run these through makeinfo" \ 166 | "(use \`make info' here to do that automatically)." 167 | 168 | .PHONY: info 169 | info: 170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 171 | @echo "Running Texinfo files through makeinfo..." 172 | make -C $(BUILDDIR)/texinfo info 173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 174 | 175 | .PHONY: gettext 176 | gettext: 177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 178 | @echo 179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 180 | 181 | .PHONY: changes 182 | changes: 183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 184 | @echo 185 | @echo "The overview file is in $(BUILDDIR)/changes." 186 | 187 | .PHONY: linkcheck 188 | linkcheck: 189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 190 | @echo 191 | @echo "Link check complete; look for any errors in the above output " \ 192 | "or in $(BUILDDIR)/linkcheck/output.txt." 193 | 194 | .PHONY: doctest 195 | doctest: 196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 197 | @echo "Testing of doctests in the sources finished, look at the " \ 198 | "results in $(BUILDDIR)/doctest/output.txt." 199 | 200 | .PHONY: coverage 201 | coverage: 202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 203 | @echo "Testing of coverage in the sources finished, look at the " \ 204 | "results in $(BUILDDIR)/coverage/python.txt." 205 | 206 | .PHONY: xml 207 | xml: 208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 209 | @echo 210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 211 | 212 | .PHONY: pseudoxml 213 | pseudoxml: 214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 215 | @echo 216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 217 | -------------------------------------------------------------------------------- /homepage/_static/background.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/background.jpg -------------------------------------------------------------------------------- /homepage/_static/data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/data.png -------------------------------------------------------------------------------- /homepage/_static/expert.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/expert.png -------------------------------------------------------------------------------- /homepage/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/favicon.ico -------------------------------------------------------------------------------- /homepage/_static/logo/logoAFNI.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoAFNI.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoANTS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoANTS.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoAnaconda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoAnaconda.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoFSL.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoFSL.jpg -------------------------------------------------------------------------------- /homepage/_static/logo/logoFreeSurfer.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoFreeSurfer.jpg -------------------------------------------------------------------------------- /homepage/_static/logo/logoMatlab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoMatlab.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoNeurodebian.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoNeurodebian.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoNipype.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoNipype.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoOpenfmri.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoOpenfmri.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoPython.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoPython.png -------------------------------------------------------------------------------- /homepage/_static/logo/logoSPM12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/logo/logoSPM12.png -------------------------------------------------------------------------------- /homepage/_static/nipype-beginners-guide-html_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/nipype-beginners-guide-html_logo.png -------------------------------------------------------------------------------- /homepage/_static/nipype-beginners-guide-pdf_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/nipype-beginners-guide-pdf_logo.png -------------------------------------------------------------------------------- /homepage/_static/nipype.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/nipype.png -------------------------------------------------------------------------------- /homepage/_static/setup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/setup.png -------------------------------------------------------------------------------- /homepage/_static/sphinxdoc.css: -------------------------------------------------------------------------------- 1 | /* 2 | * sphinxdoc.css_t 3 | * ~~~~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- sphinxdoc theme. Originally created by 6 | * Armin Ronacher for Werkzeug. 7 | * 8 | * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. 9 | * :license: BSD, see LICENSE for details. 10 | * 11 | */ 12 | 13 | @import url("basic.css"); 14 | 15 | /* -- page layout ----------------------------------------------------------- */ 16 | 17 | body { 18 | font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 19 | 'Verdana', sans-serif; 20 | font-size: 14px; 21 | letter-spacing: -0.01em; 22 | line-height: 150%; 23 | text-align: center; 24 | color: black; 25 | padding: 0; 26 | margin: 2px auto; 27 | min-width: 1080px; 28 | max-width: 1240px; 29 | 30 | background-color: #404040; 31 | background: url(background.jpg) #404040; 32 | } 33 | 34 | body:before { 35 | content: ""; 36 | position: fixed; 37 | top: -10px; 38 | left: 0; 39 | width: 100%; 40 | height: 10px; 41 | -webkit-box-shadow: 0px 0px 10px black; 42 | -moz-box-shadow: 0px 0px 10px black; 43 | box-shadow: 0px 0px 10px black; 44 | z-index: 100; 45 | } 46 | 47 | div.document { 48 | background-color: white; 49 | text-align: justify; 50 | 51 | background-image: url(contents.png); 52 | background-repeat: repeat-x; 53 | box-shadow: inset -10px 10px 10px -8px #A2A2A2, 54 | inset 10px -10px 10px -8px #A2A2A2; 55 | } 56 | 57 | div.document img { 58 | -o-box-shadow: 4px 4px 8px #555; 59 | -icab-box-shadow: 4px 4px 8px #555; 60 | -moz-box-shadow: 4px 4px 8px #555; 61 | -webkit-box-shadow: 4px 4px 8px #555; 62 | box-shadow: 4px 4px 8px #555; 63 | } 64 | 65 | div.document img:hover { 66 | -o-box-shadow: 5px 5px 8px #444; 67 | -icab-box-shadow: 5px 5px 8px #444; 68 | -moz-box-shadow: 5px 5px 8px #444; 69 | -webkit-box-shadow: 5px 5px 8px #444; 70 | box-shadow: 5px 5px 8px #444; 71 | } 72 | 73 | div img.align-left, div img.align-right, div img.align-center { 74 | margin-top: 10px; 75 | -o-box-shadow: 5px 5px 8px white; 76 | -icab-box-shadow: 5px 5px 8px white; 77 | -moz-box-shadow: 5px 5px 8px white; 78 | -webkit-box-shadow: 5px 5px 8px white; 79 | box-shadow: 5px 5px 8px white; 80 | } 81 | 82 | div.bodywrapper { 83 | margin: 0 240px 0 0; 84 | border-right: 1px solid #404040; 85 | } 86 | 87 | div.body { 88 | margin: 0; 89 | padding: 0.5em 20px 20px 20px; 90 | } 91 | 92 | div.related { 93 | font-size: 1em; 94 | color: #2400d9; 95 | } 96 | 97 | div.related ul { 98 | background-image: url(navigation.png); 99 | height: 2em; 100 | border-top: 1px solid #404040; 101 | border-bottom: 1px solid #404040; 102 | } 103 | 104 | div.related ul li { 105 | margin: 0; 106 | padding: 0; 107 | height: 2em; 108 | float: left; 109 | } 110 | 111 | div.related ul li.right { 112 | float: right; 113 | margin-right: 5px; 114 | } 115 | 116 | div.related ul li a { 117 | margin: 0; 118 | padding: 0 5px 0 5px; 119 | line-height: 2em; 120 | color: #EE4B00; 121 | } 122 | 123 | div.related ul li a:hover { 124 | color: #2400D9; 125 | text-shadow: 0.1em 0.1em 0.05em #999; 126 | transition: all 0.2s ease-in; 127 | } 128 | 129 | div.sphinxsidebarwrapper { 130 | padding: 0px; 131 | } 132 | 133 | div.sphinxsidebar { 134 | margin: 0; 135 | padding: 0.5em 15px 15px 0; 136 | width: 210px; 137 | float: right; 138 | font-size: 14px; 139 | text-align: left; 140 | } 141 | 142 | div.sphinxsidebar h3, div.sphinxsidebar h4 { 143 | margin: 1em 0 0.5em 0; 144 | font-size: 1em; 145 | padding: 0.1em 0 0.1em 0.5em; 146 | color: black; 147 | border: 1px solid #000000; 148 | background-color: #D2D2D2; 149 | -o-box-shadow: 1px 1px 2px #555; 150 | -icab-box-shadow: 1px 1px 2px #555; 151 | -moz-box-shadow: 1px 1px 2px #555; 152 | -webkit-box-shadow: 1px 1px 2px #555; 153 | box-shadow: 1px 1px 2px #555; 154 | } 155 | 156 | div.sphinxsidebar h3 a { 157 | color: black; 158 | } 159 | 160 | div.sphinxsidebar ul { 161 | padding-left: 1.5em; 162 | margin-top: 7px; 163 | padding: 0; 164 | line-height: 130%; 165 | } 166 | 167 | div.sphinxsidebar ul ul { 168 | margin-left: 20px; 169 | } 170 | 171 | div.sphinxsidebar input { 172 | border: 1px solid #2400D9; 173 | } 174 | 175 | div.footer { 176 | color: white; 177 | padding: 3px 8px 3px 0; 178 | clear: both; 179 | font-size: 0.8em; 180 | text-align: right; 181 | } 182 | 183 | div.footer a { 184 | color: #EE4B00; 185 | } 186 | 187 | div.footer a:hover { 188 | color: #2400D9; 189 | text-shadow: 0.1em 0.1em 0.1em white; 190 | transition: all 0.2s ease-in; 191 | } 192 | 193 | /* -- body styles ----------------------------------------------------------- */ 194 | 195 | p { 196 | margin: 0.8em 0 0.5em 0; 197 | -webkit-hyphens: auto; 198 | -moz-hyphens: auto; 199 | hyphens: auto; 200 | } 201 | 202 | a { 203 | color: #EE4B00; 204 | text-decoration: none; 205 | } 206 | 207 | a:hover { 208 | color: #2400D9; 209 | text-shadow: 0.1em 0.1em 0.05em #999; 210 | transition: all 0.2s ease-in; 211 | } 212 | 213 | 214 | div.body a { 215 | text-decoration: none; 216 | } 217 | 218 | h1 { 219 | margin: 0; 220 | padding: 0.8em 0 0.4em 0; 221 | font-size: 2em; 222 | color: black; 223 | text-shadow: 0.05em 0.05em 0.02em #EE4B00; 224 | } 225 | 226 | h2 { 227 | margin: 1em 0 0 0; 228 | font-size: 1.5em; 229 | padding: 0; 230 | color: #EE4B00; 231 | text-shadow: 0.04em 0.04em 0.02em black; 232 | } 233 | 234 | h3 { 235 | margin: 1em 0 0 0; 236 | font-size: 1.3em; 237 | color: #2400D9 238 | } 239 | 240 | h4 { 241 | margin: 1em 0 0 0; 242 | font-size: 1.1em; 243 | color: green; 244 | } 245 | 246 | h5 { 247 | margin: 1em 0 0 0; 248 | font-size: 1em; 249 | color: black; 250 | } 251 | 252 | div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { 253 | color: black!important; 254 | } 255 | 256 | h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { 257 | display: none; 258 | margin: 0 0 0 0.3em; 259 | padding: 0 0.2em 0 0.2em; 260 | color: #aaa!important; 261 | } 262 | 263 | h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, 264 | h5:hover a.anchor, h6:hover a.anchor { 265 | display: inline; 266 | } 267 | 268 | h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, 269 | h5 a.anchor:hover, h6 a.anchor:hover { 270 | color: #777; 271 | background-color: #eee; 272 | } 273 | 274 | a.headerlink { 275 | color: red!important; 276 | font-size: 1em; 277 | margin-left: 6px; 278 | padding: 0 4px 0 4px; 279 | text-decoration: none!important; 280 | } 281 | 282 | a.headerlink:hover { 283 | background-color: white; 284 | transition: all 0.2s ease-in-out; 285 | color: #EE4B00!important; 286 | } 287 | 288 | cite, code, tt { 289 | font-family: 'Consolas', 'Deja Vu Sans Mono', 290 | 'Bitstream Vera Sans Mono', monospace; 291 | font-size: 0.95em; 292 | letter-spacing: 0.01em; 293 | } 294 | 295 | tt { 296 | background-color: #F2F2F2; 297 | border: 1px solid #ddd; 298 | color: black; 299 | } 300 | 301 | tt.descname, tt.descclassname, tt.xref { 302 | border: 0; 303 | } 304 | 305 | tt.xref.download { 306 | color: #EE4B00; 307 | } 308 | 309 | tt.xref.download:hover { 310 | color: #2400D9; 311 | } 312 | 313 | hr { 314 | border: 1px solid #abc; 315 | margin: 2em; 316 | } 317 | 318 | a tt { 319 | border: 0; 320 | color: #EE4B00; 321 | } 322 | 323 | a tt:hover { 324 | color: #2491CF; 325 | } 326 | 327 | pre { 328 | font-family: 'Consolas', 'Deja Vu Sans Mono', 329 | 'Bitstream Vera Sans Mono', monospace; 330 | font-size: 0.95em; 331 | letter-spacing: 0.015em; 332 | line-height: 120%; 333 | padding: 0.5em; 334 | border: 1px solid #404040; 335 | background-color: #f8f8f8; 336 | } 337 | 338 | pre a { 339 | color: inherit; 340 | text-decoration: underline; 341 | } 342 | 343 | td.linenos pre { 344 | padding: 0.5em 0; 345 | } 346 | 347 | td.code pre { 348 | -o-box-shadow: 1px 1px 2px #555; 349 | -icab-box-shadow: 1px 1px 2px #555; 350 | -khtml-box-shadow: 1px 1px 2px #555; 351 | -moz-box-shadow: 1px 1px 2px #555; 352 | -webkit-box-shadow: 1px 1px 2px #555; 353 | box-shadow: 1px 1px 2px #555; 354 | } 355 | 356 | td.code pre:hover { 357 | -o-box-shadow: 2px 2px 3px #444; 358 | -icab-box-shadow: 2px 2px 3px #444; 359 | -khtml-box-shadow: 2px 2px 3px #444; 360 | -moz-box-shadow: 2px 2px 3px #444; 361 | -webkit-box-shadow: 2px 2px 3px #444; 362 | box-shadow: 2px 2px 3px #444; 363 | } 364 | 365 | div.quotebar { 366 | background-color: #f8f8f8; 367 | max-width: 250px; 368 | float: right; 369 | padding: 2px 7px; 370 | border: 1px solid #ccc; 371 | } 372 | 373 | div.topic { 374 | background-color: #f8f8f8; 375 | } 376 | 377 | table { 378 | border-collapse: collapse; 379 | margin: 0 -0.5em 0 -0.5em; 380 | } 381 | 382 | table td, table th { 383 | padding: 0.2em 0.5em 0.2em 0.5em; 384 | } 385 | 386 | table.docutils td, table.docutils th { 387 | border-bottom: 10px solid transparent; 388 | border-collapse: collapse; 389 | } 390 | 391 | table.docutils img, table.docutils img:hover { 392 | margin-top: 20px; 393 | margin-left: 10px; 394 | -o-box-shadow: 0 0 0 transparent; 395 | -icab-box-shadow: 0 0 0 transparent; 396 | -moz-box-shadow: 0 0 0 transparent; 397 | -webkit-box-shadow: 0 0 0 transparent; 398 | box-shadow: 0 0 0 transparent; 399 | } 400 | 401 | div.admonition, div.warning { 402 | font-size: 0.9em; 403 | margin: 1em 0 1em 0; 404 | border: 1px solid #404040; 405 | background-color: #F8F8F8; 406 | padding: 0; 407 | -o-box-shadow: 2px 2px 4px #555; 408 | -icab-box-shadow: 2px 2px 4px #555; 409 | -khtml-box-shadow: 2px 2px 4px #555; 410 | -moz-box-shadow: 2px 2px 4px #555; 411 | -webkit-box-shadow: 2px 2px 4px #555; 412 | box-shadow: 2px 2px 4px #555; 413 | } 414 | 415 | div.admonition:hover, div.warning:hover { 416 | -o-box-shadow: 3px 3px 5px #444; 417 | -icab-box-shadow: 3px 3px 5px #444; 418 | -khtml-box-shadow: 3px 3px 5px #444; 419 | -moz-box-shadow: 3px 3px 5px #444; 420 | -webkit-box-shadow: 3px 3px 5px #444; 421 | box-shadow: 3px 3px 5px #444; 422 | } 423 | 424 | div.admonition p, div.warning p { 425 | margin: 0.5em 1em 0.5em 1em; 426 | padding: 0; 427 | } 428 | 429 | div.admonition pre, div.warning pre { 430 | margin: 0.4em 1em 0.4em 1em; 431 | } 432 | 433 | div.admonition p.admonition-title, 434 | div.warning p.admonition-title { 435 | margin: 0; 436 | padding: 0.1em 0 0.1em 0.5em; 437 | color: white; 438 | border-bottom: 1px solid #404040; 439 | font-weight: bold; 440 | background-color: #404040; 441 | } 442 | 443 | div.warning { 444 | border: 1px solid #940000; 445 | } 446 | 447 | div.warning p.admonition-title { 448 | background-color: #CF0000; 449 | border-bottom-color: #940000; 450 | } 451 | 452 | div.admonition ul, div.admonition ol, 453 | div.warning ul, div.warning ol { 454 | margin: 0.1em 0.5em 0.5em 3em; 455 | padding: 0; 456 | } 457 | 458 | div.important { 459 | border: 2px solid black; 460 | } 461 | 462 | div.important p.admonition-title { 463 | padding: 0.4em 0 0.1em 0.5em; 464 | background-color: #EE4B00; 465 | border-bottom-color: black; 466 | font-size: 2em; 467 | color: black; 468 | height: 30px; 469 | } 470 | 471 | div.important p.last { 472 | font-size: 1.25em; 473 | 474 | } 475 | 476 | div.hint { 477 | border: 1px solid black; 478 | } 479 | 480 | div.hint p.admonition-title { 481 | background-color: #2400D9; 482 | border-bottom-color: black; 483 | color: white; 484 | } 485 | 486 | div.versioninfo { 487 | margin: 1em 0 0 0; 488 | border: 1px solid #ccc; 489 | background-color: #DDEAF0; 490 | padding: 8px; 491 | line-height: 1.3em; 492 | font-size: 0.9em; 493 | } 494 | 495 | .viewcode-back { 496 | font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 497 | 'Verdana', sans-serif; 498 | } 499 | 500 | div.viewcode-block:target { 501 | background-color: #f4debf; 502 | border-top: 1px solid #ac9; 503 | border-bottom: 1px solid #ac9; 504 | } 505 | 506 | div.highlight { 507 | background-color: #f8f8f8; 508 | } 509 | 510 | div.highlight-python pre{ 511 | -o-box-shadow: 1px 1px 2px #555; 512 | -icab-box-shadow: 1px 1px 2px #555; 513 | -moz-box-shadow: 1px 1px 2px #555; 514 | -webkit-box-shadow: 1px 1px 2px #555; 515 | box-shadow: 1px 1px 2px #555; 516 | } 517 | 518 | div.highlight-python pre:hover{ 519 | -o-box-shadow: 2px 2px 3px #444; 520 | -icab-box-shadow: 2px 2px 3px #444; 521 | -moz-box-shadow: 2px 2px 3px #444; 522 | -webkit-box-shadow: 2px 2px 3px #444; 523 | box-shadow: 2px 2px 3px #444; 524 | } 525 | 526 | 527 | div.highlight-none pre { 528 | -o-box-shadow: 1px 1px 2px #555; 529 | -icab-box-shadow: 1px 1px 2px #555; 530 | -moz-box-shadow: 1px 1px 2px #555; 531 | -webkit-box-shadow: 1px 1px 2px #555; 532 | box-shadow: 1px 1px 2px #555; 533 | } 534 | 535 | div.highlight-none pre:hover { 536 | -o-box-shadow: 2px 2px 3px #444; 537 | -icab-box-shadow: 2px 2px 3px #444; 538 | -moz-box-shadow: 2px 2px 3px #444; 539 | -webkit-box-shadow: 2px 2px 3px #444; 540 | box-shadow: 2px 2px 3px #444; 541 | } 542 | 543 | 544 | 545 | /* -- Header with Text ------------------- */ 546 | 547 | .headertext { 548 | width:1080px; 549 | height:100px; 550 | font-size: 70px; 551 | letter-spacing: 0.02em; 552 | font-weight: bold; 553 | line-height: 110%; 554 | text-align: left; 555 | 556 | } 557 | 558 | .headertext a { 559 | color: white; 560 | text-shadow: 1px 0px 1px #999, 561 | 1px 1px 1px #888, 562 | 2px 1px 1px #777, 563 | 2px 2px 1px #666, 564 | 3px 2px 1px #555, 565 | 3px 3px 1px #444, 566 | 4px 3px 1px #333, 567 | 4px 4px 1px #222, 568 | 5px 4px 1px #111, 569 | 5px 5px 1px #000, 570 | 6px 6px 3px #2400D9, 571 | 6px 6px 10px rgba(255,255,255,0.4), 572 | 6px 6px 20px rgba(255,255,255,0.3); 573 | } 574 | 575 | .headertext a:hover { 576 | transition: all 0.2s ease-in; 577 | text-shadow: 1px 0px 1px #999, 578 | 1px 1px 1px #888, 579 | 2px 1px 1px #777, 580 | 2px 2px 1px #666, 581 | 3px 2px 1px #555, 582 | 3px 3px 1px #444, 583 | 4px 3px 1px #333, 584 | 4px 4px 1px #222, 585 | 5px 4px 1px #111, 586 | 5px 5px 1px #000, 587 | 6px 6px 3px #EE4B00, 588 | 6px 6px 10px rgba(255,255,255,0.4), 589 | 6px 6px 20px rgba(255,255,255,0.3); 590 | } 591 | 592 | .graphviz img { 593 | width: 810px; 594 | } 595 | 596 | div.leftside { 597 | float: left; 598 | } 599 | 600 | div.rightside { 601 | margin-left: 110px; 602 | } 603 | 604 | 605 | /* -- Mouse Highlight Color ------------------- */ 606 | 607 | ::-moz-selection 608 | { 609 | background-color: black; 610 | color:#EE4B00; 611 | } 612 | 613 | ::-webkit-selection 614 | { 615 | background-color: black; 616 | color:#EE4B00; 617 | } 618 | 619 | ::selection 620 | { 621 | background-color: black; 622 | color:#EE4B00; 623 | } 624 | -------------------------------------------------------------------------------- /homepage/_static/start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/start.png -------------------------------------------------------------------------------- /homepage/_static/support.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/_static/support.png -------------------------------------------------------------------------------- /homepage/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {# 2 | basic/layout.html 3 | ~~~~~~~~~~~~~~~~~ 4 | 5 | Master layout template for Sphinx themes. 6 | 7 | :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. 8 | :license: BSD, see LICENSE for details. 9 | #} 10 | {%- block doctype -%} 11 | 13 | {%- endblock %} 14 | {%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} 15 | {%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} 16 | {%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and 17 | (sidebars != []) %} 18 | {%- set url_root = pathto('', 1) %} 19 | {# XXX necessary? #} 20 | {%- if url_root == '#' %}{% set url_root = '' %}{% endif %} 21 | {%- if not embedded and docstitle %} 22 | {%- set titlesuffix = " — "|safe + docstitle|e %} 23 | {%- else %} 24 | {%- set titlesuffix = "" %} 25 | {%- endif %} 26 | {% set title = 'Nipype Beginner\'s Guide' %} 27 | 28 | {%- macro relbar() %} 29 | 53 | {%- endmacro %} 54 | 55 | {%- macro sidebar() %} 56 | {%- if render_sidebar %} 57 |
58 |
59 | {%- block sidebarlogo %} 60 | {%- if logo %} 61 | 64 | {%- endif %} 65 | {%- endblock %} 66 | {%- if sidebars != None %} 67 | {#- new style sidebar: explicitly include/exclude templates #} 68 | {%- for sidebartemplate in sidebars %} 69 | {%- include sidebartemplate %} 70 | {%- endfor %} 71 | {%- else %} 72 | {#- old style sidebars: using blocks -- should be deprecated #} 73 | {%- block sidebartoc %} 74 | {%- include "localtoc.html" %} 75 | {%- endblock %} 76 | {%- block sidebarrel %} 77 | {%- include "relations.html" %} 78 | {%- endblock %} 79 | {%- block sidebarsourcelink %} 80 | {%- include "sourcelink.html" %} 81 | {%- endblock %} 82 | {%- if customsidebar %} 83 | {%- include customsidebar %} 84 | {%- endif %} 85 | {%- block sidebarsearch %} 86 | {%- include "searchbox.html" %} 87 | {%- endblock %} 88 | {%- endif %} 89 |
90 |
91 | {%- endif %} 92 | {%- endmacro %} 93 | 94 | {%- macro script() %} 95 | 104 | {%- for scriptfile in script_files %} 105 | 106 | {%- endfor %} 107 | {%- endmacro %} 108 | 109 | {%- macro css() %} 110 | 111 | 112 | {%- for cssfile in css_files %} 113 | 114 | {%- endfor %} 115 | {%- endmacro %} 116 | 117 | 118 | 119 | 120 | {{ metatags }} 121 | {%- block htmltitle %} 122 | {{ title|striptags|e }}{{ titlesuffix }} 123 | {%- endblock %} 124 | {{ css() }} 125 | {%- if not embedded %} 126 | {{ script() }} 127 | {%- if use_opensearch %} 128 | 131 | {%- endif %} 132 | {%- if favicon %} 133 | 134 | {%- endif %} 135 | {%- endif %} 136 | {%- block linktags %} 137 | {%- if hasdoc('about') %} 138 | 139 | {%- endif %} 140 | {%- if hasdoc('genindex') %} 141 | 142 | {%- endif %} 143 | {%- if hasdoc('search') %} 144 | 145 | {%- endif %} 146 | {%- if hasdoc('copyright') %} 147 | 148 | {%- endif %} 149 | 150 | {%- if parents %} 151 | 152 | {%- endif %} 153 | {%- if next %} 154 | 155 | {%- endif %} 156 | {%- if prev %} 157 | 158 | {%- endif %} 159 | {%- endblock %} 160 | {%- block extrahead %} 161 | 162 | 172 | {% endblock %} 173 | 174 | 175 | {% block header %} 176 | 180 | {% endblock %} 181 | 182 | {%- block relbar1 %}{{ relbar() }}{% endblock %} 183 | 184 | {%- block content %} 185 | {% block sidebar1 %}{{ sidebar() }}{% endblock %} 186 |
187 | {%- block document %} 188 |
189 | {%- if render_sidebar %} 190 |
191 | {%- endif %} 192 |
193 | 194 | {% block body %} 195 | {{ super() }} 196 | {% endblock %} 197 | 198 |
199 | {%- if render_sidebar %} 200 |
201 | {%- endif %} 202 |
203 | 204 | {%- endblock %} 205 | 206 | {%- block sidebar2 %}{% endblock %} 207 |
208 |
209 | {%- endblock %} 210 | 211 | {%- block relbar2 %}{{ relbar() }}{% endblock %} 212 | 213 | {%- block footer %} 214 | 234 | {%- endblock %} 235 | 236 | 237 | -------------------------------------------------------------------------------- /homepage/_templates/layout_addition.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | {% set title = 'Nipype Beginner\'s Guide' %} 3 | 4 | {% block extrahead %} 5 | {{ super() }} 6 | 7 | 17 | 18 | {% endblock %} 19 | 20 | {% block header %} 21 | 25 | {% endblock %} 26 | 27 | {% block footer %} 28 | {{ super() }} 29 | 31 | {% endblock %} 32 | 33 | 34 | {% block rootrellink %} 35 | {{ super() }} 36 |
  • github »
  • 37 | {% endblock %} 38 | 39 | {% block body %} 40 | {{ super() }} 41 | 52 | {% endblock %} 53 | 54 | -------------------------------------------------------------------------------- /homepage/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Beginner's Guide to Nipype documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Feb 2 11:12:03 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode','sphinx.ext.graphviz'] 32 | 33 | # Add any paths that contain templates here, relative to this directory. 34 | templates_path = ['_templates'] 35 | 36 | # The suffix(es) of source filenames. 37 | # You can specify multiple suffix as a list of string: 38 | # source_suffix = ['.rst', '.md'] 39 | source_suffix = '.rst' 40 | 41 | # The encoding of source files. 42 | #source_encoding = 'utf-8-sig' 43 | 44 | # The master toctree document. 45 | master_doc = 'index' 46 | 47 | # General information about the project. 48 | project = u'Beginner\'s Guide to Nipype' 49 | copyright = u'2016, Michael Notter' 50 | author = u'Michael Notter' 51 | 52 | # The version info for the project you're documenting, acts as replacement for 53 | # |version| and |release|, also used in various other places throughout the 54 | # built documents. 55 | # 56 | # The short X.Y version. 57 | version = u'1.0' 58 | # The full version, including alpha/beta/rc tags. 59 | release = u'1.0' 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | # 64 | # This is also used if you do content translation via gettext catalogs. 65 | # Usually you set "language" from the command line for these cases. 66 | language = None 67 | 68 | # There are two options for replacing |today|: either, you set today to some 69 | # non-false value, then it is used: 70 | #today = '' 71 | # Else, today_fmt is used as the format for a strftime call. 72 | #today_fmt = '%B %d, %Y' 73 | 74 | # List of patterns, relative to source directory, that match files and 75 | # directories to ignore when looking for source files. 76 | exclude_patterns = ['_build'] 77 | 78 | # The reST default role (used for this markup: `text`) to use for all 79 | # documents. 80 | #default_role = None 81 | 82 | # If true, '()' will be appended to :func: etc. cross-reference text. 83 | #add_function_parentheses = True 84 | 85 | # If true, the current module name will be prepended to all description 86 | # unit titles (such as .. function::). 87 | #add_module_names = True 88 | 89 | # If true, sectionauthor and moduleauthor directives will be shown in the 90 | # output. They are ignored by default. 91 | #show_authors = False 92 | 93 | # The name of the Pygments (syntax highlighting) style to use. 94 | pygments_style = 'sphinx' 95 | 96 | # A list of ignored prefixes for module index sorting. 97 | #modindex_common_prefix = [] 98 | 99 | # If true, keep warnings as "system message" paragraphs in the built documents. 100 | #keep_warnings = False 101 | 102 | # If true, `todo` and `todoList` produce output, else they produce nothing. 103 | todo_include_todos = False 104 | 105 | 106 | # -- Options for HTML output ---------------------------------------------- 107 | 108 | # The theme to use for HTML and HTML Help pages. See the documentation for 109 | # a list of builtin themes. 110 | html_theme = 'sphinxdoc' 111 | 112 | # Theme options are theme-specific and customize the look and feel of a theme 113 | # further. For a list of options available for each theme, see the 114 | # documentation. 115 | #html_theme_options = {} 116 | 117 | # Add any paths that contain custom themes here, relative to this directory. 118 | #html_theme_path = [] 119 | 120 | # The name for this set of Sphinx documents. If None, it defaults to 121 | # " v documentation". 122 | html_title = 'All you need to know to become an expert in Nipype' 123 | 124 | # A shorter title for the navigation bar. Default is the same as html_title. 125 | html_short_title = 'Home' 126 | 127 | # The name of an image file (relative to this directory) to place at the top 128 | # of the sidebar. 129 | html_logo = '_static/nipype-beginners-guide-html_logo.png' 130 | 131 | # The name of an image file (within the static path) to use as favicon of the 132 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 133 | # pixels large. 134 | html_favicon = '_static/favicon.ico' 135 | 136 | # Add any paths that contain custom static files (such as style sheets) here, 137 | # relative to this directory. They are copied after the builtin static files, 138 | # so a file named "default.css" will overwrite the builtin "default.css". 139 | html_static_path = ['_static'] 140 | 141 | # Add any extra paths that contain custom files (such as robots.txt or 142 | # .htaccess) here, relative to this directory. These files are copied 143 | # directly to the root of the documentation. 144 | #html_extra_path = [] 145 | 146 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 147 | # using the given strftime format. 148 | html_last_updated_fmt = '%B %d, %Y' 149 | 150 | # If true, SmartyPants will be used to convert quotes and dashes to 151 | # typographically correct entities. 152 | #html_use_smartypants = True 153 | 154 | #Sphinx will add “permalinks” for each heading and description environment as paragraph signs that become visible when the mouse hovers over them. 155 | #This value determines the text for the permalink; it defaults to "¶". Set it to None or the empty string to disable permalinks. 156 | html_add_permalinks = u'¶' 157 | 158 | # Custom sidebar templates, maps document names to template names. 159 | html_sidebars = { 160 | '**': ['localtoc.html', 'relations.html', 'searchbox.html','sourcelink.html'], 161 | } 162 | 163 | # Additional templates that should be rendered to pages, maps page names to 164 | # template names. 165 | #html_additional_pages = {} 166 | 167 | # If false, no module index is generated. 168 | #html_domain_indices = True 169 | 170 | # If false, no index is generated. 171 | html_use_index = False 172 | 173 | # If true, the index is split into individual pages for each letter. 174 | #html_split_index = False 175 | 176 | # If true, links to the reST sources are added to the pages. 177 | #html_show_sourcelink = True 178 | 179 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 180 | #html_show_sphinx = True 181 | 182 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 183 | #html_show_copyright = True 184 | 185 | # If true, an OpenSearch description file will be output, and all pages will 186 | # contain a tag referring to it. The value of this option must be the 187 | # base URL from which the finished HTML is served. 188 | #html_use_opensearch = '' 189 | 190 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 191 | #html_file_suffix = None 192 | 193 | # Language to be used for generating the HTML full-text search index. 194 | # Sphinx supports the following languages: 195 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 196 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' 197 | #html_search_language = 'en' 198 | 199 | # A dictionary with options for the search language support, empty by default. 200 | # Now only 'ja' uses this config value 201 | #html_search_options = {'type': 'default'} 202 | 203 | # The name of a javascript file (relative to the configuration directory) that 204 | # implements a search results scorer. If empty, the default will be used. 205 | #html_search_scorer = 'scorer.js' 206 | 207 | # Output file base name for HTML help builder. 208 | htmlhelp_basename = 'BeginnersGuideToNipype' 209 | 210 | # -- Options for LaTeX output --------------------------------------------- 211 | 212 | latex_elements = { 213 | # The paper size ('letterpaper' or 'a4paper'). 214 | 'papersize': 'letterpaper', 215 | 216 | # The font size ('10pt', '11pt' or '12pt'). 217 | #'pointsize': '10pt', 218 | 219 | # Additional stuff for the LaTeX preamble. 220 | 'preamble': '\setcounter{tocdepth}{2}', 221 | 222 | # Latex figure (float) alignment 223 | #'figure_align': 'htbp', 224 | 225 | } 226 | 227 | # Grouping the document tree into LaTeX files. List of tuples 228 | # (source start file, target name, title, 229 | # author, documentclass [howto, manual, or own class]). 230 | latex_documents = [ 231 | (master_doc, 'NipypeBeginnersGuide.tex', u'Nipype Beginner\'s Guide', 232 | u'Michael Notter', 'manual'), 233 | ] 234 | 235 | # The name of an image file (relative to this directory) to place at the top of 236 | # the title page. 237 | latex_logo = '_static/nipype-beginners-guide-pdf_logo.png' 238 | 239 | # For "manual" documents, if this is true, then toplevel headings are parts, 240 | # not chapters. 241 | #latex_use_parts = False 242 | 243 | # If true, show page references after internal links. 244 | #latex_show_pagerefs = False 245 | 246 | # If true, show URL addresses after external links. 247 | latex_show_urls = 'False' 248 | 249 | # Documents to append as an appendix to all manuals. 250 | #latex_appendices = [] 251 | 252 | # If false, no module index is generated. 253 | #latex_domain_indices = True 254 | 255 | 256 | # -- Options for manual page output --------------------------------------- 257 | 258 | # One entry per manual page. List of tuples 259 | # (source start file, name, description, authors, manual section). 260 | man_pages = [ 261 | (master_doc, 'Beginner\'s Guide to Nipype', u'Beginner\'s Guide to Nipype Documentation', 262 | [author], 1) 263 | ] 264 | 265 | # If true, show URL addresses after external links. 266 | #man_show_urls = False 267 | 268 | 269 | # -- Options for Texinfo output ------------------------------------------- 270 | 271 | # Grouping the document tree into Texinfo files. List of tuples 272 | # (source start file, target name, title, author, 273 | # dir menu entry, description, category) 274 | texinfo_documents = [ 275 | (master_doc, 'Beginner\'s Guide to Nipype', u'Beginner\'s Guide to Nipype Documentation', 276 | author, 'Beginner\'s Guide to Nipype', 'One line description of project.', 277 | 'Miscellaneous'), 278 | ] 279 | 280 | # Documents to append as an appendix to all manuals. 281 | #texinfo_appendices = [] 282 | 283 | # If false, no module index is generated. 284 | #texinfo_domain_indices = True 285 | 286 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 287 | #texinfo_show_urls = 'footnote' 288 | 289 | # If true, do not generate a @detailmenu in the "Top" node's menu. 290 | #texinfo_no_detailmenu = False 291 | -------------------------------------------------------------------------------- /homepage/help.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | ============== 6 | Help & Support 7 | ============== 8 | 9 | How Nipype Can Help You 10 | ======================= 11 | 12 | Often times the first trouble with Nipype arise because of misunderstanding a node or its function. This can be because a mandatory input was forgotten, a input or output field is not what you thought it was or something similar. That's why the first step when running into a problem while building a pipeline should be to check out the description of the interface that causes the trouble. I've already described this `here `_ but to recap: 13 | 14 | Let's assume that you've imported FreeSurfer's BBRegister with the command ``from nipype.interfaces.freesurfer import BBRegister``. Now, if you want to know what this module generally does, use the `?` character, i.e. ``BBRegister?``. This gives you a short description as well as an implementation example of this module. If you want to know a more detailed description of BBRegister, with all mandatory and possible inputs and outputs, use the ``help()`` function, i.e. ``BBRegister.help()``. 15 | 16 | Also, I highly recommend to check out Nipype's official `Documentation `_ section, where you can browse through all possible interfaces, function and description of them. 17 | 18 | 19 | How to Help Yourself 20 | ==================== 21 | 22 | If you have any **questions about or comments to this beginner's guide**, don't hesitate to leave a comment on the bottom of the corresponding homepage (you don't need an account to do so) or contact me directly by e-mail under: `miykaelnotter@gmail.com `_. 23 | 24 | If you have general **questions about what certain neuroimaging or nipype term** mean, check out the beginner's guide `Glossary `_ section. 25 | 26 | If you have any **questions about Nipype or neuroimaging** itself please go directly to `neurostars.org `_ or the beginner's guide `FAQ `_ section. `Neurostars.org `_ is a community driven Q&A platform that will help you to answer any nipype or neuroimaging related question that you possibly could have. 27 | 28 | 29 | How to Help Me 30 | ============== 31 | 32 | The list of interfaces Nipype supports grows everyday more and more and the best practice to analyze MRI data is changing all the time. It's impossible for one person to keep track of all of those softwares and to know the state of the art analysis. That's why I'm very much counting on the input and support of the community to help me to make this beginner's guide as detailed and complete as possible. 33 | 34 | So, if you found any mistakes, want to point out some alternative ways to do something or have any scripts or tutorials to share, your input is highly appreciated! 35 | 36 | The best way to help me is to fork my repo on github (`https://github.com/miykael/nipype-beginner-s-guide/tree/master/homepage `_) and send me a pull request. Alternatively you can also contact me with your ideas or feedback under `miykaelnotter@gmail.com `_. 37 | 38 | 39 | How to Read Crash Files 40 | ======================= 41 | 42 | Everytime Nipype crashes, it creates a nice crash file containing all necessary information. For a specific example see `this section `_. In this example the name of the crash file is ``crash-20141018-140440-mnotter-art.b0.pklz``. 43 | 44 | The name of the file gives you already an information about when it crashed (``20141018-140440``) and which node crashed (``art.b0``). If you want to read the node you can use the terminal command ``nipype_display_crash``. In our example the command to read the crash file is: 45 | 46 | .. code-block:: sh 47 | 48 | nipype_display_crash ~/nipype_tutorial/crash-20141018-140857-mnotter-art.b0.pklz 49 | 50 | This leads to the following output: 51 | 52 | .. code-block:: sh 53 | :linenos: 54 | 55 | File: crash-20141018-140857-mnotter-art.b0.pklz 56 | Node: preproc.art.b0 57 | Working directory: ~/nipype_tutorial/workingdir_firstSteps/ 58 | preproc/_subject_id_sub001/art 59 | 60 | Node inputs: 61 | 62 | bound_by_brainmask = False 63 | global_threshold = 8.0 64 | ignore_exception = False 65 | intersect_mask = 66 | mask_file = 67 | mask_threshold = 68 | mask_type = spm_global 69 | norm_threshold = 0.5 70 | parameter_source = SPM 71 | plot_type = png 72 | realigned_files = 73 | realignment_parameters = ['~/nipype_tutorial/workingdir_firstSteps/preproc/ 74 | _subject_id_sub001/realign/rp_arun001.txt'] 75 | rotation_threshold = 76 | save_plot = True 77 | translation_threshold = 78 | use_differences = [True, False] 79 | use_norm = True 80 | zintensity_threshold = 3.0 81 | 82 | Traceback (most recent call last): 83 | File "~/anaconda/lib/python2.7/site-packages/nipype/pipeline/plugins/linear.py", 84 | line 38, in run node.run(updatehash=updatehash) 85 | File "~/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine.py", 86 | line 1424, in run self._run_interface() 87 | File "~/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine.py", 88 | line 1534, in _run_interface self._result = self._run_command(execute) 89 | File "~/anaconda/lib/python2.7/site-packages/nipype/pipeline/engine.py", 90 | line 1660, in _run_command result = self._interface.run() 91 | File "~/anaconda/lib/python2.7/site-packages/nipype/interfaces/base.py", 92 | line 965, in run self._check_mandatory_inputs() 93 | File "~/anaconda/lib/python2.7/site-packages/nipype/interfaces/base.py", 94 | line 903, in _check_mandatory_inputs raise ValueError(msg) 95 | ValueError: ArtifactDetect requires a value for input 'realigned_files'. 96 | For a list of required inputs, see ArtifactDetect.help() 97 | 98 | The first part of the crash report contains information about the node and the second part contains the error log. In this example, the last two lines tell us exactly, that the crash was caused because the input field `'realigned_files'` was not specified. This error is easy corrected, just add the required input and rerun the workflow. 99 | 100 | Under certain circumstances it is possible and desired to rerun the crashed node. This can be done with the additional command flags to ``nipype_display_crash``. The following flags are available: 101 | 102 | .. code-block:: sh 103 | :linenos: 104 | 105 | -h, --help show this help message and exit 106 | -r, --rerun rerun crashed node (default False) 107 | -d, --debug enable python debugger when re-executing (default False) 108 | -i, --ipydebug enable ipython debugger when re-executing (default False) 109 | --dir DIRECTORY Directory to run the node in (default None) 110 | 111 | 112 | .. note:: 113 | 114 | For more information about how to debug your code and handle crashes go to `this official Nipype section `_. 115 | -------------------------------------------------------------------------------- /homepage/images/2nd_level_colored.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/2nd_level_colored.png -------------------------------------------------------------------------------- /homepage/images/BOLDresponse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/BOLDresponse.png -------------------------------------------------------------------------------- /homepage/images/GLM.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/GLM.png -------------------------------------------------------------------------------- /homepage/images/GM.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/GM.gif -------------------------------------------------------------------------------- /homepage/images/GM.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/GM.png -------------------------------------------------------------------------------- /homepage/images/WM.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/WM.gif -------------------------------------------------------------------------------- /homepage/images/WM.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/WM.png -------------------------------------------------------------------------------- /homepage/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/architecture.png -------------------------------------------------------------------------------- /homepage/images/artifact_detection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/artifact_detection.png -------------------------------------------------------------------------------- /homepage/images/brain.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/brain.gif -------------------------------------------------------------------------------- /homepage/images/brain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/brain.png -------------------------------------------------------------------------------- /homepage/images/contrast_acoustic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/contrast_acoustic.png -------------------------------------------------------------------------------- /homepage/images/contrasts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/contrasts.png -------------------------------------------------------------------------------- /homepage/images/coregistration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/coregistration.png -------------------------------------------------------------------------------- /homepage/images/datafolder_structure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/datafolder_structure.png -------------------------------------------------------------------------------- /homepage/images/datastructure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/datastructure.png -------------------------------------------------------------------------------- /homepage/images/designmatrix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/designmatrix.png -------------------------------------------------------------------------------- /homepage/images/example_node_connection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/example_node_connection.png -------------------------------------------------------------------------------- /homepage/images/firstlevel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/firstlevel.png -------------------------------------------------------------------------------- /homepage/images/flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/flow.png -------------------------------------------------------------------------------- /homepage/images/func2norm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/func2norm.png -------------------------------------------------------------------------------- /homepage/images/functional_activation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/functional_activation.png -------------------------------------------------------------------------------- /homepage/images/graph_colored.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_colored.png -------------------------------------------------------------------------------- /homepage/images/graph_exec_detailed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_exec_detailed.png -------------------------------------------------------------------------------- /homepage/images/graph_exec_simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_exec_simple.png -------------------------------------------------------------------------------- /homepage/images/graph_flat_detailed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_flat_detailed.png -------------------------------------------------------------------------------- /homepage/images/graph_flat_simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_flat_simple.png -------------------------------------------------------------------------------- /homepage/images/graph_hierarchical.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_hierarchical.png -------------------------------------------------------------------------------- /homepage/images/graph_orig_detailed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_orig_detailed.png -------------------------------------------------------------------------------- /homepage/images/graph_orig_notsimple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_orig_notsimple.png -------------------------------------------------------------------------------- /homepage/images/graph_orig_notsimple.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | %3 11 | 12 | 13 | metaflow.preproc 14 | 15 | metaflow.preproc 16 | 17 | 18 | metaflow.datasink.DataSink.io 19 | 20 | metaflow.datasink.DataSink.io 21 | 22 | 23 | metaflow.preproc->metaflow.datasink.DataSink.io 24 | 25 | 26 | 27 | 28 | metaflow.l1analysis 29 | 30 | metaflow.l1analysis 31 | 32 | 33 | metaflow.preproc->metaflow.l1analysis 34 | 35 | 36 | 37 | 38 | metaflow.l1analysis->metaflow.datasink.DataSink.io 39 | 40 | 41 | 42 | 43 | metaflow.selectfiles.SelectFiles.io 44 | 45 | metaflow.selectfiles.SelectFiles.io 46 | 47 | 48 | metaflow.selectfiles.SelectFiles.io->metaflow.preproc 49 | 50 | 51 | 52 | 53 | metaflow.getsubjectinfo.Function.utility 54 | 55 | metaflow.getsubjectinfo.Function.utility 56 | 57 | 58 | metaflow.getsubjectinfo.Function.utility->metaflow.l1analysis 59 | 60 | 61 | 62 | 63 | metaflow.infosource.IdentityInterface.utility 64 | 65 | metaflow.infosource.IdentityInterface.utility 66 | 67 | 68 | metaflow.infosource.IdentityInterface.utility->metaflow.preproc 69 | 70 | 71 | 72 | 73 | metaflow.infosource.IdentityInterface.utility->metaflow.l1analysis 74 | 75 | 76 | 77 | 78 | metaflow.infosource.IdentityInterface.utility->metaflow.selectfiles.SelectFiles.io 79 | 80 | 81 | 82 | 83 | metaflow.infosource.IdentityInterface.utility->metaflow.getsubjectinfo.Function.utility 84 | 85 | 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /homepage/images/graph_orig_simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/graph_orig_simple.png -------------------------------------------------------------------------------- /homepage/images/graph_orig_simple.svg: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | %3 11 | 12 | 13 | l1analysis 14 | 15 | l1analysis 16 | 17 | 18 | datasink (io) 19 | 20 | datasink (io) 21 | 22 | 23 | l1analysis->datasink (io) 24 | 25 | 26 | 27 | 28 | selectfiles (io) 29 | 30 | selectfiles (io) 31 | 32 | 33 | preproc 34 | 35 | preproc 36 | 37 | 38 | selectfiles (io)->preproc 39 | 40 | 41 | 42 | 43 | preproc->l1analysis 44 | 45 | 46 | 47 | 48 | preproc->datasink (io) 49 | 50 | 51 | 52 | 53 | infosource (utility) 54 | 55 | infosource (utility) 56 | 57 | 58 | infosource (utility)->l1analysis 59 | 60 | 61 | 62 | 63 | infosource (utility)->selectfiles (io) 64 | 65 | 66 | 67 | 68 | infosource (utility)->preproc 69 | 70 | 71 | 72 | 73 | getsubjectinfo (utility) 74 | 75 | getsubjectinfo (utility) 76 | 77 | 78 | infosource (utility)->getsubjectinfo (utility) 79 | 80 | 81 | 82 | 83 | getsubjectinfo (utility)->l1analysis 84 | 85 | 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /homepage/images/highpassfilter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/highpassfilter.png -------------------------------------------------------------------------------- /homepage/images/kernel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/kernel.png -------------------------------------------------------------------------------- /homepage/images/metaflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/metaflow.png -------------------------------------------------------------------------------- /homepage/images/movement.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/movement.gif -------------------------------------------------------------------------------- /homepage/images/movement.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/movement.png -------------------------------------------------------------------------------- /homepage/images/neuroimaging.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/neuroimaging.png -------------------------------------------------------------------------------- /homepage/images/neuroimaging.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/neuroimaging.pptx -------------------------------------------------------------------------------- /homepage/images/neuroimaging1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/neuroimaging1.png -------------------------------------------------------------------------------- /homepage/images/neuroimaging2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/neuroimaging2.png -------------------------------------------------------------------------------- /homepage/images/neuroimaging_nipype.dot: -------------------------------------------------------------------------------- 1 | digraph { 2 | brain[label="Human Brain",style=filled, color=red]; 3 | structuralData[label="anatomical data", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2]; 4 | functionalData[label="functional data", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2]; 5 | subgraph cluster_structuralflow { 6 | style=filled; 7 | fillcolor="#B4B4FF"; 8 | label="sMRI"; 9 | T1_image[label="NIfTi conversion (FreeSurfer)", style=filled, fillcolor="#E6E6FF"]; 10 | subgraph cluster_anatomy_preproc { 11 | edge [color="#FF0000"]; 12 | style=filled; 13 | fillcolor="#FFB4B4"; 14 | label="Preprocessing"; 15 | segmentation[label="Segmentation (FreeSurfer)", style=filled, fillcolor="#FFE6E6"]; 16 | } 17 | subgraph cluster_anatomy_analysis { 18 | edge [color="#00A300"]; 19 | style=filled; 20 | fillcolor="#B4FFB4"; 21 | label="2nd level analysis"; 22 | subgraph cluster_anatomy_1st { 23 | edge [color="#FF9721"]; 24 | style=filled; 25 | fillcolor="#FFFF8F"; 26 | label="ROI analysis"; 27 | roi[label="ROI extraction (FreeSurfer)", style=filled, fillcolor="#FFFFD8"]; 28 | anatomy_estimation[label="ROI analyse output (R)", style=filled, fillcolor="#FFFFD8"]; 29 | roi -> anatomy_estimation; 30 | } 31 | } 32 | } 33 | subgraph cluster_functionalflow { 34 | style=filled; 35 | fillcolor="#B4B4FF"; 36 | label="fMRI"; 37 | T2_image[label="NIfTi conversion (FreeSurfer)", style=filled, fillcolor="#E6E6FF"]; 38 | subgraph cluster_functional_preproc { 39 | edge [color="#FF0000"]; 40 | style=filled; 41 | fillcolor="#FFB4B4"; 42 | label="Preprocessing"; 43 | slicetiming[label="Slice Timing (SPM)", style=filled, fillcolor="#FFE6E6"]; 44 | realignment[label="Realignment (SPM)", style=filled, fillcolor="#FFE6E6"]; 45 | artifactdetect[label="Artifact Detection (ART)", style=filled, fillcolor="#FFE6E6"]; 46 | smoothing[label="Smoothing (FSL)", style=filled, fillcolor="#FFE6E6"]; 47 | slicetiming -> realignment; 48 | realignment -> artifactdetect; 49 | artifactdetect -> smoothing; 50 | } 51 | subgraph cluster_functional_analysis { 52 | edge [color="#00A300"]; 53 | style=filled; 54 | fillcolor="#B4FFB4"; 55 | label="functional analysis"; 56 | subgraph cluster_functional_1st { 57 | edge [color="#FF9721"]; 58 | style=filled; 59 | fillcolor="#FFFF8F"; 60 | label="1st level analysis"; 61 | modelspec1[label="Specify Design (Nipype)", style=filled, fillcolor="#FFFFD8"]; 62 | modelest1[label="Model Estimation (SPM)", style=filled, fillcolor="#FFFFD8"]; 63 | contrastspec1[label="Specify Contrasts (Nipype)", style=filled, fillcolor="#FFFFD8"]; 64 | contrast1[label="Contrast Estimation (SPM)", style=filled, fillcolor="#FFFFD8"]; 65 | thresh1[label="Threshold (FSL)", style=filled, fillcolor="#FFFFD8"]; 66 | 67 | modelspec1 -> modelest1 -> contrastspec1 -> contrast1 -> thresh1; 68 | 69 | } 70 | subgraph cluster_functional_2nd { 71 | edge [color="#FF9721"]; 72 | style=filled; 73 | fillcolor="#FFFF8F"; 74 | label="2nd level analysis"; 75 | modelspec2[label="Specify Design (Nipype)", style=filled, fillcolor="#FFFFD8"]; 76 | modelest2[label="Model Estimation (SPM)", style=filled, fillcolor="#FFFFD8"]; 77 | contrastspec2[label="Specify Contrasts (Nipype)", style=filled, fillcolor="#FFFFD8"]; 78 | contrast2[label="Contrast Estimation (SPM)", style=filled, fillcolor="#FFFFD8"]; 79 | thresh2[label="Threshold (FSL)", style=filled, fillcolor="#FFFFD8"]; 80 | 81 | modelspec2 -> modelest2 -> contrastspec2 -> contrast2 -> thresh2; 82 | } 83 | coregistration[label="Coregistration (FreeSurfer)", style=filled, fillcolor="#E6FFE6"]; 84 | normalization[label="Normalization (ANTs)", style=filled, fillcolor="#E6FFE6"]; 85 | template[label="Template (ANTs)", style=filled, fillcolor="#E6FFE6"]; 86 | template -> coregistration; 87 | } 88 | } 89 | 90 | brain -> structuralData -> T1_image; 91 | brain -> functionalData -> T2_image; 92 | T1_image -> segmentation; 93 | T1_image -> coregistration; 94 | T2_image -> slicetiming; 95 | contrast1 -> coregistration; 96 | smoothing -> modelspec1; 97 | coregistration -> roi; 98 | coregistration -> normalization; 99 | normalization -> modelspec2; 100 | segmentation -> roi; 101 | } 102 | -------------------------------------------------------------------------------- /homepage/images/neuroimaging_nipype.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/neuroimaging_nipype.png -------------------------------------------------------------------------------- /homepage/images/norm_ants_colored_complete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/norm_ants_colored_complete.png -------------------------------------------------------------------------------- /homepage/images/norm_ants_colored_partial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/norm_ants_colored_partial.png -------------------------------------------------------------------------------- /homepage/images/norm_spm_colored.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/norm_spm_colored.png -------------------------------------------------------------------------------- /homepage/images/normalization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/normalization.png -------------------------------------------------------------------------------- /homepage/images/normvsnorm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/normvsnorm.png -------------------------------------------------------------------------------- /homepage/images/pvalues.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/pvalues.png -------------------------------------------------------------------------------- /homepage/images/realignment_bad.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/realignment_bad.png -------------------------------------------------------------------------------- /homepage/images/realignment_good.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/realignment_good.png -------------------------------------------------------------------------------- /homepage/images/rotation_40.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/rotation_40.gif -------------------------------------------------------------------------------- /homepage/images/rotation_40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/rotation_40.png -------------------------------------------------------------------------------- /homepage/images/segmentation.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/segmentation.gif -------------------------------------------------------------------------------- /homepage/images/segmentation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/segmentation.png -------------------------------------------------------------------------------- /homepage/images/slicetiming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/slicetiming.png -------------------------------------------------------------------------------- /homepage/images/slicetiming_small.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/slicetiming_small.gif -------------------------------------------------------------------------------- /homepage/images/smoothed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/smoothed.png -------------------------------------------------------------------------------- /homepage/images/stimuli.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/stimuli.png -------------------------------------------------------------------------------- /homepage/images/subj2norm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/subj2norm.png -------------------------------------------------------------------------------- /homepage/images/subjects2normtemp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/subjects2normtemp.png -------------------------------------------------------------------------------- /homepage/images/time.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/time.png -------------------------------------------------------------------------------- /homepage/images/tractography.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/tractography.png -------------------------------------------------------------------------------- /homepage/images/tractography_small.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/tractography_small.gif -------------------------------------------------------------------------------- /homepage/images/voxel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miykael/nipype-beginner-s-guide/4b2b6b730792a1351ae9c825d6d36f18db59468c/homepage/images/voxel.png -------------------------------------------------------------------------------- /homepage/index.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | .. ######################################## 6 | # # 7 | # Nipype Beginner's Guide # 8 | # # 9 | # Author: Michael Notter # 10 | # miykaelnotter@gmail.com # 11 | # # 12 | ######################################## 13 | 14 | .. |start| image:: _static/start.png 15 | :width: 80pt 16 | .. |setup| image:: _static/setup.png 17 | :width: 80pt 18 | .. |nipype| image:: _static/nipype.png 19 | :width: 80pt 20 | .. |expert| image:: _static/expert.png 21 | :width: 80pt 22 | .. |support| image:: _static/support.png 23 | :width: 80pt 24 | .. |data| image:: _static/data.png 25 | :width: 60pt 26 | 27 | 28 | =================================== 29 | Welcome to Nipype Beginner's Guide! 30 | =================================== 31 | 32 | 33 | .. only:: latex 34 | 35 | .. toctree:: 36 | :maxdepth: 2 37 | 38 | nipype 39 | neuroimaging 40 | nipypeAndNeuroimaging 41 | installation 42 | prepareData 43 | firstSteps 44 | visualizePipeline 45 | firstLevel 46 | normalize 47 | secondLevel 48 | help 49 | links 50 | faq 51 | glossary 52 | 53 | 54 | Downloads 55 | ========= 56 | 57 | Download Nipype here: `Nipype Homepage `_. 58 | 59 | Download this beginner's guide as a PDF here: `Nipype Beginner's Guide `_. 60 | 61 | Download all scripts from this beginner's guide here:`Scripts `_. 62 | 63 | Download the dataset `DS102: Flanker task (event-related) `_ used as the tutorial dataset for this beginner's guide directly here: `ds102_raw.tgz `_. 64 | 65 | 66 | 67 | .. only:: html 68 | 69 | This Beginner's guide will teach you all you need to know for your first steps with Nipype. You will see that Nipype is a really practical and easy to learn neuroimaging toolbox, written in Python, that helps to connect many of the different softwares used in neuroimaging, such as SPM, FSL, FreeSurfer and AFNI. The goal of this Beginner's guide is to teach you the basics about Neuroimaging and to show you each step along the way of a complete neuroimaging analysis. By learning Nipype, you will become an expert in neuroimaging and be able to analyze your own dataset in no time. 70 | 71 | 72 | Neuroimaging and Nipype 73 | ======================= 74 | 75 | This part introduces you to Nipype, explains what it is and how it works. It will also introduce you to neuroimaging in general and tell you all you need to know for the analysis of a basic neuroimaging dataset. At the end, you should be able to understand what Nipype is, how it is working and why it is so useful in neuroimaging. 76 | 77 | +----------+-------------------------------------------+ 78 | | |start| | .. toctree:: | 79 | | | :maxdepth: 2 | 80 | | | | 81 | | | nipype | 82 | | | neuroimaging | 83 | | | nipypeAndNeuroimaging | 84 | +----------+-------------------------------------------+ 85 | 86 | 87 | Get Nipype to run on your System 88 | ================================ 89 | 90 | This part is all about downloading and installing Nipype and its dependencies. It will also show you how to set up all necessary environment variables and prepare everything, so that at the end you will be ready to run Nipype on your system. 91 | 92 | +----------+-------------------------------------------+ 93 | | |setup| | .. toctree:: | 94 | | | :maxdepth: 2 | 95 | | | | 96 | | | installation | 97 | +----------+-------------------------------------------+ 98 | 99 | 100 | First steps with Nipype 101 | ======================= 102 | 103 | This part will show you how to use Nipype by analyzing an fMRI dataset. By going through a neuroimaging analysis step by step, you will learn all about Nipype, its building blocks and how to connect them to create your own analysis workflow. At the end you will be able to run your own neuroimaging analysis and make your first experiences with Nipype on real data. 104 | 105 | +----------+-------------------------------------------+ 106 | | |nipype| | .. toctree:: | 107 | | | :maxdepth: 2 | 108 | | | | 109 | | | prepareData | 110 | | | firstSteps | 111 | | | visualizePipeline | 112 | +----------+-------------------------------------------+ 113 | 114 | 115 | From beginner to expert 116 | ======================= 117 | 118 | This part contains many different implementations of Nipype. Amongst others, you will learn how to do a first and second level analysis, how to normalize your data, how to use Nipype in a more flexible way (e.g. import and reuse of other workflows), how to do a region of interest (ROI) analysis, how to do a surfaced based morphometry (SBM) analysis, how to use ANTs to create your own dataset template, how to quality control your data, how to use additional supporting toolboxes such as bips and mindboggle and more... 119 | 120 | +----------+-------------------------------------------+ 121 | | |expert| | .. toctree:: | 122 | | | :maxdepth: 2 | 123 | | | | 124 | | | firstLevel | 125 | | | normalize | 126 | | | secondLevel | 127 | +----------+-------------------------------------------+ 128 | 129 | 130 | Support 131 | ======= 132 | 133 | First things first: **Don't panic!** This part will show you how to tackle almost all problems you can encounter by using Nipype, iPython or this beginner's guide. And for everything else, there's always chocolate! 134 | 135 | +----------+-------------------------------------------+ 136 | ||support| | .. toctree:: | 137 | | | :maxdepth: 1 | 138 | | | | 139 | | | help | 140 | | | links | 141 | | | faq | 142 | | | glossary | 143 | | | | 144 | +----------+-------------------------------------------+ 145 | 146 | 147 | Downloads 148 | ========= 149 | 150 | +----------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 151 | ||data| | Everything important to download can be found in this section. | 152 | | | | 153 | | | * Nipype can be downloaded here: `How to install Nipype `_. | 154 | | | * All Scripts used in this Beginner's Guide can be found on the Beginner's Guide `Github homepage `_. | 155 | | | * The dataset `DS102: Flanker task (event-related) `_ used as the tutorial dataset | 156 | | | for this beginner's guide can be directly downloaded here: `ds102_raw.tgz `_. | 157 | | | * This Beginner's Guide can be downloaded as a PDF | 158 | | | `here `_. | 159 | +----------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 160 | 161 | -------------------------------------------------------------------------------- /homepage/links.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | ============================================= 6 | Links to Nipype, Programming and Neuroimaging 7 | ============================================= 8 | 9 | 10 | Nipype 11 | ====== 12 | 13 | - `Nipype `_: The official homepage contains all about Nipype that you want to know. 14 | - `Michael Waskom `_ has written very nice introductions about `Workflows `_, `Interfaces `_ and `Iteration `_. 15 | - A short and very nice introduction to Nipype, written by by Satra Ghosh, can be found `here `_. 16 | 17 | 18 | Programming: Python, Git & more 19 | =============================== 20 | 21 | I highly recommend to use `Sublime Text 3 `_ to write and edit your scripts. It's a really good and cool looking text editor with many helpful things, like the `Anaconda Plugion `_ for example. 22 | 23 | 24 | Learn Python 25 | ------------ 26 | 27 | - `A Byte of Python `_: A very nice introduction to Python in general. 28 | - `A Crash Course in Python for Scientists `_: a very good introduction to Python and scientific programming (e.g. Numpy, Scipy, Matplotlib) 29 | - `Codecademy - Python `_: An interactive online training and introduction to Python. 30 | - `Learn Python the Hard Way `_: A very good step by step introduction to Python. 31 | - `Python Scientific Lecture Notes `_: A very good and more detailed introduction to Python and scientific programming. 32 | - If you're looking for a Python based IDE like Eclipse or MATLAB, check out `Pycharm `_ or `Spyder `_. 33 | - `Programming with Python `_: This short introduction by *software carpentry* teaches you the basics of scientific programming on very practical examples. 34 | 35 | 36 | Learn Git 37 | --------- 38 | 39 | - `Got 15 minutes and want to learn Git? `_: Github's own git tutorial. It's fun and very short. 40 | - `Git Real `_ on `Code School `_: An interactive tutorial about GIT 41 | - `Top 10 Git Tutorials for Beginners `_ 42 | 43 | 44 | Learn Unix Shell 45 | ---------------- 46 | 47 | - `the Unix Shell `_: If you're new to Linux, here's a quick starter guide by software carpentry that teaches you the basics. 48 | 49 | 50 | Learn Programming 51 | ----------------- 52 | 53 | - A very good article that outlines what programming languages are and where they are coming from can be found on `IT Hare's Beginners Guide to Programming Languages `_. A big thank you to Sarah and the class of Mrs. Lowe from Colorado for recommending this site to me! 54 | 55 | - If you want to learn more about other programming languages such as HTML & CSS, Javascript, jQuery, PHP, Python and Ruby, check out `Beginner’s Resources to Learn Programming Languages `_. A big thank you to Mr. Tom Coner and his group for recommending the site to me. It's a great starting point to dive into any programming language. 56 | 57 | - A great resource to better understand many programming and coding related words can be found on `Software Programming and Coding Glossary for Kids `_. A big thanks to Katie and the Lyndhurst STEM Club for Girls for sharing this resource with me! The glossary gives a great overview of many important concepts and the homepage includes tons of other games and practice websites to play with. 58 | 59 | 60 | Neuroimaging 61 | ============ 62 | 63 | - `Neurostars.org `_: If you have any questions about Neuroinformatics, this is the place to go! 64 | - `Design efficiency in FMRI `_: A nice and detailed guide on how to design a good fMRI study. 65 | -------------------------------------------------------------------------------- /homepage/nipype.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | ====================== 6 | Introduction to Nipype 7 | ====================== 8 | 9 | What is Nipype? 10 | =============== 11 | 12 | `Nipype `_ (Neuroimaging in Python - Pipelines and Interfaces) is an open-source, user-friendly, community-developed software package under the umbrella of `NiPy `_. Nipype allows you to pipeline your neuroimaging workflow in an intuitive way and enables you to use the software packages and algorithms you want to use, regardless their programing language. This is possible because Nipype provides an uniform interface to many existing neuroimaging processing and analysis packages like `SPM `_, `FreeSurfer `_, `FSL `_, `AFNI `_, `ANTS `_, `Camino `_, `MRtrix `_, `Slicer `_, `MNE `_ and many more. 13 | 14 | Nipype allows you to easily combine all those heterogeneous software packages whithin a single workflow. This procedure gives you the opportunity to pick the best algorithm there is for the problem at hand and therefore allows you to profit from the advantages of any software package you like. 15 | 16 | Nipype is written in `Python `_, an easy to learn and very intuitive programming language. This means that your whole neuroimaging analysis can be easily specified using python scripts. It won't even take as many lines of code as you might fear. Nipype is very straightforward and easy to learn. As you will see, it is quite simple to combine processing steps using different software packages. Steps from previous analyses can be reused effortlessly and new approaches can be applied much faster. 17 | 18 | You're still concerned because you want to combine your own **bash**, **MATLAB**, **R** or **Python** scripts with Nipype? No problem! Even the creation of your own interface to your own software solution is straightforward and can be done in a rather short time. Thanks to `Github `_, there's also always a community standing behind you. 19 | 20 | Nipype provides an environment that encourages interactive exploration of algorithms. It allows you to make your research easily reproducible and lets you share your code with the community. 21 | 22 | 23 | A Short Example 24 | =============== 25 | 26 | Let's assume you want to do an Analysis that uses **AFNI** for the *Motion Correction*, **FreeSurfer** for the *Coregistration*, **ANTS** for the *Normalization*, **FSL** for the *Smoothing*, **Nipype** for the *Model Specification*, **SPM** for the *Model Estimation* and **SPM** for the *Statistical Inference*. Normally this would be a hell of a mess. Switching between multiple scripts in different programming languages with a lot of manual intervention. On top of all that, you want to do your analysis on multiple subjects, preferably as fast as possible, i.e., processing several subjects in parallel. With Nipype, this is no problem! You can do all this and much more. 27 | 28 | To illustrate the straightforwardness of an Nipype workflow and show how simply it can be created, look at the following example. This figure shows you a simplification of the analysis *workflow* just outlined. 29 | 30 | .. graphviz:: 31 | 32 | strict digraph { 33 | 34 | dataset[label="rawdata", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2]; 35 | datastorage[label="output", shape=box3d,style=filled, color=black, colorscheme=greys7 fillcolor=2]; 36 | 37 | sub1[label="Subject1", style=filled, fillcolor="#E6E6FF"]; 38 | afni1[label="Motion Correction (AFNI) - Sub1", style=filled, fillcolor="#E6E6FF"]; 39 | coreg1[label="Coregistration (FreeSurfer) - Sub1", style=filled, fillcolor="#E6E6FF"]; 40 | norm1[label="Normalization (ANTS) - Sub1", style=filled, fillcolor="#E6E6FF"]; 41 | smooth1[label="Smoothing (FSL) - Sub1", style=filled, fillcolor="#E6E6FF"]; 42 | spec1[label="Model Specification (Nipype) - Sub1", style=filled, fillcolor="#E6E6FF"]; 43 | est1[label="Model Estimation (SPM) - Sub1", style=filled, fillcolor="#E6E6FF"]; 44 | stat1[label="Statistical Inference (SPM) - Sub1", style=filled, fillcolor="#E6E6FF"]; 45 | 46 | sub2[label="Subject2", style=filled, fillcolor="#FFE6E6"]; 47 | afni2[label="Motion Correction (AFNI) - Sub2", style=filled, fillcolor="#FFE6E6"]; 48 | coreg2[label="Coregistration (FreeSurfer) - Sub2", style=filled, fillcolor="#FFE6E6"]; 49 | norm2[label="Normalization (ANTS) - Sub2", style=filled, fillcolor="#FFE6E6"]; 50 | smooth2[label="Smoothing (FSL) - Sub2", style=filled, fillcolor="#FFE6E6"]; 51 | spec2[label="Model Specification (Nipype) - Sub2", style=filled, fillcolor="#FFE6E6"]; 52 | est2[label="Model Estimation (SPM) - Sub2", style=filled, fillcolor="#FFE6E6"]; 53 | stat2[label="Statistical Inference (SPM) - Sub2", style=filled, fillcolor="#FFE6E6"]; 54 | 55 | sub3[label="Subject3", style=filled, fillcolor="#E6FFE6"]; 56 | afni3[label="Motion Correction (AFNI) - Sub3", style=filled, fillcolor="#E6FFE6"]; 57 | coreg3[label="Coregistration (FreeSurfer) - Sub3", style=filled, fillcolor="#E6FFE6"]; 58 | norm3[label="Normalization (ANTS) - Sub3", style=filled, fillcolor="#E6FFE6"]; 59 | smooth3[label="Smoothing (FSL) - Sub3", style=filled, fillcolor="#E6FFE6"]; 60 | spec3[label="Model Specification (Nipype) - Sub3", style=filled, fillcolor="#E6FFE6"]; 61 | est3[label="Model Estimation (SPM) - Sub3", style=filled, fillcolor="#E6FFE6"]; 62 | stat3[label="Statistical Inference (SPM) - Sub3", style=filled, fillcolor="#E6FFE6"]; 63 | 64 | dataset -> sub1; 65 | dataset -> sub2; 66 | dataset -> sub3; 67 | 68 | subgraph flow1 { 69 | edge [color="#0000FF"]; 70 | sub1 -> afni1 -> coreg1 -> norm1 -> smooth1 -> spec1 -> est1 -> stat1; 71 | } 72 | 73 | subgraph flow2 { 74 | edge [color="#FF0000"]; 75 | sub2 -> afni2 -> coreg2 -> norm2 -> smooth2 -> spec2 -> est2 -> stat2; 76 | } 77 | 78 | subgraph flow3 { 79 | edge [color="#00A300"]; 80 | sub3 -> afni3 -> coreg3 -> norm3 -> smooth3 -> spec3 -> est3 -> stat3; 81 | } 82 | 83 | stat1 -> datastorage; 84 | stat2 -> datastorage; 85 | stat3 -> datastorage; 86 | 87 | } 88 | 89 | 90 | The code to create an Nipype workflow that specifies the steps illustrated in the figure above and can run all the steps would look something like this: 91 | 92 | .. code-block:: py 93 | :linenos: 94 | 95 | #Import modules 96 | import nipype 97 | import nipype.interfaces.afni as afni 98 | import nipype.interfaces.freesurfer as fs 99 | import nipype.interfaces.ants as ants 100 | import nipype.interfaces.fsl as fsl 101 | import nipype.interfaces.nipy as nipy 102 | import nipype.interfaces.spm as spm 103 | 104 | 105 | #Specify experiment specifc parameters 106 | experiment_dir = '~/experiment_folder' 107 | nameofsubjects = ['subject1','subject2','subject3'] 108 | 109 | #Where can the raw data be found? 110 | grabber = nipype.DataGrabber() 111 | grabber.inputs.base_directory = experiment_dir + '/data' 112 | grabber.inputs.subject_id = nameofsubjects 113 | 114 | #Where should the output data be stored at? 115 | sink = nipype.DataSink() 116 | sink.inputs.base_directory = experiment_dir + '/output_folder' 117 | 118 | 119 | #Create a node for each step of the analysis 120 | 121 | #Motion Correction (AFNI) 122 | realign = afni.Retroicor() 123 | 124 | #Coregistration (FreeSurfer) 125 | coreg = fs.BBRegister() 126 | 127 | #Normalization (ANTS) 128 | normalize = ants.WarpTimeSeriesImageMultiTransform() 129 | 130 | #Smoothing (FSL) 131 | smooth = fsl.SUSAN() 132 | smooth.inputs.fwhm = 6.0 133 | 134 | #Model Specification (Nipype) 135 | modelspec = nipype.SpecifyModel() 136 | modelspec.inputs.input_units = 'secs' 137 | modelspec.inputs.time_repetition = 2.5 138 | modelspec.inputs.high_pass_filter_cutoff = 128. 139 | 140 | #Model Estimation (SPM) 141 | modelest = spm.EstimateModel() 142 | 143 | #Contrast Estimation (SPM) 144 | contrastest = spm.EstimateContrast() 145 | cont1 = ['human_faces', [1 0 0]] 146 | cont2 = ['animal_faces', [0 1 0]] 147 | contrastest.inputs.contrasts = [cont1, cont2] 148 | 149 | #Statistical Inference (SPM) 150 | threshold = spm.Threshold() 151 | threshold.inputs.use_fwe_correction = True 152 | threshold.inputs.extent_fdr_p_threshold = 0.05 153 | 154 | 155 | #Create a workflow to connect all those nodes 156 | analysisflow = nipype.Workflow() 157 | 158 | #Connect the nodes to each other 159 | analysisflow.connect([(grabber -> realign ), 160 | (realign -> coreg ), 161 | (coreg -> normalize ), 162 | (normalize -> smooth ), 163 | (smooth -> modelspec ), 164 | (modelspec -> modelest ), 165 | (modelest -> contrastest), 166 | (contrastest -> threshold ), 167 | (threshold -> sink ) 168 | ]) 169 | 170 | #Run the workflow in parallel 171 | analysisflow.run(mode='parallel') 172 | 173 | 174 | By using *multicore processing*, *SGE*, *PBS*, *Torque*, *HTCondor*, *LSF* or other plugins for parallel execution you will be able to reduce your computation time considerably. This means, that an analysis of 24 subjects where each takes one hour to process would normally take about one day, but it could be done on a single machine with eight processors in under about three hours. 175 | 176 | .. note:: 177 | The code above is of course a shortened and simplified version of the real code. But it gives you a good idea of what the code would look like, and how straightforward and readable the programming of a neuroimaging pipeline with Nipype is. 178 | 179 | 180 | Nipype's Architecture 181 | ===================== 182 | 183 | Nipype consists of many parts, but the most important ones are **Interfaces**, the **Workflow Engine** and the **Execution Plugins**. 184 | 185 | .. image:: images/architecture.png 186 | :align: center 187 | 188 | .. note:: 189 | For a deeper understanding of Nipype go either to `Nipype's main homepage `_ or read the official paper: Gorgolewski K, Burns CD, Madison C, Clark D, Halchenko YO, Waskom ML, Ghosh SS (2011) **Nipype: a flexible, lightweight and extensible neuroimaging data processing framework in Python.** *Front. Neuroinform. 5:13*. `http://dx.doi.org/10.3389/fninf.2011.00013 `_ 190 | 191 | 192 | Interfaces 193 | ********** 194 | 195 | Interfaces in the context of Nipype are program wrappers that enable Nipype, which runs in Python, to run a program or function in any other programming language. As a result, Python becomes the common denominator of all neuroimaging software packages and allows Nipype to easily connect them to each other. A short tutorial about interfaces can be found on the `official homepage `_. More practical examples will be given later in this beginner's guide. 196 | 197 | For a full list of software interfaces supported by Nipype go `here `_. 198 | 199 | 200 | Workflow Engine 201 | *************** 202 | 203 | The core of Nipype's architecture is the workflow engine. It consists of **Nodes**, **MapNodes** and **Workflows**, which can be interconnected in various ways. 204 | 205 | * **Node**: A node provides the information -- parameters, filenames, etc. -- that is needed by an interface to run the program properly for a particular job, whether as part of a workflow or separately. 206 | * **MapNode**: A Mapnode is quite similar to a Node, but it differs because it takes multiple inputs of a single type to create a single output. For example, it might specify multiple DICOM files to create one NIfTI file. 207 | * **Workflow**: A workflow (also called a pipeline), is a directed acyclic graph (DAG) or forest of graphs whose nodes are of type Node, MapNode or Workflow and whose edges (lines connecting nodes) represent data flow. 208 | 209 | Each Node, MapNode or Workflow has (at least) one input field and (at least) one output field. Those fields specify the dataflow into and out of a Node, MapNode or Workflow. MapNodes use fields to specify multiple inputs (basically a list of input items). There they are called *iterfields* because the interface will iterate over the list of input items, and they have to be labeled as such to distinguish them from single-item fields. 210 | 211 | A very cool feature of a Nipype workflow are so called **iterables**. Iterables allow you to run a given workflow or subgraph several times with changing input values. For example, if you want to run an analysis pipeline on multiple subjects or with an FWHM smoothing kernel of 4mm, 6mm, and 8mm. This can easily be achieved with iterables and additionally allows you to do this all in parallel (simultaneous execution), if requested. 212 | 213 | Go to the documentation section of `Nipype's main homepage `_ to read more about `MapNode, iterfield, and iterables `_, `JoinNode, synchronize and itersource `_ and `much more `_. Nonetheless, a more detailed explanation will be given in a `later section `_ of this beginner's guide. 214 | 215 | .. note:: 216 | For more practical and extended examples of Nipype concepts see `Michael Waskom `_'s really cool Jupyter notebooks about `Interfaces `_, `Iteration `_ and `Workflows `_. 217 | 218 | 219 | Execution Plugins 220 | ***************** 221 | 222 | Plugins are components that describe how a workflow should be executed. They allow seamless execution across many architectures and make using parallel computation quite easy. 223 | 224 | On a local machine, you can use the plugin **Serial** for a linear, or serial, execution of your workflow. If you machine has more than one core, you can use the **Multicore** plugin for parallel execution of your workflow. On a cluster, you have the option of using plugins for: 225 | 226 | * **HTCondor** 227 | * **PBS, Torque, SGE, LSF** (native and via IPython) 228 | * **SSH** (via IPython) 229 | * **Soma Workflow** 230 | 231 | .. note:: 232 | Cluster operation often needs a special setup. You may wish to consult your cluster operators about which plugins are available. 233 | 234 | 235 | 236 | To show how easily this can be done, the following code shows how to run a workflow with different plugins: 237 | 238 | .. code-block:: py 239 | :linenos: 240 | 241 | # Normally calling run executes the workflow in series 242 | workflow.run() 243 | 244 | # But you can scale to parallel very easily. 245 | # For example, to use multiple cores on your local machine 246 | workflow.run('MultiProc', plugin_args={'n_procs': 4}) 247 | 248 | # or to other job managers 249 | workflow.run('PBS', plugin_args={'qsub_args': '-q many'}) 250 | workflow.run('SGE', plugin_args={'qsub_args': '-q many'}) 251 | workflow.run('LSF', plugin_args={'qsub_args': '-q many'}) 252 | workflow.run('Condor') 253 | workflow.run('IPython') 254 | 255 | # or submit graphs as a whole 256 | workflow.run('PBSGraph', plugin_args={'qsub_args': '-q many'}) 257 | workflow.run('SGEGraph', plugin_args={'qsub_args': '-q many'}) 258 | workflow.run('CondorDAGMan') 259 | 260 | 261 | More about Plugins can be found on Nipype's main homepage under `Using Nipype Plugins `_. 262 | 263 | -------------------------------------------------------------------------------- /homepage/nipypeAndNeuroimaging.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | ======================= 6 | Nipype and Neuroimaging 7 | ======================= 8 | 9 | As you've seen in the previous chapter, there are many steps involved in the analysis of neuroimaging data. And there are even more possibilities to combine them. And adding to all this complexity, there are often numerous different software packages for each step. This is where Nipype can help you. Changing the order of preprocessing or analysis steps is as simple as changing the flow of a workflow. 10 | 11 | 12 | Neuroimaging Workflow 13 | ===================== 14 | 15 | Let's get back to the steps involved in the analysis of fMRI data from the previous chapter. Keep in mind that this is only one possible way of preprocessing and analyzing fMRI data. But if we connect up all the different steps into one big workflow we end up with the following structure. 16 | 17 | .. only:: html 18 | 19 | .. image:: images/neuroimaging.png 20 | :align: center 21 | :width: 550pt 22 | 23 | 24 | .. only:: latex 25 | 26 | .. image:: images/neuroimaging1.png 27 | :align: center 28 | :width: 300pt 29 | 30 | .. image:: images/neuroimaging2.png 31 | :align: center 32 | :width: 300pt 33 | 34 | 35 | This all seems to be really big and complex. And what if you want to first want to do a motion correction and then a slice timing correction? Or add an additional step into an already established analysis. With Nipype, this is very easy. 36 | 37 | 38 | Nipype Workflow 39 | =============== 40 | 41 | Nipype enables you to create the exact workflow that you want and gives you the opportunity to switch between the software packages (e.g. `FreeSurfer`, `FSL`, `SPM`, `ANTs`, `AFNI`,...) as you like. The power to analyze your data exactly as you want it lies in your hand. 42 | 43 | Let's get back to the workflow above. In the world of Nipype, this neuroimaging workflow would look something like this: 44 | 45 | .. only:: html 46 | 47 | .. image:: images/neuroimaging_nipype.svg 48 | :align: center 49 | :width: 750pt 50 | 51 | .. only:: latex 52 | 53 | .. image:: images/neuroimaging_nipype.png 54 | :align: center 55 | :width: 325pt 56 | 57 | As you can see, each step of the process is represented by a node in the workflow (e.g. `Motion Correction`, `Coregistration`, `Normalization`, `Smoothing`, `Model Estimation`). And each of those nodes can come from a different software package (e.g. `FreeSurfer`, `FSL`, `SPM`, `ANTs`, `AFNI`, `Nipype`). The freedom to chose the software package to use, to guide the flow and sequential order of the execution is completely up to you. Even if you want to run a node with different parameters (e.g. `fwhm = 4 and 8`) this can be done with no problem. And the great thing about all this. All of those steps can be done in parallel! 58 | 59 | If you understand those concepts, you will be able to use Nipype in no time. Because this is all there is to know about Nipype. But before we'll start with the first Nipype script, lets first make sure that your system is set up correctly. All about how to check that and how to install all required softwares can be found in the next chapter of this Beginner's Guide. 60 | 61 | .. note:: 62 | This guide is meant as a general introduction. The implementation of Nipype is nearly unlimited and there is a lot of advanced knowledge that won't be covered by this guide. But you can look it up at various places on the `Nipype homepage `_. A lot of very good tutorials and examples about the usage of workflows for specific situations can be found here: `Tutorials and Examples `_. 63 | 64 | -------------------------------------------------------------------------------- /homepage/secondLevel.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | ======================================================== 6 | How To Build A Pipeline For A Second Level fMRI Analysis 7 | ======================================================== 8 | 9 | In this section you will learn how to create a workflow that does a **second level analysis** on fMRI data. There are again multiple ways how you can do this, but the most simple on is to check if your contrasts from the first level analysis are still significant on the group-level a.k.a. the 2nd level. 10 | 11 | .. note:: 12 | 13 | You can only do a **second level analysis** if you already have done a first level analysis, obviously! But more importantly, those first level contrasts have to be in a common reference space. Otherwise there is now way of actually comparing them with each other and getting a valid results from them. Luckily, if you've done the previous step, you've already normalized your data (either with ANTs or SPM) to a template. 14 | 15 | 16 | Write your pipeline script 17 | ========================== 18 | 19 | If you've already done the previous sections, you know how this works. We first import necessary modules, define experiment specific parameters, create nodes, create a workflow and connect the nodes to it, we create an I/O stream and connect it to the workflow and finally run this workflow. 20 | 21 | 22 | Import modules and specify interface behaviors 23 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 24 | 25 | .. code-block:: py 26 | :linenos: 27 | 28 | from os.path import join as opj 29 | from nipype.interfaces.io import SelectFiles, DataSink 30 | from nipype.interfaces.spm import (OneSampleTTestDesign, EstimateModel, 31 | EstimateContrast, Threshold) 32 | from nipype.interfaces.utility import IdentityInterface 33 | from nipype.pipeline.engine import Workflow, Node 34 | 35 | # Specification to MATLAB 36 | from nipype.interfaces.matlab import MatlabCommand 37 | MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') 38 | MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") 39 | 40 | 41 | Define experiment specific parameters 42 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 43 | 44 | .. code-block:: py 45 | :linenos: 46 | 47 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 48 | output_dir = 'output_fMRI_example_2nd_ants' # name of 2nd-level output folder 49 | input_dir_norm = 'output_fMRI_example_norm_ants'# name of norm output folder 50 | working_dir = 'workingdir_fMRI_example_2nd_ants'# name of working directory 51 | subject_list = ['sub001', 'sub002', 'sub003', 52 | 'sub004', 'sub005', 'sub006', 53 | 'sub007', 'sub008', 'sub009', 54 | 'sub010'] # list of subject identifiers 55 | contrast_list = ['con_0001', 'con_0002', 'con_0003', 56 | 'con_0004', 'ess_0005', 'ess_0006'] # list of contrast identifiers 57 | 58 | .. note:: 59 | Pay attention to the name of the ``input_dir_norm``. Depending on the way you normalized your data, ANTs or SPM, the folder name has either the ending ``_ants`` or ``_spm``. 60 | 61 | 62 | 63 | Create nodes 64 | ~~~~~~~~~~~~ 65 | 66 | We don't need many nodes for a simple second level analysis. In fact they are the same as the ones we used for the first level analysis. We create a simple T-Test, estimate it and look at a simple mean contrast, i.e. a contrast that shows what the group mean activation of a certain first level contrast is. 67 | 68 | .. code-block:: py 69 | :linenos: 70 | 71 | # One Sample T-Test Design - creates one sample T-Test Design 72 | onesamplettestdes = Node(OneSampleTTestDesign(), 73 | name="onesampttestdes") 74 | 75 | # EstimateModel - estimate the parameters of the model 76 | level2estimate = Node(EstimateModel(estimation_method={'Classical': 1}), 77 | name="level2estimate") 78 | 79 | # EstimateContrast - estimates simple group contrast 80 | level2conestimate = Node(EstimateContrast(group_contrast=True), 81 | name="level2conestimate") 82 | cont1 = ['Group', 'T', ['mean'], [1]] 83 | level2conestimate.inputs.contrasts = [cont1] 84 | 85 | 86 | Create the pipeline and connect nodes to it 87 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 88 | 89 | .. code-block:: py 90 | :linenos: 91 | 92 | 93 | # Specify 2nd-Level Analysis Workflow & Connect Nodes 94 | l2analysis = Workflow(name='l2analysis') 95 | l2analysis.base_dir = opj(experiment_dir, working_dir) 96 | 97 | # Connect up the 2nd-level analysis components 98 | l2analysis.connect([(onesamplettestdes, level2estimate, [('spm_mat_file', 99 | 'spm_mat_file')] ), 100 | (level2estimate, level2conestimate, [('spm_mat_file', 101 | 'spm_mat_file'), 102 | ('beta_images', 103 | 'beta_images'), 104 | ('residual_image', 105 | 'residual_image')]), 106 | ]) 107 | 108 | 109 | Establish Input & Output Stream 110 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 111 | 112 | The creation of the I/O stream is as usual. But because I showed you three ways to normalize your data in the previous section, be aware that you have to point the ``SelectFiles`` node to the right input folder. Your option for the ``SelectFiles`` input template are as follows: 113 | 114 | .. code-block:: py 115 | :linenos: 116 | 117 | # contrast template for ANTs normalization (complete) 118 | con_file = opj(input_dir_norm, 'warp_complete', 'sub*', 'warpall*', 119 | '{contrast_id}_trans.nii') 120 | 121 | # contrast template for ANTs normalization (partial) 122 | con_file = opj(input_dir_norm, 'warp_partial', 'sub*', 'apply2con*', 123 | '{contrast_id}_out_trans.nii.gz') 124 | 125 | # contrast template for SPM normalization 126 | con_file = opj(input_dir_norm, 'normalized', 'sub*', 127 | '*{contrast_id}_out.nii') 128 | 129 | 130 | .. note:: 131 | It is very important to notice that only contrast images (e.g. ``con``-images) can be used for a second-level group analysis. It is statistically incorrect to use statistic images, such as ``spmT``- or ``spmF``-images. 132 | 133 | 134 | The following example is adjusted for the situation where the normalization was done with ANTs. The code for the I/O stream looks as follows: 135 | 136 | .. code-block:: py 137 | :linenos: 138 | 139 | # Infosource - a function free node to iterate over the list of subject names 140 | infosource = Node(IdentityInterface(fields=['contrast_id']), 141 | name="infosource") 142 | infosource.iterables = [('contrast_id', contrast_list)] 143 | 144 | # SelectFiles - to grab the data (alternative to DataGrabber) 145 | con_file = opj(input_dir_norm, 'warp_complete', 'sub*', 'warpall*', 146 | '{contrast_id}_trans.nii') 147 | templates = {'cons': con_file} 148 | 149 | selectfiles = Node(SelectFiles(templates, 150 | base_directory=experiment_dir), 151 | name="selectfiles") 152 | 153 | # Datasink - creates output folder for important outputs 154 | datasink = Node(DataSink(base_directory=experiment_dir, 155 | container=output_dir), 156 | name="datasink") 157 | 158 | # Use the following DataSink output substitutions 159 | substitutions = [('_contrast_id_', '')] 160 | datasink.inputs.substitutions = substitutions 161 | 162 | # Connect SelectFiles and DataSink to the workflow 163 | l2analysis.connect([(infosource, selectfiles, [('contrast_id', 164 | 'contrast_id')]), 165 | (selectfiles, onesamplettestdes, [('cons', 'in_files')]), 166 | (level2conestimate, datasink, [('spm_mat_file', 167 | 'contrasts.@spm_mat'), 168 | ('spmT_images', 169 | 'contrasts.@T'), 170 | ('con_images', 171 | 'contrasts.@con')]), 172 | ]) 173 | 174 | 175 | If you've normalized your data with ANTs but did only the so called **partial** approach, the code above will not work and crash with the following message: 176 | 177 | .. code-block:: matlab 178 | :linenos: 179 | 180 | Item 'Scans', field 'val': Number of matching files (0) less than required (1). 181 | 182 | Standard error: 183 | MATLAB code threw an exception: 184 | ... 185 | Name:pyscript_onesamplettestdesign 186 | ... 187 | Interface OneSampleTTestDesign failed to run. 188 | 189 | 190 | Such errors are sometimes hard to read. What this message means is that SPM's ``onesamplettestdes`` tried to open an image-file but was only able to read out 0 scans, of the requested at least 1. This is a common message where SPM tries to read a zipped NIfTI file (ending with ``nii.gz``) and cannot unpack it. To solve this issue we only need to insert an additional ``Gunzip`` node in our pipeline and redirect the workflow through this new gunzip node before it goes to the ``onesamplettestdes`` node. So the new code looks as follows: 191 | 192 | .. code-block:: py 193 | :linenos: 194 | 195 | # Gunzip - unzip the contrast image 196 | from nipype.algorithms.misc import Gunzip 197 | from nipype.pipeline.engine import MapNode 198 | gunzip_con = MapNode(Gunzip(), name="gunzip_con", 199 | iterfield=['in_file']) 200 | 201 | # Connect SelectFiles and DataSink to the workflow 202 | l2analysis.connect([(infosource, selectfiles, [('contrast_id', 203 | 'contrast_id')]), 204 | (selectfiles, gunzip_con, [('cons', 'in_file')]), 205 | (gunzip_con, onesamplettestdes, [('out_file', 206 | 'in_files')]), 207 | (level2conestimate, datasink, [('spm_mat_file', 208 | 'contrasts.@spm_mat'), 209 | ('spmT_images', 210 | 'contrasts.@T'), 211 | ('con_images', 212 | 'contrasts.@con')]), 213 | ]) 214 | 215 | 216 | Run the pipeline and generate the graph 217 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 218 | 219 | .. code-block:: py 220 | :linenos: 221 | 222 | l2analysis.write_graph(graph2use='colored') 223 | l2analysis.run('MultiProc', plugin_args={'n_procs': 8}) 224 | 225 | 226 | .. hint:: 227 | 228 | You can download the code for this 2nd level pipeline as a script here: `example_fMRI_3_second_level.py `_ 229 | 230 | 231 | Visualize your pipeline 232 | ======================= 233 | 234 | The colored graph of the 2nd-level workflow looks as follows: 235 | 236 | .. only:: html 237 | 238 | .. image:: images/2nd_level_colored.png 239 | :width: 200pt 240 | :align: center 241 | 242 | .. only:: latex 243 | 244 | .. image:: images/2nd_level_colored.png 245 | :width: 125pt 246 | :align: center 247 | 248 | 249 | 250 | 251 | Resulting Folder Structure 252 | ========================== 253 | 254 | The resulting folder structure looks as follows: 255 | 256 | .. code-block:: sh 257 | 258 | output_fMRI_example_2nd 259 | |-- contrasts 260 | |-- con_0001 261 | | |-- con_0001.nii 262 | | |-- SPM.mat 263 | | |-- spmT_0001.nii 264 | |-- con_0002 265 | |-- con_0003 266 | |-- con_0004 267 | |-- ess_0005 268 | | |-- ess_0005.nii 269 | | |-- SPM.mat 270 | | |-- spmF_0005.nii 271 | |-- ess_0006 272 | -------------------------------------------------------------------------------- /homepage/tableofcontent.rst: -------------------------------------------------------------------------------- 1 | .. ######################################## 2 | # # 3 | # Nipype Beginner's Guide # 4 | # # 5 | # Author: Michael Notter # 6 | # miykaelnotter@gmail.com # 7 | # # 8 | ######################################## 9 | 10 | .. important:: 11 | 12 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 13 | 14 | ================ 15 | Table of content 16 | ================ 17 | 18 | Neuroimaging and Nipype 19 | ======================= 20 | 21 | .. toctree:: 22 | :maxdepth: 2 23 | 24 | nipype 25 | neuroimaging 26 | nipypeAndNeuroimaging 27 | 28 | 29 | Get Nipype to run on your System 30 | ================================ 31 | 32 | .. toctree:: 33 | :maxdepth: 2 34 | 35 | installation 36 | 37 | 38 | First steps with Nipype 39 | ======================= 40 | 41 | .. toctree:: 42 | :maxdepth: 2 43 | 44 | prepareData 45 | firstSteps 46 | visualizePipeline 47 | 48 | 49 | From beginner to expert 50 | ======================= 51 | .. toctree:: 52 | :maxdepth: 2 53 | 54 | firstLevel 55 | normalize 56 | secondLevel 57 | 58 | 59 | Support 60 | ======= 61 | 62 | .. toctree:: 63 | :maxdepth: 2 64 | 65 | help 66 | links 67 | faq 68 | glossary 69 | 70 | 71 | Downloads 72 | ========= 73 | 74 | * Download Nipype here: `Nipype Homepage `_. 75 | * Download this beginner's guide as a PDF here: `Nipype Beginner's Guide `_ 76 | * Download all scripts from this beginner's guide here: `Scripts `_ 77 | * Download the `Open fMRI dataset 'Flanker task (event-related)' `_ used in this tutorial here: `Open fMRI Dataset - ds102 `_ 78 | 79 | 80 | -------------------------------------------------------------------------------- /homepage/updateHomepage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script updates the homepage (branch: gh-pages) 4 | # according the folder 'homepage' under the master branch 5 | 6 | # Clean up previous builds 7 | make clean 8 | 9 | # Create html files 10 | make html 11 | 12 | # Copy relevant html content into a temporary folder 13 | TMP_DIR=`mktemp -d` 14 | cp -r _build/html "$TMP_DIR"/. 15 | rm -rf _build 16 | 17 | # Switch to gh-pages branch 18 | cd .. 19 | git checkout gh-pages 20 | 21 | # Update content 22 | rm -rf * 23 | cp -r "$TMP_DIR"/html/* . 24 | 25 | # Submit changes with current timestamp 26 | TIMESTAMP=`date +'%Y-%m-%d %H:%M:%S'` 27 | git add * 28 | git commit -a -m "Homepage update ${TIMESTAMP}" 29 | git push origin gh-pages 30 | 31 | # Remove temporary folder 32 | rm -rf "$TMP_DIR" 33 | 34 | # Go back to the folder 'homepage' on the master branch 35 | git checkout master 36 | cd homepage/ 37 | 38 | # Create PDF file 39 | make latexpdf 40 | 41 | # Update PDF file 42 | cp _build/latex/NipypeBeginnersGuide.pdf ../NipypeBeginnersGuide.pdf 43 | 44 | # Clean up latexpdf build 45 | make clean 46 | 47 | # Submit newest PDF with current timestamp to homepage 48 | git add ../NipypeBeginnersGuide.pdf 49 | git commit -m "PDF update ${TIMESTAMP}" 50 | git push origin master 51 | -------------------------------------------------------------------------------- /homepage/visualizePipeline.rst: -------------------------------------------------------------------------------- 1 | .. important:: 2 | 3 | This guide hasn't been updated since January 2017 and is based on an older version of Nipype. The code in this guide is not tested against newer Nipype versions and might not work anymore. For a newer, more up to date and better introduction to Nipype, please check out the the `Nipype Tutorial `_. 4 | 5 | =========================== 6 | How To Visualize A Pipeline 7 | =========================== 8 | 9 | The option to visualize your workflow is a great feature of Nipype. It allows you to see your analysis in one piece, control the connections between the nodes and check which input and output fields are connected. 10 | 11 | 12 | What kind of graph do you need? 13 | =============================== 14 | 15 | You can visualize your pipeline as soon as you have a workflow that contains any nodes and connections between them. To create a graph, simply use the function ``write_graph()`` on your workflow: 16 | 17 | .. code-block:: py 18 | 19 | workflow.write_graph(graph2use='flat') 20 | 21 | Nipype can create five different kinds of graphs by setting the variable ``graph2use`` to the following parameters: 22 | 23 | * ``orig`` shows only the main workflows and omits any subworkflows 24 | * ``flat`` shows all workflows, including any subworkflows 25 | * ``exec`` shows all workflows, including subworkflows and expands iterables into subgraphs 26 | * ``hierarchical`` shows all workflows, including subworkflows and also shows the hierarchical structure 27 | * ``colored`` gives you the same output as ``hierarchical`` but color codes the different levels and the connections within those levels according to their hierarchical depth. 28 | 29 | All types, except hierarchical and colored, create two graph files. The difference of those two files is in the level of detail they show. There is a **simple overview graph** called ``graph.dot`` which gives you the basic connections between nodes and a **more detailed overview graph** called ``graph_detailed.dot`` which additionally gives you the output and input fields of each node and the connections between them. The **hierarchical** and **colored graph** on the other side create only a simple overview graph. I mostly use **colored** graphs, as it gives you a fast and clear picture of your workflow structure. 30 | 31 | .. note:: 32 | The ``graph`` files can be found in the highest pipeline folder of your working directory. 33 | 34 | If graphviz is installed the dot files will automatically be converted into png-files. If not, take and load the dot files in any graphviz visualizer of your choice. 35 | 36 | 37 | Tweak your visualization 38 | ======================== 39 | 40 | There are two additional parameters ``format`` and ``simple_form`` that you can use to change your output graph. ``format`` can be used to change the output format of the image file to either ``png`` or ``svg``. ``simple_form`` determines if the node name shown in the visualization is either of the form ``nodename (package)`` when set to ``True`` or ``nodename.Class.package`` when set to ``False``. 41 | 42 | .. code-block:: py 43 | 44 | workflow.write_graph(graph2use='colored', format='svg', simple_form=True) 45 | 46 | To illustrate, on the left you can see a simple graph of the visualization type **orig** when ``simple_form`` is set to ``True`` and on the right if it is set to ``False``. 47 | 48 | 49 | .. only:: html 50 | 51 | .. image:: images/graph_orig_simple.svg 52 | :height: 250pt 53 | 54 | .. image:: images/graph_orig_notsimple.svg 55 | :height: 250pt 56 | 57 | 58 | .. only:: latex 59 | 60 | .. image:: images/graph_orig_simple.png 61 | :height: 150pt 62 | 63 | .. image:: images/graph_orig_notsimple.png 64 | :height: 200pt 65 | 66 | 67 | Examples of each visualization type 68 | =================================== 69 | 70 | The graphs shown below are visualizations of the **first level analysis pipeline** or ``metaflow`` from the section: `How To Build A First Level Pipeline `_ 71 | 72 | 73 | ``orig`` - simple graph 74 | ~~~~~~~~~~~~~~~~~~~~~~~ 75 | 76 | The simple graph of the visualization type ``orig`` shows only the top layer, i.e. hierarchical highest workflows and nodes, of your workflow. In this case this is the ``metaflow``. Subworkflows such as ``preproc`` and ``l1analysis1`` are represented by a single node. 77 | 78 | .. only:: html 79 | 80 | .. image:: images/graph_orig_simple.svg 81 | :align: center 82 | :width: 300pt 83 | 84 | .. only:: latex 85 | 86 | .. image:: images/graph_orig_simple.png 87 | :align: center 88 | :width: 180pt 89 | 90 | 91 | ``orig`` - detailed graph 92 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 93 | 94 | The detailed graph of the visualization type ``orig`` shows the ``metaflow`` to the same depth as the simple version above, but with a bit more information about input and output fields. Now you can see which output of which node is connected to which input of the following node. 95 | 96 | .. only:: html 97 | 98 | .. image:: images/graph_orig_detailed.svg 99 | :align: center 100 | :width: 600pt 101 | 102 | .. only:: latex 103 | 104 | .. image:: images/graph_orig_detailed.png 105 | :align: center 106 | :width: 465pt 107 | 108 | 109 | ``flat`` - simple graph 110 | ~~~~~~~~~~~~~~~~~~~~~~~ 111 | 112 | The simple graph of the visualization type ``flat`` shows all nodes of a workflow. As you can see, subworkflows such as ``preproc`` and ``l1analysis1`` are now expanded and represented by all their containing nodes. 113 | 114 | .. only:: html 115 | 116 | .. image:: images/graph_flat_simple.svg 117 | :align: center 118 | :width: 600pt 119 | 120 | .. only:: latex 121 | 122 | .. image:: images/graph_flat_simple.png 123 | :align: center 124 | :width: 450pt 125 | 126 | 127 | ``flat`` - detailed graph 128 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 129 | 130 | The detailed graph of the visualization type ``flat`` shows the ``metaflow`` in all its glory. This graph shows all nodes, their inputs and outputs and how they are connected to each other. 131 | 132 | .. only:: html 133 | 134 | .. image:: images/graph_flat_detailed.svg 135 | :align: center 136 | :width: 600pt 137 | 138 | .. only:: latex 139 | 140 | .. image:: images/graph_flat_detailed.png 141 | :align: center 142 | :width: 450pt 143 | 144 | 145 | ``exec`` - simple graph 146 | ~~~~~~~~~~~~~~~~~~~~~~~ 147 | 148 | The detailed graph of the visualization type ``exec`` doesn't really show you anything different than the simple graph of the visualization type ``flat``. The advantage of the ``exec`` type lies in the detailed graph. 149 | 150 | .. only:: html 151 | 152 | .. image:: images/graph_exec_simple.svg 153 | :align: center 154 | :width: 600pt 155 | 156 | .. only:: latex 157 | 158 | .. image:: images/graph_exec_simple.png 159 | :align: center 160 | :width: 465pt 161 | 162 | 163 | ``exec`` - detailed graph 164 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 165 | 166 | The detailed graph of the visualization type ``exec`` shows you the nodes of the ``metaflow`` with the same details as the visualization type ``flat`` would do. But additionally, all iterables are expanded so that you can see the full hierarchical and parallel structure of your analysis. In the following example the node ``selectfiles`` iterates over ``sub001``, ``sub002`` and ``sub003``. 167 | 168 | .. only:: html 169 | 170 | .. image:: images/graph_exec_detailed.svg 171 | :align: center 172 | :width: 600pt 173 | 174 | .. only:: latex 175 | 176 | .. image:: images/graph_exec_detailed.png 177 | :align: center 178 | :width: 465pt 179 | 180 | 181 | .. note:: 182 | 183 | As you can see from this example, every iteration creates a subgraph with its own index. In this case ``a0``, ``a1`` and ``a2``. Such an indexing structure is also maintained in the folders and subfolders of your working and output directory. 184 | 185 | 186 | ``hierarchical`` - simple graph 187 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 188 | 189 | The graph of the visualization type ``hierarchical`` shows the ``metaflow`` as seen with the visualization type ``flat`` but emphasizes the hierarchical structure of its subworkflows. This is done by surrounding each subworkflow with a box labeled with the name of the subworkflow. Additionally, each node with an iterable field will be shown as a gray box. 190 | 191 | .. only:: html 192 | 193 | .. image:: images/graph_hierarchical.svg 194 | :align: center 195 | :width: 600pt 196 | 197 | .. only:: latex 198 | 199 | .. image:: images/graph_hierarchical.png 200 | :align: center 201 | :width: 450pt 202 | 203 | 204 | In this example you see that the ``metaflow`` contains a ``preproc`` and a ``l1analysis`` workflow. 205 | 206 | 207 | ``colored`` - simple graph 208 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 209 | 210 | The graph of the visualization type ``colored`` shows the ``metaflow`` as seen with the visualization type ``hierarchical`` but color codes the different ``hierarchical`` levels as well as the connections between and within those levels with different colors. 211 | 212 | .. only:: html 213 | 214 | .. image:: images/graph_colored.svg 215 | :align: center 216 | :width: 600pt 217 | 218 | .. only:: latex 219 | 220 | .. image:: images/graph_colored.png 221 | :align: center 222 | :width: 450pt 223 | -------------------------------------------------------------------------------- /scripts/example_fMRI_1_first_level.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from os.path import join as opj 4 | from nipype.interfaces.afni import Despike 5 | from nipype.interfaces.freesurfer import (BBRegister, ApplyVolTransform, 6 | Binarize, MRIConvert, FSCommand) 7 | from nipype.interfaces.spm import (SliceTiming, Realign, Smooth, Level1Design, 8 | EstimateModel, EstimateContrast) 9 | from nipype.interfaces.utility import Function, IdentityInterface 10 | from nipype.interfaces.io import FreeSurferSource, SelectFiles, DataSink 11 | from nipype.algorithms.rapidart import ArtifactDetect 12 | from nipype.algorithms.misc import TSNR, Gunzip 13 | from nipype.algorithms.modelgen import SpecifySPMModel 14 | from nipype.pipeline.engine import Workflow, Node, MapNode 15 | 16 | # MATLAB - Specify path to current SPM and the MATLAB's default mode 17 | from nipype.interfaces.matlab import MatlabCommand 18 | MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') 19 | MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") 20 | 21 | # FreeSurfer - Specify the location of the freesurfer folder 22 | fs_dir = '~/nipype_tutorial/freesurfer' 23 | FSCommand.set_default_subjects_dir(fs_dir) 24 | 25 | 26 | ### 27 | # Specify variables 28 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 29 | subject_list = ['sub001', 'sub002', 'sub003', 30 | 'sub004', 'sub005', 'sub006', 31 | 'sub007', 'sub008', 'sub009', 32 | 'sub010'] # list of subject identifiers 33 | output_dir = 'output_fMRI_example_1st' # name of 1st-level output folder 34 | working_dir = 'workingdir_fMRI_example_1st' # name of 1st-level working directory 35 | 36 | number_of_slices = 40 # number of slices in volume 37 | TR = 2.0 # time repetition of volume 38 | fwhm_size = 6 # size of FWHM in mm 39 | 40 | 41 | ### 42 | # Specify Preprocessing Nodes 43 | 44 | # Despike - Removes 'spikes' from the 3D+time input dataset 45 | despike = MapNode(Despike(outputtype='NIFTI'), 46 | name="despike", iterfield=['in_file']) 47 | 48 | # Slicetiming - correct for slice wise acquisition 49 | interleaved_order = range(1,number_of_slices+1,2) + range(2,number_of_slices+1,2) 50 | sliceTiming = Node(SliceTiming(num_slices=number_of_slices, 51 | time_repetition=TR, 52 | time_acquisition=TR-TR/number_of_slices, 53 | slice_order=interleaved_order, 54 | ref_slice=2), 55 | name="sliceTiming") 56 | 57 | # Realign - correct for motion 58 | realign = Node(Realign(register_to_mean=True), 59 | name="realign") 60 | 61 | # TSNR - remove polynomials 2nd order 62 | tsnr = MapNode(TSNR(regress_poly=2), 63 | name='tsnr', iterfield=['in_file']) 64 | 65 | # Artifact Detection - determine which of the images in the functional series 66 | # are outliers. This is based on deviation in intensity or movement. 67 | art = Node(ArtifactDetect(norm_threshold=1, 68 | zintensity_threshold=3, 69 | mask_type='file', 70 | parameter_source='SPM', 71 | use_differences=[True, False]), 72 | name="art") 73 | 74 | # Gunzip - unzip functional 75 | gunzip = MapNode(Gunzip(), name="gunzip", iterfield=['in_file']) 76 | 77 | # Smooth - to smooth the images with a given kernel 78 | smooth = Node(Smooth(fwhm=fwhm_size), 79 | name="smooth") 80 | 81 | # FreeSurferSource - Data grabber specific for FreeSurfer data 82 | fssource = Node(FreeSurferSource(subjects_dir=fs_dir), 83 | run_without_submitting=True, 84 | name='fssource') 85 | 86 | # BBRegister - coregister a volume to the Freesurfer anatomical 87 | bbregister = Node(BBRegister(init='fsl', 88 | contrast_type='t2', 89 | out_fsl_file=True), 90 | name='bbregister') 91 | 92 | # Volume Transformation - transform the brainmask into functional space 93 | applyVolTrans = Node(ApplyVolTransform(inverse=True), 94 | name='applyVolTrans') 95 | 96 | # Binarize - binarize and dilate an image to create a brainmask 97 | binarize = Node(Binarize(min=0.5, 98 | dilate=1, 99 | out_type='nii'), 100 | name='binarize') 101 | 102 | 103 | ### 104 | # Specify Preprocessing Workflow & Connect Nodes 105 | 106 | # Create a preprocessing workflow 107 | preproc = Workflow(name='preproc') 108 | 109 | # Connect all components of the preprocessing workflow 110 | preproc.connect([(despike, sliceTiming, [('out_file', 'in_files')]), 111 | (sliceTiming, realign, [('timecorrected_files', 'in_files')]), 112 | (realign, tsnr, [('realigned_files', 'in_file')]), 113 | (tsnr, art, [('detrended_file', 'realigned_files')]), 114 | (realign, art, [('mean_image', 'mask_file'), 115 | ('realignment_parameters', 116 | 'realignment_parameters')]), 117 | (tsnr, gunzip, [('detrended_file', 'in_file')]), 118 | (gunzip, smooth, [('out_file', 'in_files')]), 119 | (realign, bbregister, [('mean_image', 'source_file')]), 120 | (fssource, applyVolTrans, [('brainmask', 'target_file')]), 121 | (bbregister, applyVolTrans, [('out_reg_file', 'reg_file')]), 122 | (realign, applyVolTrans, [('mean_image', 'source_file')]), 123 | (applyVolTrans, binarize, [('transformed_file', 'in_file')]), 124 | ]) 125 | 126 | 127 | ### 128 | # Specify 1st-Level Analysis Nodes 129 | 130 | # SpecifyModel - Generates SPM-specific Model 131 | modelspec = Node(SpecifySPMModel(concatenate_runs=False, 132 | input_units='secs', 133 | output_units='secs', 134 | time_repetition=TR, 135 | high_pass_filter_cutoff=128), 136 | name="modelspec") 137 | 138 | # Level1Design - Generates an SPM design matrix 139 | level1design = Node(Level1Design(bases={'hrf': {'derivs': [0, 0]}}, 140 | timing_units='secs', 141 | interscan_interval=TR, 142 | model_serial_correlations='AR(1)'), 143 | name="level1design") 144 | 145 | # EstimateModel - estimate the parameters of the model 146 | level1estimate = Node(EstimateModel(estimation_method={'Classical': 1}), 147 | name="level1estimate") 148 | 149 | # EstimateContrast - estimates contrasts 150 | conestimate = Node(EstimateContrast(), name="conestimate") 151 | 152 | # Volume Transformation - transform contrasts into anatomical space 153 | applyVolReg = MapNode(ApplyVolTransform(fs_target=True), 154 | name='applyVolReg', 155 | iterfield=['source_file']) 156 | 157 | # MRIConvert - to gzip output files 158 | mriconvert = MapNode(MRIConvert(out_type='niigz'), 159 | name='mriconvert', 160 | iterfield=['in_file']) 161 | 162 | 163 | ### 164 | # Specify 1st-Level Analysis Workflow & Connect Nodes 165 | 166 | # Initiation of the 1st-level analysis workflow 167 | l1analysis = Workflow(name='l1analysis') 168 | 169 | # Connect up the 1st-level analysis components 170 | l1analysis.connect([(modelspec, level1design, [('session_info', 171 | 'session_info')]), 172 | (level1design, level1estimate, [('spm_mat_file', 173 | 'spm_mat_file')]), 174 | (level1estimate, conestimate, [('spm_mat_file', 175 | 'spm_mat_file'), 176 | ('beta_images', 177 | 'beta_images'), 178 | ('residual_image', 179 | 'residual_image')]), 180 | (conestimate, applyVolReg, [('con_images', 181 | 'source_file')]), 182 | (applyVolReg, mriconvert, [('transformed_file', 183 | 'in_file')]), 184 | ]) 185 | 186 | 187 | ### 188 | # Specify Meta-Workflow & Connect Sub-Workflows 189 | metaflow = Workflow(name='metaflow') 190 | metaflow.base_dir = opj(experiment_dir, working_dir) 191 | 192 | metaflow.connect([(preproc, l1analysis, [('realign.realignment_parameters', 193 | 'modelspec.realignment_parameters'), 194 | ('smooth.smoothed_files', 195 | 'modelspec.functional_runs'), 196 | ('art.outlier_files', 197 | 'modelspec.outlier_files'), 198 | ('binarize.binary_file', 199 | 'level1design.mask_image'), 200 | ('bbregister.out_reg_file', 201 | 'applyVolReg.reg_file'), 202 | ]), 203 | ]) 204 | 205 | 206 | ### 207 | # Specify Model - Condition, Onset, Duration, Contrast 208 | 209 | # Condition names 210 | condition_names = ['congruent', 'incongruent'] 211 | 212 | # Contrasts 213 | cont01 = ['congruent', 'T', condition_names, [1, 0]] 214 | cont02 = ['incongruent', 'T', condition_names, [0, 1]] 215 | cont03 = ['congruent vs incongruent', 'T', condition_names, [1, -1]] 216 | cont04 = ['incongruent vs congruent', 'T', condition_names, [-1, 1]] 217 | cont05 = ['Cond vs zero', 'F', [cont01, cont02]] 218 | cont06 = ['Diff vs zero', 'F', [cont03, cont04]] 219 | 220 | contrast_list = [cont01, cont02, cont03, cont04, cont05, cont06] 221 | 222 | # Function to get Subject specific condition information 223 | def get_subject_info(subject_id): 224 | from os.path import join as opj 225 | path = '~/nipype_tutorial/data/%s' % subject_id 226 | onset_info = [] 227 | for run in ['01', '02']: 228 | for cond in ['01', '02', '03', '04']: 229 | onset_file = opj(path, 'onset_run0%s_cond0%s.txt'%(run, cond)) 230 | with open(onset_file, 'rt') as f: 231 | for line in f: 232 | info = line.strip().split() 233 | if info[1] != '0.00': 234 | onset_info.append(['cond0%s'%cond, 235 | 'run0%s'%run, 236 | float(info[0])]) 237 | onset_run1_congruent = [] 238 | onset_run1_incongruent = [] 239 | onset_run2_congruent = [] 240 | onset_run2_incongruent = [] 241 | 242 | for info in onset_info: 243 | if info[1] == 'run001': 244 | if info[0] == 'cond001' or info[0] == 'cond002': 245 | onset_run1_congruent.append(info[2]) 246 | elif info[0] == 'cond003' or info[0] == 'cond004': 247 | onset_run1_incongruent.append(info[2]) 248 | if info[1] == 'run002': 249 | if info[0] == 'cond001' or info[0] == 'cond002': 250 | onset_run2_congruent.append(info[2]) 251 | elif info[0] == 'cond003' or info[0] == 'cond004': 252 | onset_run2_incongruent.append(info[2]) 253 | 254 | onset_list = [sorted(onset_run1_congruent), sorted(onset_run1_incongruent), 255 | sorted(onset_run2_congruent), sorted(onset_run2_incongruent)] 256 | 257 | from nipype.interfaces.base import Bunch 258 | condition_names = ['congruent', 'incongruent'] 259 | 260 | subjectinfo = [] 261 | for r in range(2): 262 | onsets = [onset_list[r*2], onset_list[r*2+1]] 263 | subjectinfo.insert(r, 264 | Bunch(conditions=condition_names, 265 | onsets=onsets, 266 | durations=[[0], [0]], 267 | amplitudes=None, 268 | tmod=None, 269 | pmod=None, 270 | regressor_names=None, 271 | regressors=None)) 272 | return subjectinfo 273 | 274 | # Get Subject Info - get subject specific condition information 275 | getsubjectinfo = Node(Function(input_names=['subject_id'], 276 | output_names=['subject_info'], 277 | function=get_subject_info), 278 | name='getsubjectinfo') 279 | 280 | 281 | ### 282 | # Input & Output Stream 283 | 284 | # Infosource - a function free node to iterate over the list of subject names 285 | infosource = Node(IdentityInterface(fields=['subject_id', 286 | 'contrasts'], 287 | contrasts=contrast_list), 288 | name="infosource") 289 | infosource.iterables = [('subject_id', subject_list)] 290 | 291 | # SelectFiles - to grab the data (alternativ to DataGrabber) 292 | templates = {'func': 'data/{subject_id}/run*.nii.gz'} 293 | selectfiles = Node(SelectFiles(templates, 294 | base_directory=experiment_dir), 295 | name="selectfiles") 296 | 297 | # Datasink - creates output folder for important outputs 298 | datasink = Node(DataSink(base_directory=experiment_dir, 299 | container=output_dir), 300 | name="datasink") 301 | 302 | # Use the following DataSink output substitutions 303 | substitutions = [('_subject_id_', ''), 304 | ('_despike', ''), 305 | ('_detrended', ''), 306 | ('_warped', '')] 307 | datasink.inputs.substitutions = substitutions 308 | 309 | # Connect Infosource, SelectFiles and DataSink to the main workflow 310 | metaflow.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]), 311 | (infosource, preproc, [('subject_id', 312 | 'bbregister.subject_id'), 313 | ('subject_id', 314 | 'fssource.subject_id')]), 315 | (selectfiles, preproc, [('func', 'despike.in_file')]), 316 | (infosource, getsubjectinfo, [('subject_id', 'subject_id')]), 317 | (getsubjectinfo, l1analysis, [('subject_info', 318 | 'modelspec.subject_info')]), 319 | (infosource, l1analysis, [('contrasts', 320 | 'conestimate.contrasts')]), 321 | (preproc, datasink, [('realign.mean_image', 322 | 'preprocout.@mean'), 323 | ('realign.realignment_parameters', 324 | 'preprocout.@parameters'), 325 | ('art.outlier_files', 326 | 'preprocout.@outliers'), 327 | ('art.plot_files', 328 | 'preprocout.@plot'), 329 | ('binarize.binary_file', 330 | 'preprocout.@brainmask'), 331 | ('bbregister.out_reg_file', 332 | 'bbregister.@out_reg_file'), 333 | ('bbregister.out_fsl_file', 334 | 'bbregister.@out_fsl_file'), 335 | ('bbregister.registered_file', 336 | 'bbregister.@registered_file'), 337 | ]), 338 | (l1analysis, datasink, [('mriconvert.out_file', 339 | 'contrasts.@contrasts'), 340 | ('conestimate.spm_mat_file', 341 | 'contrasts.@spm_mat'), 342 | ('conestimate.spmT_images', 343 | 'contrasts.@T'), 344 | ('conestimate.con_images', 345 | 'contrasts.@con'), 346 | ]), 347 | ]) 348 | 349 | 350 | ### 351 | # Run Workflow 352 | metaflow.write_graph(graph2use='colored') 353 | metaflow.run('MultiProc', plugin_args={'n_procs': 8}) 354 | -------------------------------------------------------------------------------- /scripts/example_fMRI_2_normalize_ANTS_complete.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from os.path import join as opj 4 | from nipype.interfaces.ants import Registration, ApplyTransforms 5 | from nipype.interfaces.freesurfer import FSCommand, MRIConvert, BBRegister 6 | from nipype.interfaces.c3 import C3dAffineTool 7 | from nipype.interfaces.utility import IdentityInterface, Merge 8 | from nipype.interfaces.io import SelectFiles, DataSink, FreeSurferSource 9 | from nipype.pipeline.engine import Workflow, Node, MapNode 10 | from nipype.interfaces.fsl import Info 11 | 12 | # FreeSurfer - Specify the location of the freesurfer folder 13 | fs_dir = '~/nipype_tutorial/freesurfer' 14 | FSCommand.set_default_subjects_dir(fs_dir) 15 | 16 | 17 | ### 18 | # Specify variables 19 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 20 | input_dir_1st = 'output_fMRI_example_1st' # name of 1st-level output folder 21 | output_dir = 'output_fMRI_example_norm_ants' # name of norm output folder 22 | working_dir = 'workingdir_fMRI_example_norm_ants' # name of norm working directory 23 | subject_list = ['sub001', 'sub002', 'sub003', 24 | 'sub004', 'sub005', 'sub006', 25 | 'sub007', 'sub008', 'sub009', 26 | 'sub010'] # list of subject identifiers 27 | 28 | # location of template file 29 | template = Info.standard_image('MNI152_T1_1mm_brain.nii.gz') 30 | 31 | 32 | ### 33 | # Specify Normalization Nodes 34 | 35 | # Registration - computes registration between subject's structural and MNI template. 36 | antsreg = Node(Registration(args='--float', 37 | collapse_output_transforms=True, 38 | fixed_image=template, 39 | initial_moving_transform_com=True, 40 | num_threads=1, 41 | output_inverse_warped_image=True, 42 | output_warped_image=True, 43 | sigma_units=['vox']*3, 44 | transforms=['Rigid', 'Affine', 'SyN'], 45 | terminal_output='file', 46 | winsorize_lower_quantile=0.005, 47 | winsorize_upper_quantile=0.995, 48 | convergence_threshold=[1e-06], 49 | convergence_window_size=[10], 50 | metric=['MI', 'MI', 'CC'], 51 | metric_weight=[1.0]*3, 52 | number_of_iterations=[[1000, 500, 250, 100], 53 | [1000, 500, 250, 100], 54 | [100, 70, 50, 20]], 55 | radius_or_number_of_bins=[32, 32, 4], 56 | sampling_percentage=[0.25, 0.25, 1], 57 | sampling_strategy=['Regular', 58 | 'Regular', 59 | 'None'], 60 | shrink_factors=[[8, 4, 2, 1]]*3, 61 | smoothing_sigmas=[[3, 2, 1, 0]]*3, 62 | transform_parameters=[(0.1,), 63 | (0.1,), 64 | (0.1, 3.0, 0.0)], 65 | use_histogram_matching=True, 66 | write_composite_transform=True), 67 | name='antsreg') 68 | 69 | # FreeSurferSource - Data grabber specific for FreeSurfer data 70 | fssource = Node(FreeSurferSource(subjects_dir=fs_dir), 71 | run_without_submitting=True, 72 | name='fssource') 73 | 74 | # Convert FreeSurfer's MGZ format into NIfTI format 75 | convert2nii = Node(MRIConvert(out_type='nii'), name='convert2nii') 76 | 77 | # Coregister the median to the surface 78 | bbregister = Node(BBRegister(init='fsl', 79 | contrast_type='t2', 80 | out_fsl_file=True), 81 | name='bbregister') 82 | 83 | # Convert the BBRegister transformation to ANTS ITK format 84 | convert2itk = Node(C3dAffineTool(fsl2ras=True, 85 | itk_transform=True), 86 | name='convert2itk') 87 | 88 | # Concatenate BBRegister's and ANTS' transforms into a list 89 | merge = Node(Merge(2), iterfield=['in2'], name='mergexfm') 90 | 91 | # Transform the contrast images. First to anatomical and then to the target 92 | warpall = MapNode(ApplyTransforms(args='--float', 93 | input_image_type=3, 94 | interpolation='Linear', 95 | invert_transform_flags=[False, False], 96 | num_threads=1, 97 | reference_image=template, 98 | terminal_output='file'), 99 | name='warpall', iterfield=['input_image']) 100 | 101 | # Transform the mean image. First to anatomical and then to the target 102 | warpmean = Node(ApplyTransforms(args='--float', 103 | input_image_type=3, 104 | interpolation='Linear', 105 | invert_transform_flags=[False, False], 106 | num_threads=1, 107 | reference_image=template, 108 | terminal_output='file'), 109 | name='warpmean') 110 | 111 | 112 | ### 113 | # Specify Normalization Workflow & Connect Nodes 114 | 115 | # Initiation of the ANTS normalization workflow 116 | normflow = Workflow(name='normflow') 117 | normflow.base_dir = opj(experiment_dir, working_dir) 118 | 119 | # Connect up ANTS normalization components 120 | normflow.connect([(fssource, convert2nii, [('T1', 'in_file')]), 121 | (convert2nii, convert2itk, [('out_file', 'reference_file')]), 122 | (bbregister, convert2itk, [('out_fsl_file', 123 | 'transform_file')]), 124 | (convert2itk, merge, [('itk_transform', 'in2')]), 125 | (antsreg, merge, [('composite_transform', 126 | 'in1')]), 127 | (merge, warpmean, [('out', 'transforms')]), 128 | (merge, warpall, [('out', 'transforms')]), 129 | ]) 130 | 131 | 132 | ### 133 | # Input & Output Stream 134 | 135 | # Infosource - a function free node to iterate over the list of subject names 136 | infosource = Node(IdentityInterface(fields=['subject_id']), 137 | name="infosource") 138 | infosource.iterables = [('subject_id', subject_list)] 139 | 140 | # SelectFiles - to grab the data (alternative to DataGrabber) 141 | anat_file = opj('freesurfer', '{subject_id}', 'mri/brain.mgz') 142 | func_file = opj(input_dir_1st, 'contrasts', '{subject_id}', 143 | '_mriconvert*/*_out.nii.gz') 144 | func_orig_file = opj(input_dir_1st, 'contrasts', '{subject_id}', '[ce]*.nii') 145 | mean_file = opj(input_dir_1st, 'preprocout', '{subject_id}', 'mean*.nii') 146 | 147 | templates = {'anat': anat_file, 148 | 'func': func_file, 149 | 'func_orig': func_orig_file, 150 | 'mean': mean_file, 151 | } 152 | 153 | selectfiles = Node(SelectFiles(templates, 154 | base_directory=experiment_dir), 155 | name="selectfiles") 156 | 157 | # Datasink - creates output folder for important outputs 158 | datasink = Node(DataSink(base_directory=experiment_dir, 159 | container=output_dir), 160 | name="datasink") 161 | 162 | # Use the following DataSink output substitutions 163 | substitutions = [('_subject_id_', ''), 164 | ('_apply2con', 'apply2con'), 165 | ('_warpall', 'warpall')] 166 | datasink.inputs.substitutions = substitutions 167 | 168 | # Connect SelectFiles and DataSink to the workflow 169 | normflow.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]), 170 | (infosource, fssource, [('subject_id', 'subject_id')]), 171 | (infosource, bbregister, [('subject_id', 'subject_id')]), 172 | (selectfiles, bbregister, [('mean', 'source_file')]), 173 | (selectfiles, antsreg, [('anat', 'moving_image')]), 174 | (selectfiles, convert2itk, [('mean', 'source_file')]), 175 | (selectfiles, warpall, [('func_orig', 'input_image')]), 176 | (selectfiles, warpmean, [('mean', 'input_image')]), 177 | (antsreg, datasink, [('warped_image', 178 | 'antsreg.@warped_image'), 179 | ('inverse_warped_image', 180 | 'antsreg.@inverse_warped_image'), 181 | ('composite_transform', 182 | 'antsreg.@transform'), 183 | ('inverse_composite_transform', 184 | 'antsreg.@inverse_transform')]), 185 | (warpall, datasink, [('output_image', 'warp_complete.@warpall')]), 186 | (warpmean, datasink, [('output_image', 'warp_complete.@warpmean')]), 187 | ]) 188 | 189 | 190 | ### 191 | # Run Workflow 192 | normflow.write_graph(graph2use='colored') 193 | normflow.run('MultiProc', plugin_args={'n_procs': 8}) 194 | -------------------------------------------------------------------------------- /scripts/example_fMRI_2_normalize_ANTS_partial.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from opj import join as opj 4 | from nipype.interfaces.ants import Registration, ApplyTransforms 5 | from nipype.interfaces.freesurfer import FSCommand, MRIConvert, BBRegister 6 | from nipype.interfaces.c3 import C3dAffineTool 7 | from nipype.interfaces.utility import IdentityInterface, Merge 8 | from nipype.interfaces.io import SelectFiles, DataSink, FreeSurferSource 9 | from nipype.pipeline.engine import Workflow, Node, MapNode 10 | from nipype.interfaces.fsl import Info 11 | 12 | # FreeSurfer - Specify the location of the freesurfer folder 13 | fs_dir = '~/nipype_tutorial/freesurfer' 14 | FSCommand.set_default_subjects_dir(fs_dir) 15 | 16 | 17 | ### 18 | # Specify variables 19 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 20 | input_dir_1st = 'output_fMRI_example_1st' # name of 1st-level output folder 21 | output_dir = 'output_fMRI_example_norm_ants' # name of norm output folder 22 | working_dir = 'workingdir_fMRI_example_norm_ants' # name of norm working directory 23 | subject_list = ['sub001', 'sub002', 'sub003', 24 | 'sub004', 'sub005', 'sub006', 25 | 'sub007', 'sub008', 'sub009', 26 | 'sub010'] # list of subject identifiers 27 | 28 | # location of template file 29 | template = Info.standard_image('MNI152_T1_1mm_brain.nii.gz') 30 | 31 | 32 | ### 33 | # Specify Normalization Nodes 34 | 35 | # Registration - computes registration between subject's structural and MNI template. 36 | antsreg = Node(Registration(args='--float', 37 | collapse_output_transforms=True, 38 | fixed_image=template, 39 | initial_moving_transform_com=True, 40 | num_threads=1, 41 | output_inverse_warped_image=True, 42 | output_warped_image=True, 43 | sigma_units=['vox']*3, 44 | transforms=['Rigid', 'Affine', 'SyN'], 45 | terminal_output='file', 46 | winsorize_lower_quantile=0.005, 47 | winsorize_upper_quantile=0.995, 48 | convergence_threshold=[1e-06], 49 | convergence_window_size=[10], 50 | metric=['MI', 'MI', 'CC'], 51 | metric_weight=[1.0]*3, 52 | number_of_iterations=[[1000, 500, 250, 100], 53 | [1000, 500, 250, 100], 54 | [100, 70, 50, 20]], 55 | radius_or_number_of_bins=[32, 32, 4], 56 | sampling_percentage=[0.25, 0.25, 1], 57 | sampling_strategy=['Regular', 58 | 'Regular', 59 | 'None'], 60 | shrink_factors=[[8, 4, 2, 1]]*3, 61 | smoothing_sigmas=[[3, 2, 1, 0]]*3, 62 | transform_parameters=[(0.1,), 63 | (0.1,), 64 | (0.1, 3.0, 0.0)], 65 | use_histogram_matching=True, 66 | write_composite_transform=True), 67 | name='antsreg') 68 | 69 | # Apply Transformation - applies the normalization matrix to contrast images 70 | apply2con = MapNode(ApplyTransforms(args='--float', 71 | input_image_type=3, 72 | interpolation='Linear', 73 | invert_transform_flags=[False], 74 | num_threads=1, 75 | reference_image=template, 76 | terminal_output='file'), 77 | name='apply2con', iterfield=['input_image']) 78 | 79 | # Apply Transformation - applies the normalization matrix to the mean image 80 | apply2mean = Node(ApplyTransforms(args='--float', 81 | input_image_type=3, 82 | interpolation='Linear', 83 | invert_transform_flags=[False], 84 | num_threads=1, 85 | reference_image=template, 86 | terminal_output='file'), 87 | name='apply2mean') 88 | 89 | 90 | ### 91 | # Specify Normalization Workflow & Connect Nodes 92 | 93 | # Initiation of the ANTS normalization workflow 94 | normflow = Workflow(name='normflow') 95 | normflow.base_dir = opj.join(experiment_dir, working_dir) 96 | 97 | # Connect up ANTS normalization components 98 | normflow.connect([(antsreg, apply2con, [('composite_transform', 'transforms')]), 99 | (antsreg, apply2mean, [('composite_transform', 100 | 'transforms')]) 101 | ]) 102 | 103 | 104 | ### 105 | # Input & Output Stream 106 | 107 | # Infosource - a function free node to iterate over the list of subject names 108 | infosource = Node(IdentityInterface(fields=['subject_id']), 109 | name="infosource") 110 | infosource.iterables = [('subject_id', subject_list)] 111 | 112 | # SelectFiles - to grab the data (alternative to DataGrabber) 113 | anat_file = opj('freesurfer', '{subject_id}', 'mri/brain.mgz') 114 | func_file = opj(input_dir_1st, 'contrasts', '{subject_id}', 115 | '_mriconvert*/*_out.nii.gz') 116 | func_orig_file = opj(input_dir_1st, 'contrasts', '{subject_id}', '[ce]*.nii') 117 | mean_file = opj(input_dir_1st, 'preprocout', '{subject_id}', 'mean*.nii') 118 | 119 | templates = {'anat': anat_file, 120 | 'func': func_file, 121 | 'func_orig': func_orig_file, 122 | 'mean': mean_file, 123 | } 124 | 125 | selectfiles = Node(SelectFiles(templates, 126 | base_directory=experiment_dir), 127 | name="selectfiles") 128 | 129 | # Datasink - creates output folder for important outputs 130 | datasink = Node(DataSink(base_directory=experiment_dir, 131 | container=output_dir), 132 | name="datasink") 133 | 134 | # Use the following DataSink output substitutions 135 | substitutions = [('_subject_id_', ''), 136 | ('_apply2con', 'apply2con'), 137 | ('_warpall', 'warpall')] 138 | datasink.inputs.substitutions = substitutions 139 | 140 | # Connect SelectFiles and DataSink to the workflow 141 | normflow.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]), 142 | (selectfiles, apply2con, [('func', 'input_image')]), 143 | (selectfiles, apply2mean, [('mean', 'input_image')]), 144 | (selectfiles, antsreg, [('anat', 'moving_image')]), 145 | (antsreg, datasink, [('warped_image', 146 | 'antsreg.@warped_image'), 147 | ('inverse_warped_image', 148 | 'antsreg.@inverse_warped_image'), 149 | ('composite_transform', 150 | 'antsreg.@transform'), 151 | ('inverse_composite_transform', 152 | 'antsreg.@inverse_transform')]), 153 | (apply2con, datasink, [('output_image', 154 | 'warp_partial.@con')]), 155 | (apply2mean, datasink, [('output_image', 156 | 'warp_partial.@mean')]), 157 | ]) 158 | 159 | 160 | ### 161 | # Run Workflow 162 | normflow.write_graph(graph2use='colored') 163 | normflow.run('MultiProc', plugin_args={'n_procs': 8}) 164 | -------------------------------------------------------------------------------- /scripts/example_fMRI_2_normalize_SPM.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from os.path import join as opj 4 | from nipype.interfaces.spm import Normalize12 5 | from nipype.interfaces.utility import IdentityInterface 6 | from nipype.interfaces.io import SelectFiles, DataSink 7 | from nipype.algorithms.misc import Gunzip 8 | from nipype.pipeline.engine import Workflow, Node, MapNode 9 | 10 | # Specification to MATLAB 11 | from nipype.interfaces.matlab import MatlabCommand 12 | MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') 13 | MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") 14 | 15 | 16 | ### 17 | # Specify variables 18 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 19 | input_dir_1st = 'output_fMRI_example_1st' # name of 1st-level output folder 20 | output_dir = 'output_fMRI_example_norm_spm' # name of norm output folder 21 | working_dir = 'workingdir_fMRI_example_norm_spm' # name of working directory 22 | subject_list = ['sub001', 'sub002', 'sub003', 23 | 'sub004', 'sub005', 'sub006', 24 | 'sub007', 'sub008', 'sub009', 25 | 'sub010'] # list of subject identifiers 26 | 27 | # location of template in form of a tissue probability map to normalize to 28 | template = '/usr/local/MATLAB/R2014a/toolbox/spm12/tpm/TPM.nii' 29 | 30 | 31 | ### 32 | # Specify Normalization Nodes 33 | 34 | # Gunzip - unzip the structural image 35 | gunzip_struct = Node(Gunzip(), name="gunzip_struct") 36 | 37 | # Gunzip - unzip the contrast image 38 | gunzip_con = MapNode(Gunzip(), name="gunzip_con", 39 | iterfield=['in_file']) 40 | 41 | # Normalize - normalizes functional and structural images to the MNI template 42 | normalize = Node(Normalize12(jobtype='estwrite', 43 | tpm=template, 44 | write_voxel_sizes=[1, 1, 1]), 45 | name="normalize") 46 | 47 | ### 48 | # Specify Normalization-Workflow & Connect Nodes 49 | normflow = Workflow(name='normflow') 50 | normflow.base_dir = opj(experiment_dir, working_dir) 51 | 52 | # Connect up ANTS normalization components 53 | normflow.connect([(gunzip_struct, normalize, [('out_file', 'image_to_align')]), 54 | (gunzip_con, normalize, [('out_file', 'apply_to_files')]), 55 | ]) 56 | 57 | 58 | ### 59 | # Input & Output Stream 60 | 61 | # Infosource - a function free node to iterate over the list of subject names 62 | infosource = Node(IdentityInterface(fields=['subject_id']), 63 | name="infosource") 64 | infosource.iterables = [('subject_id', subject_list)] 65 | 66 | # SelectFiles - to grab the data (alternative to DataGrabber) 67 | anat_file = opj('data', '{subject_id}', 'struct.nii.gz') 68 | con_file = opj(input_dir_1st, 'contrasts', '{subject_id}', 69 | '_mriconvert*/*_out.nii.gz') 70 | templates = {'anat': anat_file, 71 | 'con': con_file, 72 | } 73 | selectfiles = Node(SelectFiles(templates, 74 | base_directory=experiment_dir), 75 | name="selectfiles") 76 | 77 | # Datasink - creates output folder for important outputs 78 | datasink = Node(DataSink(base_directory=experiment_dir, 79 | container=output_dir), 80 | name="datasink") 81 | 82 | # Use the following DataSink output substitutions 83 | substitutions = [('_subject_id_', '')] 84 | datasink.inputs.substitutions = substitutions 85 | 86 | # Connect SelectFiles and DataSink to the workflow 87 | normflow.connect([(infosource, selectfiles, [('subject_id', 'subject_id')]), 88 | (selectfiles, gunzip_struct, [('anat', 'in_file')]), 89 | (selectfiles, gunzip_con, [('con', 'in_file')]), 90 | (normalize, datasink, [('normalized_files', 91 | 'normalized.@files'), 92 | ('normalized_image', 93 | 'normalized.@image'), 94 | ('deformation_field', 95 | 'normalized.@field'), 96 | ]), 97 | ]) 98 | 99 | ### 100 | # Run Workflow 101 | normflow.write_graph(graph2use='colored') 102 | normflow.run('MultiProc', plugin_args={'n_procs': 8}) 103 | -------------------------------------------------------------------------------- /scripts/example_fMRI_3_second_level.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from os.path import join as opj 4 | from nipype.interfaces.io import SelectFiles, DataSink 5 | from nipype.interfaces.spm import (OneSampleTTestDesign, EstimateModel, 6 | EstimateContrast, Threshold) 7 | from nipype.interfaces.utility import IdentityInterface 8 | from nipype.pipeline.engine import Workflow, Node 9 | 10 | # Specification to MATLAB 11 | from nipype.interfaces.matlab import MatlabCommand 12 | MatlabCommand.set_default_paths('/usr/local/MATLAB/R2014a/toolbox/spm12') 13 | MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") 14 | 15 | 16 | ### 17 | # Specify variables 18 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 19 | output_dir = 'output_fMRI_example_2nd_ants' # name of 2nd-level output folder 20 | input_dir_norm = 'output_fMRI_example_norm_ants'# name of norm output folder 21 | working_dir = 'workingdir_fMRI_example_2nd_ants'# name of working directory 22 | subject_list = ['sub001', 'sub002', 'sub003', 23 | 'sub004', 'sub005', 'sub006', 24 | 'sub007', 'sub008', 'sub009', 25 | 'sub010'] # list of subject identifiers 26 | contrast_list = ['con_0001', 'con_0002', 'con_0003', 27 | 'con_0004', 'ess_0005', 'ess_0006'] # list of contrast identifiers 28 | 29 | 30 | ### 31 | # Specify 2nd-Level Analysis Nodes 32 | 33 | # One Sample T-Test Design - creates one sample T-Test Design 34 | onesamplettestdes = Node(OneSampleTTestDesign(), 35 | name="onesampttestdes") 36 | 37 | # EstimateModel - estimate the parameters of the model 38 | level2estimate = Node(EstimateModel(estimation_method={'Classical': 1}), 39 | name="level2estimate") 40 | 41 | # EstimateContrast - estimates simple group contrast 42 | level2conestimate = Node(EstimateContrast(group_contrast=True), 43 | name="level2conestimate") 44 | cont1 = ['Group', 'T', ['mean'], [1]] 45 | level2conestimate.inputs.contrasts = [cont1] 46 | 47 | 48 | ### 49 | # Specify 2nd-Level Analysis Workflow & Connect Nodes 50 | l2analysis = Workflow(name='l2analysis') 51 | l2analysis.base_dir = opj(experiment_dir, working_dir) 52 | 53 | # Connect up the 2nd-level analysis components 54 | l2analysis.connect([(onesamplettestdes, level2estimate, [('spm_mat_file', 55 | 'spm_mat_file')]), 56 | (level2estimate, level2conestimate, [('spm_mat_file', 57 | 'spm_mat_file'), 58 | ('beta_images', 59 | 'beta_images'), 60 | ('residual_image', 61 | 'residual_image')]), 62 | ]) 63 | 64 | 65 | ### 66 | # Input & Output Stream 67 | 68 | # Infosource - a function free node to iterate over the list of subject names 69 | infosource = Node(IdentityInterface(fields=['contrast_id']), 70 | name="infosource") 71 | infosource.iterables = [('contrast_id', contrast_list)] 72 | 73 | # SelectFiles - to grab the data (alternative to DataGrabber) 74 | con_file = opj(input_dir_norm, 'warp_complete', 'sub*', 'warpall*', 75 | '{contrast_id}_trans.nii') 76 | templates = {'cons': con_file} 77 | selectfiles = Node(SelectFiles(templates, 78 | base_directory=experiment_dir), 79 | name="selectfiles") 80 | 81 | # Datasink - creates output folder for important outputs 82 | datasink = Node(DataSink(base_directory=experiment_dir, 83 | container=output_dir), 84 | name="datasink") 85 | 86 | # Use the following DataSink output substitutions 87 | substitutions = [('_contrast_id_', '')] 88 | datasink.inputs.substitutions = substitutions 89 | 90 | # Connect SelectFiles and DataSink to the workflow 91 | l2analysis.connect([(infosource, selectfiles, [('contrast_id', 92 | 'contrast_id')]), 93 | (selectfiles, onesamplettestdes, [('cons', 'in_files')]), 94 | (level2conestimate, datasink, [('spm_mat_file', 95 | 'contrasts.@spm_mat'), 96 | ('spmT_images', 97 | 'contrasts.@T'), 98 | ('con_images', 99 | 'contrasts.@con')]), 100 | ]) 101 | 102 | 103 | ### 104 | # Run Workflow 105 | l2analysis.write_graph(graph2use='colored') 106 | l2analysis.run('MultiProc', plugin_args={'n_procs': 8}) 107 | -------------------------------------------------------------------------------- /scripts/tutorial_1_create_dataset.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Specify important variables 4 | ZIP_FILE=~/Downloads/ds102_raw.tgz #location of download file 5 | TUTORIAL_DIR=~/nipype_tutorial #location of experiment folder 6 | TMP_DIR=$TUTORIAL_DIR/tmp #location of temporary folder 7 | DATA_DIR=$TUTORIAL_DIR/data #location of data folder 8 | 9 | ## To download the dataset to the Download folder use the following code: 10 | #wget https://openfmri.s3.amazonaws.com/tarballs/ds102_raw.tgz ~/Downloads 11 | 12 | # Unzip ds102 dataset into TMP_DIR 13 | mkdir -p $TMP_DIR 14 | tar -zxvf $ZIP_FILE -C $TMP_DIR 15 | 16 | # Copy data of first ten subjects into DATA_DIR 17 | for id in $(seq -w 1 10) 18 | do 19 | echo "Creating dataset for subject: sub0$id" 20 | mkdir -p $DATA_DIR/sub0$id 21 | cp $TMP_DIR/ds102/sub0$id/anatomy/highres001.nii.gz \ 22 | $DATA_DIR/sub0$id/struct.nii.gz 23 | 24 | for session in run001 run002 25 | do 26 | cp $TMP_DIR/ds102/sub0$id/BOLD/task001_$session/bold.nii.gz \ 27 | $DATA_DIR/sub0$id/$session.nii.gz 28 | cp $TMP_DIR/ds102/sub0$id/behav/task001_$session/behavdata.txt \ 29 | $DATA_DIR/sub0$id/behavdata_$session.txt 30 | 31 | for con_id in {1..4} 32 | do 33 | cp $TMP_DIR/ds102/sub0$id/model/model001/onsets/task001_$session/cond00$con_id.txt \ 34 | $DATA_DIR/sub0$id/onset_${session}_cond00$con_id.txt 35 | done 36 | done 37 | echo "sub0$id done." 38 | done 39 | 40 | # Copy information about demographics, conditions and tasks into DATA_DIR 41 | cp $TMP_DIR/ds102/demographics.txt $DATA_DIR/demographics.txt 42 | cp $TMP_DIR/ds102/models/model001/* $DATA_DIR/. 43 | 44 | # Delete the temporary folder 45 | rm -rf $TMP_DIR 46 | -------------------------------------------------------------------------------- /scripts/tutorial_2_recon_python.py: -------------------------------------------------------------------------------- 1 | # Import modules 2 | import os 3 | from os.path import join as opj 4 | from nipype.interfaces.freesurfer import ReconAll 5 | from nipype.interfaces.utility import IdentityInterface 6 | from nipype.pipeline.engine import Workflow, Node 7 | 8 | # Specify important variables 9 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 10 | data_dir = opj(experiment_dir, 'data') # location of data folder 11 | fs_folder = opj(experiment_dir, 'freesurfer') # location of freesurfer folder 12 | subject_list = ['sub001', 'sub002', 'sub003', 13 | 'sub004', 'sub005', 'sub006', 14 | 'sub007', 'sub008', 'sub009', 15 | 'sub010'] # subject identifier 16 | T1_identifier = 'struct.nii.gz' # Name of T1-weighted image 17 | 18 | # Create the output folder - FreeSurfer can only run if this folder exists 19 | os.system('mkdir -p %s' % fs_folder) 20 | 21 | # Create the pipeline that runs the recon-all command 22 | reconflow = Workflow(name="reconflow") 23 | reconflow.base_dir = opj(experiment_dir, 'workingdir_reconflow') 24 | 25 | # Some magical stuff happens here (not important for now) 26 | infosource = Node(IdentityInterface(fields=['subject_id']), 27 | name="infosource") 28 | infosource.iterables = ('subject_id', subject_list) 29 | # This node represents the actual recon-all command 30 | reconall = Node(ReconAll(directive='all', 31 | #flags='-nuintensitycor- 3T', 32 | subjects_dir=fs_folder), 33 | name="reconall") 34 | 35 | # This function returns for each subject the path to struct.nii.gz 36 | def pathfinder(subject, foldername, filename): 37 | from os.path import join as opj 38 | struct_path = opj(foldername, subject, filename) 39 | return struct_path 40 | 41 | # This section connects all the nodes of the pipeline to each other 42 | reconflow.connect([(infosource, reconall, [('subject_id', 'subject_id')]), 43 | (infosource, reconall, [(('subject_id', pathfinder, 44 | data_dir, T1_identifier), 45 | 'T1_files')]), 46 | ]) 47 | 48 | # This command runs the recon-all pipeline in parallel (using 8 cores) 49 | reconflow.run('MultiProc', plugin_args={'n_procs': 8}) 50 | -------------------------------------------------------------------------------- /scripts/tutorial_2_recon_shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Specify important variables 4 | export TUTORIAL_DIR=~/nipype_tutorial #location of experiment folder 5 | export DATA_DIR=$TUTORIAL_DIR/data #location of data folder 6 | export SUBJECTS_DIR=$TUTORIAL_DIR/freesurfer #location of freesurfer folder 7 | 8 | for id in $(seq -w 1 10) 9 | do 10 | echo "working on sub0$id" 11 | mkdir -p $SUBJECTS_DIR/sub0$id/mri/orig 12 | mri_convert $DATA_DIR/sub0$id/struct.nii.gz \ 13 | $SUBJECTS_DIR/sub0$id/mri/orig/001.mgz 14 | recon-all -all -subjid sub0$id #-nuintensitycor-3T 15 | echo "sub0$id finished" 16 | done 17 | -------------------------------------------------------------------------------- /scripts/tutorial_3_first_steps.py: -------------------------------------------------------------------------------- 1 | ### 2 | # Import modules 3 | from os.path import join as opj 4 | from nipype.interfaces.spm import SliceTiming, Realign, Smooth 5 | from nipype.interfaces.utility import IdentityInterface 6 | from nipype.interfaces.io import SelectFiles, DataSink 7 | from nipype.algorithms.rapidart import ArtifactDetect 8 | from nipype.algorithms.misc import Gunzip 9 | from nipype.pipeline.engine import Workflow, Node 10 | 11 | 12 | ### 13 | # Specify variables 14 | experiment_dir = '~/nipype_tutorial' # location of experiment folder 15 | subject_list = ['sub001', 'sub002', 'sub003', 16 | 'sub004', 'sub005', 'sub006', 17 | 'sub007', 'sub008', 'sub009', 18 | 'sub010'] # list of subject identifiers 19 | session_list = ['run001', 'run002'] # list of session identifiers 20 | 21 | output_dir = 'output_firstSteps' # name of output folder 22 | working_dir = 'workingdir_firstSteps' # name of working directory 23 | 24 | number_of_slices = 40 # number of slices in volume 25 | TR = 2.0 # time repetition of volume 26 | smoothing_size = 8 # size of FWHM in mm 27 | 28 | 29 | ### 30 | # Specify Nodes 31 | 32 | # Gunzip - unzip functional 33 | gunzip = Node(Gunzip(), name="gunzip") 34 | 35 | # Slicetiming - correct for slice wise acquisition 36 | interleaved_order = range(1,number_of_slices+1,2) + range(2,number_of_slices+1,2) 37 | sliceTiming = Node(SliceTiming(num_slices=number_of_slices, 38 | time_repetition=TR, 39 | time_acquisition=TR-TR/number_of_slices, 40 | slice_order=interleaved_order, 41 | ref_slice=2), 42 | name="sliceTiming") 43 | 44 | # Realign - correct for motion 45 | realign = Node(Realign(register_to_mean=True), 46 | name="realign") 47 | 48 | # Artifact Detection - determine which of the images in the functional series 49 | # are outliers. This is based on deviation in intensity or movement. 50 | art = Node(ArtifactDetect(norm_threshold=1, 51 | zintensity_threshold=3, 52 | mask_type='spm_global', 53 | parameter_source='SPM'), 54 | name="art") 55 | 56 | # Smooth - to smooth the images with a given kernel 57 | smooth = Node(Smooth(fwhm=smoothing_size), 58 | name="smooth") 59 | 60 | 61 | ### 62 | # Specify Workflows & Connect Nodes 63 | 64 | # Create a preprocessing workflow 65 | preproc = Workflow(name='preproc') 66 | preproc.base_dir = opj(experiment_dir, working_dir) 67 | 68 | # Connect all components of the preprocessing workflow 69 | preproc.connect([(gunzip, sliceTiming, [('out_file', 'in_files')]), 70 | (sliceTiming, realign, [('timecorrected_files', 'in_files')]), 71 | (realign, art, [('realigned_files', 'realigned_files'), 72 | ('mean_image', 'mask_file'), 73 | ('realignment_parameters', 74 | 'realignment_parameters')]), 75 | (realign, smooth, [('realigned_files', 'in_files')]), 76 | ]) 77 | 78 | 79 | ### 80 | # Input & Output Stream 81 | 82 | # Infosource - a function free node to iterate over the list of subject names 83 | infosource = Node(IdentityInterface(fields=['subject_id', 84 | 'session_id']), 85 | name="infosource") 86 | infosource.iterables = [('subject_id', subject_list), 87 | ('session_id', session_list)] 88 | 89 | # SelectFiles 90 | templates = {'func': 'data/{subject_id}/{session_id}.nii.gz'} 91 | selectfiles = Node(SelectFiles(templates, 92 | base_directory=experiment_dir), 93 | name="selectfiles") 94 | 95 | # Datasink 96 | datasink = Node(DataSink(base_directory=experiment_dir, 97 | container=output_dir), 98 | name="datasink") 99 | 100 | # Use the following DataSink output substitutions 101 | substitutions = [('_subject_id', ''), 102 | ('_session_id_', '')] 103 | datasink.inputs.substitutions = substitutions 104 | 105 | # Connect SelectFiles and DataSink to the workflow 106 | preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'), 107 | ('session_id', 'session_id')]), 108 | (selectfiles, gunzip, [('func', 'in_file')]), 109 | (realign, datasink, [('mean_image', 'realign.@mean'), 110 | ('realignment_parameters', 111 | 'realign.@parameters'), 112 | ]), 113 | (smooth, datasink, [('smoothed_files', 'smooth')]), 114 | (art, datasink, [('outlier_files', 'art.@outliers'), 115 | ('plot_files', 'art.@plot'), 116 | ]), 117 | ]) 118 | 119 | 120 | ### 121 | # Run Workflow 122 | preproc.write_graph(graph2use='flat') 123 | preproc.run('MultiProc', plugin_args={'n_procs': 8}) 124 | --------------------------------------------------------------------------------