├── .coveragerc ├── .gitattributes ├── .gitignore ├── .mailmap ├── .travis.yml ├── AUTHOR ├── LICENSE ├── MANIFEST.in ├── README.rst ├── THANKS ├── doc ├── .gitignore ├── Makefile ├── README.txt ├── _static │ ├── nipy.css │ └── reggie2.png ├── _templates │ └── layout.html ├── api │ └── index.rst ├── bibtex │ ├── README.txt │ └── vtk.bib ├── conf.py ├── devel │ ├── code_discussions │ │ ├── brainvisa_repositories.rst │ │ ├── comparisons │ │ │ ├── index.rst │ │ │ └── vtk_datasets.rst │ │ ├── coordmap_notes.rst │ │ ├── image_ordering.rst │ │ ├── index.rst │ │ ├── pipelining_api.rst │ │ ├── refactoring │ │ │ ├── imagelists.rst │ │ │ └── index.rst │ │ ├── registration_api.rst │ │ ├── repository_api.rst │ │ ├── repository_design.rst │ │ ├── simple_viewer.rst │ │ ├── understanding_affines.rst │ │ └── usecases │ │ │ ├── batching.rst │ │ │ ├── images.rst │ │ │ ├── index.rst │ │ │ ├── resampling.rst │ │ │ └── transformations.rst │ ├── development_quickstart.rst │ ├── doctests_preprocessor.rst │ ├── guidelines │ │ ├── changelog.rst │ │ ├── commit_codes.rst │ │ ├── compiling_windows.rst │ │ ├── coverage_testing.rst │ │ ├── debugging.rst │ │ ├── elegant.py │ │ ├── gitwash │ │ │ ├── branch_list.png │ │ │ ├── branch_list_compare.png │ │ │ ├── configure_git.rst │ │ │ ├── development_workflow.rst │ │ │ ├── dot2_dot3.rst │ │ │ ├── following_latest.rst │ │ │ ├── forking_button.png │ │ │ ├── forking_hell.rst │ │ │ ├── git_development.rst │ │ │ ├── git_install.rst │ │ │ ├── git_intro.rst │ │ │ ├── git_links.inc │ │ │ ├── git_resources.rst │ │ │ ├── index.rst │ │ │ ├── patching.rst │ │ │ ├── pull_button.png │ │ │ └── set_up_fork.rst │ │ ├── howto_document.rst │ │ ├── index.rst │ │ ├── make_release.rst │ │ ├── open_source_devel.rst │ │ ├── optimization.rst │ │ ├── sphinx_helpers.rst │ │ └── testing.rst │ ├── images.rst │ ├── index.rst │ ├── install │ │ ├── debian.rst │ │ ├── fedora.rst │ │ ├── index.rst │ │ ├── windows.rst │ │ └── windows_scipy_build.rst │ ├── planning │ │ ├── TODO.rst │ │ ├── index.rst │ │ └── roadmap.rst │ └── tools │ │ ├── index.rst │ │ ├── tricked_out_emacs.rst │ │ └── virtualenv-tutor.rst ├── documentation.rst ├── faq │ ├── documentation_faq.rst │ ├── index.rst │ ├── johns_bsd_pitch.rst │ ├── licensing.rst │ └── why.rst ├── glossary.rst ├── history.rst ├── index.rst ├── labs │ ├── datasets.rst │ ├── datasets │ │ ├── viz_volume_data.py │ │ ├── viz_volume_field.py │ │ ├── viz_volume_grid.py │ │ ├── viz_volume_img.py │ │ ├── volume_data.jpg │ │ ├── volume_field.jpg │ │ ├── volume_grid.jpg │ │ └── volume_img.jpg │ ├── enn.rst │ ├── index.rst │ ├── mask.rst │ ├── plots │ │ ├── enn_demo.py │ │ └── surrogate_array.py │ ├── simul_activation.rst │ ├── viz.png │ └── viz.rst ├── license.rst ├── links_names.txt ├── mission.rst ├── mission.txt ├── publications.rst ├── references │ └── brainpy_abstract.rst ├── sphinxext │ ├── README.txt │ ├── autosummary_generate.py │ ├── ipython_console_highlighting.py │ └── numpy_ext │ │ ├── __init__.py │ │ ├── docscrape.py │ │ ├── docscrape_sphinx.py │ │ └── numpydoc.py └── users │ ├── basic_io.rst │ ├── coordinate_map.rst │ ├── glm_spec.rst │ ├── index.rst │ ├── install_data.rst │ ├── installation.rst │ ├── introduction.rst │ ├── math_coordmap.rst │ ├── plots │ ├── amplitudes.py │ ├── block.py │ ├── event.py │ ├── event_amplitude.py │ ├── hrf.py │ ├── hrf_delta.py │ ├── hrf_different.py │ ├── neuronal_block.py │ ├── neuronal_event.py │ ├── random_amplitudes.py │ ├── random_amplitudes_times.py │ └── sinusoidal.py │ ├── scipy_orientation.rst │ └── tutorial.rst ├── examples ├── affine_registration.py └── space_time_realign.py ├── nireg ├── NOTES_ELF ├── TODO.txt ├── __init__.py ├── _register.c ├── _register.h ├── _register.pyx ├── affine.py ├── chain_transform.py ├── cubic_spline.c ├── cubic_spline.h ├── externals │ ├── __init__.py │ ├── argparse.py │ ├── configobj.py │ ├── setup.py │ ├── six.py │ ├── transforms3d │ │ ├── __init__.py │ │ ├── quaternions.py │ │ ├── setup.py │ │ ├── taitbryan.py │ │ ├── tests │ │ │ ├── __init__.py │ │ │ ├── samples.py │ │ │ ├── test_quaternions.py │ │ │ └── test_taitbryan.py │ │ └── utils.py │ └── validate.py ├── groupwise_registration.py ├── histogram_registration.py ├── joint_histogram.c ├── joint_histogram.h ├── np_distutils_monkey.py ├── optimizer.py ├── polyaffine.c ├── polyaffine.h ├── polyaffine.py ├── resample.py ├── setup.py ├── similarity_measures.py ├── slicetiming │ ├── __init__.py │ ├── setup.py │ ├── tests │ │ ├── __init__.py │ │ └── test_timefuncs.py │ └── timefuncs.py ├── testing │ ├── __init__.py │ ├── anatomical.nii.gz │ ├── functional.nii.gz │ └── setup.py ├── tests │ ├── __init__.py │ ├── test_affine.py │ ├── test_chain_transforms.py │ ├── test_cubic_spline.py │ ├── test_fmri_realign4d.py │ ├── test_histogram_registration.py │ ├── test_polyaffine.py │ ├── test_register.py │ ├── test_resample.py │ ├── test_slice_timing.py │ └── test_transform.py ├── transform.py ├── type_check.py ├── wichmann_prng.c └── wichmann_prng.h ├── requirements.txt ├── scripts ├── nipy_3dto4d ├── nipy_4d_realign ├── nipy_4dto3d ├── nipy_diagnose ├── nipy_tsdiffana ├── scripting.py └── test_scripting.py ├── setup.cfg ├── setup.py ├── setup_egg.py ├── site.cfg.mingw32 └── tools ├── refresh_readme.py ├── travis_tools.sh └── upload-gh-pages.sh /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = nipy 4 | include = */nipy/* 5 | omit = 6 | */nipy/fixes/* 7 | */nipy/externals/* 8 | */benchmarks/* 9 | */bench/* 10 | */setup.py 11 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | nipy/COMMIT_INFO.txt export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Editor temporary/working/backup files # 2 | ######################################### 3 | .#* 4 | [#]*# 5 | *~ 6 | *$ 7 | *.bak 8 | *.diff 9 | *.org 10 | .project 11 | *.rej 12 | .settings/ 13 | .*.sw[nop] 14 | .sw[nop] 15 | *.tmp 16 | *.orig 17 | 18 | # Not sure what the next two are for 19 | *.kpf 20 | *-stamp 21 | 22 | # Compiled source # 23 | ################### 24 | *.a 25 | *.com 26 | *.class 27 | *.dll 28 | *.exe 29 | *.o 30 | *.py[oc] 31 | *.so 32 | *.pyd 33 | 34 | # Packages # 35 | ############ 36 | # it's better to unpack these files and commit the raw source 37 | # git has its own built in compression methods 38 | *.7z 39 | *.bz2 40 | *.bzip2 41 | *.dmg 42 | *.gz 43 | *.iso 44 | *.jar 45 | *.rar 46 | *.tar 47 | *.tbz2 48 | *.tgz 49 | *.zip 50 | 51 | # Python files # 52 | ################ 53 | MANIFEST 54 | build/ 55 | _build 56 | dist/ 57 | *.egg-info 58 | .shelf/ 59 | .tox/ 60 | .coverage 61 | .buildbot.patch 62 | 63 | # Logs and databases # 64 | ###################### 65 | *.log 66 | *.sql 67 | *.sqlite 68 | 69 | # OS generated files # 70 | ###################### 71 | .gdb_history 72 | .DS_Store? 73 | ehthumbs.db 74 | Icon? 75 | Thumbs.db 76 | 77 | # Things specific to this project # 78 | ################################### 79 | __config__.py 80 | doc/api/generated 81 | doc/build/ 82 | doc/manual 83 | cythonize.dat 84 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Alexis Roche Alexis ROCHE 2 | Ariel Rokem arokem 3 | Benjamin Thyreau benjamin.thyreau <> 4 | Benjamin Thyreau benji2@decideur.info <> 5 | Bertrand Thirion Bertrand THIRION 6 | Bertrand Thirion bertrand.thirion <> 7 | Bertrand Thirion bthirion 8 | Christopher Burns Chris 9 | Christopher Burns cburns <> 10 | Cindee Madison Cindee Madison 11 | Cindee Madison cindee.madison <> 12 | Cindee Madison cindeem <> 13 | Cindee Madison cindeem 14 | Eleftherios Garyfallidis 15 | Erik Ziegler erikz 16 | Fabian Pedregosa 17 | Fernando Perez fdo.perez <> 18 | Gael Varoquaux Gael varoquaux 19 | Gael Varoquaux GaelVaroquaux 20 | Gael Varoquaux GaelVaroquaux 21 | Gael Varoquaux gvaroquaux 22 | Gael Varoquaux varoquau 23 | Jarrod Millman Jarrod Millman 24 | Jarrod Millman jarrod.millman <> 25 | Jean-Baptiste Poline JB 26 | Jean-Baptiste Poline jbpoline 27 | Jonathan Taylor jonathan.taylor <> 28 | Jonathan Taylor jtaylo 29 | Martin Bergtholdt 30 | Matthew Brett matthew.brett <> 31 | Matthew Brett mb312 32 | Matthieu Brucher 33 | Merlin Keller Merlin KELLER 34 | Merlin Keller keller 35 | Tom Waite twaite 36 | Virgile Fritsch VirgileFritsch 37 | Virgile Fritsch Fritsch 38 | 39 | # and below the ones to fill out 40 | Paris Sprint Account 41 | Philippe CIUCIU 42 | Thomas VINCENT <20100thomas@gmail.com> 43 | alan 44 | brian.hawthorne <> 45 | davclark <> 46 | denis.riviere <> 47 | michael.castelle <> 48 | mike.trumpis <> 49 | sebastien.meriaux <> 50 | tim.leslie <> 51 | yann.cointepas <> 52 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | cache: 3 | directories: 4 | - $HOME/.cache/pip 5 | env: 6 | global: 7 | - DEPENDS="numpy scipy sympy matplotlib nibabel" 8 | - INSTALL_TYPE="setup" 9 | python: 10 | - 2.6 11 | - 3.2 12 | - 3.3 13 | - 3.4 14 | matrix: 15 | include: 16 | - python: 2.7 17 | env: 18 | - COVERAGE=--with-coverage 19 | before_install: 20 | - sudo apt-get install -qq libatlas-dev libatlas-base-dev gfortran libpng-dev 21 | - pip install --no-index -f http://wheels2.astropy.org -f https://nipy.bic.berkeley.edu/scipy_installers/travis 22 | scipy matplotlib; 23 | - pip install nibabel 24 | - if [ "${COVERAGE}" == "--with-coverage" ]; then pip install coverage; pip install 25 | coveralls; fi 26 | install: 27 | - python setup.py install 28 | script: 29 | - mkdir for_testing 30 | - cd for_testing 31 | - if [ "${COVERAGE}" == "--with-coverage" ]; then cp ../.coveragerc .; fi 32 | - $PYTHON ../tools/nipnost $COVERAGE `python -c "import os; import nireg; print(os.path.dirname(nireg.__file__))"` 33 | after_success: 34 | - if [ "${COVERAGE}" == "--with-coverage" ]; then coveralls; fi 35 | deploy: 36 | provider: pypi 37 | user: alexis-roche 38 | password: 39 | secure: JVVrRUGF+7AZmW92IyVOpyUPsPyZC+3J5I03JHq//kW/GInxojoUsBXeLkXgsVgBrxMKG3KjRrReEl4gRRxF3jN6Sk9fjqCGPXHgRsRYJECUWl+JcJk96uRCgn9BjawVU4gXrw8vQWAuauke0RYFTem+riRgPb5HEoeAzG5TyNaBe9Lip/g+mNUdJXVB35fgDQDisWXBzBwmv5RAr36eB5Q0PzpGzTj80aUeN3PAlvM51HGUtTyQuLQA4ZyMppVkt7+d+nXzefkqMbwbahCglg+K1G0oixdqMuUIpiY2+VbLhgDrynq0uk2JO6enCnmkGJlTzEZyRpPO0UfEAQ2pqcVchnG19O9dAw/QuLPBTaRS53UQkrgwil1n4DTjsUiS+weGQxze13Dt76Duh/Yyou611rxEqHrF0TSlQcACuj5dcOC2Y0SburfxBVzaOto1Bdb2Oc00DF6racePZL4S3Kc0symMm3uNHsok+A7RW/tAylY48n9/pc9hvKiHzzhr4pP1EQJx6DglWUQ+PHw3jOGopbiZpqH8V5Qg1GwXz38uP6ViWXLBj+4VhYp/TriXMO6+b7Ptektsz1U2KS+gPHH/Mtw6fd5YFBNfq0TP7Qq2lZ8/WB7Y6T/ZtNV6cAzh6kwgEmbOd6FgzeaBgYqOxIeE7q+d44+GyF7C75X57QE= 40 | on: 41 | tags: true 42 | branch: master 43 | -------------------------------------------------------------------------------- /AUTHOR: -------------------------------------------------------------------------------- 1 | Alexis Roche 2 | Matthew Brett 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2006-2015, NIPY Developers 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above 12 | copyright notice, this list of conditions and the following 13 | disclaimer in the documentation and/or other materials provided 14 | with the distribution. 15 | 16 | * Neither the name of the NIPY Developers nor the names of any 17 | contributors may be used to endorse or promote products derived 18 | from this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHOR COPYING Makefile* MANIFEST.in setup* README.* THANKS 2 | include Changelog TODO 3 | include *.py 4 | include site.* 5 | recursive-include nipy *.c *.h *.pyx *.pxd 6 | recursive-include lib *.c *.h *.pyx *.pxd remake 7 | recursive-include scripts * 8 | recursive-include tools * 9 | # put this stuff back into setup.py (package_data) once I'm enlightened 10 | # enough to accomplish this herculean task 11 | recursive-include nipy/algorithms/tests/data * 12 | include nipy/testing/*.nii.gz 13 | include nipy/algorithms/diagnostics/tests/data/*.mat 14 | include nipy/algorithms/statistics/models/tests/*.bin 15 | include nipy/modalities/fmri/tests/*.npz 16 | include nipy/modalities/fmri/tests/*.mat 17 | include nipy/COMMIT_INFO.txt 18 | include LICENSE 19 | graft examples 20 | graft doc 21 | global-exclude *~ *.swp *.pyc 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. -*- rest -*- 2 | .. vim:syntax=rst 3 | 4 | ===== 5 | NIREG 6 | ===== 7 | 8 | Image registration package for Python. 9 | 10 | 11 | Website 12 | ======= 13 | 14 | Current information can always be found at the `NIPY project website 15 | `_. 16 | 17 | Mailing Lists 18 | ============= 19 | 20 | For questions on how to use nipy or on making code contributions, please see 21 | the ``neuroimaging`` mailing list: 22 | 23 | https://mail.python.org/mailman/listinfo/neuroimaging 24 | 25 | Please report bugs at github issues: 26 | 27 | https://github.com/nipy/nireg/issues 28 | 29 | You can see the list of current proposed changes at: 30 | 31 | https://github.com/nipy/nireg/pulls 32 | 33 | Code 34 | ==== 35 | 36 | You can find our sources and single-click downloads: 37 | 38 | * `Main repository`_ on Github; 39 | * Documentation_ for all releases and current development tree; 40 | * Download the `current development version`_ as a tar/zip file; 41 | * Downloads of all `available releases`_. 42 | 43 | .. _main repository: http://github.com/nipy/nireg 44 | .. _Documentation: http://nipy.org/nipy 45 | .. _current development version: https://github.com/nipy/nireg/archive/master.zip 46 | .. _available releases: http://pypi.python.org/pypi/nireg 47 | 48 | Tests 49 | ===== 50 | 51 | To run nipy's tests, you will need to install the nose_ Python testing 52 | package. Then:: 53 | 54 | python -c "import nireg; nireg.test()" 55 | 56 | 57 | Dependencies 58 | ============ 59 | 60 | 61 | To run NIREG, you will need: 62 | ======= 63 | * python_ >= 2.5 (tested with 2.5, 2.6, 2.7, 3.2, 3.3) 64 | * numpy_ >= 1.2 65 | * scipy_ >= 0.7.0 66 | * nibabel_ >= 1.2 67 | 68 | You will probably also like to have: 69 | 70 | * ipython_ for interactive work 71 | * matplotlib_ for 2D plotting 72 | * mayavi_ for 3D plotting 73 | 74 | .. _python: http://python.org 75 | .. _numpy: http://numpy.scipy.org 76 | .. _scipy: http://www.scipy.org 77 | .. _nibabel: http://nipy.org/nibabel 78 | .. _ipython: http://ipython.org 79 | .. _matplotlib: http://matplotlib.org 80 | .. _mayavi: http://code.enthought.com/projects/mayavi/ 81 | .. _nose: http://nose.readthedocs.org/en/latest 82 | 83 | License 84 | ======= 85 | 86 | We use the 3-clause BSD license; the full license is in the file ``LICENSE`` 87 | in the nipy distribution. 88 | -------------------------------------------------------------------------------- /THANKS: -------------------------------------------------------------------------------- 1 | NIPY is an open source project for neuroimaging analysis using Python. It is a 2 | community project. Many people have contributed to NIPY, in code development, 3 | and they are (mainly) listed in the AUTHOR file. Others have contributed 4 | greatly in code review, discussion, and financial support. Below is a partial 5 | list. If you've been left off, please let us know (neuroimaging at 6 | python.org), and we'll add you. 7 | 8 | Michael Castelle 9 | Philippe Ciuciu 10 | Dav Clark 11 | Yann Cointepas 12 | Mark D'Esposito 13 | Denis Riviere 14 | Karl Young 15 | -------------------------------------------------------------------------------- /doc/.gitignore: -------------------------------------------------------------------------------- 1 | labs/generated/ 2 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | DIST_DIR = dist 5 | 6 | # You can set these variables from the command line. 7 | SPHINXOPTS = #-q # suppress all output but warnings 8 | SPHINXBUILD = sphinx-build 9 | PAPER = 10 | 11 | # Internal variables. 12 | PAPEROPT_a4 = -D latex_paper_size=a4 13 | PAPEROPT_letter = -D latex_paper_size=letter 14 | ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 15 | 16 | .PHONY: help clean pdf all dist htmlonly api html pickle htmlhelp latex changes linkcheck doctest 17 | 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html make HTML and API documents" 21 | @echo " htmlonly make HTML documents only" 22 | @echo " api make API documents only" 23 | @echo " latex make LaTeX documents (you can set\ 24 | PAPER=a4 or PAPER=letter)" 25 | @echo " all make HTML, API and PDF documents" 26 | @echo " clean remove all generated documents" 27 | @echo 28 | @echo " linkcheck check all external links for integrity" 29 | @echo " doctest run doctests in reST files" 30 | @echo " pdf make and run the PDF generation" 31 | @echo " dist make and put results in $DIST_DIR/" 32 | @echo " gitwash-update update git workflow from source repo" 33 | 34 | # Commented these out, wasn't clear if we'd use these targets or not. 35 | # @echo " pickle to make pickle files (usable by e.g. sphinx-web)" 36 | # @echo " htmlhelp to make HTML files and a HTML help project" 37 | # @echo " changes to make an overview over all changed/added/deprecated items" 38 | 39 | clean: 40 | -rm -rf build/* $(DIST_DIR)/* *~ api/generated labs/generated 41 | -rm -f manual 42 | 43 | pdf: latex 44 | cd build/latex && make all-pdf 45 | 46 | all: html pdf 47 | 48 | dist: clean all 49 | mkdir -p $(DIST_DIR) 50 | ln build/latex/nipy*.pdf $(DIST_DIR) 51 | cp -a build/html/* $(DIST_DIR) 52 | @echo "Build finished. Final docs are in $(DIST_DIR)" 53 | 54 | htmlonly: 55 | mkdir -p build/html build/doctrees 56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html 57 | @echo 58 | @echo "Build finished. The HTML pages are in build/html." 59 | 60 | api: 61 | python ../tools/build_modref_templates.py 62 | @echo "Build API docs finished." 63 | 64 | html: api htmlonly 65 | -ln -s build manual 66 | @echo "Build HTML and API finished." 67 | 68 | gitwash-update: 69 | python ../tools/gitwash_dumper.py devel/guidelines nipy \ 70 | --github-user=nipy \ 71 | --project-url=http://nipy.org/nipy \ 72 | --project-ml-url=https://mail.python.org/mailman/listinfo/neuroimaging 73 | 74 | pickle: 75 | mkdir -p build/pickle build/doctrees 76 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle 77 | @echo 78 | @echo "Build finished; now you can process the pickle files or run" 79 | @echo " sphinx-web build/pickle" 80 | @echo "to start the sphinx-web server." 81 | 82 | htmlhelp: 83 | mkdir -p build/htmlhelp build/doctrees 84 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp 85 | @echo 86 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 87 | ".hhp project file in build/htmlhelp." 88 | 89 | latex: api 90 | mkdir -p build/latex build/doctrees 91 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex 92 | # Clear bug for longtable column output in sphinx 93 | python ../tools/fix_longtable.py build/latex/nipy.tex 94 | @echo 95 | @echo "Build finished; the LaTeX files are in build/latex." 96 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 97 | "run these through (pdf)latex." 98 | 99 | changes: 100 | mkdir -p build/changes build/doctrees 101 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes 102 | @echo 103 | @echo "The overview file is in build/changes." 104 | 105 | linkcheck: 106 | mkdir -p build/linkcheck build/doctrees 107 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck 108 | @echo 109 | @echo "Link check complete; look for any errors in the above output " \ 110 | "or in build/linkcheck/output.txt." 111 | 112 | doctest: 113 | mkdir -p build/doctest build/doctrees 114 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) build/doctest 115 | @echo 116 | @echo "The overview file is in build/doctest." 117 | -------------------------------------------------------------------------------- /doc/README.txt: -------------------------------------------------------------------------------- 1 | ==================== 2 | Nipy Documentation 3 | ==================== 4 | 5 | This is the top level build directory for the nipy documentation. All 6 | of the documentation is written using Sphinx_, a python documentation 7 | system built on top of reST_. 8 | 9 | Dependencies 10 | ============ 11 | 12 | In order to build the documentation, 13 | you must have: 14 | 15 | * Sphinx 1.0 or greater 16 | * nipy and all its dependencies so that nipy can import 17 | * matplotlib 18 | * latex (for the PNG mathematics graphics) 19 | * graphviz (for the inheritance diagrams) 20 | 21 | This directory contains: 22 | 23 | * Makefile - the build script to build the HTML or PDF docs. Type 24 | ``make help`` for a list of options. 25 | 26 | * users - the user documentation. 27 | 28 | * devel - documentation for developers. 29 | 30 | * faq - frequently asked questions 31 | 32 | * api - placeholders to automatically generate the api documentation 33 | 34 | * www - source files for website only reST documentss which should not 35 | go in the generated PDF documentation. 36 | 37 | * links_names.txt - reST document with hyperlink targets for common 38 | links used throughout the documentation 39 | 40 | * .rst files - some top-level documentation source files 41 | 42 | * conf.py - the sphinx configuration. 43 | 44 | * sphinxext - some extensions to sphinx to handle math, ipython syntax 45 | highlighting, numpy_ docstring 46 | parsing, and autodocs. 47 | 48 | * _static - used by the sphinx build system. 49 | 50 | * _templates - used by the sphinx build system. 51 | 52 | 53 | Building the documentation 54 | -------------------------- 55 | 56 | Instructions for building the documentation are in the file: 57 | ``devel/guidelines/howto_document.rst`` 58 | 59 | .. Since this README.txt is not processed by Sphinx during the 60 | .. documentation build, I've included the links directly so it is at 61 | .. least a valid reST doc. 62 | 63 | .. _Sphinx: http://sphinx.pocoo.org/ 64 | .. _reST: http://docutils.sourceforge.net/rst.html 65 | .. _numpy: http://www.scipy.org/NumPy 66 | 67 | .. vim: ft=rst 68 | -------------------------------------------------------------------------------- /doc/_static/reggie2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/_static/reggie2.png -------------------------------------------------------------------------------- /doc/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | {% set title = 'Neuroimaging in Python' %} 3 | 4 | {% block rootrellink %} 5 |
  • NIPY home
  • 6 | {% endblock %} 7 | 8 | 9 | {% block extrahead %} 10 | 12 | {% endblock %} 13 | 14 | {% block header %} 15 | 19 | {% endblock %} 20 | 21 | {# This block gets put at the top of the sidebar #} 22 | {% block sidebarlogo %} 23 | 24 | 25 |

    Site Navigation

    26 |
    30 | 31 |

    NIPY Community

    32 | 42 | 43 |

    Github repo

    44 | 48 | {% endblock %} 49 | 50 | {# I had to copy the whole search block just to change the rendered text, 51 | so it doesn't mention modules or classes #} 52 | {%- block sidebarsearch %} 53 | {%- if pagename != "search" %} 54 | 55 | 68 | 69 | 80 | 81 | 82 | {%- endif %} 83 | 84 | {# The sidebarsearch block is the last one available in the default sidebar() 85 | macro, so the only way to add something to the bottom of the sidebar is to 86 | put it here, at the end of the sidebarsearch block (before it closes). 87 | #} 88 | 89 | {%- endblock %} 90 | -------------------------------------------------------------------------------- /doc/api/index.rst: -------------------------------------------------------------------------------- 1 | .. _api-index: 2 | 3 | ##### 4 | API 5 | ##### 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. include:: generated/gen.rst 13 | -------------------------------------------------------------------------------- /doc/bibtex/README.txt: -------------------------------------------------------------------------------- 1 | .. Using -*- rst -*- (ReST) mode for emacs editing 2 | .. We don't expect this file to appear in the output documentation 3 | 4 | =============== 5 | Bibtex folder 6 | =============== 7 | 8 | This folder is for bibtex bibliographies, for citations in NIPY 9 | documentation. At the moment there is no standard bibtex mechanism in 10 | sphinx_, but we keep be the bibs here, waiting for the time that this is 11 | done. They also provide the sources for script conversion to ReST_. 12 | 13 | For script conversion, we have used: http://code.google.com/p/bibstuff/ 14 | 15 | For example, let's say in your ReST_ page ``example.rst`` you have 16 | something like this:: 17 | 18 | I here cite the VTK book [VTK4]_ 19 | 20 | and you've got a bibtex entry starting ``@book{VTK4,`` in a file 21 | ``vtk.bib``, then you could run this command:: 22 | 23 | bib4txt.py -i example.rst vtk.bib 24 | 25 | which would output, to the terminal, the ReST_ text you could add to the 26 | bottom of ``example.rst`` to create the reference. 27 | -------------------------------------------------------------------------------- /doc/bibtex/vtk.bib: -------------------------------------------------------------------------------- 1 | @book{VTK4, 2 | author={Will Schroeder and Ken Martin and Bill Lorensen}, 3 | title={{The Visualization Toolkit--An Object-Oriented Approach To 3D 4 | Graphics}}, 5 | publisher={Kitware, Inc.}, 6 | edition={Fourth}, 7 | year={2006} 8 | } 9 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/comparisons/index.rst: -------------------------------------------------------------------------------- 1 | .. _comparisons: 2 | 3 | ================= 4 | Software Design 5 | ================= 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | vtk_datasets 16 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/index.rst: -------------------------------------------------------------------------------- 1 | .. _code-discussions: 2 | 3 | ================ 4 | Code discussions 5 | ================ 6 | 7 | These are some developer discussions about design of code in NIPY. 8 | 9 | .. only:: html 10 | 11 | :Release: |version| 12 | :Date: |today| 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | understanding_affines 18 | image_ordering 19 | registration_api 20 | repository_design 21 | brainvisa_repositories 22 | repository_api 23 | pipelining_api 24 | simple_viewer 25 | usecases/index 26 | refactoring/index 27 | comparisons/index 28 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/pipelining_api.rst: -------------------------------------------------------------------------------- 1 | .. _pipelining_api: 2 | 3 | ================================== 4 | What would pipelining look like? 5 | ================================== 6 | 7 | Imagine a repository that is a modified version of the one in :ref:`repository_api` 8 | 9 | Then:: 10 | 11 | my_repo = SubjectRepository('/some/structured/file/system') 12 | my_designmaker = MyDesignParser() # Takes parameters from subject to create design 13 | my_pipeline = Pipeline([ 14 | realignerfactory('fsl'), 15 | slicetimerfactory('nipy', 'linear'), 16 | coregisterfactory('fsl', 'flirt'), 17 | normalizerfactory('spm'), 18 | filterfactory('nipy', 'smooth', 8), 19 | designfactory('nipy', my_designmaker), 20 | ]) 21 | 22 | my_analysis = SubjectAnalysis(my_repo, subject_pipeline=my_pipeline) 23 | my_analysis.do() 24 | my_analysis.archive() 25 | 26 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/refactoring/imagelists.rst: -------------------------------------------------------------------------------- 1 | ======================== 2 | Refactoring imagelists 3 | ======================== 4 | 5 | Usecases for ImageList 6 | ====================== 7 | 8 | Thus far only used in anger in 9 | :mod:`nipy.modalities.fmri.fmristat.model`, similarly in 10 | :mod:`nipy.modalities.fmri.spm.model`. 11 | 12 | From that file, an object ``obj`` of class :class:`FmriImageList` must: 13 | 14 | * return 4D array from ``np.asarray(obj)``, such that the first axis 15 | (axis 0) is the axis over which the model is applied 16 | * be indexable such that ``obj[0]`` returns an Image instance, with 17 | valid ``shape`` and ``coordmap`` attributes for a time-point 3D volume 18 | in the 4D time-series. 19 | * have an attribute ``volume_start_times`` giving times of the start of 20 | each of the volumes in the 4D time series. 21 | * Return the number of volumes in the time-series from ``len(obj)`` 22 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/refactoring/index.rst: -------------------------------------------------------------------------------- 1 | .. _refactoring_index: 2 | 3 | ====================== 4 | Defining use cases 5 | ====================== 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | imagelists 11 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/registration_api.rst: -------------------------------------------------------------------------------- 1 | ========================= 2 | Registration API Design 3 | ========================= 4 | 5 | This contains design ideas for the end-user api when registering images in nipy. 6 | 7 | We want to provide a simple api, but with enough flexibility to allow 8 | users to changes various components of the pipeline. We will also 9 | provide various **Standard** scripts that perform typical pipelines. 10 | 11 | The pluggable script:: 12 | 13 | func_img = load_image(filename) 14 | anat_img = load_image(filename) 15 | interpolator = SplineInterpolator(order=3) 16 | metric = NormalizedMutualInformation() 17 | optimizer = Powell() 18 | strategy = RegistrationStrategy(interpolator, metric, optimizer) 19 | w2w = strategy.apply(img_fixed, img_moving) 20 | 21 | To apply the transform and resample the image:: 22 | 23 | new_img = resample(img_moving, w2w, interp=interpolator) 24 | 25 | Or:: 26 | 27 | new_img = Image(img_moving, w2w*img_moving.coordmap) 28 | 29 | Transform Multiplication 30 | ------------------------ 31 | 32 | The multiplication order is important and coordinate systems must 33 | *make sense*. The *output coordinates* of the mapping on the 34 | right-hand of the operator, must match the *input coordinates* of the 35 | mapping on the left-hand side of the operator. 36 | 37 | For example, imageA has a mapping from voxels-to-world (v2w), imageB 38 | has a mapping from world-to-world (w2w). So the output of imageA, 39 | *world*, maps to the input of imageB, *world*. We would compose a new 40 | mapping (transform) from these mappings like this:: 41 | 42 | new_coordmap = imageB.coordmap * imageA.coordmap 43 | 44 | If one tried to compose a mapping in the other order, an error should 45 | be raised as the code would detect a mismatch of trying to map output 46 | coordinates from imageB, *world* to the input coordinates of imageA, 47 | *voxels*:: 48 | 49 | new_coordmap = imageA.coordmap * imageB.coordmap 50 | raise ValueError!!! 51 | 52 | Note: We should consider a meaningful error message to help people 53 | quickly correct this mistake. 54 | 55 | One way to remember this ordering is to think of composing functions. 56 | If these were functions, the output of the first function to evaluate 57 | (imageA.coordmap) is passed as input to the second function 58 | (imageB.coordmap). And therefore they must match:: 59 | 60 | new_coordmap = imageB.coordmap(imageA.coordmap()) 61 | 62 | Matching Coordinate Systems 63 | --------------------------- 64 | 65 | We need to make sure we can detect mismatched coordinate mappings. 66 | The CoordinateSystem class has a check for equality (__eq__ method) 67 | based on the axis and name attributes. Long-term this may not be 68 | robust enough, but it's a starting place. We should write tests for 69 | failing cases of this, if they don't already exists. 70 | 71 | CoordinateMap 72 | ------------- 73 | 74 | Recall the CoordinateMap defines a mapping between two coordinate 75 | systems, an input coordinate system and an output coordinate system. 76 | One example of this would be a mapping from voxel space to scanner 77 | space. In a Nifti1 header we would have an affine transform to apply 78 | this mapping. The *input coordinates* would be voxel space, the 79 | *output coordinates* would be world space, and the affine transform 80 | provides the mapping between them. 81 | 82 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/repository_api.rst: -------------------------------------------------------------------------------- 1 | .. _repository_api: 2 | 3 | Repository API 4 | ============== 5 | 6 | See also :ref:`repository_design` and :ref:`brainvisa_repositories` 7 | 8 | FMRI datasets often have the structure: 9 | 10 | * Group (sometimes) e.g. Patients, Controls 11 | 12 | * Subject e.g. Subject1, Subject2 13 | 14 | * Session e.g. Sess1, Sess1 15 | 16 | How about an interface like: 17 | 18 | :: 19 | 20 | repo = GSSRespository( 21 | root_dir = '/home/me/data/experiment1', 22 | groups = {'patients': 23 | {'subjects': 24 | {'patient1': 25 | {'sess1': 26 | 'filter': 'raw*nii'}, 27 | {'sess2': 28 | 'filter': 'raw*nii'} 29 | }, 30 | {'patient2': 31 | {'sess1': 32 | 'filter': 'raw*nii'} 33 | {'sess2': 34 | 'filter': 'raw*nii'} 35 | } 36 | }, 37 | 'controls': 38 | {'subjects': 39 | {'control1': 40 | {'sess1': 41 | 'filter': 'raw*nii'}, 42 | {'sess2': 43 | 'filter': 'raw*nii'} 44 | }, 45 | {'control2': 46 | {'sess1': 47 | 'filter': 'raw*nii'} 48 | {'sess2': 49 | 'filter': 'raw*nii'} 50 | } 51 | } 52 | }) 53 | 54 | for group in repo.groups: 55 | for subject in group.subjects: 56 | for session in subject.sessions: 57 | img = session.image 58 | # do something with image 59 | 60 | 61 | We would need to think about adding metadata such as behavioral data 62 | from the scanning session, and so on. I suppose this will help us 63 | move transparently to using something like HDF5 for data storage. 64 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/repository_design.rst: -------------------------------------------------------------------------------- 1 | .. _repository_design: 2 | 3 | =================== 4 | Repository design 5 | =================== 6 | 7 | See also :ref:`repository_api` and :ref:`brainvisa_repositories` 8 | 9 | For the NIPY system, there seems to be interest for the following: 10 | 11 | * Easy distributed computing 12 | * Easy scripting, replicating the same analysis on different data 13 | * Flexibility - easy of inter-operation with other brain imaging systems 14 | 15 | At a minimum, this seems to entail the following requirements for the 16 | NIPY repository system: 17 | 18 | * Unique identifiers of data, which can be abstracted from the most 19 | local or convenient data storage 20 | * A mechanism for mapping the canonical data model(s) from NIPY to an 21 | arbitrary, and potentially even inconsistent repository structure 22 | * A set of semantic primitives / metadata slots, enabling for example: 23 | * "all scans from this subject" 24 | * "the first scan from every subject in the control group" 25 | * "V1 localizer scans from all subjects" 26 | * "Extract the average timecourse for each subject from the ROI 27 | defined by all voxels with t > 0.005 in the V1 localizer scan for 28 | that subject" 29 | 30 | These problems are not unique to the problem of brain imaging data, 31 | and in many cases have been treated in the domains of database design, 32 | geospatial and space telescope data, and the semantic web. 33 | Technologies of particular interest include: 34 | 35 | * HDF5 - the basis of MINC 2.0 (and potentially NIFTII 2), the most 36 | recent development in the more general CDF / HDF series (and very 37 | highly regarded). There are excellent python binding available in 38 | `PyTables `_. 39 | * Relational database design - it would be nice to efficiently select 40 | data based on any arbitrary subset of attributes associated with 41 | that data. 42 | * The notion of `URI `_ developed under 43 | the guidance of the w3c. Briefly, a URI consists of: 44 | 45 | * An authority (i.e. a domain name controlled by a particular 46 | entity) 47 | * A path - a particular resource specified by that authority 48 | * Abstraction from storage (as opposed to a URL) - a URI does not 49 | necessarily include the information necessary for retrieving the 50 | data referred to, though it may. 51 | 52 | * Ways of dealing with hierarchical data as developed in the XML field 53 | (though these strategies could be implemented potentially in other 54 | hierarchical data formats - even filesystems). 55 | 56 | Note that incorporation of any of the above ideas does not require the 57 | use of the actual technology referenced. For example, relational 58 | queries can be made in PyTables in many cases **more efficiently** 59 | than in a relational database by storing everything in a single 60 | denormalized table. This data structure tends to be more efficient 61 | than the equivalent normalized relational database format in the cases 62 | where a single data field is much larger than the others (as is the 63 | case with the data array in brain imaging data). That said, adherance 64 | to standards allows us to leverage existing code which may be tuned to 65 | a degree that would be beyond the scope of this project (for example, 66 | fast Xpath query libraries, as made available via lxml in Python). 67 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/simple_viewer.rst: -------------------------------------------------------------------------------- 1 | Simple image viewer 2 | ------------------- 3 | 4 | Other attempts 5 | -------------- 6 | 7 | http://biomag.wikidot.com/mri-tools 8 | http://code.google.com/p/dicompyler 9 | https://cirl.berkeley.edu/svn/cburns/trunk/nifti_viewer 10 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/usecases/batching.rst: -------------------------------------------------------------------------------- 1 | .. _batching: 2 | 3 | ================== 4 | Batching use cases 5 | ================== 6 | 7 | Using the nipy_ framework for creating scripts to process whole 8 | datasets, for example movement correction, coregistration of 9 | functional to structural (intermodality), smoothing, statistics, 10 | inference. 11 | 12 | .. include:: ../../../links_names.txt 13 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/usecases/index.rst: -------------------------------------------------------------------------------- 1 | .. _usecases_index: 2 | 3 | ====================== 4 | Defining use cases 5 | ====================== 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | transformations 11 | images 12 | resampling 13 | batching 14 | 15 | -------------------------------------------------------------------------------- /doc/devel/code_discussions/usecases/resampling.rst: -------------------------------------------------------------------------------- 1 | .. _resampling: 2 | 3 | ======================= 4 | Resampling use cases 5 | ======================= 6 | 7 | Use cases for image resampling. See also :ref:`images`. 8 | 9 | -------------------------------------------------------------------------------- /doc/devel/development_quickstart.rst: -------------------------------------------------------------------------------- 1 | .. _development-quickstart: 2 | 3 | ======================== 4 | Development quickstart 5 | ======================== 6 | 7 | Source Code 8 | =========== 9 | 10 | NIPY uses github_ for our code hosting. For immediate access to 11 | the source code, see the `nipy github`_ site. 12 | 13 | Checking out the latest version 14 | =============================== 15 | 16 | To check out the latest version of nipy you need git_:: 17 | 18 | git clone git://github.com/nipy/nipy.git 19 | 20 | There are two methods to install a development version of nipy. For 21 | both methods, build the extensions in place:: 22 | 23 | python setup.py build_ext --inplace 24 | 25 | Then you can either: 26 | 27 | #. Create a symbolic link in your *site-packages* directory to the inplace 28 | build of your source. The advantage of this method is it does not require 29 | any modifications of your PYTHONPATH. 30 | 31 | #. Place the source directory in your PYTHONPATH. 32 | 33 | With either method, all of the modifications made to your source tree 34 | will be picked up when nipy is imported. 35 | 36 | Getting data files 37 | ================== 38 | 39 | See :ref:`data_files`. 40 | 41 | Guidelines 42 | ========== 43 | 44 | We have adopted many developer guidelines in an effort to make 45 | development easy, and the source code readable, consistent and robust. 46 | Many of our guidelines are adopted from the scipy_ / numpy_ community. 47 | We welcome new developers to the effort, if you're interested in 48 | developing code or documentation please join the `nipy mailing list`_ 49 | and introduce yourself. If you plan to do any code development, we 50 | ask that you take a look at the following guidelines. We do our best 51 | to follow these guidelines ourselves: 52 | 53 | * :ref:`howto_document` : Documentation is critical. This document 54 | describes the documentation style, syntax, and tools we use. 55 | 56 | * `Numpy/Scipy Coding Style Guidelines: 57 | `_ 58 | This is the coding style we strive to maintain. 59 | 60 | * :ref:`development-workflow` : This describes our process for version control. 61 | 62 | * :ref:`testing` : We've adopted a rigorous testing framework. 63 | 64 | * :ref:`optimization`: "premature optimization is the root of all 65 | evil." 66 | 67 | .. _trunk_download: 68 | 69 | Submitting a patch 70 | ================== 71 | 72 | The preferred method to submit a patch is to create a branch of nipy on 73 | your machine, modify the code and make a patch or patches. Then email 74 | the `nipy mailing list`_ and we will review your code and hopefully 75 | apply (merge) your patch. See the instructions for 76 | :ref:`making-patches`. 77 | 78 | If you do not wish to use git and github, please feel free to 79 | file a bug report and submit a patch or email the 80 | `nipy mailing list`_. 81 | 82 | Bug reports 83 | =========== 84 | 85 | If you find a bug in nipy, please submit a bug report at the `nipy 86 | bugs`_ github site so that we can fix it. 87 | 88 | 89 | .. include:: ../links_names.txt 90 | -------------------------------------------------------------------------------- /doc/devel/guidelines/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: 2 | 3 | =============== 4 | The ChangeLog 5 | =============== 6 | 7 | **NOTE:** We have not kepted up with our ChangeLog. This is here for 8 | future reference. We will be more diligent with this when we have 9 | regular software releases. 10 | 11 | If you are a developer with commit access, **please** fill a proper 12 | ChangeLog entry per significant change. The SVN commit messages may 13 | be shorter (though a brief summary is appreciated), but a detailed 14 | ChangeLog is critical. It gives us a history of what has happened, 15 | allows us to write release notes at each new release, and is often the 16 | only way to backtrack on the rationale for a change (as the diff will 17 | only show the change, not **why** it happened). 18 | 19 | Please skim the existing ChangeLog for an idea of the proper level of 20 | detail (you don't have to write a novel about a patch). 21 | 22 | The existing ChangeLog is generated using (X)Emacs' fantastic 23 | ChangeLog mode: all you have to do is position the cursor in the 24 | function/method where the change was made, and hit 'C-x 4 a'. XEmacs 25 | automatically opens the ChangeLog file, mark a dated/named point, and 26 | creates an entry pre-titled with the file and function name. It 27 | doesn't get any better than this. If you are not using (X)Emacs, 28 | please try to follow the same convention so we have a readable, 29 | organized ChangeLog. 30 | 31 | To get your name in the ChangeLog, set this in your .emacs file: 32 | 33 | (setq user-full-name "Your Name") 34 | (setq user-mail-address "youradddress@domain.com") 35 | 36 | Feel free to obfuscate or omit the address, but at least leave your 37 | name in. For user contributions, try to give credit by name on 38 | patches or significant ideas, but please do an @ -> -AT- replacement 39 | in the email addresses (users have asked for this in the past). 40 | -------------------------------------------------------------------------------- /doc/devel/guidelines/commit_codes.rst: -------------------------------------------------------------------------------- 1 | .. _commit-codes: 2 | 3 | Commit message codes 4 | --------------------- 5 | 6 | Please prefix all commit summaries with one (or more) of the following labels. 7 | This should help others to easily classify the commits into meaningful 8 | categories: 9 | 10 | * *BF* : bug fix 11 | * *RF* : refactoring 12 | * *ENH* : new feature or extended functionality 13 | * *BW* : addresses backward-compatibility 14 | * *OPT* : optimization 15 | * *BK* : breaks something and/or tests fail 16 | * *DOC*: for all kinds of documentation related commits 17 | * *TEST* : for adding or changing tests 18 | * *STY* : PEP8 conformance, whitespace changes etc that do not affect 19 | function. 20 | * *WIP* : Work in progress; please try and avoid using this one, and rebase 21 | incomplete changes into functional units using e.g. ``git rebase -i`` 22 | 23 | So your commit message might look something like this:: 24 | 25 | TEST: relax test threshold slightly 26 | 27 | Attempted fix for failure on windows test run when arrays are in fact 28 | very close (within 6 dp). 29 | 30 | Keeping up a habit of doing this is useful because it makes it much easier to 31 | see at a glance which changes are likely to be important when you are looking 32 | for sources of bugs, fixes, large refactorings or new features. 33 | 34 | Pull request codes 35 | ------------------ 36 | 37 | When you submit a pull request to github, github will ask you for a summary. If 38 | your code is not ready to merge, but you want to get feedback, please consider 39 | using ``WIP - me working on image design`` or similar for the title of your pull 40 | request. That way we will all know that it's not yet ready to merge and that 41 | you may be interested in more fundamental comments about design. 42 | 43 | When you think the pull request is ready to merge, change the title (using the 44 | *Edit* button) to something like ``MRG - my work on image design``. 45 | -------------------------------------------------------------------------------- /doc/devel/guidelines/compiling_windows.rst: -------------------------------------------------------------------------------- 1 | Some notes on compiling on windows with Visual Studio 2 | ----------------------------------------------------- 3 | 4 | I followed instructions here: 5 | 6 | http://wiki.cython.org/64BitCythonExtensionsOnWindows 7 | 8 | First I downloaded and installed from here: 9 | 10 | http://download.microsoft.com/download/2/E/9/2E911956-F90F-4BFB-8231-E292A7B6F287/GRMSDKX_EN_DVD.iso 11 | 12 | via here: http://www.microsoft.com/en-us/download/details.aspx?id=18950#instructions 13 | 14 | Then I got Visual Studio 2008 from here: 15 | 16 | http://www.microsoft.com/en-us/download/details.aspx?id=14597 17 | 18 | (file ``vcsetup.exe``) with hints from here: 19 | 20 | http://docs.python.org/devguide/setup.html#windows 21 | http://bugs.python.org/issue16161 22 | -------------------------------------------------------------------------------- /doc/devel/guidelines/coverage_testing.rst: -------------------------------------------------------------------------------- 1 | 2 | Coverage Testing 3 | ---------------- 4 | 5 | Coverage testing is a technique used to see how much of the code is 6 | exercised by the unit tests. It is important to remember that a high 7 | level of coverage is a necessary but not sufficient condition for 8 | having effective tests. Coverage testing can be useful for identifying 9 | whole functions or classes which are not tested, or for finding 10 | certain conditions which are never tested. 11 | 12 | This is an excellent task for nose_ - the automated test runner we are 13 | using. Nose can run the `python coverage tester`_. First make sure 14 | you have the coverage tester installed on your system. Download the 15 | tarball from the link, extract and install ``python setup.py 16 | install``. Or on Ubuntu you can install from apt-get: ``sudo apt-get 17 | install python-coverage``. 18 | 19 | Run nose with coverage testing arguments:: 20 | 21 | nosetests -sv --with-coverage path_to_code 22 | 23 | For example, this command:: 24 | 25 | nosetests -sv --with-coverage test_coordinate_map.py 26 | 27 | will report the following:: 28 | 29 | Name Stmts Exec Cover Missing 30 | ----------------------------------------------------------------------------- 31 | nipy 21 14 66% 70-74, 88-89 32 | nipy.core 4 4 100% 33 | nipy.core.reference 8 8 100% 34 | nipy.core.reference.array_coords 100 90 90% 133-134, 148-151, 220, 222, 235, 242 35 | nipy.core.reference.coordinate_map 188 187 99% 738 36 | nipy.core.reference.coordinate_system 61 61 100% 37 | nipy.core.reference.slices 34 34 100% 38 | nipy.core.transforms 0 0 100% 39 | nipy.core.transforms.affines 14 14 100% 40 | 41 | 42 | The coverage report will cover any python source module imported after 43 | the start of the test. This can be noisy and difficult to focus on 44 | the specific module for which you are writing nosetests. For 45 | instance, the above report also included coverage of most of 46 | ``numpy``. To focus the coverage report, you can provide nose with 47 | the specific package you would like output from using the 48 | ``--cover-package``. For example, in writing tests for the 49 | coordinate_map module:: 50 | 51 | nosetests --with-coverage --cover-package=nipy.core.reference.coordinate_map test_coordinate_map.py 52 | 53 | Since that's a lot to type, I wrote a tool called ``sneeze`` to that 54 | simplifies coverage testing with nose. 55 | 56 | 57 | Sneeze 58 | ^^^^^^ 59 | 60 | Sneeze runs nose with coverage testing and reports only the package 61 | the test module is testing. It requires the test module follow a 62 | simple naming convention: 63 | 64 | #. Prefix ``test_`` 65 | #. The package name you are testing 66 | #. Suffix ``.py`` 67 | 68 | For example, the test module for the ``coordinate_map`` module is 69 | named ``test_coordinate_map.py``. Then testing coverage is as simple as:: 70 | 71 | sneeze.py test_coordinate_map.py 72 | 73 | Sneeze is included in the ``tools`` directory in the nipy_ 74 | source. Simply run the ``setup.py`` to install sneeze in your local 75 | bin directory. 76 | 77 | 78 | .. include:: ../../links_names.txt 79 | -------------------------------------------------------------------------------- /doc/devel/guidelines/debugging.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | Debugging 3 | =========== 4 | 5 | Some options are: 6 | 7 | Run in ipython 8 | -------------- 9 | 10 | As in:: 11 | 12 | In [1]: run mymodule.py 13 | ... (somecrash) 14 | In [2]: %debug 15 | 16 | Then diagnose, using the workspace that comes up, which has the 17 | context of the crash. 18 | 19 | You can also do:: 20 | 21 | In [1] %pdb on 22 | In [2]: run mymodule.py 23 | ... (somecrash) 24 | 25 | At that point you will be automatically dropped into the the workspace 26 | in the context of the error. This is very similar to the matlab 27 | ``dbstop if error`` command. 28 | 29 | See the `ipython manual`_ , and 30 | `debugging in ipython `_ 31 | for more detail. 32 | 33 | Embed ipython in crashing code 34 | ------------------------------ 35 | 36 | Often it is not possible to run the code directly from ipython using 37 | the ``run`` command. For example, the code may be called from some 38 | other system such as sphinx_. In that case you can embed. At the 39 | point that you want ipython to open with the context available for 40 | instrospection, add:: 41 | 42 | from IPython.Shell import IPShellEmbed 43 | ipshell = IPShellEmbed() 44 | ipshell() 45 | 46 | See 47 | `embedding ipython `_ 48 | for more detail. 49 | 50 | .. include:: ../../links_names.txt 51 | -------------------------------------------------------------------------------- /doc/devel/guidelines/elegant.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | import matplotlib.pyplot as plt 4 | plt.plot([1,2,3], [4,5,6]) 5 | plt.ylabel('some more numbers') 6 | 7 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/branch_list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/devel/guidelines/gitwash/branch_list.png -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/branch_list_compare.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/devel/guidelines/gitwash/branch_list_compare.png -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/configure_git.rst: -------------------------------------------------------------------------------- 1 | .. _configure-git: 2 | 3 | =============== 4 | Configure git 5 | =============== 6 | 7 | .. _git-config-basic: 8 | 9 | Overview 10 | ======== 11 | 12 | Your personal git_ configurations are saved in the ``.gitconfig`` file in 13 | your home directory. 14 | Here is an example ``.gitconfig`` file:: 15 | 16 | [user] 17 | name = Your Name 18 | email = you@yourdomain.example.com 19 | 20 | [alias] 21 | ci = commit -a 22 | co = checkout 23 | st = status -a 24 | stat = status -a 25 | br = branch 26 | wdiff = diff --color-words 27 | 28 | [core] 29 | editor = vim 30 | 31 | [merge] 32 | summary = true 33 | 34 | You can edit this file directly or you can use the ``git config --global`` 35 | command:: 36 | 37 | git config --global user.name "Your Name" 38 | git config --global user.email you@yourdomain.example.com 39 | git config --global alias.ci "commit -a" 40 | git config --global alias.co checkout 41 | git config --global alias.st "status -a" 42 | git config --global alias.stat "status -a" 43 | git config --global alias.br branch 44 | git config --global alias.wdiff "diff --color-words" 45 | git config --global core.editor vim 46 | git config --global merge.summary true 47 | 48 | To set up on another computer, you can copy your ``~/.gitconfig`` file, 49 | or run the commands above. 50 | 51 | In detail 52 | ========= 53 | 54 | user.name and user.email 55 | ------------------------ 56 | 57 | It is good practice to tell git_ who you are, for labeling any changes 58 | you make to the code. The simplest way to do this is from the command 59 | line:: 60 | 61 | git config --global user.name "Your Name" 62 | git config --global user.email you@yourdomain.example.com 63 | 64 | This will write the settings into your git configuration file, which 65 | should now contain a user section with your name and email:: 66 | 67 | [user] 68 | name = Your Name 69 | email = you@yourdomain.example.com 70 | 71 | Of course you'll need to replace ``Your Name`` and ``you@yourdomain.example.com`` 72 | with your actual name and email address. 73 | 74 | Aliases 75 | ------- 76 | 77 | You might well benefit from some aliases to common commands. 78 | 79 | For example, you might well want to be able to shorten ``git checkout`` 80 | to ``git co``. Or you may want to alias ``git diff --color-words`` 81 | (which gives a nicely formatted output of the diff) to ``git wdiff`` 82 | 83 | The following ``git config --global`` commands:: 84 | 85 | git config --global alias.ci "commit -a" 86 | git config --global alias.co checkout 87 | git config --global alias.st "status -a" 88 | git config --global alias.stat "status -a" 89 | git config --global alias.br branch 90 | git config --global alias.wdiff "diff --color-words" 91 | 92 | will create an ``alias`` section in your ``.gitconfig`` file with contents 93 | like this:: 94 | 95 | [alias] 96 | ci = commit -a 97 | co = checkout 98 | st = status -a 99 | stat = status -a 100 | br = branch 101 | wdiff = diff --color-words 102 | 103 | Editor 104 | ------ 105 | 106 | You may also want to make sure that your editor of choice is used :: 107 | 108 | git config --global core.editor vim 109 | 110 | Merging 111 | ------- 112 | 113 | To enforce summaries when doing merges (``~/.gitconfig`` file again):: 114 | 115 | [merge] 116 | log = true 117 | 118 | Or from the command line:: 119 | 120 | git config --global merge.log true 121 | 122 | 123 | .. include:: git_links.inc 124 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/dot2_dot3.rst: -------------------------------------------------------------------------------- 1 | .. _dot2-dot3: 2 | 3 | ======================================== 4 | Two and three dots in difference specs 5 | ======================================== 6 | 7 | Thanks to Yarik Halchenko for this explanation. 8 | 9 | Imagine a series of commits A, B, C, D... Imagine that there are two 10 | branches, *topic* and *master*. You branched *topic* off *master* when 11 | *master* was at commit 'E'. The graph of the commits looks like this:: 12 | 13 | 14 | A---B---C topic 15 | / 16 | D---E---F---G master 17 | 18 | Then:: 19 | 20 | git diff master..topic 21 | 22 | will output the difference from G to C (i.e. with effects of F and G), 23 | while:: 24 | 25 | git diff master...topic 26 | 27 | would output just differences in the topic branch (i.e. only A, B, and 28 | C). 29 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/following_latest.rst: -------------------------------------------------------------------------------- 1 | .. _following-latest: 2 | 3 | ============================= 4 | Following the latest source 5 | ============================= 6 | 7 | These are the instructions if you just want to follow the latest 8 | *nipy* source, but you don't need to do any development for now. 9 | 10 | The steps are: 11 | 12 | * :ref:`install-git` 13 | * get local copy of the git repository from github_ 14 | * update local copy from time to time 15 | 16 | Get the local copy of the code 17 | ============================== 18 | 19 | From the command line:: 20 | 21 | git clone git://github.com/nipy/nipy.git 22 | 23 | You now have a copy of the code tree in the new ``nipy`` directory. 24 | 25 | Updating the code 26 | ================= 27 | 28 | From time to time you may want to pull down the latest code. Do this with:: 29 | 30 | cd nipy 31 | git pull 32 | 33 | The tree in ``nipy`` will now have the latest changes from the initial 34 | repository. 35 | 36 | .. include:: git_links.inc 37 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/forking_button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/devel/guidelines/gitwash/forking_button.png -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/forking_hell.rst: -------------------------------------------------------------------------------- 1 | .. _forking: 2 | 3 | ========================================== 4 | Making your own copy (fork) of nipy 5 | ========================================== 6 | 7 | You need to do this only once. The instructions here are very similar 8 | to the instructions at http://help.github.com/forking/ - please see that 9 | page for more detail. We're repeating some of it here just to give the 10 | specifics for the nipy_ project, and to suggest some default names. 11 | 12 | Set up and configure a github_ account 13 | ====================================== 14 | 15 | If you don't have a github_ account, go to the github_ page, and make one. 16 | 17 | You then need to configure your account to allow write access - see the 18 | ``Generating SSH keys`` help on `github help`_. 19 | 20 | Create your own forked copy of nipy_ 21 | ========================================= 22 | 23 | #. Log into your github_ account. 24 | #. Go to the nipy_ github home at `nipy github`_. 25 | #. Click on the *fork* button: 26 | 27 | .. image:: forking_button.png 28 | 29 | Now, after a short pause and some 'Hardcore forking action', you 30 | should find yourself at the home page for your own forked copy of nipy_. 31 | 32 | .. include:: git_links.inc 33 | 34 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/git_development.rst: -------------------------------------------------------------------------------- 1 | .. _git-development: 2 | 3 | ===================== 4 | Git for development 5 | ===================== 6 | 7 | Contents: 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | forking_hell 13 | set_up_fork 14 | configure_git 15 | development_workflow 16 | 17 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/git_install.rst: -------------------------------------------------------------------------------- 1 | .. _install-git: 2 | 3 | ============= 4 | Install git 5 | ============= 6 | 7 | Overview 8 | ======== 9 | 10 | ================ ============= 11 | Debian / Ubuntu ``sudo apt-get install git-core`` 12 | Fedora ``sudo yum install git-core`` 13 | Windows Download and install msysGit_ 14 | OS X Use the git-osx-installer_ 15 | ================ ============= 16 | 17 | In detail 18 | ========= 19 | 20 | See the git_ page for the most recent information. 21 | 22 | Have a look at the github_ install help pages available from `github help`_ 23 | 24 | There are good instructions here: http://book.git-scm.com/2_installing_git.html 25 | 26 | .. include:: git_links.inc 27 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/git_intro.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Introduction 3 | ============== 4 | 5 | These pages describe a git_ and github_ workflow for the nipy_ 6 | project. 7 | 8 | There are several different workflows here, for different ways of 9 | working with *nipy*. 10 | 11 | This is not a comprehensive git_ reference, it's just a workflow for our 12 | own project. It's tailored to the github_ hosting service. You may well 13 | find better or quicker ways of getting stuff done with git_, but these 14 | should get you started. 15 | 16 | For general resources for learning git_ see :ref:`git-resources`. 17 | 18 | .. include:: git_links.inc 19 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/git_links.inc: -------------------------------------------------------------------------------- 1 | .. This (-*- rst -*-) format file contains commonly used link targets 2 | and name substitutions. It may be included in many files, 3 | therefore it should only contain link targets and name 4 | substitutions. Try grepping for "^\.\. _" to find plausible 5 | candidates for this list. 6 | 7 | .. NOTE: reST targets are 8 | __not_case_sensitive__, so only one target definition is needed for 9 | nipy, NIPY, Nipy, etc... 10 | 11 | .. PROJECTNAME placeholders 12 | .. _PROJECTNAME: http://neuroimaging.scipy.org 13 | .. _`PROJECTNAME github`: http://github.com/nipy 14 | .. _`PROJECTNAME mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging 15 | 16 | .. nipy 17 | .. _nipy: http://nipy.org/nipy 18 | .. _`nipy github`: http://github.com/nipy/nipy 19 | .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging 20 | 21 | .. ipython 22 | .. _ipython: http://ipython.scipy.org 23 | .. _`ipython github`: http://github.com/ipython/ipython 24 | .. _`ipython mailing list`: http://mail.scipy.org/mailman/listinfo/IPython-dev 25 | 26 | .. dipy 27 | .. _dipy: http://nipy.org/dipy 28 | .. _`dipy github`: http://github.com/Garyfallidis/dipy 29 | .. _`dipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging 30 | 31 | .. nibabel 32 | .. _nibabel: http://nipy.org/nibabel 33 | .. _`nibabel github`: http://github.com/nipy/nibabel 34 | .. _`nibabel mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging 35 | 36 | .. marsbar 37 | .. _marsbar: http://marsbar.sourceforge.net 38 | .. _`marsbar github`: http://github.com/matthew-brett/marsbar 39 | .. _`MarsBaR mailing list`: https://lists.sourceforge.net/lists/listinfo/marsbar-users 40 | 41 | .. git stuff 42 | .. _git: http://git-scm.com/ 43 | .. _github: http://github.com 44 | .. _github help: http://help.github.com 45 | .. _msysgit: http://code.google.com/p/msysgit/downloads/list 46 | .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list 47 | .. _subversion: http://subversion.tigris.org/ 48 | .. _git cheat sheet: http://github.com/guides/git-cheat-sheet 49 | .. _pro git book: http://progit.org/ 50 | .. _git svn crash course: http://git-scm.com/course/svn.html 51 | .. _learn.github: http://learn.github.com/ 52 | .. _network graph visualizer: http://github.com/blog/39-say-hello-to-the-network-graph-visualizer 53 | .. _git user manual: http://www.kernel.org/pub/software/scm/git/docs/user-manual.html 54 | .. _git tutorial: http://www.kernel.org/pub/software/scm/git/docs/gittutorial.html 55 | .. _git community book: http://book.git-scm.com/ 56 | .. _git ready: http://www.gitready.com/ 57 | .. _git casts: http://www.gitcasts.com/ 58 | .. _Fernando's git page: http://www.fperez.org/py4science/git.html 59 | .. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html 60 | .. _git concepts: http://www.eecs.harvard.edu/~cduan/technical/git/ 61 | .. _git clone: http://www.kernel.org/pub/software/scm/git/docs/git-clone.html 62 | .. _git checkout: http://www.kernel.org/pub/software/scm/git/docs/git-checkout.html 63 | .. _git commit: http://www.kernel.org/pub/software/scm/git/docs/git-commit.html 64 | .. _git push: http://www.kernel.org/pub/software/scm/git/docs/git-push.html 65 | .. _git pull: http://www.kernel.org/pub/software/scm/git/docs/git-pull.html 66 | .. _git add: http://www.kernel.org/pub/software/scm/git/docs/git-add.html 67 | .. _git status: http://www.kernel.org/pub/software/scm/git/docs/git-status.html 68 | .. _git diff: http://www.kernel.org/pub/software/scm/git/docs/git-diff.html 69 | .. _git log: http://www.kernel.org/pub/software/scm/git/docs/git-log.html 70 | .. _git branch: http://www.kernel.org/pub/software/scm/git/docs/git-branch.html 71 | .. _git remote: http://www.kernel.org/pub/software/scm/git/docs/git-remote.html 72 | .. _git config: http://www.kernel.org/pub/software/scm/git/docs/git-config.html 73 | .. _why the -a flag?: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html 74 | .. _git staging area: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html 75 | .. _tangled working copy problem: http://tomayko.com/writings/the-thing-about-git 76 | .. _git management: http://kerneltrap.org/Linux/Git_Management 77 | .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html 78 | .. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html 79 | .. _git foundation: http://matthew-brett.github.com/pydagogue/foundation.html 80 | 81 | .. other stuff 82 | .. _python: http://www.python.org 83 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/git_resources.rst: -------------------------------------------------------------------------------- 1 | .. _git-resources: 2 | 3 | ================ 4 | git_ resources 5 | ================ 6 | 7 | Tutorials and summaries 8 | ======================= 9 | 10 | * `github help`_ has an excellent series of how-to guides. 11 | * `learn.github`_ has an excellent series of tutorials 12 | * The `pro git book`_ is a good in-depth book on git. 13 | * A `git cheat sheet`_ is a page giving summaries of common commands. 14 | * The `git user manual`_ 15 | * The `git tutorial`_ 16 | * The `git community book`_ 17 | * `git ready`_ - a nice series of tutorials 18 | * `git casts`_ - video snippets giving git how-tos. 19 | * `git magic`_ - extended introduction with intermediate detail 20 | * The `git parable`_ is an easy read explaining the concepts behind git. 21 | * Our own `git foundation`_ expands on the `git parable`_. 22 | * Fernando Perez' git page - `Fernando's git page`_ - many links and tips 23 | * A good but technical page on `git concepts`_ 24 | * `git svn crash course`_: git_ for those of us used to subversion_ 25 | 26 | Advanced git workflow 27 | ===================== 28 | 29 | There are many ways of working with git_; here are some posts on the 30 | rules of thumb that other projects have come up with: 31 | 32 | * Linus Torvalds on `git management`_ 33 | * Linus Torvalds on `linux git workflow`_ . Summary; use the git tools 34 | to make the history of your edits as clean as possible; merge from 35 | upstream edits as little as possible in branches where you are doing 36 | active development. 37 | 38 | Manual pages online 39 | =================== 40 | 41 | You can get these on your own machine with (e.g) ``git help push`` or 42 | (same thing) ``git push --help``, but, for convenience, here are the 43 | online manual pages for some common commands: 44 | 45 | * `git add`_ 46 | * `git branch`_ 47 | * `git checkout`_ 48 | * `git clone`_ 49 | * `git commit`_ 50 | * `git config`_ 51 | * `git diff`_ 52 | * `git log`_ 53 | * `git pull`_ 54 | * `git push`_ 55 | * `git remote`_ 56 | * `git status`_ 57 | 58 | .. include:: git_links.inc 59 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/index.rst: -------------------------------------------------------------------------------- 1 | .. _using-git: 2 | 3 | Working with *nipy* source code 4 | ====================================== 5 | 6 | Contents: 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | 11 | git_intro 12 | git_install 13 | following_latest 14 | patching 15 | git_development 16 | git_resources 17 | 18 | 19 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/patching.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Making a patch 3 | ================ 4 | 5 | You've discovered a bug or something else you want to change in nipy_ - excellent! 6 | 7 | You've worked out a way to fix it - even better! 8 | 9 | You want to tell us about it - best of all! 10 | 11 | The easiest way is to make a *patch* or set of patches. Here we explain 12 | how. Making a patch is the simplest and quickest, but if you're going 13 | to be doing anything more than simple quick things, please consider 14 | following the :ref:`git-development` model instead. 15 | 16 | .. _making-patches: 17 | 18 | Making patches 19 | ============== 20 | 21 | Overview 22 | -------- 23 | 24 | :: 25 | 26 | # tell git who you are 27 | git config --global user.email you@yourdomain.example.com 28 | git config --global user.name "Your Name Comes Here" 29 | # get the repository if you don't have it 30 | git clone git://github.com/nipy/nipy.git 31 | # make a branch for your patching 32 | cd nipy 33 | git branch the-fix-im-thinking-of 34 | git checkout the-fix-im-thinking-of 35 | # hack, hack, hack 36 | # Tell git about any new files you've made 37 | git add somewhere/tests/test_my_bug.py 38 | # commit work in progress as you go 39 | git commit -am 'BF - added tests for Funny bug' 40 | # hack hack, hack 41 | git commit -am 'BF - added fix for Funny bug' 42 | # make the patch files 43 | git format-patch -M -C master 44 | 45 | Then, send the generated patch files to the `nipy mailing list`_ - where we will thank you warmly. 46 | 47 | In detail 48 | --------- 49 | 50 | #. Tell git_ who you are so it can label the commits you've made:: 51 | 52 | git config --global user.email you@yourdomain.example.com 53 | git config --global user.name "Your Name Comes Here" 54 | 55 | #. If you don't already have one, clone a copy of the nipy_ repository:: 56 | 57 | git clone git://github.com/nipy/nipy.git 58 | cd nipy 59 | 60 | #. Make a 'feature branch'. This will be where you work on your bug 61 | fix. It's nice and safe and leaves you with access to an unmodified 62 | copy of the code in the main branch:: 63 | 64 | git branch the-fix-im-thinking-of 65 | git checkout the-fix-im-thinking-of 66 | 67 | #. Do some edits, and commit them as you go:: 68 | 69 | # hack, hack, hack 70 | # Tell git about any new files you've made 71 | git add somewhere/tests/test_my_bug.py 72 | # commit work in progress as you go 73 | git commit -am 'BF - added tests for Funny bug' 74 | # hack hack, hack 75 | git commit -am 'BF - added fix for Funny bug' 76 | 77 | Note the ``-am`` options to ``commit``. The ``m`` flag just signals 78 | that you're going to type a message on the command line. The ``a`` 79 | flag - you can just take on faith - or see `why the -a flag?`_. 80 | 81 | #. When you have finished, check you have committed all your changes:: 82 | 83 | git status 84 | 85 | #. Finally, make your commits into patches. You want all the commits 86 | since you branched from the ``master`` branch:: 87 | 88 | git format-patch -M -C master 89 | 90 | You will now have several files named for the commits:: 91 | 92 | 0001-BF-added-tests-for-Funny-bug.patch 93 | 0002-BF-added-fix-for-Funny-bug.patch 94 | 95 | Send these files to the `nipy mailing list`_. 96 | 97 | When you are done, to switch back to the main copy of the code, just 98 | return to the ``master`` branch:: 99 | 100 | git checkout master 101 | 102 | Moving from patching to development 103 | =================================== 104 | 105 | If you find you have done some patches, and you have one or more feature 106 | branches, you will probably want to switch to development mode. You can 107 | do this with the repository you have. 108 | 109 | Fork the nipy_ repository on github_ - :ref:`forking`. Then:: 110 | 111 | # checkout and refresh master branch from main repo 112 | git checkout master 113 | git pull origin master 114 | # rename pointer to main repository to 'upstream' 115 | git remote rename origin upstream 116 | # point your repo to default read / write to your fork on github 117 | git remote add origin git@github.com:your-user-name/nipy.git 118 | # push up any branches you've made and want to keep 119 | git push origin the-fix-im-thinking-of 120 | 121 | Then you can, if you want, follow the :ref:`development-workflow`. 122 | 123 | .. include:: git_links.inc 124 | -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/pull_button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/devel/guidelines/gitwash/pull_button.png -------------------------------------------------------------------------------- /doc/devel/guidelines/gitwash/set_up_fork.rst: -------------------------------------------------------------------------------- 1 | .. _set-up-fork: 2 | 3 | ================== 4 | Set up your fork 5 | ================== 6 | 7 | First you follow the instructions for :ref:`forking`. 8 | 9 | Overview 10 | ======== 11 | 12 | :: 13 | 14 | git clone git@github.com:your-user-name/nipy.git 15 | cd nipy 16 | git remote add upstream git://github.com/nipy/nipy.git 17 | 18 | In detail 19 | ========= 20 | 21 | Clone your fork 22 | --------------- 23 | 24 | #. Clone your fork to the local computer with ``git clone 25 | git@github.com:your-user-name/nipy.git`` 26 | #. Investigate. Change directory to your new repo: ``cd nipy``. Then 27 | ``git branch -a`` to show you all branches. You'll get something 28 | like:: 29 | 30 | * master 31 | remotes/origin/master 32 | 33 | This tells you that you are currently on the ``master`` branch, and 34 | that you also have a ``remote`` connection to ``origin/master``. 35 | What remote repository is ``remote/origin``? Try ``git remote -v`` to 36 | see the URLs for the remote. They will point to your github_ fork. 37 | 38 | Now you want to connect to the upstream `nipy github`_ repository, so 39 | you can merge in changes from trunk. 40 | 41 | .. _linking-to-upstream: 42 | 43 | Linking your repository to the upstream repo 44 | -------------------------------------------- 45 | 46 | :: 47 | 48 | cd nipy 49 | git remote add upstream git://github.com/nipy/nipy.git 50 | 51 | ``upstream`` here is just the arbitrary name we're using to refer to the 52 | main nipy_ repository at `nipy github`_. 53 | 54 | Note that we've used ``git://`` for the URL rather than ``git@``. The 55 | ``git://`` URL is read only. This means we that we can't accidentally 56 | (or deliberately) write to the upstream repo, and we are only going to 57 | use it to merge into our own code. 58 | 59 | Just for your own satisfaction, show yourself that you now have a new 60 | 'remote', with ``git remote -v show``, giving you something like:: 61 | 62 | upstream git://github.com/nipy/nipy.git (fetch) 63 | upstream git://github.com/nipy/nipy.git (push) 64 | origin git@github.com:your-user-name/nipy.git (fetch) 65 | origin git@github.com:your-user-name/nipy.git (push) 66 | 67 | .. include:: git_links.inc 68 | 69 | -------------------------------------------------------------------------------- /doc/devel/guidelines/howto_document.rst: -------------------------------------------------------------------------------- 1 | .. _howto_document: 2 | 3 | ============================ 4 | How to write documentation 5 | ============================ 6 | 7 | Nipy_ uses the Sphinx_ documentation generating tool. Sphinx 8 | translates reST_ formatted documents into html and pdf documents. All 9 | our documents and docstrings are in reST format, this allows us to 10 | have both human-readable docstrings when viewed in ipython_, and 11 | web and print quality documentation. 12 | 13 | 14 | Building the documentation 15 | -------------------------- 16 | 17 | You need to have Sphinx_ (version 0.6.2 or above) and graphviz_ (version 18 | 2.20 or greater). 19 | 20 | The ``Makefile`` (in the top-level doc directory) automates the 21 | generation of the documents. To make the HTML documents:: 22 | 23 | make html 24 | 25 | For PDF documentation do:: 26 | 27 | make pdf 28 | 29 | The built documentation is then placed in a ``build/html`` or 30 | ``build/latex`` subdirectories. 31 | 32 | For more options, type:: 33 | 34 | make help 35 | 36 | Viewing the documentation 37 | ------------------------- 38 | 39 | We also build our website using sphinx_. All of the documentation in 40 | the ``docs`` directory is included on the website. There are a few 41 | files that are website only and these are placed in the ``www`` 42 | directory. The easiest way to view the documentation while editing 43 | is to build the website and open the local build in your browser:: 44 | 45 | make web 46 | 47 | Then open ``www/build/html/index.html`` in your browser. 48 | 49 | 50 | Syntax 51 | ------ 52 | 53 | Please have a look at our :ref:`sphinx_helpers` for examples on using 54 | Sphinx_ and reST_ in our documentation. 55 | 56 | The Sphinx website also has an excellent `sphinx rest`_ primer. 57 | 58 | Additional reST references:: 59 | - `reST primer `_ 60 | - `reST quick reference `_ 61 | 62 | Consider using emacs for editing rst files - see :ref:`rst_emacs` 63 | 64 | Style 65 | ----- 66 | 67 | Nipy has adopted the numpy_ documentation standards. The `numpy 68 | coding style guideline`_ is the main reference for how to format the 69 | documentation in your code. It's also useful to look at the `source 70 | reST file 71 | `_ that 72 | generates the coding style guideline. 73 | 74 | Numpy has a `detailed example 75 | `_ for 76 | writing docstrings. 77 | 78 | .. _`numpy coding style guideline`: 79 | http://scipy.org/scipy/numpy/wiki/CodingStyleGuidelines 80 | 81 | Documentation Problems 82 | ---------------------- 83 | 84 | See our :ref:`documentation_faq` if you are having problems building 85 | or writing the documentation. 86 | 87 | .. include:: ../../links_names.txt 88 | -------------------------------------------------------------------------------- /doc/devel/guidelines/index.rst: -------------------------------------------------------------------------------- 1 | .. _development_guidelines: 2 | 3 | ======================== 4 | Development Guidelines 5 | ======================== 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | howto_document 16 | sphinx_helpers 17 | gitwash/index 18 | commit_codes 19 | testing 20 | debugging 21 | optimization 22 | open_source_devel 23 | make_release 24 | changelog 25 | -------------------------------------------------------------------------------- /doc/devel/guidelines/open_source_devel.rst: -------------------------------------------------------------------------------- 1 | .. _open_source_devel: 2 | 3 | ========================= 4 | Open Source Development 5 | ========================= 6 | 7 | For those interested in more info about contributing to an open source 8 | project, Here are some links I've found. They are probably no better or worse 9 | than other similar documents: 10 | 11 | * `Software Release Practice HOWTO 12 | `_ 13 | * `Contributing to Open Source Projects HOWTO 14 | `_ 15 | -------------------------------------------------------------------------------- /doc/devel/guidelines/optimization.rst: -------------------------------------------------------------------------------- 1 | .. _optimization: 2 | 3 | ============== 4 | Optimization 5 | ============== 6 | 7 | In the early stages of NIPY development, we are focusing on 8 | functionality and usability. In regards to optimization, we benefit 9 | **significantly** from the optimized routines in scipy_ and numpy_. 10 | As NIPY progresses it is likely we will spend more energy on 11 | optimizing critical functions. In our `py4science group at UC 12 | Berkeley `_ we've 13 | had several meetings on the various optimization options including 14 | ctypes, weave and blitz, and cython. It's clear there are many good 15 | options, including standard C-extensions. However, optimized code 16 | tends to be less readable and more difficult to debug and maintain. 17 | When we do optimize our code we will first profile the code to 18 | determine the offending sections, then optimize those sections. Until 19 | that need arises, we will follow the great advice from these fellow 20 | programmers: 21 | 22 | 23 | Kent Beck: 24 | "First make it work. Then make it right. Then make it fast." 25 | 26 | `Donald Knuth on optimization 27 | `_: 28 | 29 | "We should forget about small efficiencies, say about 97% of the 30 | time: premature optimization is the root of all evil." 31 | 32 | 33 | Tim Hochberg, from the Numpy list:: 34 | 35 | 0. Think about your algorithm. 36 | 1. Vectorize your inner loop. 37 | 2. Eliminate temporaries 38 | 3. Ask for help 39 | 4. Recode in C. 40 | 5. Accept that your code will never be fast. 41 | 42 | Step zero should probably be repeated after every other step ;) 43 | 44 | 45 | .. include:: ../../links_names.txt 46 | -------------------------------------------------------------------------------- /doc/devel/images.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | Describing images 3 | =================== 4 | 5 | Here we set out what we think an image is and how it should work in our 6 | code. We are largely following the nifti_ standard. 7 | 8 | What is an image? 9 | ================= 10 | 11 | An image is the association of a block (array) of spatial data, with the 12 | relationship of the position of that data to some continuous space. 13 | 14 | Therefore an image contains: 15 | 16 | * an array 17 | * a spatial transformation describing the position of the data in the 18 | array relative to some space. 19 | 20 | An image always has 3 spatial dimensions. It can have other dimensions, 21 | such as time. 22 | 23 | A slice from a 3D image is also a 3D image, but with one dimension of 24 | the image having length 1. 25 | 26 | The transformation is spatial and refers to exactly three dimensions. 27 | 28 | :: 29 | 30 | import numpy as np 31 | import neuroimaging as ni 32 | img = ni.load_image('example3d.img') 33 | arr = img.get_data() 34 | assert isinstance(arr, np.ndarray) 35 | xform = img.get_transform() 36 | voxel_position = [0, 0, 0] 37 | world_position = xform.apply(voxel_position) 38 | assert world_position.shape = (3,) 39 | 40 | An image has an array. The first 3 axes (dimensions) of that array are 41 | spatial. Further dimensions can have various meanings. The most common 42 | meaning of the 4th axis is time. 43 | 44 | The relationship of the first three dimensions to any particular 45 | orientation in space are only known from the image transform. 46 | 47 | .. include:: ../links_names.txt 48 | -------------------------------------------------------------------------------- /doc/devel/index.rst: -------------------------------------------------------------------------------- 1 | .. _developers-guide-index: 2 | 3 | ================= 4 | Developer Guide 5 | ================= 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | development_quickstart 16 | install/index 17 | guidelines/index 18 | planning/index 19 | code_discussions/index 20 | tools/index 21 | -------------------------------------------------------------------------------- /doc/devel/install/debian.rst: -------------------------------------------------------------------------------- 1 | =================================== 2 | Debian / Ubuntu developer install 3 | =================================== 4 | 5 | Dependencies 6 | ------------ 7 | 8 | See :ref:`installation` for the installation instructions. Since NiPy 9 | is provided within stock distribution (``main`` component of Debian, 10 | and ``universe`` of Ubuntu), to install all necessary requirements it 11 | is enough to:: 12 | 13 | sudo apt-get build-dep python-nipy 14 | 15 | .. note:: 16 | 17 | Above invocation assumes that you have references to ``Source`` 18 | repository listed with ``deb-src`` prefixes in your apt .list files. 19 | 20 | Otherwise, you can revert to manual installation with:: 21 | 22 | sudo apt-get build-essential 23 | sudo apt-get install python-dev 24 | sudo apt-get install python-numpy python-numpy-dev python-scipy 25 | sudo apt-get install liblapack-dev 26 | sudo apt-get install python-sympy 27 | 28 | 29 | Useful additions 30 | ---------------- 31 | 32 | Some functionality in NiPy requires additional modules:: 33 | 34 | sudo apt-get install ipython 35 | sudo apt-get install python-matplotlib 36 | sudo apt-get install mayavi2 37 | 38 | For getting the code via version control:: 39 | 40 | sudo apt-get install git-core 41 | 42 | Then follow the instructions at :ref:`trunk_download`. 43 | 44 | And for easier control of multiple Python modules installations 45 | (e.g. different versions of IPython):: 46 | 47 | sudo apt-get install virtualenvwrapper 48 | -------------------------------------------------------------------------------- /doc/devel/install/fedora.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | Fedora developer install 3 | ========================== 4 | 5 | See :ref:`installation` 6 | 7 | This assumes a recent Fedora (>=10) version. It may work for earlier 8 | versions - see :ref:`installation` for requirements. 9 | 10 | This page may also hold for Fedora-based distributions such as 11 | Mandriva and Centos. 12 | 13 | Run all the ``yum install`` commands as root. 14 | 15 | Requirements:: 16 | 17 | yum install gcc-c++ 18 | yum install python-devel 19 | yum install numpy scipy 20 | yum install sympy 21 | yum install atlas-devel 22 | 23 | Options:: 24 | 25 | yum install ipython 26 | yum install python-matplotlib 27 | 28 | For getting the code via version control:: 29 | 30 | yum install git-core 31 | 32 | Then follow the instructions at :ref:`trunk_download` 33 | -------------------------------------------------------------------------------- /doc/devel/install/index.rst: -------------------------------------------------------------------------------- 1 | .. _distribution-installs: 2 | 3 | ================================================ 4 | Developer installs for different distributions 5 | ================================================ 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | debian 16 | fedora 17 | windows 18 | 19 | -------------------------------------------------------------------------------- /doc/devel/install/windows.rst: -------------------------------------------------------------------------------- 1 | ================================ 2 | Development install on windows 3 | ================================ 4 | 5 | The easy way - a super-package 6 | ------------------------------ 7 | 8 | The easiest way to get the dependencies is to install PythonXY_ or the 9 | `Enthought Tool Suite`_ . This gives you MinGW_, Python_, Numpy_, 10 | Scipy_, ipython_ and matplotlib_ (and much more). 11 | 12 | The hard way - by components 13 | ---------------------------- 14 | 15 | If instead you want to do it by component, try the instructions below. 16 | 17 | Requirements: 18 | 19 | * Download and install MinGW_ 20 | * Download and install the windows binary for Python_ 21 | * Download and install the Numpy_ and Scipy_ binaries 22 | * Download and install Sympy_ 23 | 24 | Options: 25 | 26 | * Download and install ipython_, being careful to follow the windows 27 | installation instructions 28 | * Download and install matplotlib_ 29 | 30 | Alternatively, if you are very brave, you may want to install numpy / scipy from 31 | source - see our maybe out of date :ref:`windows_scipy_build` for details. 32 | 33 | Getting and installing NIPY 34 | --------------------------- 35 | 36 | You will next need to get the NIPY code via version control: 37 | 38 | * Download and install the windows binary for git_ 39 | * Go to the windows menu, find the ``git`` menu, and run ``git`` in a 40 | windows terminal. 41 | 42 | You should now be able to follow the instructions in 43 | :ref:`trunk_download`, but with the following modifications: 44 | 45 | Running the build / install 46 | --------------------------- 47 | 48 | Here we assume that you do *not* have the Microsoft visual C tools, you 49 | did not use the ETS_ package (which sets the compiler for you) and *are* 50 | using a version of MinGW_ to compile NIPY. 51 | 52 | First, for the ``python setup.py`` steps, you will need to add the 53 | ``--compiler=mingw32`` flag, like this:: 54 | 55 | python setup.py build --compiler=mingw32 install 56 | 57 | Note that, with this setup you cannot do inplace (developer) installs 58 | (like ``python setup.py build_ext --inplace``) because of a six-legged 59 | python packaging feature that does not allow the compiler options (here 60 | ``--compiler=mingw32``) to be passed from the ``build_ext`` command. 61 | 62 | If you want to be able to do that, add these lines to your ``distutils.cfg`` file :: 63 | 64 | [build] 65 | compiler=mingw32 66 | 67 | [config] 68 | compiler = mingw32 69 | 70 | See http://docs.python.org/install/#inst-config-files for details on 71 | this file. After you've done this, you can run the standard ``python 72 | setup.py build_ext --inplace`` command. 73 | 74 | The command line from Windows 75 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 76 | 77 | The default windows XP command line ``cmd`` is very basic. You might 78 | consider using the Cygwin_ bash shell, or you may want to use the 79 | ipython_ shell to work in. For system commands use the ``!`` escape, 80 | like this, from the ipython prompt:: 81 | 82 | !python setup.py build --compiler=mingw32 83 | 84 | 85 | .. include:: ../../links_names.txt 86 | -------------------------------------------------------------------------------- /doc/devel/planning/TODO.rst: -------------------------------------------------------------------------------- 1 | .. _todo: 2 | 3 | =========================== 4 | TODO for nipy development 5 | =========================== 6 | 7 | This document will serve to organize current development work on nipy. 8 | It will include current sprint items, future feature ideas, and design 9 | discussions, etc... 10 | 11 | Documentation 12 | ============= 13 | 14 | * Create NIPY sidebar with links to all project related websites. 15 | * Create a Best Practices document. 16 | * Create a rst doc for *Request a review* process. 17 | 18 | Tutorials 19 | --------- 20 | 21 | Tutorials are an excellent way to document and test the software. 22 | Some ideas for tutorials to write in our Sphinx documentation (in no 23 | specific order): 24 | 25 | * Slice timing 26 | * Image resampling 27 | * Image IO 28 | * Registration using SPM/FSL 29 | * FMRI analysis 30 | * Making one 4D image from many 3D images, and vice versa. Document 31 | ImageList and FmriImageList. 32 | * Apply SPM registration .mat to a NIPY image. 33 | 34 | * Create working example out of this TRAC `pca 35 | `_ 36 | page. Should also be a rest document. 37 | 38 | * Add analysis pipeline(s) blueprint. 39 | 40 | 41 | Bugs 42 | ==== 43 | 44 | These should be moved to the nipy_ bug section on github. Placed 45 | here until they can be input. 46 | 47 | * Fix possible precision error in 48 | fixes.scipy.ndimage.test_registration function 49 | test_autoalign_nmi_value_2. See FIXME. 50 | 51 | * Fix error in test_segment test_texture2 functions 52 | (fixes.scipy.ndimage). See FIXME. 53 | 54 | * import nipy.algorithms is very slow! Find and fix. The 55 | shared library is slow. 56 | 57 | * base class for all new-style classes should be *object*; preliminary 58 | search with ``grin "class +[a-zA-Z0-9]+ *:"`` 59 | 60 | Refactorings 61 | ============ 62 | 63 | * image.save function should accept filename or file-like object. If 64 | I have an open file I would like to be able to pass that in also, 65 | instead of fp.name. Happens in test code a lot. 66 | 67 | * image._open function should accept Image objects in addition to 68 | ndarrays and filenames. Currently the save function has to call 69 | np.asarray(img) to get the data array out of the image and pass them 70 | to _open in order to create the output image. 71 | 72 | * Add dtype options when saving. When saving images it uses the native 73 | dtype for the system. Should be able to specify this. in the 74 | test_file_roundtrip, self.img is a uint8, but is saved to tmpfile as 75 | float64. Adding this would allow us to save images without the 76 | scaling being applied. 77 | 78 | * In image._open(url, ...), should we test if the "url" is a PyNiftiIO 79 | object already? This was in the tests from 'old code' and passed:: 80 | 81 | new = Image(self.img._data, self.img.grid) 82 | 83 | img._data is a PyNIftiIO object. It works, but we should verify 84 | it's harmless otherwise prevent it from happening. 85 | 86 | * Look at image.merge_image function. Is it still needed? Does it 87 | fit into the current api? 88 | 89 | * FmriImageList.emptycopy() - Is there a better way to do this? 90 | Matthew proposed possibly implementing Gael's dress/undress metadata 91 | example. 92 | 93 | * Verify documentation of the image generators. Create a simple 94 | example using them. 95 | 96 | * Use python 2.5 feature of being able to reset the generator? 97 | 98 | * Add test data where volumes contain intensity ramps. Slice with 99 | generator and test ramp values. 100 | 101 | * Implement `fmriimagelist blueprint 102 | `_. 103 | 104 | Code Design Thoughts 105 | ==================== 106 | 107 | A central location to dump thoughts that could be shared by the 108 | developers and tracked easily. 109 | 110 | Future Features 111 | =============== 112 | 113 | Put ideas here for features nipy should have but are not part of our 114 | current development. These features will eventually be added to a 115 | weekly sprint log. 116 | 117 | * Auto backup script for nipy repos to run as weekly cron job. We 118 | should setup a machine to perform regular branch builds and tests. 119 | This would also provide an on-site backup. 120 | 121 | * See if we can add bz2 support to nifticlib. 122 | 123 | * Should image.load have an optional squeeze keyword to squeeze a 4D 124 | image with one frame into a 3D image? 125 | 126 | 127 | .. include:: ../../links_names.txt 128 | -------------------------------------------------------------------------------- /doc/devel/planning/index.rst: -------------------------------------------------------------------------------- 1 | .. _development_planning: 2 | 3 | ====================== 4 | Development Planning 5 | ====================== 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | roadmap.rst 16 | TODO.rst 17 | 18 | -------------------------------------------------------------------------------- /doc/devel/planning/roadmap.rst: -------------------------------------------------------------------------------- 1 | .. _roadmap: 2 | 3 | ============== 4 | Nipy roadmap 5 | ============== 6 | 7 | We plan to release a protoype of NIPY_ by the Summer of 2009. This 8 | will include a full FMRI analysis, 2D visualization, and integration 9 | with other packages for spatial processing (SPM_ and FSL_). We will 10 | continue to improve our documentation and tutorials with the aim of 11 | providing a full introduction to neuroimaging analysis. 12 | 13 | We will also extend our collaborations with other neuroimaging groups, 14 | integrating more functionality into NIPY and providing better 15 | interoperability with other packages. This will include the design 16 | and implementation of a pipeline/batching system, integration of 17 | registration algorithms, and improved 2D and 3D visualization. 18 | 19 | 20 | .. include:: ../../links_names.txt 21 | 22 | 23 | -------------------------------------------------------------------------------- /doc/devel/tools/index.rst: -------------------------------------------------------------------------------- 1 | .. _developer_tools: 2 | 3 | ================= 4 | Developer Tools 5 | ================= 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | tricked_out_emacs 16 | virtualenv-tutor 17 | -------------------------------------------------------------------------------- /doc/documentation.rst: -------------------------------------------------------------------------------- 1 | .. _documentation-main: 2 | 3 | ==================== 4 | NIPY documentation 5 | ==================== 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | Contents: 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | users/index.rst 18 | labs/index.rst 19 | devel/index.rst 20 | faq/index.rst 21 | api/index.rst 22 | publications 23 | license 24 | 25 | .. only:: html 26 | 27 | * :ref:`genindex` 28 | * :ref:`modindex` 29 | * :ref:`search` 30 | 31 | -------------------------------------------------------------------------------- /doc/faq/documentation_faq.rst: -------------------------------------------------------------------------------- 1 | .. _documentation_faq: 2 | 3 | =================== 4 | Documentation FAQ 5 | =================== 6 | 7 | .. _installing_graphviz_on_OSX: 8 | 9 | Installing graphviz on OSX 10 | -------------------------- 11 | 12 | The easiest way I found to do this was using MacPorts_, all other 13 | methods caused python exceptions when attempting to write out the pngs 14 | in the inheritance_diagram.py functions. Just do:: 15 | 16 | sudo port install graphviz 17 | 18 | And make sure your macports directory (``/opt/local/bin``) is in your PATH. 19 | 20 | Error writing output on OSX 21 | --------------------------- 22 | 23 | If you are getting an error during the **writing output...** phase of 24 | the documentation build you may have a problem with your graphviz_ 25 | install. The error may look something like:: 26 | 27 | **writing output...** about api/generated/gen 28 | api/generated/nipy 29 | api/generated/nipy.algorithms.fwhm Format: "png" not 30 | recognized. Use one of: canon cmap cmapx cmapx_np dia dot eps fig 31 | hpgl imap imap_np ismap mif mp pcl pic plain plain-ext ps ps2 svg 32 | svgz tk vml vmlz vtx xdot 33 | 34 | ... 35 | 36 | Exception occurred: 37 | 38 | File "/Users/cburns/src/nipy-repo/trunk-dev/doc/sphinxext/ 39 | inheritance_diagram.py", line 238, in generate_dot 40 | (name, self._format_node_options(this_node_options))) 41 | 42 | IOError: [Errno 32] Broken pipe 43 | 44 | Try installing graphviz using MacPorts_. See the 45 | :ref:`installing_graphviz_on_OSX` for instructions. 46 | 47 | 48 | Sphinx and reST gotchas 49 | ----------------------- 50 | 51 | Docstrings 52 | ^^^^^^^^^^ 53 | 54 | Sphinx_ and reST_ can be very picky about whitespace. For example, in 55 | the docstring below the *Parameters* section will render correctly, 56 | where the *Returns* section will not. By correctly I mean Sphinx will 57 | insert a link to the CoordinateSystem class in place of the 58 | cross-reference *:class:`CoordinateSystem`*. The *Returns* section 59 | will be rendered exactly as shown below with the *:class:* identifier 60 | and the backticks around CoordinateSystem. This section fails because 61 | of the missing whitespace between ``product_coord_system`` and the 62 | colon ``:``. 63 | 64 | :: 65 | 66 | Parameters 67 | ---------- 68 | coord_systems : sequence of :class:`CoordinateSystem` 69 | 70 | Returns 71 | ------- 72 | product_coord_system: :class:`CoordinateSystem` 73 | 74 | 75 | 76 | .. include:: ../links_names.txt 77 | -------------------------------------------------------------------------------- /doc/faq/index.rst: -------------------------------------------------------------------------------- 1 | .. _faq-index: 2 | 3 | ===== 4 | FAQ 5 | ===== 6 | 7 | .. only:: html 8 | 9 | :Release: |version| 10 | :Date: |today| 11 | 12 | Frequently asked questions about nipy 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | why 18 | licensing 19 | documentation_faq 20 | -------------------------------------------------------------------------------- /doc/faq/licensing.rst: -------------------------------------------------------------------------------- 1 | .. _licensing: 2 | 3 | =========== 4 | Licensing 5 | =========== 6 | 7 | How do you spell licence? 8 | ------------------------- 9 | 10 | If you are British you spell it differently from Americans, sometimes: 11 | 12 | http://www.tiscali.co.uk/reference/dictionaries/english/data/d0082350.html 13 | 14 | As usual the American spelling rule (always use *s*) was less painful 15 | and arbitrary, so I (MB) went for that. 16 | 17 | Why did you choose BSD? 18 | ----------------------- 19 | 20 | We have chosen BSD licensing, for compatibility with SciPy, and to 21 | increase input from developers in industry. Wherever possible we will 22 | keep packages that can have BSD licensing separate from packages 23 | needing a GPL license. 24 | 25 | Our choices were between: 26 | 27 | * :term:`BSD` 28 | * :term:`GPL` 29 | 30 | John Hunter made the argument for the BSD license in 31 | :ref:`johns-bsd-pitch`, and we agree. Richard Stallman makes the case 32 | for the GPL here: http://www.gnu.org/licenses/why-not-lgpl.html 33 | 34 | How does the BSD license affect our relationship to other projects? 35 | ------------------------------------------------------------------- 36 | 37 | The BSD license allows other projects with virtually any license, 38 | including GPL, to use our code. BSD makes it more likely that we will 39 | attract support from companies, including open-source software 40 | companies, such as Enthought_ and Kitware_. 41 | 42 | Any part of our code that uses (links to) GPL code, should be in 43 | a separable package. 44 | 45 | Note that we do not have this problem with :term:`LGPL`, which allows 46 | us to link without ourselves having a GPL. 47 | 48 | What license does the NIH prefer? 49 | --------------------------------- 50 | 51 | The NIH asks that software written with NIH money can be 52 | commercialized. Quoting from: `NIH NATIONAL CENTERS FOR BIOMEDICAL 53 | COMPUTING 54 | `_ 55 | grant application document: 56 | 57 | A software dissemination plan must be included in the application. 58 | There is no prescribed single license for software produced in this 59 | project. However NIH does have goals for software dissemination, 60 | and reviewers will be instructed to evaluate the dissemination plan 61 | relative to these goals: 62 | 63 | 1. The software should be freely available to biomedical researchers 64 | and educators in the non-profit sector, such as institutions of 65 | education, research institutes, and government laboratories. 66 | 67 | 2. The terms of software availability should permit the 68 | commercialization of enhanced or customized versions of the software, 69 | or incorporation of the software or pieces of it into other software 70 | packages. 71 | 72 | There is more discussion of licensing in this `na-mic presentation 73 | `_. 74 | See also these links (from the presentation): 75 | 76 | * http://www.rosenlaw.com/oslbook.htm 77 | * http://www.opensource.org 78 | * http://wiki.na-mic.org/Wiki/index.php/NAMIC_Wiki:Community_Licensing 79 | 80 | So far this might suggest that the NIH would prefer at least a 81 | BSD-like license, but the NIH has supported several GPL'd projects in 82 | imaging, :term:`AFNI` being the most obvious example. 83 | 84 | 85 | .. include:: ../links_names.txt 86 | -------------------------------------------------------------------------------- /doc/history.rst: -------------------------------------------------------------------------------- 1 | =================== 2 | A history of NIPY 3 | =================== 4 | 5 | Sometime around 2002, Jonthan Taylor started writing BrainSTAT, a 6 | Python version of Keith Worsley's FmriSTAT package. 7 | 8 | In 2004, Jarrod Millman and Matthew Brett decided that they wanted to 9 | write a grant to build a new neuoimaging analysis package in Python. 10 | Soon afterwards, they found that Jonathan had already started, and 11 | merged efforts. At first we called this project *BrainPy*. Later we 12 | changed the name to NIPY. 13 | 14 | In 2005, Jarrod, Matthew and Jonathan, along with Mark D'Esposito, 15 | Fernando Perez, John Hunter, Jean-Baptiste Poline, and Tom Nichols, 16 | submitted the first NIPY grant to the NIH. It was not successful. 17 | 18 | In 2006, Jarrod and Mark submitted a second grant, based on the first. 19 | The NIH gave us 3 years of funding for two programmers. We hired two 20 | programmers in 2007 - Christopher Burns and Tom Waite - and began work on 21 | refactoring the code. 22 | 23 | Meanwhile, the team at Neurospin, Paris, started to refactor their FFF 24 | code to work better with python and NIPY. This work was by Alexis 25 | Roche, Bertrand Thirion, and Benjamin Thyreau, with some help and 26 | advice from Fernando Perez. 27 | 28 | In 2008, Fernando Perez and Matthew Brett started work full-time at 29 | the UC Berkeley `Brain Imaging Center `_. 30 | Matthew in particular came to work on NIPY. 31 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. _about_nipy: 2 | 3 | ==== 4 | NIPY 5 | ==== 6 | 7 | NIPY is a python project for analysis of structural and functional 8 | neuroimaging data. 9 | 10 | Please see our :ref:`documentation-main` and feel free to hold us to the 11 | high ideals of :ref:`nipy-mission`. 12 | 13 | *The NIPY team* 14 | 15 | .. We need the following toctree directive to include the documentation 16 | .. in the document heirarchy - see http://sphinx.pocoo.org/concepts.html 17 | .. toctree:: 18 | :hidden: 19 | 20 | documentation 21 | -------------------------------------------------------------------------------- /doc/labs/datasets/viz_volume_data.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | Use Mayavi to visualize the structure of a VolumeData 5 | """ 6 | 7 | from enthought.mayavi import mlab 8 | import numpy as np 9 | 10 | x, y, z, s = np.random.random((4, 20)) 11 | 12 | mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1)) 13 | mlab.clf() 14 | 15 | src = mlab.pipeline.scalar_scatter(x, y, z, s) 16 | sgrid = mlab.pipeline.delaunay3d(src) 17 | 18 | mlab.pipeline.surface(sgrid, opacity=0.4) 19 | mlab.pipeline.surface(mlab.pipeline.extract_edges(sgrid), color=(0, 0, 0)) 20 | mlab.pipeline.glyph(sgrid, mode='cube', scale_factor=0.05, scale_mode='none') 21 | mlab.savefig('volume_data.jpg') 22 | mlab.show() 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /doc/labs/datasets/viz_volume_field.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | Use Mayavi to visualize the structure of a VolumeData 5 | """ 6 | 7 | from enthought.mayavi import mlab 8 | import numpy as np 9 | 10 | s = np.random.random((5, 5, 5)) 11 | 12 | # Put the side at 0 13 | 14 | s[0, ...] = 0 15 | s[-1, ...] = 0 16 | s[:, 0, :] = 0 17 | s[:, -1, :] = 0 18 | s[..., 0] = 0 19 | s[..., -1] = 0 20 | 21 | mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1)) 22 | mlab.clf() 23 | 24 | src = mlab.pipeline.scalar_field(s) 25 | 26 | mlab.pipeline.volume(src, vmin=0, vmax=0.9) 27 | # We save as a different filename than the one used, as we modify the 28 | # curves. 29 | mlab.savefig('volume_field_raw.jpg') 30 | mlab.show() 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /doc/labs/datasets/viz_volume_grid.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | Use Mayavi to visualize the structure of a VolumeGrid 5 | """ 6 | 7 | from enthought.mayavi import mlab 8 | import numpy as np 9 | 10 | from enthought.tvtk.api import tvtk 11 | 12 | dims = (4, 4, 4) 13 | x, y, z = np.mgrid[0.:dims[0], 0:dims[1], 0:dims[2]] 14 | x = np.reshape(x.T, (-1,)) 15 | y = np.reshape(y.T, (-1,)) 16 | z = np.reshape(z.T, (-1,)) 17 | y += 0.3*np.sin(x) 18 | z += 0.4*np.cos(x) 19 | x += 0.05*y**3 20 | sgrid = tvtk.StructuredGrid(dimensions=(dims[0], dims[1], dims[2])) 21 | sgrid.points = np.c_[x, y, z] 22 | s = np.random.random((dims[0]*dims[1]*dims[2])) 23 | sgrid.point_data.scalars = np.ravel(s.copy()) 24 | sgrid.point_data.scalars.name = 'scalars' 25 | 26 | mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1)) 27 | mlab.clf() 28 | 29 | mlab.pipeline.surface(sgrid, opacity=0.4) 30 | mlab.pipeline.surface(mlab.pipeline.extract_edges(sgrid), color=(0, 0, 0)) 31 | mlab.pipeline.glyph(sgrid, mode='cube', scale_factor=0.2, scale_mode='none') 32 | mlab.savefig('volume_grid.jpg') 33 | mlab.show() 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /doc/labs/datasets/viz_volume_img.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | Use Mayavi to visualize the structure of a VolumeImg 5 | """ 6 | 7 | from enthought.mayavi import mlab 8 | import numpy as np 9 | 10 | rand = np.random.RandomState(1) 11 | data = rand.random_sample((5, 4, 4)) 12 | 13 | mlab.figure(1, fgcolor=(0, 0, 0), bgcolor=(1, 1, 1)) 14 | mlab.clf() 15 | 16 | src = mlab.pipeline.scalar_field(data) 17 | src.image_data.spacing = (0.5, 1, 0.7) 18 | src.image_data.update_data() 19 | 20 | mlab.pipeline.surface(src, opacity=0.4) 21 | mlab.pipeline.surface(mlab.pipeline.extract_edges(src), color=(0, 0, 0)) 22 | mlab.pipeline.glyph(src, mode='cube', scale_factor=0.2, scale_mode='none') 23 | mlab.savefig('volume_img.jpg') 24 | mlab.show() 25 | 26 | 27 | -------------------------------------------------------------------------------- /doc/labs/datasets/volume_data.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/labs/datasets/volume_data.jpg -------------------------------------------------------------------------------- /doc/labs/datasets/volume_field.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/labs/datasets/volume_field.jpg -------------------------------------------------------------------------------- /doc/labs/datasets/volume_grid.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/labs/datasets/volume_grid.jpg -------------------------------------------------------------------------------- /doc/labs/datasets/volume_img.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/labs/datasets/volume_img.jpg -------------------------------------------------------------------------------- /doc/labs/enn.rst: -------------------------------------------------------------------------------- 1 | 2 | Empirical null 3 | ============== 4 | 5 | .. currentmodule:: nipy.algorithms.statistics.empirical_pvalue 6 | 7 | The :mod:`nipy.algorithms.statistics.empirical_pvalue` module contains a class 8 | that fits a Gaussian model to the central part of an histogram, following 9 | Schwartzman et al, 2009. This is typically necessary to estimate a FDR when one 10 | is not certain that the data behaves as a standard normal under H_0. 11 | 12 | The `NormalEmpiricalNull` class learns its null distribution on the data 13 | provided at initialisation. Two different methods can be used to set a threshold 14 | from the null distribution: the :meth:`NormalEmpiricalNull.threshold` method 15 | returns the threshold for a given false discovery rate, and thus accounts for 16 | multiple comparisons with the given dataset; the 17 | :meth:`NormalEmpiricalNull.uncorrected_threshold` returns the threshold for a 18 | given uncorrected p-value, and as such does not account for multiple 19 | comparisons. 20 | 21 | Example 22 | ------- 23 | 24 | If we use the empirical normal null estimator on a two Gaussian mixture 25 | distribution, with a central Gaussian, and a wide one, it uses the central 26 | distribution as a null hypothesis, and returns the threshold following which the 27 | data can be claimed to belong to the wide Gaussian: 28 | 29 | .. plot:: labs/plots/enn_demo.py 30 | :include-source: 31 | 32 | The threshold evaluated with the :meth:`NormalEmpiricalNull.threshold` method is 33 | around 2.8 (using the default p-value of 0.05). The 34 | :meth:`NormalEmpiricalNull.uncorrected_threshold` returns, for the same p-value, 35 | a threshold of 1.9. It is necessary to use a higher p-value with uncorrected 36 | comparisons. 37 | 38 | Class documentation 39 | ------------------- 40 | 41 | .. autoclass:: NormalEmpiricalNull 42 | :members: 43 | 44 | .. automethod:: __init__ 45 | 46 | ____ 47 | 48 | **Reference**: Schwartzmann et al., NeuroImage 44 (2009) 71--82 49 | 50 | -------------------------------------------------------------------------------- /doc/labs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | 3 | NeuroSpin tools 4 | =============== 5 | 6 | The package ``nipy.labs`` hosts some tools that where originally developed at 7 | NeuroSpin, France. The list below also includes routines for estimating the 8 | empirical null, moved from ``nipy.labs`` to ``nipy.algorithms.statistics``. 9 | 10 | 11 | .. toctree:: 12 | 13 | mask.rst 14 | enn.rst 15 | viz.rst 16 | simul_activation.rst 17 | datasets.rst 18 | 19 | -------------------------------------------------------------------------------- /doc/labs/mask.rst: -------------------------------------------------------------------------------- 1 | 2 | Mask-extraction utilities 3 | ========================== 4 | 5 | .. currentmodule:: nipy.labs.utils.mask 6 | 7 | The module :mod:`nipy.labs.utils.mask` contains utilities to extract 8 | brain masks from fMRI data: 9 | 10 | .. autosummary:: 11 | :toctree: generated 12 | 13 | compute_mask 14 | compute_mask_files 15 | compute_mask_sessions 16 | 17 | The :func:`compute_mask_files` and :func:`compute_mask_sessions` 18 | functions work with Nifti files rather than numpy ndarrays. This is 19 | convenient to reduce memory pressure when working with long time series, 20 | as there is no need to store the whole series in memory. 21 | 22 | -------------------------------------------------------------------------------- /doc/labs/plots/enn_demo.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | import numpy as np 4 | 5 | from nipy.algorithms.statistics.empirical_pvalue import NormalEmpiricalNull 6 | 7 | x = np.c_[np.random.normal(size=1e4), 8 | np.random.normal(scale=4, size=1e4)] 9 | 10 | enn = NormalEmpiricalNull(x) 11 | enn.threshold(verbose=True) 12 | -------------------------------------------------------------------------------- /doc/labs/plots/surrogate_array.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | import numpy as np 4 | 5 | import pylab as pl 6 | 7 | from nipy.labs.utils.simul_multisubject_fmri_dataset import \ 8 | surrogate_2d_dataset 9 | 10 | pos = np.array([[10, 10], 11 | [14, 20], 12 | [23, 18]]) 13 | ampli = np.array([4, 5, 2]) 14 | 15 | # First generate some noiseless data 16 | noiseless_data = surrogate_2d_dataset(n_subj=1, noise_level=0, spatial_jitter=0, 17 | signal_jitter=0, pos=pos, ampli=ampli) 18 | 19 | pl.figure(figsize=(10, 3)) 20 | pl.subplot(1, 4, 1) 21 | pl.imshow(noiseless_data[0]) 22 | pl.title('Noise-less data') 23 | 24 | # Second, generate some group data, with default noise parameters 25 | group_data = surrogate_2d_dataset(n_subj=3, pos=pos, ampli=ampli) 26 | 27 | pl.subplot(1, 4, 2) 28 | pl.imshow(group_data[0]) 29 | pl.title('Subject 1') 30 | pl.subplot(1, 4, 3) 31 | pl.title('Subject 2') 32 | pl.imshow(group_data[1]) 33 | pl.subplot(1, 4, 4) 34 | pl.title('Subject 3') 35 | pl.imshow(group_data[2]) 36 | -------------------------------------------------------------------------------- /doc/labs/simul_activation.rst: -------------------------------------------------------------------------------- 1 | 2 | Generating simulated activation maps 3 | ===================================== 4 | 5 | .. currentmodule:: nipy.labs.utils.simul_multisubject_fmri_dataset 6 | 7 | The module :mod:`nipy.labs.utils.simul_multisubject_fmri_dataset` 8 | contains a various functions to create simulated activation maps in two, three 9 | and four dimensions. A 2D example is :func:`surrogate_2d_dataset`. The 10 | functions can position various activations and add noise, both as background 11 | noise and jitter in the activation positions and amplitude. 12 | 13 | These functions can be useful to test methods. 14 | 15 | Example 16 | -------- 17 | 18 | .. plot:: labs/plots/surrogate_array.py 19 | :include-source: 20 | 21 | 22 | Function documentation 23 | ------------------------- 24 | 25 | .. autofunction:: surrogate_2d_dataset 26 | 27 | .. autofunction:: surrogate_3d_dataset 28 | 29 | .. autofunction:: surrogate_4d_dataset 30 | -------------------------------------------------------------------------------- /doc/labs/viz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/labs/viz.png -------------------------------------------------------------------------------- /doc/labs/viz.rst: -------------------------------------------------------------------------------- 1 | 2 | Plotting of activation maps 3 | =========================== 4 | 5 | .. currentmodule:: nipy.labs.viz_tools.activation_maps 6 | 7 | The module :mod:`nipy.labs.viz` provides functions to plot 8 | visualization of activation maps in a non-interactive way. 9 | 10 | 2D cuts of an activation map can be plotted and superimposed on an 11 | anatomical map using matplotlib_. In addition, Mayavi2_ can be used to 12 | plot 3D maps, using volumetric rendering. Some emphasis is made on 13 | automatic choice of default parameters, such as cut coordinates, to give 14 | a sensible view of a map in a purely automatic way, for instance to save 15 | a summary of the output of a calculation. 16 | 17 | .. _matplotlib: http://matplotlib.sourceforge.net 18 | 19 | .. _Mayavi2: http://code.enthought.com/projects/mayavi 20 | 21 | .. warning:: 22 | 23 | The content of the module will change over time, as neuroimaging 24 | volumetric data structures are used instead of plain numpy arrays. 25 | 26 | An example 27 | ---------- 28 | 29 | :: 30 | 31 | from nipy.labs.viz import plot_map, mni_sform, coord_transform 32 | 33 | # First, create a fake activation map: a 3D image in MNI space with 34 | # a large rectangle of activation around Broca Area 35 | import numpy as np 36 | mni_sform_inv = np.linalg.inv(mni_sform) 37 | # Color an asymmetric rectangle around Broca area: 38 | x, y, z = -52, 10, 22 39 | x_map, y_map, z_map = coord_transform(x, y, z, mni_sform_inv) 40 | map = np.zeros((182, 218, 182)) 41 | map[x_map-30:x_map+30, y_map-3:y_map+3, z_map-10:z_map+10] = 1 42 | 43 | # We use a masked array to add transparency to the parts that we are 44 | # not interested in: 45 | thresholded_map = np.ma.masked_less(map, 0.5) 46 | 47 | # And now, visualize it: 48 | plot_map(thresholded_map, mni_sform, cut_coords=(x, y, z), vmin=0.5) 49 | 50 | This creates the following image: 51 | 52 | .. image:: viz.png 53 | 54 | The same plot can be obtained fully automatically, by letting 55 | :func:`plot_map` find the activation threshold and the cut coordinates:: 56 | 57 | plot_map(map, mni_sform, threshold='auto') 58 | 59 | In this simple example, the code will easily detect the bar as activation 60 | and position the cut at the center of the bar. 61 | 62 | `nipy.labs.viz` functions 63 | ------------------------- 64 | 65 | .. autosummary:: 66 | :toctree: generated 67 | 68 | plot_map 69 | 70 | 71 | 3D plotting utilities 72 | --------------------- 73 | 74 | .. currentmodule:: nipy.labs.viz_tools.maps_3d 75 | 76 | The module :mod:`nipy.labs.viz3d` can be used as helpers to 77 | represent neuroimaging volumes with Mayavi2_. 78 | 79 | .. autosummary:: 80 | :toctree: generated 81 | 82 | plot_map_3d 83 | plot_anat_3d 84 | 85 | For more versatile visualizations the core idea is that given a 3D map 86 | and an affine, the data is exposed in Mayavi as a volumetric source, with 87 | world space coordinates corresponding to figure coordinates. 88 | Visualization modules can be applied on this data source as explained in 89 | the `Mayavi manual 90 | `_ 91 | 92 | .. autosummary:: 93 | :toctree: generated 94 | 95 | affine_img_src 96 | 97 | -------------------------------------------------------------------------------- /doc/license.rst: -------------------------------------------------------------------------------- 1 | .. _nipy-license: 2 | 3 | ======================== 4 | NIPY License Information 5 | ======================== 6 | 7 | .. _nipy-software-license: 8 | 9 | Software License 10 | ----------------- 11 | 12 | Except where otherwise noted, all NIPY software is licensed under a 13 | `revised BSD license `_. 14 | 15 | See our :ref:`licensing` page for more details. 16 | 17 | .. _nipy-documentation-license: 18 | 19 | Documentation License 20 | --------------------- 21 | 22 | Except where otherwise noted, all NIPY documentation is licensed under a 23 | `Creative Commons Attribution 3.0 License `_. 24 | 25 | All code fragments in the documentation are licensed under our 26 | software license. 27 | -------------------------------------------------------------------------------- /doc/mission.rst: -------------------------------------------------------------------------------- 1 | .. _nipy-mission: 2 | 3 | =================== 4 | What is NIPY for? 5 | =================== 6 | 7 | .. include:: mission.txt 8 | 9 | *The NIPY team* 10 | 11 | -------------------------------------------------------------------------------- /doc/mission.txt: -------------------------------------------------------------------------------- 1 | The purpose of NIPY is to make it easier to do better brain imaging 2 | research. We believe that neuroscience ideas and analysis ideas 3 | develop together. Good ideas come from understanding; understanding comes 4 | from clarity, and clarity must come from well-designed teaching 5 | materials and well-designed software. The software must be designed 6 | as a natural extension of the underlying ideas. 7 | 8 | We aim to build software that is: 9 | 10 | * clearly written 11 | * clearly explained 12 | * a good fit for the underlying ideas 13 | * a natural home for collaboration 14 | 15 | We hope that, if we fail to do this, you will let us know. We will 16 | try and make it better. 17 | 18 | -------------------------------------------------------------------------------- /doc/publications.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Publications 3 | ============ 4 | 5 | Peer-reviewed Publications 6 | -------------------------- 7 | 8 | K. Jarrod Millman, M. Brett, `"Analysis of Functional Magnetic Resonance 9 | Imaging in Python," `_ 10 | Computing in Science and Engineering, vol. 9, no. 3, pp. 52-55, May/June, 2007. 11 | 12 | Posters 13 | ------- 14 | 15 | Taylor JE, Worsley K, Brett M, Cointepas Y, Hunter J, Millman KJ, 16 | Poline J-B, Perez F. “BrainPy: an open source environment for the 17 | analysis and visualization of human brain data.” Meeting of the 18 | Organization for Human Brain Mapping, 2005. See the 19 | :ref:`BrainPy HBM abstract `. 20 | 21 | -------------------------------------------------------------------------------- /doc/references/brainpy_abstract.rst: -------------------------------------------------------------------------------- 1 | .. _brainpy-hbm-abstract: 2 | 3 | ============================ 4 | BrainPy HBM abstract, 2005 5 | ============================ 6 | 7 | This is the abstract describing the BrainPy / NIPY project from 8 | the `HBM2005 `_ conference. 9 | 10 | BrainPy: an open source environment for the analysis and visualization of human brain data 11 | ========================================================================================== 12 | 13 | Jonathan Taylor (1), Keith Worsley (2), Matthew Brett (3), Yann 14 | Cointepas (4), John Hunter (5), Jarrod Millman (3), Jean-Baptiste 15 | Poline (4), Fernando Perez (6) 16 | 17 | 1. Dept. of Statistics, Stanford University, U.S.A. 18 | 2. Dept. of Mathematics and Statistics, !McGill University, Canada 19 | 3. Department of Neuroscience, University of California, Berkeley, U.S.A 20 | 4. Service Hospitalier Frédéric Joliot, France 21 | 5. Complex Systems Laboratory, University of Chicago, U.S.A. 22 | 6. Department of Applied Mathematics, University of Colorado at Boulder, U.S.A. 23 | 24 | Objective 25 | --------- 26 | 27 | What follows are the goals of BrainPy, a multi-center project to 28 | provide an open source environment for the analysis and visualization 29 | of human brain data built on top of python. While the project is still 30 | in its initial stages, packages for file I/O, script support as well 31 | as single subject fMRI and random effects group comparisons model are 32 | currently available. 33 | 34 | Methods 35 | ------- 36 | 37 | Scientific computing has evolved over the last two decades in two 38 | broad directions. One, there has been a movement to the use of 39 | high-level interface languages that glue existing high-performance 40 | libraries into an accessible, scripted, interactive environment, eg 41 | IDL, matlab. Two, there has been a shift to open algorithms and 42 | software because this development process leads to better code, and 43 | because it more consistent with the scientific method. 44 | 45 | Results & Discussion 46 | -------------------- 47 | 48 | The proposed environment includes the following: 49 | 50 | * We intend to provide users with an open source environment which is 51 | interoperable with current packages such as SPM and AFNI, both at a 52 | file I/O level and, where possible, interactively (e.g. pymat -- 53 | calling matlab/SPM from python). 54 | * Read/write/conversion support for all major imaging formats and 55 | packages (SPM/ANALYZE, :term:`FSL`, :term:`AFNI`, MINC, NIFTI, and 56 | :term:`VoxBo` 57 | * Low-level access to data through an interactive shell, which is 58 | important for developing new analysis methods, as well as 59 | high-level access through GUIs for specialized tasks using standard 60 | python tools. 61 | * Visualization of results using pre-existing tools such as 62 | :term:`BrainVisa`, as well as support for development of new tools 63 | using VTK. 64 | * Support for MATLAB style numeric packages (Numarray) and plotting 65 | (matplotlib_). 66 | * Support for EEG analysis including EEG/MEG/fMRI fusion analysis. 67 | * Support for spatio-temporal wavelet analysis 68 | (`PhiWave `_) 69 | 70 | Conclusions 71 | ----------- 72 | 73 | BrainPy is an open-source environment for the analysis and 74 | visualization of neuroimaging data built on top of python. 75 | 76 | .. include:: ../links_names.txt 77 | -------------------------------------------------------------------------------- /doc/sphinxext/README.txt: -------------------------------------------------------------------------------- 1 | =================== 2 | Sphinx Extensions 3 | =================== 4 | 5 | Thesea are a few sphinx extensions we are using to build the nipy 6 | documentation. In this file we list where they each come from, since we intend 7 | to always push back upstream any modifications or improvements we make to them. 8 | 9 | * From matploltlib: 10 | * inheritance_diagram.py 11 | 12 | * From numpy: 13 | * numpy_ext 14 | 15 | * From ipython 16 | * ipython_console_highlighting 17 | -------------------------------------------------------------------------------- /doc/sphinxext/ipython_console_highlighting.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """reST directive for syntax-highlighting ipython interactive sessions. 4 | """ 5 | 6 | #----------------------------------------------------------------------------- 7 | # Needed modules 8 | 9 | # Standard library 10 | import re 11 | 12 | # Third party 13 | from pygments.lexer import Lexer, do_insertions 14 | from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, 15 | PythonTracebackLexer) 16 | from pygments.token import Comment, Generic 17 | 18 | from sphinx import highlighting 19 | 20 | 21 | #----------------------------------------------------------------------------- 22 | # Global constants 23 | line_re = re.compile('.*?\n') 24 | 25 | #----------------------------------------------------------------------------- 26 | # Code begins - classes and functions 27 | 28 | class IPythonConsoleLexer(Lexer): 29 | """ 30 | For IPython console output or doctests, such as: 31 | 32 | .. sourcecode:: ipython 33 | 34 | In [1]: a = 'foo' 35 | 36 | In [2]: a 37 | Out[2]: 'foo' 38 | 39 | In [3]: print a 40 | foo 41 | 42 | In [4]: 1 / 0 43 | 44 | Notes: 45 | 46 | - Tracebacks are not currently supported. 47 | 48 | - It assumes the default IPython prompts, not customized ones. 49 | """ 50 | 51 | name = 'IPython console session' 52 | aliases = ['ipython'] 53 | mimetypes = ['text/x-ipython-console'] 54 | input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") 55 | output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") 56 | continue_prompt = re.compile(" \.\.\.+:") 57 | tb_start = re.compile("\-+") 58 | 59 | def get_tokens_unprocessed(self, text): 60 | pylexer = PythonLexer(**self.options) 61 | tblexer = PythonTracebackLexer(**self.options) 62 | 63 | curcode = '' 64 | insertions = [] 65 | for match in line_re.finditer(text): 66 | line = match.group() 67 | input_prompt = self.input_prompt.match(line) 68 | continue_prompt = self.continue_prompt.match(line.rstrip()) 69 | output_prompt = self.output_prompt.match(line) 70 | if line.startswith("#"): 71 | insertions.append((len(curcode), 72 | [(0, Comment, line)])) 73 | elif input_prompt is not None: 74 | insertions.append((len(curcode), 75 | [(0, Generic.Prompt, input_prompt.group())])) 76 | curcode += line[input_prompt.end():] 77 | elif continue_prompt is not None: 78 | insertions.append((len(curcode), 79 | [(0, Generic.Prompt, continue_prompt.group())])) 80 | curcode += line[continue_prompt.end():] 81 | elif output_prompt is not None: 82 | insertions.append((len(curcode), 83 | [(0, Generic.Output, output_prompt.group())])) 84 | curcode += line[output_prompt.end():] 85 | else: 86 | if curcode: 87 | for item in do_insertions(insertions, 88 | pylexer.get_tokens_unprocessed(curcode)): 89 | yield item 90 | curcode = '' 91 | insertions = [] 92 | yield match.start(), Generic.Output, line 93 | if curcode: 94 | for item in do_insertions(insertions, 95 | pylexer.get_tokens_unprocessed(curcode)): 96 | yield item 97 | 98 | #----------------------------------------------------------------------------- 99 | # Register the extension as a valid pygments lexer 100 | highlighting.lexers['ipython'] = IPythonConsoleLexer() 101 | -------------------------------------------------------------------------------- /doc/sphinxext/numpy_ext/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/doc/sphinxext/numpy_ext/__init__.py -------------------------------------------------------------------------------- /doc/users/basic_io.rst: -------------------------------------------------------------------------------- 1 | .. basic_data_io: 2 | 3 | =============== 4 | Basic Data IO 5 | =============== 6 | 7 | Accessing images using nipy: 8 | 9 | While Nifti_ is the primary file format Analyze images (with associated .mat 10 | file), and MINC files can also be read. 11 | 12 | Load Image from File 13 | ==================== 14 | 15 | Get a filename for an example file. ``anatfile`` gives a filename for a small 16 | testing image in the nipy distribution: 17 | 18 | >>> from nipy.testing import anatfile 19 | 20 | Load the file from disk: 21 | 22 | >>> from nipy import load_image 23 | >>> myimg = load_image(anatfile) 24 | >>> myimg.shape 25 | (33, 41, 25) 26 | >>> myimg.affine 27 | array([[ -2., 0., 0., 32.], 28 | [ 0., 2., 0., -40.], 29 | [ 0., 0., 2., -16.], 30 | [ 0., 0., 0., 1.]]) 31 | 32 | Access Data into an Array 33 | ========================= 34 | 35 | This allows the user to access data as a numpy array. 36 | 37 | >>> mydata = myimg.get_data() 38 | >>> mydata.shape 39 | (33, 41, 25) 40 | >>> mydata.ndim 41 | 3 42 | 43 | Save image to a File 44 | ==================== 45 | 46 | >>> from nipy import save_image 47 | >>> newimg = save_image(myimg, 'newmyfile.nii') 48 | 49 | Create Image from an Array 50 | =========================== 51 | 52 | This will have a generic affine-type CoordinateMap with unit voxel sizes. 53 | 54 | >>> import numpy as np 55 | >>> from nipy.core.api import Image, vox2mni 56 | >>> rawarray = np.zeros((43,128,128)) 57 | >>> arr_img = Image(rawarray, vox2mni(np.eye(4))) 58 | >>> arr_img.shape 59 | (43, 128, 128) 60 | 61 | Coordinate map 62 | ============== 63 | 64 | Images have a Coordinate Map. 65 | 66 | The Coordinate Map contains information defining the input (domain) and output 67 | (range) Coordinate Systems of the image, and the mapping between the two 68 | Coordinate systems. The *input* coordinate system is the *voxel* coordinate 69 | system, and the *output* coordinate system is the *world* coordinate system. 70 | 71 | >>> newimg.coordmap 72 | AffineTransform( 73 | function_domain=CoordinateSystem(coord_names=('i', 'j', 'k'), name='voxels', coord_dtype=float64), 74 | function_range=CoordinateSystem(coord_names=('aligned-x=L->R', 'aligned-y=P->A', 'aligned-z=I->S'), name='aligned', coord_dtype=float64), 75 | affine=array([[ -2., 0., 0., 32.], 76 | [ 0., 2., 0., -40.], 77 | [ 0., 0., 2., -16.], 78 | [ 0., 0., 0., 1.]]) 79 | ) 80 | 81 | See :ref:`coordinate_map` for more detail. 82 | 83 | .. include:: ../links_names.txt 84 | -------------------------------------------------------------------------------- /doc/users/index.rst: -------------------------------------------------------------------------------- 1 | .. _users-guide-index: 2 | 3 | .. This is the source doc for the nipy users guide. The users guide 4 | includes the FAQ (a directory below), and glossary. 5 | 6 | ============ 7 | User Guide 8 | ============ 9 | 10 | .. only:: html 11 | 12 | :Release: |version| 13 | :Date: |today| 14 | 15 | .. toctree:: 16 | :maxdepth: 2 17 | 18 | introduction 19 | installation 20 | scipy_orientation 21 | tutorial.rst 22 | ../glossary 23 | 24 | .. only:: html 25 | 26 | * :ref:`genindex` 27 | * :ref:`modindex` 28 | * :ref:`search` 29 | -------------------------------------------------------------------------------- /doc/users/introduction.rst: -------------------------------------------------------------------------------- 1 | .. _introduction: 2 | 3 | ============== 4 | Introduction 5 | ============== 6 | 7 | As you can see, we do not yet have much of a user guide for NIPY. We 8 | are spending all our effort in developing the building blocks of the 9 | code, and we have not yet returned to a guide to how to use it. 10 | 11 | We are starting to write general :ref:`tutorial-index`, that include 12 | introductions to how to use NIPY code to run analyses. 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | ../mission 18 | ../history 19 | -------------------------------------------------------------------------------- /doc/users/plots/amplitudes.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent an event-type design with 5 | events at times [0,4,8,12,16] and amplitudes [0,1.1,2.3,0.9,0.3]. 6 | """ 7 | 8 | import pylab 9 | import numpy as np 10 | 11 | pylab.scatter([0,4,8,12,16], [0,1.1,2.3,0.9,0.3], c='r', marker='o') 12 | 13 | a = pylab.gca() 14 | a.set_yticks([0,2]) 15 | a.set_xlabel('Time') 16 | a.set_ylabel('Amplitude') 17 | 18 | -------------------------------------------------------------------------------- /doc/users/plots/block.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent an event-type with Faces presented 5 | at times [0,4,8,12,16] and Objects presented at [2,6,10,14,18]. 6 | 7 | There are two values for Y: one for 'Face' and one for 'Object' 8 | """ 9 | 10 | import pylab 11 | import numpy as np 12 | 13 | for t in [0,4,8,12,16]: 14 | pylab.plot([t,t+0.5], [1,1], c='r', label='Face', linewidth=3) 15 | for t in [2,6,10,14,18]: 16 | pylab.plot([t,t+0.5], [0,0], c='b', label='Object', linewidth=3) 17 | 18 | 19 | a = pylab.gca() 20 | a.set_ylim([-0.1,1.1]) 21 | a.set_yticks([0,1]) 22 | a.set_yticklabels(['Object', 'Face']) 23 | a.set_xlim([-0.5,10]) 24 | a.set_xlabel('Time') 25 | 26 | -------------------------------------------------------------------------------- /doc/users/plots/event.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent an event-type with Faces presented 5 | at times [0,4,8,12,16] and Objects presented at [2,6,10,14,18]. 6 | 7 | There are two values for Y: one for 'Face' and one for 'Object' 8 | """ 9 | 10 | import pylab 11 | import numpy as np 12 | 13 | pylab.scatter([0,4,8,12,16], [1,1,1,1,1], c='r', marker='o', label='Face') 14 | pylab.scatter([2,6,10,14,18], [0,0,0,0,0], c='b', marker='o', label='Object') 15 | 16 | a = pylab.gca() 17 | a.set_ylim([-0.1,1.1]) 18 | a.set_yticks([0,1]) 19 | a.set_yticklabels(['Object', 'Face']) 20 | a.set_xlim([-0.5,10]) 21 | a.set_xlabel('Time') 22 | 23 | -------------------------------------------------------------------------------- /doc/users/plots/event_amplitude.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | import numpy as np 4 | import pylab 5 | 6 | from nipy.modalities.fmri.utils import events, Symbol, lambdify_t 7 | from nipy.modalities.fmri.hrf import glover 8 | 9 | # Symbol for amplitude 10 | a = Symbol('a') 11 | 12 | # Some event onsets regularly spaced 13 | onsets = np.linspace(0,50,6) 14 | 15 | # Make amplitudes from onset times (greater as function of time) 16 | amplitudes = onsets[:] 17 | 18 | # Flip even numbered amplitudes 19 | amplitudes = amplitudes * ([-1, 1] * 3) 20 | 21 | # Make event functions 22 | evs = events(onsets, amplitudes=amplitudes, g=a + 0.5 * a**2, f=glover) 23 | 24 | # Real valued function for symbolic events 25 | real_evs = lambdify_t(evs) 26 | 27 | # Time points at which to sample 28 | t_samples = np.linspace(0,60,601) 29 | 30 | pylab.plot(t_samples, real_evs(t_samples), c='r') 31 | for onset, amplitude in zip(onsets, amplitudes): 32 | pylab.plot([onset, onset],[0, 25 * amplitude], c='b') 33 | 34 | pylab.show() 35 | -------------------------------------------------------------------------------- /doc/users/plots/hrf.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | Plot of the canonical Glover HRF 5 | """ 6 | 7 | import numpy as np 8 | 9 | from nipy.modalities.fmri import hrf, utils 10 | 11 | import matplotlib.pyplot as plt 12 | 13 | # hrf.glover is a symbolic function; get a function of time to work on arrays 14 | hrf_func = utils.lambdify_t(hrf.glover(utils.T)) 15 | 16 | t = np.linspace(0,25,200) 17 | plt.plot(t, hrf_func(t)) 18 | a=plt.gca() 19 | a.set_xlabel(r'$t$') 20 | a.set_ylabel(r'$h_{can}(t)$') 21 | -------------------------------------------------------------------------------- /doc/users/plots/hrf_delta.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This plot demonstrates a neuronal model that is a sum 5 | of delta functions times coefficient values 6 | """ 7 | 8 | import matplotlib.pyplot as plt 9 | 10 | # Coefficients for a and b 11 | ba = 1 12 | bb = -2 13 | 14 | # Times for a and b 15 | ta = [0,4,8,12,16] 16 | tb = [2,6,10,14,18] 17 | 18 | for t in ta: 19 | plt.plot([t,t],[0,ba],c='r') 20 | for t in tb: 21 | plt.plot([t,t],[0,bb],c='b') 22 | 23 | a = plt.gca() 24 | a.set_xlabel(r'$t$') 25 | a.set_ylabel(r'$n(t)$') 26 | -------------------------------------------------------------------------------- /doc/users/plots/hrf_different.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This example uses a different HRF for different event types 5 | """ 6 | 7 | import numpy as np 8 | 9 | import matplotlib.pyplot as plt 10 | 11 | from nipy.modalities.fmri import hrf 12 | from nipy.modalities.fmri.utils import T, lambdify_t 13 | 14 | 15 | # HRFs as functions of (symbolic) time 16 | glover = hrf.glover(T) 17 | afni = hrf.afni(T) 18 | 19 | ta = [0,4,8,12,16]; tb = [2,6,10,14,18] 20 | ba = 1; bb = -2 21 | na = ba * sum([glover.subs(T, T - t) for t in ta]) 22 | nb = bb * sum([afni.subs(T, T - t) for t in tb]) 23 | 24 | nav = lambdify_t(na) 25 | nbv = lambdify_t(nb) 26 | 27 | t = np.linspace(0,30,200) 28 | plt.plot(t, nav(t), c='r', label='Face') 29 | plt.plot(t, nbv(t), c='b', label='Object') 30 | plt.plot(t, nbv(t)+nav(t), c='g', label='Combined') 31 | 32 | for t in ta: 33 | plt.plot([t,t],[0,ba*0.5],c='r') 34 | for t in tb: 35 | plt.plot([t,t],[0,bb*0.5],c='b') 36 | plt.plot([0,30], [0,0],c='#000000') 37 | plt.legend() 38 | 39 | plt.show() 40 | -------------------------------------------------------------------------------- /doc/users/plots/neuronal_block.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent the neuronal block model 5 | with Faces at times [0,4,8,12,16] and Objects presented at [2,6,10,14,18] 6 | each presented for 0.5 seconds 7 | and a coefficient of +1 for Faces, -2 for Objects. 8 | 9 | """ 10 | 11 | import pylab 12 | import numpy as np 13 | 14 | 15 | from sympy import Symbol, Piecewise, lambdify 16 | ta = [0,4,8,12,16]; tb = [2,6,10,14,18] 17 | ba = Symbol('ba'); bb = Symbol('bb'); t = Symbol('t') 18 | fa = sum([Piecewise((0, (t<_t)), ((t-_t)/0.5, (t<_t+0.5)), (1, (t >= _t+0.5))) for _t in ta])*ba 19 | fb = sum([Piecewise((0, (t<_t)), ((t-_t)/0.5, (t<_t+0.5)), (1, (t >= _t+0.5))) for _t in tb])*bb 20 | N = fa+fb 21 | 22 | Nn = N.subs(ba,1) 23 | Nn = Nn.subs(bb,-2) 24 | 25 | NNl = lambdify(t, Nn) 26 | 27 | tt = np.linspace(-1,21,121) 28 | pylab.plot(tt, [NNl(float(_t)) for _t in tt]) 29 | 30 | a = pylab.gca() 31 | a.set_ylim([-5.5,1.5]) 32 | a.set_ylabel('Neuronal (cumulative)') 33 | a.set_xlabel('Time') 34 | 35 | pylab.show() 36 | -------------------------------------------------------------------------------- /doc/users/plots/neuronal_event.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent the neuronal event-related model 5 | 6 | and a coefficient of +1 for Faces, -2 for Objects. 7 | 8 | """ 9 | 10 | import pylab 11 | import numpy as np 12 | 13 | 14 | from sympy import Symbol, Heaviside, lambdify 15 | ta = [0,4,8,12,16]; tb = [2,6,10,14,18] 16 | ba = Symbol('ba'); bb = Symbol('bb'); t = Symbol('t') 17 | fa = sum([Heaviside(t-_t) for _t in ta]) * ba 18 | fb = sum([Heaviside(t-_t) for _t in tb]) * bb 19 | N = fa+fb 20 | 21 | Nn = N.subs(ba,1) 22 | Nn = Nn.subs(bb,-2) 23 | 24 | Nn = lambdify(t, Nn) 25 | 26 | tt = np.linspace(-1,21,1201) 27 | pylab.step(tt, [Nn(_t) for _t in tt]) 28 | 29 | a = pylab.gca() 30 | a.set_ylim([-5.5,1.5]) 31 | a.set_ylabel('Neuronal (cumulative)') 32 | a.set_xlabel('Time') 33 | 34 | pylab.show() 35 | -------------------------------------------------------------------------------- /doc/users/plots/random_amplitudes.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent an event-type design with 5 | events at times [0,4,8,12,16] and random amplitudes 6 | centered at [0,1.1,2.3,0.9,0.3]. 7 | """ 8 | 9 | import pylab 10 | import numpy as np 11 | 12 | for t, y in zip([0,4,8,12,16], [0,1.1,2.3,0.9,0.3]): 13 | pylab.plot([t,t], [y-0.1,y+0.1], c='r', linewidth=3) 14 | 15 | a = pylab.gca() 16 | a.set_yticks([0,2]) 17 | a.set_xlim([-1,18]) 18 | a.set_xlabel('Time') 19 | a.set_ylabel('Amplitude') 20 | 21 | -------------------------------------------------------------------------------- /doc/users/plots/random_amplitudes_times.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent an event-type design with 5 | events at random times centered [0,2,4,6,8] and random 6 | amplitudes centered [0,1.1,2.3,0.9,0.3]. 7 | """ 8 | 9 | import pylab, matplotlib 10 | import numpy as np 11 | 12 | 13 | for t, y in zip([0,4,8,12,16], [0,1.1,2.3,0.9,0.3]): 14 | dt = np.array([-0.5,0.5,0.5,-0.5]) 15 | dy = np.array([-0.1,-0.1,0.1,0.1]) 16 | pylab.fill(t+dt,y+dy, 'r') 17 | 18 | a = pylab.gca() 19 | a.set_yticks([0,2]) 20 | a.set_xlim([-1,18]) 21 | a.set_xlabel('Time') 22 | a.set_ylabel('Amplitude') 23 | 24 | -------------------------------------------------------------------------------- /doc/users/plots/sinusoidal.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """ 4 | This figure is meant to represent a continuous 5 | stimulus having two features, Orientation and Contrast 6 | """ 7 | 8 | 9 | import pylab 10 | import numpy as np 11 | 12 | t = np.linspace(0,10,1000) 13 | o = np.sin(2*np.pi*(t+1)) * np.exp(-t/10) 14 | c = np.sin(2*np.pi*(t+0.2)/4) * np.exp(-t/12) 15 | 16 | pylab.plot(t, o, label='Orientation') 17 | pylab.plot(t, c+2.1, label='Contrast') 18 | pylab.legend() 19 | 20 | a = pylab.gca() 21 | a.set_yticks([]) 22 | a.set_xlabel('Time') 23 | -------------------------------------------------------------------------------- /doc/users/scipy_orientation.rst: -------------------------------------------------------------------------------- 1 | ============================== 2 | Geography of the Scipy world 3 | ============================== 4 | 5 | in which we briefly describe the various components you are likely to 6 | come across when writing scientific python software in general, and NIPY 7 | code in particular. 8 | 9 | Numpy 10 | ===== 11 | 12 | NumPy_ is the basic Python array-manipulation package. It allows you to 13 | create, slice and manipulate N-D arrays at near C speed. It also has 14 | basic arithmetical and mathematical functions (such as sum, mean, and 15 | log, exp, sin, cos), matrix multiplication (``numpy.dot``), Fourier 16 | transforms (``numpy.fft``) and basic linear algebra ``numpy.linalg``. 17 | 18 | SciPy 19 | ===== 20 | 21 | Scipy_ is a large umbrella project that builds on Numpy (and depends on 22 | it). It includes a variety of high level science and engineering 23 | modules together as a single package. There are extended modules for 24 | linear algebra (including wrappers to BLAS and LAPACK), optimization, 25 | integration, sparse matrices, special functions, FFTs, signal and image 26 | processing, genetic algorithms, ODE solvers, and others. 27 | 28 | Matplotlib 29 | ========== 30 | 31 | Matplotlib_ is a 2D plotting package that depends on NumPy_. It has a 32 | simple matlab-like plotting syntax that makes it relatively easy to 33 | create good-looking plots, histograms and images with a small amount of 34 | code. As well as this simplified Matlab-like syntax, There is also a 35 | more powerful and flexible object-oriented interface. 36 | 37 | Ipython 38 | ======= 39 | 40 | Ipython_ is an interactive shell for python that has various features of 41 | the interactive shell of Matlab, Mathematica and R. It works 42 | particularly well with Matplotlib_, but is also an essential tool for 43 | interactive code development and code exploration. It contains 44 | libraries for creainteracting with parallel jobs on clusters or over 45 | several CPU cores in a fairly transparent way. 46 | 47 | Cython 48 | ====== 49 | 50 | Cython_ is a development language that allows you to write a combination 51 | of Python and C-like syntax to generate Python extensions. It is 52 | especially good for linking C libraries to Python in a readable way. It 53 | is also an excellent choice for optimization of Python code, because it 54 | allows you to drop down to C or C-like code at your bottlenecks without 55 | losing much of the readability of Python. 56 | 57 | Mayavi 58 | ====== 59 | 60 | Mayavi_ is a high-level python interface to the VTK_ plotting 61 | libraries. 62 | 63 | .. include:: ../links_names.txt 64 | -------------------------------------------------------------------------------- /doc/users/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-index: 2 | 3 | =========== 4 | Tutorials 5 | =========== 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | basic_io 11 | coordinate_map 12 | glm_spec 13 | 14 | -------------------------------------------------------------------------------- /examples/affine_registration.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | This script requires the nipy-data package to run. It is an example of 6 | inter-subject affine registration using two MR-T1 images from the 7 | sulcal 2000 database acquired at CEA, SHFJ, Orsay, France. The source 8 | is 'ammon' and the target is 'anubis'. Running it will result in a 9 | resampled ammon image being created in the current directory. 10 | """ 11 | from __future__ import print_function # Python 2/3 compatibility 12 | 13 | from optparse import OptionParser 14 | import time 15 | 16 | import numpy as np 17 | import nibabel as nb 18 | 19 | from nireg import HistogramRegistration, resample 20 | from nipy.utils import example_data 21 | 22 | print('Scanning data directory...') 23 | 24 | # Input images are provided with the nipy-data package 25 | source = 'ammon' 26 | target = 'anubis' 27 | source_file = example_data.get_filename('neurospin', 'sulcal2000', 28 | 'nobias_' + source + '.nii.gz') 29 | target_file = example_data.get_filename('neurospin', 'sulcal2000', 30 | 'nobias_' + target + '.nii.gz') 31 | 32 | # Parse arguments 33 | parser = OptionParser(description=__doc__) 34 | 35 | doc_similarity = 'similarity measure: cc (correlation coefficient), \ 36 | cr (correlation ratio), crl1 (correlation ratio in L1 norm), \ 37 | mi (mutual information), nmi (normalized mutual information), \ 38 | pmi (Parzen mutual information), dpmi (discrete Parzen mutual \ 39 | information). Default is crl1.' 40 | 41 | doc_renormalize = 'similarity renormalization: default, ml or nml.' 42 | 43 | doc_interp = 'interpolation method: tri (trilinear), pv (partial volume), \ 44 | rand (random). Default is pv.' 45 | 46 | doc_optimizer = 'optimization method: simplex, powell, steepest, cg, bfgs. \ 47 | Default is powell.' 48 | 49 | doc_tol = 'numerical tolerance on similarity values: default is 0.01' 50 | 51 | parser.add_option('-s', '--similarity', dest='similarity', 52 | help=doc_similarity) 53 | parser.add_option('-r', '--renormalize', dest='renormalize', 54 | help=doc_renormalize) 55 | parser.add_option('-i', '--interp', dest='interp', 56 | help=doc_interp) 57 | parser.add_option('-o', '--optimizer', dest='optimizer', 58 | help=doc_optimizer) 59 | parser.add_option('-t', '--tol', dest='tol', 60 | help=doc_tol) 61 | opts, args = parser.parse_args() 62 | 63 | 64 | # Optional arguments 65 | similarity = 'crl1' 66 | renormalize = 'default' 67 | interp = 'pv' 68 | optimizer = 'powell' 69 | if opts.similarity is not None: 70 | similarity = opts.similarity 71 | if opts.renormalize is not None: 72 | renormalize = bool(int(opts.renormalize)) 73 | if opts.interp is not None: 74 | interp = opts.interp 75 | if opts.optimizer is not None: 76 | optimizer = opts.optimizer 77 | if not opts.tol == None: 78 | tol = float(opts.tol) 79 | else: 80 | tol = 1e-2 81 | 82 | # Print messages 83 | print('Source brain: %s' % source) 84 | print('Target brain: %s' % target) 85 | print('Similarity measure: %s' % similarity) 86 | print('Renormalization: %s' % renormalize) 87 | print('Interpolation: %s' % interp) 88 | print('Optimizer: %s' % optimizer) 89 | print('Tolerance: %f' % tol) 90 | 91 | # Get data 92 | print('Fetching image data...') 93 | I = nb.load(source_file) 94 | J = nb.load(target_file) 95 | 96 | # Perform affine registration 97 | # The output is an array-like object such that 98 | # np.asarray(T) is a customary 4x4 matrix 99 | print('Setting up registration...') 100 | tic = time.time() 101 | R = HistogramRegistration(I, J, similarity=similarity, interp=interp, 102 | renormalize=renormalize) 103 | T = R.optimize('affine', optimizer=optimizer, xtol=tol, ftol=tol) 104 | toc = time.time() 105 | print(' Registration time: %f sec' % (toc - tic)) 106 | 107 | # Resample source image 108 | print('Resampling source image...') 109 | tic = time.time() 110 | #It = resample2(I, J.coordmap, T.inv(), J.shape) 111 | It = resample(I, T.inv(), reference=J) 112 | toc = time.time() 113 | print(' Resampling time: %f sec' % (toc - tic)) 114 | 115 | # Save resampled source 116 | outroot = source + '_TO_' + target 117 | outimg = outroot + '.nii.gz' 118 | print ('Saving resampled source in: %s' % outimg) 119 | nb.save(It, outimg) 120 | 121 | # Save transformation matrix 122 | """ 123 | outparams = outroot + '.npy' 124 | np.save(outparams, np.asarray(T)) 125 | """ 126 | -------------------------------------------------------------------------------- /examples/space_time_realign.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | This script requires the nipy-data package to run. It is an example of 6 | simultaneous motion correction and slice timing correction in 7 | multi-session fMRI data from the FIAC 2005 dataset. Specifically, it 8 | uses the first two sessions of subject 'fiac0'. 9 | 10 | Usage: 11 | python space_time_realign.py 12 | 13 | Two images will be created in the working directory for the realigned series:: 14 | 15 | rarun1.nii 16 | rarun2.nii 17 | 18 | Author: Alexis Roche, 2009. 19 | """ 20 | from __future__ import print_function # Python 2/3 compatibility 21 | 22 | import os 23 | from os.path import split as psplit, abspath 24 | import numpy as np 25 | import nibabel as nb 26 | from nireg import SpaceTimeRealign 27 | from nipy.utils import example_data 28 | 29 | # Input images are provided with the nipy-data package 30 | runnames = [example_data.get_filename('fiac', 'fiac0', run + '.nii.gz') 31 | for run in ('run1', 'run2')] 32 | runs = [nb.load(run) for run in runnames] 33 | 34 | # Spatio-temporal realigner assuming interleaved ascending slice order 35 | R = SpaceTimeRealign(runs, tr=2.5, slice_times='asc_alt_2', slice_info=2) 36 | 37 | # If you are not sure what the above is doing, you can alternatively 38 | # declare slice times explicitly using the following equivalent code 39 | """ 40 | tr = 2.5 41 | nslices = runs[0].shape[2] 42 | slice_times = (tr / float(nslices)) *\ 43 | np.argsort(range(0, nslices, 2) + range(1, nslices, 2)) 44 | print('Slice times: %s' % slice_times) 45 | R = SpaceTimeRealign(runs, tr=tr, slice_times=slice_times, slice_info=2) 46 | """ 47 | 48 | # Estimate motion within- and between-sessions 49 | R.estimate(refscan=None) 50 | 51 | # Resample data on a regular space+time lattice using 4d interpolation 52 | # Save images 53 | cwd = abspath(os.getcwd()) 54 | print('Saving results in: %s' % cwd) 55 | for i in range(len(runs)): 56 | corr_run = R.resample(i) 57 | fname = 'ra' + psplit(runnames[i])[1] 58 | nb.save(corr_run, fname) 59 | -------------------------------------------------------------------------------- /nireg/NOTES_ELF: -------------------------------------------------------------------------------- 1 | 2 | 3 | Notes 4 | 5 | neurospin/registration 6 | 7 | registration/ 8 | 9 | __init__.py 10 | registration.py 11 | iconic_registration (intensity based, joint histogram) 12 | renamed joint registration 13 | takes from and two images and compute joint histogram 14 | groupwise_registration.py (motion correction in fmri) 15 | register a set of images 16 | sum of square differences 17 | not using joint histogram 18 | affine.py (discribes a general 3d affine transformation and its parametrization) 19 | class affine 20 | params=s(-1)xv12 s: pre_cond 21 | radius for the preconditioner is in translation coordinates 22 | check for rigidity 23 | class 24 | 25 | grid_transform.py (discrete displacements of the from grid) 26 | cubic_spline.c (same results as ndimage) 27 | wichmann_prng.c (only for the random interplation) 28 | iconic.c to be renamed to histogram.c 29 | 30 | 31 | 32 | 33 | interpolating the histogram 34 | avoids the problem of casting the intensity 35 | 36 | in C assumes the joint histogram is a signed short array (16bit) 37 | 38 | clamp 39 | 40 | Make independent tests with checks starting from different registrations. 41 | Sensible default for the focus function 42 | What should we do when outside the fov? 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /nireg/TODO.txt: -------------------------------------------------------------------------------- 1 | * 'permuted' svd in affine.py 2 | * rename rotation, scaling, shearing appropriately 3 | * spline transform object 4 | * log-euclidean transform object ??? 5 | * Levenberg-Marquardt 6 | * Affine transform creation 7 | 8 | -------------------------------------------- 9 | 10 | Transform objects 11 | 12 | Transform 13 | | 14 | --> Affine 15 | | 16 | --> Rigid, Similarity, ... 17 | | 18 | --> GridTransform 19 | | 20 | --> SplineTransform 21 | 22 | | 23 | --> PolyAffine 24 | | 25 | --> PolyRigid, PolySimilarity, ... 26 | 27 | 28 | ChainTransform 29 | 30 | Any registration method should take a generic transform argument 31 | having an `apply` method and a `param` attribute or property. 32 | 33 | Internally, it may create a ChainTransform object to represent 34 | voxel-to-voxel transforms or other kinds of compositions. The 35 | transform supplied by the user should be optimizable (have a `param` 36 | attribute). 37 | 38 | -------------------------------------------------------------------------------- /nireg/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | from .resample import resample 5 | from .histogram_registration import (HistogramRegistration, clamp, 6 | ideal_spacing, interp_methods) 7 | from .affine import (threshold, rotation_mat2vec, rotation_vec2mat, to_matrix44, 8 | preconditioner, inverse_affine, subgrid_affine, Affine, 9 | Affine2D, Rigid, Rigid2D, Similarity, Similarity2D, 10 | affine_transforms) 11 | from .groupwise_registration import (interp_slice_times, scanner_coords, 12 | make_grid, Image4d, Realign4dAlgorithm, 13 | resample4d, adjust_subsampling, 14 | single_run_realign4d, realign4d, 15 | SpaceTimeRealign, 16 | Realign4d) 17 | 18 | from numpy.testing import Tester 19 | test = Tester().test 20 | bench = Tester().bench 21 | -------------------------------------------------------------------------------- /nireg/_register.h: -------------------------------------------------------------------------------- 1 | #define PY_ARRAY_UNIQUE_SYMBOL _register_ARRAY_API 2 | -------------------------------------------------------------------------------- /nireg/chain_transform.py: -------------------------------------------------------------------------------- 1 | """ Chain transforms """ 2 | 3 | from .affine import Affine 4 | 5 | 6 | class ChainTransform(object): 7 | def __init__(self, optimizable, pre=None, post=None): 8 | """ Create chain transform instance 9 | 10 | Parameters 11 | ---------- 12 | optimizable : array or Transform 13 | Transform that we are optimizing. If this is an array, then assume 14 | it's an affine matrix. 15 | pre : None or array or Transform, optional 16 | If not None, a transform that should be applied to points before 17 | applying the `optimizable` transform. If an array, then assume it's 18 | an affine matrix. 19 | post : None or Transform, optional 20 | If not None, a transform that should be applied to points after 21 | applying any `pre` transform, and then the `optimizable` 22 | transform. If an array, assume it's an affine matrix 23 | """ 24 | if not hasattr(optimizable, 'param'): 25 | raise ValueError('Input transform should be optimizable') 26 | if not hasattr(optimizable, 'apply'): 27 | optimizable = Affine(optimizable) 28 | if not hasattr(pre, 'apply'): 29 | pre = Affine(pre) 30 | if not hasattr(post, 'apply'): 31 | post = Affine(post) 32 | self.optimizable = optimizable 33 | self.pre = pre 34 | self.post = post 35 | 36 | def apply(self, pts): 37 | """ Apply full transformation to points `pts` 38 | 39 | If there are N points, then `pts` will be N by 3 40 | 41 | Parameters 42 | ---------- 43 | pts : array-like 44 | array of points 45 | 46 | Returns 47 | ------- 48 | transformed_pts : array 49 | N by 3 array of transformed points 50 | """ 51 | composed = self.post.compose(self.optimizable.compose(self.pre)) 52 | return composed.apply(pts) 53 | 54 | def _set_param(self, param): 55 | self.optimizable.param = param 56 | def _get_param(self): 57 | return self.optimizable.param 58 | param = property(_get_param, _set_param, None, 'get/set param') 59 | -------------------------------------------------------------------------------- /nireg/cubic_spline.h: -------------------------------------------------------------------------------- 1 | /*! 2 | 3 | \file cubic_spline.h 4 | \brief Cubic spline transformation and interpolation. 5 | \author Alexis Roche 6 | \date 2003 7 | 8 | Compute the cubic spline coefficients of regularly sampled signals 9 | and perform interpolation. The cubic spline transform is implemented 10 | from the recursive algorithm described in: 11 | 12 | M. Unser, "Splines : a perfect fit for signal/image processing ", 13 | IEEE Signal Processing Magazine, Nov. 1999. Web page: 14 | http://bigwww.epfl.ch/publications/unser9902.html Please check the 15 | erratum. 16 | 17 | */ 18 | 19 | 20 | #ifndef CUBIC_SPLINE 21 | #define CUBIC_SPLINE 22 | 23 | #ifdef __cplusplus 24 | extern "C" { 25 | #endif 26 | 27 | #include 28 | 29 | /* 30 | * Use extension numpy symbol table 31 | */ 32 | #define NO_IMPORT_ARRAY 33 | #include "_register.h" 34 | 35 | #include 36 | 37 | /*! 38 | \brief Cubic spline basis function 39 | \param x input value 40 | */ 41 | extern double cubic_spline_basis(double x); 42 | /*! 43 | \brief Cubic spline transform of a one-dimensional signal 44 | \param src input signal 45 | \param res output signal (same size) 46 | */ 47 | extern void cubic_spline_transform(PyArrayObject* res, const PyArrayObject* src); 48 | 49 | extern double cubic_spline_sample1d(double x, const PyArrayObject* coef, 50 | int mode); 51 | extern double cubic_spline_sample2d(double x, double y, const PyArrayObject* coef, 52 | int mode_x, int mode_y); 53 | extern double cubic_spline_sample3d(double x, double y, double z, const PyArrayObject* coef, 54 | int mode_x, int mode_y, int mode_z); 55 | extern double cubic_spline_sample4d(double x, double y, double z, double t, const PyArrayObject* coef, 56 | int mode_x, int mode_y, int mode_z, int mode_t); 57 | extern void cubic_spline_resample3d(PyArrayObject* im_resampled, const PyArrayObject* im, 58 | const double* Tvox, 59 | int mode_x, int mode_y, int mode_z); 60 | 61 | 62 | 63 | #ifdef __cplusplus 64 | } 65 | #endif 66 | 67 | #endif 68 | -------------------------------------------------------------------------------- /nireg/externals/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | # init for externals package 5 | from . import argparse 6 | from . import configobj 7 | -------------------------------------------------------------------------------- /nireg/externals/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | def configuration(parent_package='',top_path=None): 5 | from numpy.distutils.misc_util import Configuration 6 | config = Configuration('externals', parent_package, top_path) 7 | 8 | config.add_subpackage('transforms3d') 9 | 10 | return config 11 | 12 | if __name__ == '__main__': 13 | from numpy.distutils.core import setup 14 | setup(**configuration(top_path='').todict()) 15 | -------------------------------------------------------------------------------- /nireg/externals/transforms3d/__init__.py: -------------------------------------------------------------------------------- 1 | ''' Copies from transforms3d package 2 | 3 | Please see github.com/matthew-brett/transforms3d 4 | ''' 5 | from __future__ import absolute_import 6 | 7 | from . import quaternions 8 | -------------------------------------------------------------------------------- /nireg/externals/transforms3d/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | def configuration(parent_package='',top_path=None): 5 | from numpy.distutils.misc_util import Configuration 6 | config = Configuration('transforms3d', parent_package, top_path) 7 | config.add_subpackage('tests') 8 | 9 | return config 10 | 11 | if __name__ == '__main__': 12 | from numpy.distutils.core import setup 13 | setup(**configuration(top_path='').todict()) 14 | -------------------------------------------------------------------------------- /nireg/externals/transforms3d/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Init to make tests directory into package 2 | -------------------------------------------------------------------------------- /nireg/externals/transforms3d/tests/samples.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import numpy as np 3 | 4 | from ..utils import inique, permuted_signs, permuted_with_signs 5 | 6 | from ..taitbryan import euler2mat 7 | 8 | # Regular points around a sphere 9 | _r13 = np.sqrt(1/3.0) 10 | _r12 = np.sqrt(0.5) 11 | sphere_points = ( 12 | tuple(inique(permuted_with_signs([1, 0, 0]))) + 13 | tuple(inique(permuted_with_signs([_r12, _r12, 0]))) + 14 | tuple(inique(permuted_signs([_r13, _r13, _r13]))) 15 | ) 16 | 17 | # Example rotations ''' 18 | euler_tuples = [] 19 | params = np.arange(-np.pi,np.pi,np.pi/2) 20 | euler_tuples = tuple((x, y, z) 21 | for x in params 22 | for y in params 23 | for z in params) 24 | 25 | euler_mats = tuple(euler2mat(*t) for t in euler_tuples) 26 | 27 | -------------------------------------------------------------------------------- /nireg/externals/transforms3d/utils.py: -------------------------------------------------------------------------------- 1 | ''' Utilities for transforms3d ''' 2 | from __future__ import absolute_import 3 | 4 | import math 5 | from itertools import permutations 6 | 7 | import numpy as np 8 | 9 | 10 | def normalized_vector(vec): 11 | ''' Return vector divided by Euclidean (L2) norm 12 | 13 | See :term:`unit vector` and :term:`Euclidean norm` 14 | 15 | Parameters 16 | ---------- 17 | vec : array-like shape (3,) 18 | 19 | Returns 20 | ------- 21 | nvec : array shape (3,) 22 | vector divided by L2 norm 23 | 24 | Examples 25 | -------- 26 | >>> vec = [1, 2, 3] 27 | >>> l2n = np.sqrt(np.dot(vec, vec)) 28 | >>> nvec = normalized_vector(vec) 29 | >>> np.allclose(np.array(vec) / l2n, nvec) 30 | True 31 | >>> vec = np.array([[1, 2, 3]]) 32 | >>> vec.shape 33 | (1, 3) 34 | >>> normalized_vector(vec).shape 35 | (3,) 36 | ''' 37 | vec = np.asarray(vec).squeeze() 38 | return vec / math.sqrt((vec**2).sum()) 39 | 40 | 41 | def vector_norm(vec): 42 | ''' Return vector Euclidaan (L2) norm 43 | 44 | See :term:`unit vector` and :term:`Euclidean norm` 45 | 46 | Parameters 47 | ---------- 48 | vec : array-like shape (3,) 49 | 50 | Returns 51 | ------- 52 | norm : scalar 53 | 54 | Examples 55 | -------- 56 | >>> vec = [1, 2, 3] 57 | >>> l2n = np.sqrt(np.dot(vec, vec)) 58 | >>> nvec = vector_norm(vec) 59 | >>> np.allclose(nvec, np.sqrt(np.dot(vec, vec))) 60 | True 61 | ''' 62 | vec = np.asarray(vec) 63 | return math.sqrt((vec**2).sum()) 64 | 65 | 66 | def inique(iterable): 67 | ''' Generate unique elements from `iterable` 68 | 69 | Parameters 70 | ---------- 71 | iterable : iterable 72 | 73 | Returns 74 | ------- 75 | gen : generator 76 | generator that yields unique elements from `iterable` 77 | 78 | Examples 79 | -------- 80 | >>> tuple(inique([0, 1, 2, 0, 2, 3])) 81 | (0, 1, 2, 3) 82 | ''' 83 | history = [] 84 | for val in iterable: 85 | if val not in history: 86 | history.append(val) 87 | yield val 88 | 89 | 90 | def permuted_signs(seq): 91 | ''' Generate permuted signs for sequence `seq` 92 | 93 | Parameters 94 | ---------- 95 | seq : sequence 96 | 97 | Returns 98 | ------- 99 | gen : generator 100 | generator returning `seq` with signs permuted 101 | 102 | Examples 103 | -------- 104 | >>> tuple(permuted_signs([1, -2, 0])) 105 | ((1, -2, 0), (1, -2, 0), (1, 2, 0), (1, 2, 0), (-1, -2, 0), (-1, -2, 0), (-1, 2, 0), (-1, 2, 0)) 106 | ''' 107 | seq = tuple(seq) 108 | n = len(seq) 109 | for fs in inique(permutations([1]*n + [-1]*n, n)): 110 | yield tuple(e * f for e, f in zip(seq, fs)) 111 | 112 | 113 | def permuted_with_signs(seq): 114 | ''' Return all permutations of `seq` with all sign permutations 115 | 116 | Parameters 117 | ---------- 118 | seq : sequence 119 | 120 | Returns 121 | ------- 122 | gen : generator 123 | generator returning permutations and sign permutations 124 | 125 | Examples 126 | -------- 127 | >>> tuple(permuted_with_signs((1,2))) 128 | ((1, 2), (1, -2), (-1, 2), (-1, -2), (2, 1), (2, -1), (-2, 1), (-2, -1)) 129 | ''' 130 | for pseq in permutations(seq): 131 | for sseq in permuted_signs(pseq): 132 | yield sseq 133 | -------------------------------------------------------------------------------- /nireg/joint_histogram.h: -------------------------------------------------------------------------------- 1 | /* 2 | @author Alexis Roche 3 | @date 1997-2009 4 | 5 | Intensity-based texture analysis and image registration for 2D or 3D 6 | images [BETA VERSION]. 7 | 8 | All computations are fed with the voxel-to-voxel transformation 9 | relating two images, so you do not need the voxel sizes. 10 | */ 11 | 12 | #ifndef JOINT_HISTOGRAM 13 | #define JOINT_HISTOGRAM 14 | 15 | #ifdef __cplusplus 16 | extern "C" { 17 | #endif 18 | 19 | #include 20 | 21 | /* 22 | * Use extension numpy symbol table 23 | */ 24 | #define NO_IMPORT_ARRAY 25 | #include "_register.h" 26 | 27 | #include 28 | 29 | /* 30 | Update a pre-allocated joint histogram. Important notice: in all 31 | computations, H will be assumed C-contiguous. 32 | 33 | This means that it is contiguous and that, in C convention 34 | (row-major order, i.e. column indices are fastest): 35 | 36 | i (source intensities) are row indices 37 | j (target intensities) are column indices 38 | 39 | interp: 40 | 0 - PV interpolation 41 | 1 - TRILINEAR interpolation 42 | <0 - RANDOM interpolation with seed=-interp 43 | */ 44 | extern int joint_histogram(PyArrayObject* H, 45 | unsigned int clampI, 46 | unsigned int clampJ, 47 | PyArrayIterObject* iterI, 48 | const PyArrayObject* imJ_padded, 49 | const PyArrayObject* Tvox, 50 | long interp); 51 | 52 | extern int L1_moments(double* n_, double* median_, double* dev_, 53 | const PyArrayObject* H); 54 | 55 | 56 | #ifdef __cplusplus 57 | } 58 | #endif 59 | 60 | #endif 61 | -------------------------------------------------------------------------------- /nireg/np_distutils_monkey.py: -------------------------------------------------------------------------------- 1 | # Standard library imports 2 | from os.path import join as pjoin, dirname 3 | from distutils.dep_util import newer_group 4 | from distutils.errors import DistutilsError 5 | 6 | from numpy.distutils.misc_util import appendpath 7 | from numpy.distutils import log 8 | 9 | 10 | def generate_a_pyrex_source(self, base, ext_name, source, extension): 11 | ''' Monkey patch for numpy build_src.build_src method 12 | 13 | Uses Cython instead of Pyrex. 14 | 15 | Assumes Cython is present 16 | ''' 17 | if self.inplace: 18 | target_dir = dirname(base) 19 | else: 20 | target_dir = appendpath(self.build_src, dirname(base)) 21 | target_file = pjoin(target_dir, ext_name + '.c') 22 | depends = [source] + extension.depends 23 | if self.force or newer_group(depends, target_file, 'newer'): 24 | import Cython.Compiler.Main 25 | log.info("cythonc:> %s" % (target_file)) 26 | self.mkpath(target_dir) 27 | options = Cython.Compiler.Main.CompilationOptions( 28 | defaults=Cython.Compiler.Main.default_options, 29 | include_path=extension.include_dirs, 30 | output_file=target_file) 31 | cython_result = Cython.Compiler.Main.compile(source, 32 | options=options) 33 | if cython_result.num_errors != 0: 34 | raise DistutilsError("%d errors while compiling %r with Cython" \ 35 | % (cython_result.num_errors, source)) 36 | return target_file 37 | 38 | 39 | from numpy.distutils.command import build_src 40 | build_src.build_src.generate_a_pyrex_source = generate_a_pyrex_source 41 | -------------------------------------------------------------------------------- /nireg/polyaffine.c: -------------------------------------------------------------------------------- 1 | #include "polyaffine.h" 2 | 3 | #include 4 | #include 5 | 6 | #define TINY 1e-200 7 | 8 | 9 | static double _gaussian(double* xyz, double* center, double* sigma) 10 | { 11 | double aux, d2 = 0.0; 12 | int i; 13 | 14 | for (i=0; i<3; i++) { 15 | aux = xyz[i] - center[i]; 16 | aux /= sigma[i]; 17 | d2 += aux*aux; 18 | } 19 | 20 | return exp(-.5*d2); 21 | } 22 | 23 | /* Compute: y += w*x */ 24 | static void _add_weighted_affine(double* y, const double* x, double w) 25 | { 26 | int i; 27 | 28 | for (i=0; i<12; i++) 29 | y[i] += w*x[i]; 30 | 31 | return; 32 | } 33 | 34 | /* Compute: y = mat*x */ 35 | static void _apply_affine(double *y, const double* mat, const double* x, double W) 36 | { 37 | y[0] = mat[0]*x[0]+mat[1]*x[1]+mat[2]*x[2]+mat[3]; 38 | y[1] = mat[4]*x[0]+mat[5]*x[1]+mat[6]*x[2]+mat[7]; 39 | y[2] = mat[8]*x[0]+mat[9]*x[1]+mat[10]*x[2]+mat[11]; 40 | 41 | if (Windex < iter_xyz->size) { 81 | 82 | xyz = PyArray_ITER_DATA(iter_xyz); 83 | PyArray_ITER_RESET(iter_centers); 84 | PyArray_ITER_RESET(iter_affines); 85 | memset((void*)mat, 0, bytes_mat); 86 | W = 0.0; 87 | 88 | /* Loop over centers */ 89 | while(iter_centers->index < iter_centers->size) { 90 | center = PyArray_ITER_DATA(iter_centers); 91 | affine = PyArray_ITER_DATA(iter_affines); 92 | w = _gaussian(xyz, center, sigma); 93 | W += w; 94 | _add_weighted_affine(mat, affine, w); 95 | PyArray_ITER_NEXT(iter_centers); 96 | PyArray_ITER_NEXT(iter_affines); 97 | } 98 | 99 | /* Apply matrix */ 100 | _apply_affine(t_xyz, mat, xyz, W); 101 | memcpy((void*)xyz, (void*)t_xyz, bytes_xyz); 102 | 103 | /* Update xyz iterator */ 104 | PyArray_ITER_NEXT(iter_xyz); 105 | } 106 | 107 | /* Free memory */ 108 | Py_XDECREF(iter_xyz); 109 | Py_XDECREF(iter_centers); 110 | Py_XDECREF(iter_affines); 111 | 112 | return; 113 | } 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /nireg/polyaffine.h: -------------------------------------------------------------------------------- 1 | #ifndef POLYAFFINE 2 | #define POLYAFFINE 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | #include 9 | 10 | /* 11 | * Use extension numpy symbol table 12 | */ 13 | #define NO_IMPORT_ARRAY 14 | #include "_register.h" 15 | 16 | #include 17 | 18 | extern void apply_polyaffine(PyArrayObject* XYZ, 19 | const PyArrayObject* Centers, 20 | const PyArrayObject* Affines, 21 | const PyArrayObject* Sigma); 22 | 23 | 24 | #ifdef __cplusplus 25 | } 26 | #endif 27 | 28 | #endif 29 | -------------------------------------------------------------------------------- /nireg/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from __future__ import print_function 3 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 4 | # vi: set ft=python sts=4 ts=4 sw=4 et: 5 | import os 6 | 7 | import np_distutils_monkey # ensure cythonization with numpy version > 1.10 8 | 9 | def configuration(parent_package='', top_path=None): 10 | 11 | from numpy.distutils.misc_util import Configuration 12 | 13 | config = Configuration('nireg', parent_package, top_path) 14 | config.add_subpackage('tests') 15 | config.add_include_dirs(config.name.replace('.', os.sep)) 16 | config.add_extension( 17 | '_register', 18 | sources=['_register.pyx', 19 | 'joint_histogram.c', 20 | 'wichmann_prng.c', 21 | 'cubic_spline.c', 22 | 'polyaffine.c']) 23 | config.add_subpackage('externals') 24 | config.add_subpackage('slicetiming') 25 | config.add_subpackage('testing') 26 | config.add_subpackage('tests') 27 | return config 28 | 29 | 30 | if __name__ == '__main__': 31 | print('This is the wrong setup.py file to run') 32 | -------------------------------------------------------------------------------- /nireg/slicetiming/__init__.py: -------------------------------------------------------------------------------- 1 | # Init for slicetiming subpackage 2 | """ Slicetiming subpackage 3 | 4 | The standard nipy method of slice timing is implemented in 5 | :mod:`nipy.algorithms.registration.groupwise_registration`. 6 | 7 | This subpackage is a placeholder for other slice timing methods, and for utility 8 | functions for slice timing 9 | """ 10 | -------------------------------------------------------------------------------- /nireg/slicetiming/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from __future__ import print_function 3 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 4 | # vi: set ft=python sts=4 ts=4 sw=4 et: 5 | import os 6 | 7 | 8 | def configuration(parent_package='', top_path=None): 9 | 10 | from numpy.distutils.misc_util import Configuration 11 | 12 | config = Configuration('slicetiming', parent_package, top_path) 13 | config.add_subpackage('tests') 14 | config.add_include_dirs(config.name.replace('.', os.sep)) 15 | return config 16 | 17 | 18 | if __name__ == '__main__': 19 | print('This is the wrong setup.py file to run') 20 | -------------------------------------------------------------------------------- /nireg/slicetiming/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Init for slicetiming tests 2 | -------------------------------------------------------------------------------- /nireg/slicetiming/tests/test_timefuncs.py: -------------------------------------------------------------------------------- 1 | """ Testing timefuncs module 2 | """ 3 | 4 | from __future__ import division, print_function, absolute_import 5 | 6 | import numpy as np 7 | 8 | from numpy.testing import (assert_almost_equal, 9 | assert_array_equal) 10 | 11 | from nose.tools import (assert_true, assert_false, assert_raises, 12 | assert_equal, assert_not_equal) 13 | 14 | 15 | from .. import timefuncs as tf 16 | 17 | 18 | def test_ascending(): 19 | tr = 2. 20 | for func in (tf.st_01234, tf.ascending): 21 | for n_slices in (10, 11): 22 | assert_almost_equal( 23 | func(n_slices, tr), 24 | np.arange(n_slices) / n_slices * tr) 25 | assert_array_equal( 26 | np.argsort(func(5, 1)), [0, 1, 2, 3, 4]) 27 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 28 | 29 | 30 | def test_descending(): 31 | tr = 2. 32 | for func in (tf.st_43210, tf.descending): 33 | for n_slices in (10, 11): 34 | assert_almost_equal( 35 | func(n_slices, tr), 36 | np.arange(n_slices-1, -1, -1) / n_slices * tr) 37 | assert_array_equal( 38 | np.argsort(func(5, 1)), [4, 3, 2, 1, 0]) 39 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 40 | 41 | 42 | def test_asc_alt_2(): 43 | tr = 2. 44 | for func in (tf.st_02413, tf.asc_alt_2): 45 | assert_almost_equal( 46 | func(10, tr) / tr * 10, 47 | [0, 5, 1, 6, 2, 7, 3, 8, 4, 9]) 48 | assert_almost_equal( 49 | func(11, tr) / tr * 11, 50 | [0, 6, 1, 7, 2, 8, 3, 9, 4, 10, 5]) 51 | assert_array_equal( 52 | np.argsort(func(5, 1)), [0, 2, 4, 1, 3]) 53 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 54 | 55 | 56 | def test_desc_alt_2(): 57 | tr = 2. 58 | for func in (tf.st_42031, tf.desc_alt_2): 59 | assert_almost_equal( 60 | func(10, tr) / tr * 10, 61 | [9, 4, 8, 3, 7, 2, 6, 1, 5, 0]) 62 | assert_almost_equal( 63 | func(11, tr) / tr * 11, 64 | [5, 10, 4, 9, 3, 8, 2, 7, 1, 6, 0]) 65 | assert_array_equal( 66 | np.argsort(func(5, 1)), [4, 2, 0, 3, 1]) 67 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 68 | 69 | 70 | def test_asc_alt_2_1(): 71 | tr = 2. 72 | for func in (tf.st_13024, tf.asc_alt_2_1): 73 | assert_almost_equal( 74 | func(10, tr) / tr * 10, 75 | [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]) 76 | assert_almost_equal( 77 | func(11, tr) / tr * 11, 78 | [5, 0, 6, 1, 7, 2, 8, 3, 9, 4, 10]) 79 | assert_array_equal( 80 | np.argsort(func(5, 1)), [1, 3, 0, 2, 4]) 81 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 82 | 83 | 84 | def test_asc_alt_siemens(): 85 | tr = 2. 86 | for func in (tf.st_odd0_even1, tf.asc_alt_siemens): 87 | assert_almost_equal( 88 | func(10, tr) / tr * 10, 89 | [5, 0, 6, 1, 7, 2, 8, 3, 9, 4]) 90 | assert_almost_equal( 91 | func(11, tr) / tr * 11, 92 | [0, 6, 1, 7, 2, 8, 3, 9, 4, 10, 5]) 93 | assert_array_equal( 94 | np.argsort(func(5, 1)), [0, 2, 4, 1, 3]) 95 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 96 | 97 | 98 | def test_asc_alt_half(): 99 | tr = 2. 100 | for func in (tf.st_03142, tf.asc_alt_half): 101 | assert_almost_equal( 102 | func(10, tr) / tr * 10, 103 | [0, 2, 4, 6, 8, 1, 3, 5, 7, 9]) 104 | assert_almost_equal( 105 | func(11, tr) / tr * 11, 106 | [0, 2, 4, 6, 8, 10, 1, 3, 5, 7, 9]) 107 | assert_array_equal( 108 | np.argsort(func(5, 1)), [0, 3, 1, 4, 2]) 109 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 110 | 111 | 112 | def test_desc_alt_half(): 113 | tr = 2. 114 | for func in (tf.st_41302, tf.desc_alt_half): 115 | assert_almost_equal( 116 | func(10, tr) / tr * 10, 117 | [9, 7, 5, 3, 1, 8, 6, 4, 2, 0]) 118 | assert_almost_equal( 119 | func(11, tr) / tr * 11, 120 | [9, 7, 5, 3, 1, 10, 8, 6, 4, 2, 0]) 121 | assert_array_equal( 122 | np.argsort(func(5, 1)), [4, 1, 3, 0, 2]) 123 | assert_equal(tf.SLICETIME_FUNCTIONS[func.__name__], func) 124 | 125 | 126 | def test_number_names(): 127 | for func in ( 128 | tf.st_01234, 129 | tf.st_43210, 130 | tf.st_02413, 131 | tf.st_42031, 132 | tf.st_13024, 133 | tf.st_03142, 134 | tf.st_41302): 135 | name = func.__name__ 136 | assert_equal(tf.SLICETIME_FUNCTIONS[name], func) 137 | assert_equal(tf.SLICETIME_FUNCTIONS[name[3:]], func) 138 | -------------------------------------------------------------------------------- /nireg/testing/__init__.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set ft=python sts=4 ts=4 sw=4 et: 3 | """The testing directory contains a small set of imaging files to be 4 | used for doctests only. 5 | """ 6 | import os 7 | 8 | #__all__ = ['funcfile', 'anatfile'] 9 | 10 | # Discover directory path 11 | filepath = os.path.abspath(__file__) 12 | basedir = os.path.dirname(filepath) 13 | funcfile = os.path.join(basedir, 'functional.nii.gz') 14 | anatfile = os.path.join(basedir, 'anatomical.nii.gz') 15 | 16 | -------------------------------------------------------------------------------- /nireg/testing/anatomical.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/nireg/testing/anatomical.nii.gz -------------------------------------------------------------------------------- /nireg/testing/functional.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nipy/nireg/6ed32f2830ff6ebc1860519dc630ebdf8e969dcf/nireg/testing/functional.nii.gz -------------------------------------------------------------------------------- /nireg/testing/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | 5 | def configuration(parent_package='',top_path=None): 6 | from numpy.distutils.misc_util import Configuration 7 | config = Configuration('testing', parent_package, top_path) 8 | config.add_data_files('*.nii.gz') 9 | return config 10 | 11 | 12 | if __name__ == '__main__': 13 | from numpy.distutils.core import setup 14 | setup(**configuration(top_path='').todict()) 15 | -------------------------------------------------------------------------------- /nireg/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Init to make test directory a package 2 | -------------------------------------------------------------------------------- /nireg/tests/test_cubic_spline.py: -------------------------------------------------------------------------------- 1 | """ Testing 2 | """ 3 | from numpy.testing import assert_array_almost_equal 4 | from nose.tools import assert_true, assert_equal, assert_raises 5 | 6 | import numpy as np 7 | 8 | from .._register import (_cspline_transform, 9 | _cspline_sample1d, 10 | _cspline_sample4d) 11 | 12 | 13 | 14 | def test_sample1d(): 15 | a = np.random.rand(100) 16 | c = _cspline_transform(a) 17 | x = np.arange(100) 18 | b = np.zeros(100) 19 | b = _cspline_sample1d(b, c, x) 20 | assert_array_almost_equal(a, b) 21 | b = _cspline_sample1d(b, c, x, mode='nearest') 22 | assert_array_almost_equal(a, b) 23 | 24 | 25 | def test_sample4d(): 26 | a = np.random.rand(4, 5, 6, 7) 27 | c = _cspline_transform(a) 28 | x = np.mgrid[0:4, 0:5, 0:6, 0:7] 29 | b = np.zeros(a.shape) 30 | args = list(x) 31 | b = _cspline_sample4d(b, c, *args) 32 | assert_array_almost_equal(a, b) 33 | args = list(x) + ['nearest' for i in range(4)] 34 | b = _cspline_sample4d(b, c, *args) 35 | assert_array_almost_equal(a, b) 36 | -------------------------------------------------------------------------------- /nireg/tests/test_polyaffine.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import numpy as np 3 | 4 | from ..polyaffine import PolyAffine 5 | from ..affine import Affine 6 | 7 | 8 | def random_affine(): 9 | T = np.eye(4) 10 | T[0:3, 0:4] = np.random.rand(3, 4) 11 | return T 12 | 13 | 14 | def id_affine(): 15 | return np.eye(4) 16 | 17 | 18 | NCENTERS = 5 19 | NPTS = 100 20 | 21 | centers = [np.random.rand(3) for i in range(NCENTERS)] 22 | raf = random_affine() 23 | affines = [raf for i in range(NCENTERS)] 24 | #affines = [id_affine() for i in range(NCENTERS)] 25 | sigma = 1.0 26 | xyz = np.random.rand(NPTS, 3) 27 | 28 | # test 1: crach test create polyaffine transform 29 | T = PolyAffine(centers, affines, sigma) 30 | 31 | # test 2: crash test apply method 32 | t = T.apply(xyz) 33 | 34 | # test 3: check apply does nice job 35 | c = np.array(centers) 36 | tc = T.apply(c) 37 | qc = np.array([np.dot(a[0:3, 0:3], b) + a[0:3, 3]\ 38 | for a, b in zip(affines, centers)]) 39 | 40 | # test 4: crash test compose method 41 | A = Affine(random_affine()) 42 | TA = T.compose(A) 43 | 44 | # test 5: crash test left compose method 45 | AT = A.compose(T) 46 | 47 | z = AT.apply(xyz) 48 | za = A.compose(Affine(raf)).apply(xyz) 49 | -------------------------------------------------------------------------------- /nireg/tests/test_register.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | 5 | import numpy as np 6 | from nibabel import load 7 | 8 | from ..testing import anatfile 9 | from ..histogram_registration import HistogramRegistration 10 | 11 | from numpy.testing import assert_array_almost_equal 12 | 13 | anat_img = load(anatfile) 14 | 15 | def test_registers(): 16 | # Test registration to self returns identity 17 | for cost, interp, affine_type in (('cc', 'pv', 'rigid'), 18 | ('cc', 'tri', 'rigid'), 19 | ('cc', 'rand', 'rigid'), 20 | ('cc', 'pv', 'similarity'), 21 | ('cc', 'pv', 'affine'), 22 | ('cr', 'pv', 'rigid'), 23 | ('cr', 'pv', 'rigid'), 24 | ('crl1', 'pv', 'rigid'), 25 | ('mi', 'pv', 'rigid'), 26 | ('nmi', 'pv', 'rigid'), 27 | ): 28 | R = HistogramRegistration(anat_img, anat_img, 29 | similarity=cost, 30 | interp=interp, 31 | spacing=[2,2,2]) 32 | affine = R.optimize(affine_type) 33 | yield assert_array_almost_equal, affine.as_affine(), np.eye(4), 2 34 | -------------------------------------------------------------------------------- /nireg/tests/test_resample.py: -------------------------------------------------------------------------------- 1 | """ Testing resample function 2 | """ 3 | 4 | import numpy as np 5 | from nibabel import Nifti1Image 6 | 7 | from ..resample import resample 8 | from ..affine import Affine 9 | 10 | from numpy.testing import assert_array_almost_equal 11 | 12 | 13 | def _test_resample(arr, interp_orders): 14 | # Check basic cases of resampling 15 | img = Nifti1Image(arr, np.eye(4)) 16 | T = Affine() 17 | for i in interp_orders: 18 | img2 = resample(img, T, interp_order=i) 19 | assert_array_almost_equal(img2.get_data(), img.get_data()) 20 | img2 = resample(img, T, reference=(img.shape, img.get_affine()), 21 | interp_order=i) 22 | assert_array_almost_equal(img2.get_data(), img.get_data()) 23 | 24 | 25 | def test_resample_float_data(): 26 | arr = np.random.rand(10, 11, 12) 27 | _test_resample(arr, (0, 1, 3, 5)) 28 | 29 | def test_resample_int_data(): 30 | arr = np.random.randint(100, size=(10, 11, 12)) - 50 31 | _test_resample(arr, (3,)) 32 | 33 | def test_resample_uint_data(): 34 | arr = np.random.randint(100, size=(10, 11, 12)).astype('uint8') 35 | img = Nifti1Image(arr, np.eye(4)) 36 | T = Affine((.5, .5, .5, .1, .1, .1, 0, 0, 0, 0, 0, 0)) 37 | img2 = resample(img, T) 38 | assert(np.min(img2.get_data()) >= 0) 39 | assert(np.max(img2.get_data()) < 255) 40 | 41 | -------------------------------------------------------------------------------- /nireg/tests/test_slice_timing.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from __future__ import absolute_import 3 | 4 | import numpy as np 5 | from scipy.ndimage import gaussian_filter, gaussian_filter1d 6 | from nibabel import Nifti1Image 7 | 8 | from nose.tools import assert_true, assert_false 9 | from numpy.testing import assert_almost_equal, assert_array_equal 10 | 11 | from ..groupwise_registration import SpaceTimeRealign 12 | 13 | 14 | def check_stc(true_signal, corrected_signal, ref_slice=0, 15 | rtol=1e-5, atol=1e-5): 16 | n_slices = true_signal.shape[2] 17 | # The reference slice should be more or less perfect 18 | assert_almost_equal( 19 | corrected_signal[..., ref_slice, :], 20 | true_signal[..., ref_slice, :]) 21 | # The other slices should be more or less right 22 | for sno in range(n_slices): 23 | if sno == ref_slice: 24 | continue # We checked this one 25 | arr0 = true_signal[..., sno, 1:-1] 26 | arr1 = corrected_signal[..., sno, 1:-1] 27 | # Intermediate test matrices for debugging 28 | abs_diff = np.abs(arr0 - arr1) 29 | rel_diff = np.abs((arr0 / arr1) - 1) 30 | abs_fails = abs_diff > atol 31 | rel_fails = rel_diff > rtol 32 | fails = abs_fails & rel_fails 33 | abs_only = abs_diff[fails] 34 | rel_only = rel_diff[fails] 35 | assert_true(np.allclose(arr0, arr1, rtol=rtol, atol=atol)) 36 | 37 | 38 | def test_slice_time_correction(): 39 | # Make smooth time course at slice resolution 40 | TR = 2. 41 | n_vols = 25 42 | n_slices = 10 43 | # Create single volume 44 | shape_3d = (20, 30, n_slices) 45 | spatial_sigma = 4 46 | time_sigma = n_slices * 5 # time sigma in TRs 47 | one_vol = np.random.normal(100, 25, size=shape_3d) 48 | gaussian_filter(one_vol, spatial_sigma, output=one_vol) 49 | # Add smoothed time courses. Time courses are at time resolution of one 50 | # slice time. So, there are n_slices time points per TR. 51 | n_vol_slices = n_slices * n_vols 52 | time_courses = np.random.normal(0, 15, size=shape_3d + (n_vol_slices,)) 53 | gaussian_filter1d(time_courses, time_sigma, output=time_courses) 54 | big_data = one_vol[..., None] + time_courses 55 | # Can the first time point be approximated from the later ones? 56 | first_signal = big_data[..., 0:n_vol_slices:n_slices] 57 | for name, time_to_slice in ( 58 | ('ascending', list(range(n_slices))), 59 | ('descending', list(range(n_slices)[::-1])), 60 | ('asc_alt_2', (list(range(0, n_slices, 2)) + 61 | list(range(1, n_slices, 2)))), 62 | ('desc_alt_2', (list(range(0, n_slices, 2)) + 63 | list(range(1, n_slices, 2)))[::-1]) 64 | ): 65 | slice_to_time = np.argsort(time_to_slice) 66 | acquired_signal = np.zeros_like(first_signal) 67 | for space_sno, time_sno in enumerate(slice_to_time): 68 | acquired_signal[..., space_sno, :] = \ 69 | big_data[..., space_sno, time_sno:n_vol_slices:n_slices] 70 | # do STC - minimizer will fail 71 | acquired_image = Nifti1Image(acquired_signal, np.eye(4)) 72 | stc = SpaceTimeRealign(acquired_image, TR, name, 2) 73 | stc.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') 74 | # Check no motion estimated 75 | assert_array_equal([t.param for t in stc._transforms[0]], 0) 76 | corrected = stc.resample()[0].get_data() 77 | # check we approximate first time slice with correction 78 | assert_false(np.allclose(acquired_signal, corrected, rtol=1e-3, 79 | atol=0.1)) 80 | check_stc(first_signal, corrected, ref_slice=slice_to_time[0], 81 | rtol=5e-4, atol=1e-6) 82 | -------------------------------------------------------------------------------- /nireg/tests/test_transform.py: -------------------------------------------------------------------------------- 1 | """ Testing 2 | """ 3 | from __future__ import absolute_import 4 | 5 | import numpy as np 6 | 7 | from ..transform import Transform 8 | 9 | from numpy.testing import (assert_array_almost_equal, 10 | assert_array_equal) 11 | 12 | from nose.tools import assert_true, assert_equal, assert_raises 13 | 14 | def test_transform(): 15 | t = Transform(lambda x : x+1) 16 | pts = np.random.normal(size=(10,3)) 17 | assert_array_equal(t.apply(pts), pts+1) 18 | assert_raises(AttributeError, getattr, t, 'param') 19 | tm1 = Transform(lambda x : x-1) 20 | assert_array_equal(tm1.apply(pts), pts-1) 21 | tctm1 = t.compose(tm1) 22 | assert_array_almost_equal(tctm1.apply(pts), pts) 23 | 24 | 25 | def test_transform_other_init(): 26 | # Test we can have another init for our transform 27 | 28 | class C(Transform): 29 | 30 | def __init__(self): 31 | self.func = lambda x : x + 1 32 | 33 | pts = np.random.normal(size=(10,3)) 34 | assert_array_equal(C().apply(pts), pts+1) 35 | -------------------------------------------------------------------------------- /nireg/transform.py: -------------------------------------------------------------------------------- 1 | """ Generic transform class 2 | 3 | This implementation specifies an API. We've done our best to avoid checking 4 | instances, so any class implementing this API should be valid in the places 5 | (like registration routines) that use transforms. If that isn't true, it's a 6 | bug. 7 | """ 8 | 9 | class Transform(object): 10 | """ A default transformation class 11 | 12 | This class specifies the tiny API. That is, the class should implement: 13 | 14 | * obj.param - the transformation exposed as a set of parameters. Changing 15 | param should change the transformation 16 | * obj.apply(pts) - accepts (N,3) array-like of points in 3 dimensions, 17 | returns an (N, 3) array of transformed points 18 | * obj.compose(xform) - accepts another object implementing ``apply``, and 19 | returns a new transformation object, where the resulting transformation is 20 | the composition of the ``obj`` transform onto the ``xform`` transform. 21 | """ 22 | def __init__(self, func): 23 | self.func = func 24 | 25 | def apply(self, pts): 26 | return self.func(pts) 27 | 28 | def compose(self, other): 29 | return Transform( 30 | lambda pts : self.apply(other.apply(pts))) 31 | 32 | @property 33 | def param(self): 34 | raise AttributeError('No param for generic transform') 35 | -------------------------------------------------------------------------------- /nireg/type_check.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utilities to test whether a variable is of, or convertible to, a particular type 3 | """ 4 | import numpy as np 5 | 6 | 7 | def _check_type(x, t): 8 | try: 9 | y = t(x) 10 | return True 11 | except: 12 | return False 13 | 14 | 15 | def check_type(x, t, accept_none=False): 16 | """ 17 | Checks whether a variable is convertible to a certain type. 18 | A ValueError is raised if test fails. 19 | 20 | Parameters 21 | ---------- 22 | x : object 23 | Input argument to be checked. 24 | t : type 25 | Target type. 26 | accept_none : bool 27 | If True, skip errors if `x` is None. 28 | """ 29 | if accept_none: 30 | if x is None: 31 | return 32 | if not _check_type(x, t): 33 | raise ValueError('Argument should be convertible to %s' % t) 34 | 35 | 36 | def check_type_and_shape(x, t, s, accept_none=False): 37 | """ 38 | Checks whether a sequence is convertible to a numpy ndarray with 39 | given shape, and if the elements are convertible to a certain type. 40 | A ValueError is raised if test fails. 41 | 42 | Parameters 43 | ---------- 44 | x : sequence 45 | Input sequence to be checked. 46 | t : type 47 | Target element-wise type. 48 | s : sequence of ints 49 | Target shape. 50 | accept_none : bool 51 | If True, skip errors if `x` is None. 52 | """ 53 | if accept_none: 54 | if x is None: 55 | return 56 | try: 57 | shape = (int(s), ) 58 | except: 59 | shape = tuple(s) 60 | try: 61 | y = np.asarray(x) 62 | ok_type = _check_type(y[0], t) 63 | ok_shape = (y.shape == shape) 64 | except: 65 | raise ValueError('Argument should be convertible to ndarray') 66 | if not ok_type: 67 | raise ValueError('Array values should be convertible to %s' % t) 68 | if not ok_shape: 69 | raise ValueError('Array shape should be equivalent to %s' % shape) 70 | 71 | -------------------------------------------------------------------------------- /nireg/wichmann_prng.c: -------------------------------------------------------------------------------- 1 | #include "wichmann_prng.h" 2 | 3 | #include 4 | 5 | /* 6 | Assumption to be verified: 7 | ix, iy, iz, it should be set to values between 1 and 400000 8 | */ 9 | void prng_seed(int seed, prng_state* rng) 10 | { 11 | double r, rmax=(double)RAND_MAX; 12 | int imax = 400000; 13 | srand(seed); 14 | 15 | r = (double)rand()/rmax; 16 | rng->ix = (int)(imax*r); 17 | r = (double)rand()/rmax; 18 | rng->iy = (int)(imax*r); 19 | r = (double)rand()/rmax; 20 | rng->iz = (int)(imax*r); 21 | r = (double)rand()/rmax; 22 | rng->it = (int)(imax*r); 23 | 24 | return; 25 | } 26 | 27 | 28 | double prng_double(prng_state* rng) 29 | { 30 | double W; 31 | 32 | rng->ix = 11600 * (rng->ix % 185127) - 10379 * (rng->ix / 185127); 33 | rng->iy = 47003 * (rng->iy % 45688) - 10479 * (rng->iy / 45688); 34 | rng->iz = 23000 * (rng->iz % 93368) - 19423 * (rng->iz / 93368); 35 | rng->it = 33000 * (rng->it % 65075) - 8123 * (rng->it / 65075); 36 | 37 | if (rng->ix < 0) 38 | rng->ix = rng->ix + 2147483579; 39 | if (rng->iy < 0) 40 | rng->iy = rng->iy + 2147483543; 41 | if (rng->iz < 0) 42 | rng->iz = rng->iz + 2147483423; 43 | if (rng->it < 0) 44 | rng->it = rng->it + 2147483123; 45 | 46 | W = rng->ix/2147483579. + rng->iy/2147483543. 47 | + rng->iz/2147483423. + rng->it/2147483123.; 48 | 49 | return W - (int)W; 50 | } 51 | 52 | -------------------------------------------------------------------------------- /nireg/wichmann_prng.h: -------------------------------------------------------------------------------- 1 | #ifndef WICHMANN_PRNG 2 | #define WICHMANN_PRNG 3 | 4 | #ifdef __cplusplus 5 | extern "C" { 6 | #endif 7 | 8 | /* 9 | B.A. Wichmann, I.D. Hill, Generating good pseudo-random numbers, 10 | Computational Statistics & Data Analysis, Volume 51, Issue 3, 1 11 | December 2006, Pages 1614-1622, ISSN 0167-9473, DOI: 12 | 10.1016/j.csda.2006.05.019. 13 | */ 14 | 15 | typedef struct { 16 | int ix; 17 | int iy; 18 | int iz; 19 | int it; 20 | } prng_state; 21 | 22 | extern void prng_seed(int seed, prng_state* rng); 23 | extern double prng_double(prng_state* prng); 24 | 25 | #ifdef __cplusplus 26 | } 27 | #endif 28 | 29 | #endif 30 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # See nipy/info.py for requirement definitions 2 | numpy>=1.6.0 3 | scipy>=0.9.0 4 | sympy>=0.7.0 5 | nibabel>=1.2.0 6 | -------------------------------------------------------------------------------- /scripts/nipy_3dto4d: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | DESCRIP = 'Read 3D image files and write a 4D file' 5 | EPILOG = \ 6 | '''nipy_3dto4d will take a series of 3D nifti images in any format readable by 7 | nibabel and concatenate them into a 4D image, and write the image with format 8 | guessed from the output image filename. You can set the filename with the 9 | ``--out-4d`` parameter, or we make a filename from the input names. 10 | ''' 11 | 12 | import os 13 | from os.path import join as pjoin 14 | 15 | from nipy.externals.argparse import (ArgumentParser, 16 | RawDescriptionHelpFormatter) 17 | 18 | import nibabel as nib 19 | 20 | 21 | def do_3d_to_4d(filenames, check_affines=True): 22 | imgs = [] 23 | for fname in filenames: 24 | img = nib.load(fname) 25 | imgs.append(img) 26 | return nib.concat_images(imgs, check_affines=check_affines) 27 | 28 | 29 | def main(): 30 | parser = ArgumentParser(description=DESCRIP, 31 | epilog=EPILOG, 32 | formatter_class=RawDescriptionHelpFormatter) 33 | parser.add_argument('in_filenames', type=str, 34 | nargs='+', 35 | help='3D image filenames') 36 | parser.add_argument('--out-4d', type=str, 37 | help='4D output image name') 38 | parser.add_argument('--check-affines', type=bool, 39 | default=True, 40 | help='False if you want to ignore differences ' 41 | 'in affines between the 3D images, True if you ' 42 | 'want to raise an error for significant ' 43 | 'differences (default is True)') 44 | # parse the command line 45 | args = parser.parse_args() 46 | # get input 3ds 47 | filenames = args.in_filenames 48 | # affine check 49 | check_affines = args.check_affines 50 | # get output name 51 | out_fname = args.out_4d 52 | if out_fname is None: 53 | pth, fname = os.path.split(filenames[0]) 54 | froot, ext = os.path.splitext(fname) 55 | if ext in ('.gz', '.bz2'): 56 | gz = ext 57 | froot, ext = os.path.splitext(froot) 58 | else: 59 | gz = '' 60 | out_fname = pjoin(pth, froot + '_4d' + ext + gz) 61 | img4d = do_3d_to_4d(filenames, check_affines=check_affines) 62 | nib.save(img4d, out_fname) 63 | 64 | 65 | if __name__ == '__main__': 66 | main() 67 | -------------------------------------------------------------------------------- /scripts/nipy_4d_realign: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | 5 | """This is a wrapper of SpaceTimeRealign 6 | 7 | Based on: 8 | 9 | Alexis Roche (2011) A Four-Dimensional Registration Algorithm With Application to Joint Correction of Motion and Slice Timing in fMRI. IEEE Trans. Med. Imaging 30(8): 1546-1554 10 | 11 | """ 12 | import os 13 | import os.path as op 14 | import nipy.algorithms.registration as reg 15 | import nipy.externals.argparse as argparse 16 | 17 | parser = argparse.ArgumentParser() 18 | 19 | parser.add_argument('TR', type=float, metavar='Float', help="""The TR of the measurement""") 20 | 21 | parser.add_argument('input', type=str, metavar='File', 22 | help="""Path to a nifti file, or to a folder containing nifti files. If a path to a folder is provided, the order of motion correction will be np.sort(list_of_files). The outputs will be '*_mc.par' (containing 3 translation and three rotation parameters) and '*_mc.nii.gz' containing the motion corrected data (unless 'apply' is set to False)""") 23 | 24 | parser.add_argument('--slice_order', type=str, metavar='String', 25 | help="""The order of slice aquisition {'ascending', 'descending' (default), or the name of a function from `nipy.algorithms.slicetiming.timefuncs`}""", default='descending') 26 | 27 | parser.add_argument('--slice_dim', type=int, metavar='Int', help="""Integer 28 | denoting the axis in `images` that is the slice axis. In a 4D image, this will 29 | often be axis = 2 (default).""", default=2) 30 | 31 | parser.add_argument('--slice_dir', type=int, metavar='Int', help=""" 1 if the 32 | slices were acquired slice 0 first (default), slice -1 last, or -1 if acquire slice -1 first, slice 0 last.""", default=1) 33 | 34 | parser.add_argument('--make_figure', type=bool, metavar='Bool', 35 | help="""Whether to generate a '.png' figure with the motion parameters across runs. {True, False}. Default: False """, default=False) 36 | 37 | parser.add_argument('--save_path', type=str, metavar='String', 38 | help="""Full path to a file-system location for the output files. Defaults to the same location as the input files""", 39 | default='none') 40 | 41 | parser.add_argument('--save_params', type=bool, metavar='Bool', 42 | help="""Whether to save the motion corrections parameters (3 rotations, 3 translations). {True, False}. Default: False. NOTE: The rotations are not Euler angles, but a rotation vector. Use `nipy.algorithms.registration.to_matrix44` to convert to a 4-by-4 affine matrix""", default=False) 43 | 44 | 45 | # parse the command line 46 | args = parser.parse_args() 47 | 48 | if __name__ == '__main__': 49 | if args.save_path == 'none': 50 | save_path = op.split(args.input)[0] 51 | else: 52 | save_path = args.save_path 53 | 54 | xform = reg.space_time_realign(args.input, float(args.TR), 55 | slice_order=args.slice_order, 56 | slice_dim=int(args.slice_dim), 57 | slice_dir=int(args.slice_dir), 58 | apply=True, # We always apply the xform in the cli 59 | make_figure=args.make_figure, 60 | out_name=save_path) 61 | 62 | if args.save_params: 63 | 64 | f = file(op.join(save_path, 'mc.par'), 'w') 65 | for x in xform: 66 | euler_rot = reg.aff2euler(x.as_affine()) 67 | for r in euler_rot: 68 | f.write('%s\t'%r) 69 | for t in x.translation[:-1]: 70 | f.write('%s\t'%t) 71 | f.write('%s\n'%x.translation[-1]) 72 | f.close() 73 | -------------------------------------------------------------------------------- /scripts/nipy_4dto3d: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | DESCRIP = 'Read 4D image file and write 3D nifti file for each volume' 5 | EPILOG = \ 6 | '''nipy_4dto3d will generate a series of 3D nifti images for each volume a 4D 7 | image series in any format readable by `nibabel`. 8 | ''' 9 | from os.path import splitext, join as pjoin, split as psplit 10 | 11 | import nibabel as nib 12 | 13 | from nipy.externals.argparse import (ArgumentParser, 14 | RawDescriptionHelpFormatter) 15 | 16 | 17 | def main(): 18 | parser = ArgumentParser(description=DESCRIP, 19 | epilog=EPILOG, 20 | formatter_class=RawDescriptionHelpFormatter) 21 | parser.add_argument('filename', type=str, 22 | help='4D image filename') 23 | parser.add_argument('--out-path', type=str, 24 | help='path for output image files') 25 | args = parser.parse_args() 26 | out_path = args.out_path 27 | img = nib.load(args.filename) 28 | imgs = nib.four_to_three(img) 29 | froot, ext = splitext(args.filename) 30 | if ext in ('.gz', '.bz2'): 31 | froot, ext = splitext(froot) 32 | if out_path is not None: 33 | pth, fname = psplit(froot) 34 | froot = pjoin(out_path, fname) 35 | for i, img3d in enumerate(imgs): 36 | fname3d = '%s_%04d.nii' % (froot, i) 37 | nib.save(img3d, fname3d) 38 | 39 | 40 | if __name__ == '__main__': 41 | main() 42 | -------------------------------------------------------------------------------- /scripts/nipy_diagnose: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | DESCRIP = 'Calculate and write results for diagnostic screen' 5 | EPILOG = \ 6 | '''nipy_diagnose will generate a series of diagnostic images for a 4D 7 | fMRI image volume. The following images will be generated. is 8 | the input filename extension (e.g. '.nii'): 9 | 10 | * components_