├── .dockerignore ├── .env ├── .gitignore ├── .gitmodules ├── .travis.yml ├── Dockerfile ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── doc ├── Makefile ├── conf.py ├── index.rst └── source │ ├── modules.rst │ ├── neuropythy.commands.rst │ ├── neuropythy.datasets.rst │ ├── neuropythy.freesurfer.rst │ ├── neuropythy.geometry.rst │ ├── neuropythy.graphics.rst │ ├── neuropythy.hcp.rst │ ├── neuropythy.io.rst │ ├── neuropythy.java.rst │ ├── neuropythy.mri.rst │ ├── neuropythy.registration.rst │ ├── neuropythy.rst │ ├── neuropythy.test.rst │ ├── neuropythy.util.rst │ └── neuropythy.vision.rst ├── docker-compose.yml ├── docker ├── build.sh ├── help.txt ├── jupyter_notebook_config.py ├── main.sh └── npythyrc ├── neuropythy ├── __init__.py ├── __main__.py ├── commands │ ├── __init__.py │ ├── atlas.py │ ├── benson14_retinotopy.py │ ├── register_retinotopy.py │ ├── retinotopy.py │ └── surface_to_image.py ├── datasets │ ├── __init__.py │ ├── benson_winawer_2018.py │ ├── core.py │ ├── hcp.py │ ├── hcp_lines.py │ └── visual_performance_fields.py ├── freesurfer │ ├── __init__.py │ └── core.py ├── geometry │ ├── __init__.py │ ├── mesh.py │ └── util.py ├── graphics │ ├── __init__.py │ └── core.py ├── hcp │ ├── __init__.py │ ├── core.py │ └── files.py ├── io │ ├── __init__.py │ └── core.py ├── java │ └── __init__.py ├── lib │ ├── data │ │ ├── fs_LR │ │ │ ├── lh.atlasroi.164k_fs_LR.shape.gii │ │ │ ├── lh.atlasroi.32k_fs_LR.shape.gii │ │ │ ├── lh.atlasroi.59k_fs_LR.shape.gii │ │ │ ├── rh.atlasroi.164k_fs_LR.shape.gii │ │ │ ├── rh.atlasroi.32k_fs_LR.shape.gii │ │ │ └── rh.atlasroi.59k_fs_LR.shape.gii │ │ ├── fsaverage │ │ │ └── surf │ │ │ │ ├── lh.benson14_angle.v4_0.mgz │ │ │ │ ├── lh.benson14_eccen.v4_0.mgz │ │ │ │ ├── lh.benson14_retinotopy.v4_0.sphere.reg │ │ │ │ ├── lh.benson14_sigma.v4_0.mgz │ │ │ │ ├── lh.benson14_varea.v4_0.mgz │ │ │ │ ├── lh.glasser16_atlas.v1_0.mgz │ │ │ │ ├── lh.rosenke18_vcatlas.v1_0.mgz │ │ │ │ ├── lh.wang15_fplbl.v1_0.mgz │ │ │ │ ├── lh.wang15_mplbl.v1_0.mgz │ │ │ │ ├── rh.benson14_angle.v4_0.mgz │ │ │ │ ├── rh.benson14_eccen.v4_0.mgz │ │ │ │ ├── rh.benson14_retinotopy.v4_0.sphere.reg │ │ │ │ ├── rh.benson14_sigma.v4_0.mgz │ │ │ │ ├── rh.benson14_varea.v4_0.mgz │ │ │ │ ├── rh.glasser16_atlas.v1_0.mgz │ │ │ │ ├── rh.rosenke18_vcatlas.v1_0.mgz │ │ │ │ ├── rh.wang15_fplbl.v1_0.mgz │ │ │ │ └── rh.wang15_mplbl.v1_0.mgz │ │ ├── fsaverage_sym │ │ │ └── surf │ │ │ │ ├── lh.benson14_angle.v1_0.mgz │ │ │ │ ├── lh.benson14_angle.v2_0.mgz │ │ │ │ ├── lh.benson14_angle.v2_1.mgz │ │ │ │ ├── lh.benson14_angle.v2_5.mgz │ │ │ │ ├── lh.benson14_angle.v3_0.mgz │ │ │ │ ├── lh.benson14_anorm.v1_0.mgz │ │ │ │ ├── lh.benson14_eccen.v1_0.mgz │ │ │ │ ├── lh.benson14_eccen.v2_0.mgz │ │ │ │ ├── lh.benson14_eccen.v2_1.mgz │ │ │ │ ├── lh.benson14_eccen.v2_5.mgz │ │ │ │ ├── lh.benson14_eccen.v3_0.mgz │ │ │ │ ├── lh.benson14_enorm.v1_0.mgz │ │ │ │ ├── lh.benson14_retinotopy.v3_0.sphere.reg │ │ │ │ ├── lh.benson14_sigma.v3_0.mgz │ │ │ │ ├── lh.benson14_varea.v1.0.mgz │ │ │ │ ├── lh.benson14_varea.v2_0.mgz │ │ │ │ ├── lh.benson14_varea.v2_1.mgz │ │ │ │ ├── lh.benson14_varea.v2_5.mgz │ │ │ │ └── lh.benson14_varea.v3_0.mgz │ │ └── hcp_lines_osftree.json.gz │ ├── models │ │ ├── lh.benson17.fmm.gz │ │ ├── rh.benson17.fmm.gz │ │ └── v123.fmm.gz │ └── projections │ │ ├── lh.occipital_pole.mp.json │ │ └── rh.occipital_pole.mp.json ├── math │ ├── __init__.py │ └── core.py ├── mri │ ├── __init__.py │ ├── core.py │ └── images.py ├── optimize │ ├── __init__.py │ └── core.py ├── plans │ ├── __init__.py │ ├── core.py │ └── prfclean.py ├── registration │ ├── __init__.py │ └── core.py ├── test │ ├── __init__.py │ ├── math.py │ └── optimize.py ├── util │ ├── __init__.py │ ├── conf.py │ ├── core.py │ ├── filemap.py │ └── labels.py └── vision │ ├── __init__.py │ ├── cmag.py │ ├── models.py │ └── retinotopy.py ├── pyproject.toml ├── requirements-dev.txt ├── requirements-dev27.txt ├── requirements.txt ├── setup.cfg └── setup.py /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .ipynb_checkpoints 3 | .DS_Store 4 | dist 5 | Dockerfile 6 | .gitignore 7 | .dockerignore 8 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | SUBJECTS_DIR=/dev/null 2 | HCP_SUBJECTS_DIR=/dev/null 3 | NPYTHY_DATA_CACHE_ROOT=/dev/null 4 | NPYTHY_EXT_DIR=/dev/null 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore all the compiled files 2 | *.pyc 3 | 4 | # And these particular notebook 5 | scratch.ipynb 6 | debug.ipynb 7 | 8 | # This is a backup 9 | *-backup 10 | .first-commit-backup 11 | 12 | # Ignore the meta directories 13 | build 14 | dist 15 | *.egg-info 16 | .ipynb_checkpoints 17 | 18 | # My push script is local 19 | push.sh 20 | 21 | # These are necessary for the docker, but too big to store on github 22 | docker/required_subjects.tar.gz 23 | docker/required_subjects 24 | 25 | # sphinx 26 | .build 27 | .template 28 | .static -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "neuropythy/lib/nben"] 2 | path = neuropythy/lib/nben 3 | url = https://github.com/noahbenson/nben 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | sudo: required 3 | dist: xenial 4 | 5 | python: 6 | - 3.6 7 | 8 | install: 9 | - if [ "$TRAVIS_PYTHON_VERSION" == "2.7" ]; then travis_retry pip install -r requirements-dev27.txt; else travis_retry pip install -r requirements-dev.txt; fi 10 | - if [ "$TRAVIS_PYTHON_VERSION" == "2.7" ]; then travis_retry pip install 'pytest>4.0'; else travis_retry pip install 'pytest>=5.2'; fi 11 | - travis_retry pip install -e . 12 | 13 | script: 14 | - travis_wait python -m unittest neuropythy.test 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # This Dockerfile constructs a docker image that contains an installation 2 | # of the Neuropythy library. 3 | # 4 | # Example build: 5 | # docker build --no-cache --tag nben/neuropythy `pwd` 6 | # 7 | # (but really, use docker-compose up instead). 8 | # 9 | 10 | # Start with the Jupyter scipy notebook docker-image. 11 | # We tag this to a specific version so that we're assured of future success. 12 | #FROM jupyter/scipy-notebook:lab-3.4.3 13 | FROM jupyter/scipy-notebook:python-3.9.13 14 | 15 | # Note the Maintainer. 16 | MAINTAINER Noah C. Benson 17 | 18 | # Initial Root Operations ###################################################### 19 | USER root 20 | 21 | # Install things that require apt. 22 | RUN apt-get update \ 23 | && apt-get install -y --no-install-recommends curl \ 24 | && apt-get install -y default-jdk fonts-open-sans 25 | 26 | # Make some global directories in the user's name also 27 | RUN mkdir -p /data/required_subjects \ 28 | && chown -R root:root /data/required_subjects \ 29 | && chmod -R 775 /data/required_subjects 30 | RUN mkdir -p /data/hcp \ 31 | && chown $NB_USER /data /data/hcp \ 32 | && chmod 775 /data /data/hcp 33 | 34 | # Download the required FreeSurfer subjects. 35 | RUN curl -L -o /data/required_subjects/fsaverage.tar.gz \ 36 | https://github.com/noahbenson/neuropythy/wiki/files/fsaverage.tar.gz \ 37 | && cd /data/required_subjects \ 38 | && tar zxf fsaverage.tar.gz \ 39 | && rm fsaverage.tar.gz 40 | RUN curl -L -o /data/required_subjects/fsaverage_sym.tar.gz \ 41 | https://github.com/noahbenson/neuropythy/wiki/files/fsaverage_sym.tar.gz \ 42 | && cd /data/required_subjects \ 43 | && tar zxf fsaverage_sym.tar.gz \ 44 | && rm ./fsaverage_sym.tar.gz 45 | 46 | 47 | # Initial User Operations ###################################################### 48 | USER $NB_USER 49 | 50 | # Install our Python dependencies. 51 | RUN eval "$(command conda shell.bash hook)" \ 52 | && conda activate \ 53 | && conda install --yes py4j nibabel s3fs pip \ 54 | && conda install --yes -c conda-forge ipywidgets widgetsnbextension \ 55 | ipyvolume nodejs \ 56 | jupyter_contrib_nbextensions \ 57 | && conda install --yes -c pytorch pytorch torchvision 58 | RUN eval "$(command conda shell.bash hook)" \ 59 | && conda activate \ 60 | && pip install 'setuptools == 69.0.0' 61 | 62 | # We need to do some extra work for ipyvolume to work in jupyter-labs 63 | # and with nbextensions. 64 | RUN eval "$(command conda shell.bash hook)" \ 65 | && conda activate \ 66 | && jupyter labextension install @jupyter-widgets/jupyterlab-manager \ 67 | && jupyter labextension install ipyvolume \ 68 | && jupyter labextension install jupyter-threejs 69 | RUN eval "$(command conda shell.bash hook)" \ 70 | && conda activate \ 71 | && jupyter nbextension enable collapsible_headings/main \ 72 | && jupyter nbextension enable select_keymap/main \ 73 | && jupyter nbextension enable --py --user widgetsnbextension \ 74 | && jupyter nbextension enable --py --user pythreejs \ 75 | && jupyter nbextension enable --py --user ipywebrtc \ 76 | && jupyter nbextension enable --py --user ipyvolume 77 | 78 | # Install the helvetica neue font (for figures). 79 | RUN mkdir -p ~/.local/share/fonts/helvetica_neue_tmp 80 | RUN curl -L -o ~/.local/share/fonts/helvetica_neue_tmp/helveticaneue.zip \ 81 | https://github.com/noahbenson/neuropythy/wiki/files/helveticaneue.zip \ 82 | && cd ~/.local/share/fonts/helvetica_neue_tmp \ 83 | && unzip helveticaneue.zip \ 84 | && mv *.ttf .. \ 85 | && cd .. \ 86 | && rm -r ~/.local/share/fonts/helvetica_neue_tmp \ 87 | && fc-cache -f -v \ 88 | && rm -r ~/.cache/matplotlib 89 | 90 | # Install Neuropythy from the current directory. 91 | RUN mkdir /home/$NB_USER/neuropythy 92 | COPY ./setup.py ./setup.cfg ./MANIFEST.in ./LICENSE.txt ./README.md \ 93 | ./requirements-dev.txt ./requirements.txt \ 94 | /home/$NB_USER/neuropythy/ 95 | COPY ./neuropythy /home/$NB_USER/neuropythy/neuropythy 96 | RUN eval "$(command conda shell.bash hook)" \ 97 | && conda activate \ 98 | && cd /home/$NB_USER/neuropythy \ 99 | && pip3 install -r ./requirements-dev.txt \ 100 | && python3 setup.py install 101 | 102 | 103 | # Final Root Operations ######################################################## 104 | USER root 105 | 106 | # Copy the README, license, help, and script files over. 107 | COPY LICENSE.txt /LICENSE.txt 108 | COPY README.md /README.md 109 | COPY docker/main.sh /main.sh 110 | COPY docker/help.txt /help.txt 111 | RUN chmod 755 /main.sh 112 | RUN chmod 644 /help.txt /README.md /LICENSE.txt 113 | 114 | 115 | # Final User Operations ######################################################## 116 | USER $NB_USER 117 | 118 | # Copy over some files... 119 | RUN mkdir -p /home/$NB_USER/.jupyter 120 | COPY ./docker/npythyrc /home/$NB_USER/.npythyrc 121 | COPY ./docker/jupyter_notebook_config.py /home/$NB_USER/.jupyter/ 122 | 123 | # Mark the entrypoint. 124 | ENTRYPOINT ["tini", "-g", "--", "/main.sh"] 125 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include neuropythy/lib/nben/target/nben-standalone.jar 2 | include neuropythy/lib/models/v123.fmm.gz 3 | include neuropythy/lib/models/lh.benson17.fmm.gz 4 | include neuropythy/lib/models/rh.benson17.fmm.gz 5 | include neuropythy/lib/projections/lh.occipital_pole.mp.json 6 | include neuropythy/lib/projections/rh.occipital_pole.mp.json 7 | include neuropythy/lib/data/fsaverage/surf/lh.benson14_angle.v4_0.mgz 8 | include neuropythy/lib/data/fsaverage/surf/lh.benson14_sigma.v4_0.mgz 9 | include neuropythy/lib/data/fsaverage/surf/lh.benson14_varea.v4_0.mgz 10 | include neuropythy/lib/data/fsaverage/surf/lh.benson14_eccen.v4_0.mgz 11 | include neuropythy/lib/data/fsaverage/surf/rh.benson14_retinotopy.v4_0.sphere.reg 12 | include neuropythy/lib/data/fsaverage/surf/lh.wang15_mplbl.v1_0.mgz 13 | include neuropythy/lib/data/fsaverage/surf/lh.wang15_fplbl.v1_0.mgz 14 | include neuropythy/lib/data/fsaverage/surf/rh.benson14_varea.v4_0.mgz 15 | include neuropythy/lib/data/fsaverage/surf/rh.benson14_eccen.v4_0.mgz 16 | include neuropythy/lib/data/fsaverage/surf/rh.wang15_mplbl.v1_0.mgz 17 | include neuropythy/lib/data/fsaverage/surf/rh.wang15_fplbl.v1_0.mgz 18 | include neuropythy/lib/data/fsaverage/surf/rh.benson14_angle.v4_0.mgz 19 | include neuropythy/lib/data/fsaverage/surf/rh.benson14_sigma.v4_0.mgz 20 | include neuropythy/lib/data/fsaverage/surf/lh.benson14_retinotopy.v4_0.sphere.reg 21 | include neuropythy/lib/data/fsaverage/surf/lh.rosenke18_vcatlas.v1_0.mgz 22 | include neuropythy/lib/data/fsaverage/surf/rh.rosenke18_vcatlas.v1_0.mgz 23 | include neuropythy/lib/data/fsaverage/surf/lh.glasser16_atlas.v1_0.mgz 24 | include neuropythy/lib/data/fsaverage/surf/rh.glasser16_atlas.v1_0.mgz 25 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_0.mgz 26 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_1.mgz 27 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v3_0.mgz 28 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_enorm.v1_0.mgz 29 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_5.mgz 30 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v3_0.mgz 31 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v1_0.mgz 32 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v3_0.mgz 33 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_5.mgz 34 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_sigma.v3_0.mgz 35 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_5.mgz 36 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v1_0.mgz 37 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_0.mgz 38 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_1.mgz 39 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_0.mgz 40 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_1.mgz 41 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v1.0.mgz 42 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_anorm.v1_0.mgz 43 | include neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_retinotopy.v3_0.sphere.reg 44 | include neuropythy/lib/data/fs_LR/lh.atlasroi.32k_fs_LR.shape.gii 45 | include neuropythy/lib/data/fs_LR/rh.atlasroi.32k_fs_LR.shape.gii 46 | include neuropythy/lib/data/fs_LR/lh.atlasroi.59k_fs_LR.shape.gii 47 | include neuropythy/lib/data/fs_LR/rh.atlasroi.59k_fs_LR.shape.gii 48 | include neuropythy/lib/data/fs_LR/lh.atlasroi.164k_fs_LR.shape.gii 49 | include neuropythy/lib/data/fs_LR/rh.atlasroi.164k_fs_LR.shape.gii 50 | include neuropythy/lib/data/hcp_lines_osftree.json.gz 51 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = .build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 20 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import os 16 | import sys 17 | sys.path.insert(0, os.path.abspath('.')) 18 | sys.path.insert(0, os.path.abspath('../')) 19 | 20 | 21 | # -- Project information ----------------------------------------------------- 22 | 23 | project = 'neuropythy' 24 | copyright = '2018, Noah C. Benson' 25 | author = 'Noah C. Benson' 26 | 27 | # The short X.Y version 28 | version = '' 29 | # The full version, including alpha/beta/rc tags 30 | release = '' 31 | 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # If your documentation needs a minimal Sphinx version, state it here. 36 | # 37 | # needs_sphinx = '1.0' 38 | 39 | # Add any Sphinx extension module names here, as strings. They can be 40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 41 | # ones. 42 | extensions = [ 43 | 'sphinx.ext.autodoc', 44 | 'sphinx.ext.viewcode', 45 | ] 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ['.templates'] 49 | 50 | # The suffix(es) of source filenames. 51 | # You can specify multiple suffix as a list of string: 52 | # 53 | # source_suffix = ['.rst', '.md'] 54 | source_suffix = '.rst' 55 | 56 | # The master toctree document. 57 | master_doc = 'index' 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | # 62 | # This is also used if you do content translation via gettext catalogs. 63 | # Usually you set "language" from the command line for these cases. 64 | language = None 65 | 66 | # List of patterns, relative to source directory, that match files and 67 | # directories to ignore when looking for source files. 68 | # This pattern also affects html_static_path and html_extra_path. 69 | exclude_patterns = ['.build', 'Thumbs.db', '.DS_Store'] 70 | 71 | # The name of the Pygments (syntax highlighting) style to use. 72 | pygments_style = None 73 | 74 | 75 | # -- Options for HTML output ------------------------------------------------- 76 | 77 | # The theme to use for HTML and HTML Help pages. See the documentation for 78 | # a list of builtin themes. 79 | # 80 | html_theme = 'sphinx_rtd_theme' 81 | 82 | # Theme options are theme-specific and customize the look and feel of a theme 83 | # further. For a list of options available for each theme, see the 84 | # documentation. 85 | # 86 | # html_theme_options = {} 87 | 88 | # Add any paths that contain custom static files (such as style sheets) here, 89 | # relative to this directory. They are copied after the builtin static files, 90 | # so a file named "default.css" will overwrite the builtin "default.css". 91 | html_static_path = ['.static'] 92 | 93 | # Custom sidebar templates, must be a dictionary that maps document names 94 | # to template names. 95 | # 96 | # The default sidebars (for documents that don't match any pattern) are 97 | # defined by theme itself. Builtin themes are using these templates by 98 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 99 | # 'searchbox.html']``. 100 | # 101 | # html_sidebars = {} 102 | 103 | 104 | # -- Options for HTMLHelp output --------------------------------------------- 105 | 106 | # Output file base name for HTML help builder. 107 | htmlhelp_basename = 'neuropythydoc' 108 | 109 | 110 | # -- Options for LaTeX output ------------------------------------------------ 111 | 112 | latex_elements = { 113 | # The paper size ('letterpaper' or 'a4paper'). 114 | # 115 | # 'papersize': 'letterpaper', 116 | 117 | # The font size ('10pt', '11pt' or '12pt'). 118 | # 119 | # 'pointsize': '10pt', 120 | 121 | # Additional stuff for the LaTeX preamble. 122 | # 123 | # 'preamble': '', 124 | 125 | # Latex figure (float) alignment 126 | # 127 | # 'figure_align': 'htbp', 128 | } 129 | 130 | # Grouping the document tree into LaTeX files. List of tuples 131 | # (source start file, target name, title, 132 | # author, documentclass [howto, manual, or own class]). 133 | latex_documents = [ 134 | (master_doc, 'neuropythy.tex', 'neuropythy Documentation', 135 | 'Noah C. Benson', 'manual'), 136 | ] 137 | 138 | 139 | # -- Options for manual page output ------------------------------------------ 140 | 141 | # One entry per manual page. List of tuples 142 | # (source start file, name, description, authors, manual section). 143 | man_pages = [ 144 | (master_doc, 'neuropythy', 'neuropythy Documentation', 145 | [author], 1) 146 | ] 147 | 148 | 149 | # -- Options for Texinfo output ---------------------------------------------- 150 | 151 | # Grouping the document tree into Texinfo files. List of tuples 152 | # (source start file, target name, title, author, 153 | # dir menu entry, description, category) 154 | texinfo_documents = [ 155 | (master_doc, 'neuropythy', 'neuropythy Documentation', 156 | author, 'neuropythy', 'One line description of project.', 157 | 'Miscellaneous'), 158 | ] 159 | 160 | 161 | # -- Options for Epub output ------------------------------------------------- 162 | 163 | # Bibliographic Dublin Core info. 164 | epub_title = project 165 | 166 | # The unique identifier of the text. This can be a ISBN number 167 | # or the project homepage. 168 | # 169 | # epub_identifier = '' 170 | 171 | # A unique identification for the text. 172 | # 173 | # epub_uid = '' 174 | 175 | # A list of files that should not be packed into the epub file. 176 | epub_exclude_files = ['search.html'] 177 | 178 | 179 | # -- Extension configuration ------------------------------------------------- 180 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. neuropythy documentation master file, created by 2 | sphinx-quickstart on Tue Sep 18 18:26:48 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Documentation for `neuropythy `_ 7 | ========================================================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | Indices and tables 15 | ================== 16 | 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | -------------------------------------------------------------------------------- /doc/source/modules.rst: -------------------------------------------------------------------------------- 1 | neuropythy 2 | ========== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | neuropythy 8 | -------------------------------------------------------------------------------- /doc/source/neuropythy.commands.rst: -------------------------------------------------------------------------------- 1 | neuropythy.commands package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.commands.benson14\_retinotopy module 8 | ----------------------------------------------- 9 | 10 | .. automodule:: neuropythy.commands.benson14_retinotopy 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.commands.register\_retinotopy module 16 | ----------------------------------------------- 17 | 18 | .. automodule:: neuropythy.commands.register_retinotopy 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | neuropythy.commands.surface\_to\_image module 24 | --------------------------------------------- 25 | 26 | .. automodule:: neuropythy.commands.surface_to_image 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | 32 | Module contents 33 | --------------- 34 | 35 | .. automodule:: neuropythy.commands 36 | :members: 37 | :undoc-members: 38 | :show-inheritance: 39 | -------------------------------------------------------------------------------- /doc/source/neuropythy.datasets.rst: -------------------------------------------------------------------------------- 1 | neuropythy.datasets package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.datasets.benson\_winawer\_2018 module 8 | ------------------------------------------------ 9 | 10 | .. automodule:: neuropythy.datasets.benson_winawer_2018 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.datasets.core module 16 | ------------------------------- 17 | 18 | .. automodule:: neuropythy.datasets.core 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: neuropythy.datasets 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/source/neuropythy.freesurfer.rst: -------------------------------------------------------------------------------- 1 | neuropythy.freesurfer package 2 | ============================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.freesurfer.core module 8 | --------------------------------- 9 | 10 | .. automodule:: neuropythy.freesurfer.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: neuropythy.freesurfer 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /doc/source/neuropythy.geometry.rst: -------------------------------------------------------------------------------- 1 | neuropythy.geometry package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.geometry.mesh module 8 | ------------------------------- 9 | 10 | .. automodule:: neuropythy.geometry.mesh 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.geometry.util module 16 | ------------------------------- 17 | 18 | .. automodule:: neuropythy.geometry.util 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: neuropythy.geometry 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/source/neuropythy.graphics.rst: -------------------------------------------------------------------------------- 1 | neuropythy.graphics package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.graphics.core module 8 | ------------------------------- 9 | 10 | .. automodule:: neuropythy.graphics.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: neuropythy.graphics 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /doc/source/neuropythy.hcp.rst: -------------------------------------------------------------------------------- 1 | neuropythy.hcp package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.hcp.core module 8 | -------------------------- 9 | 10 | .. automodule:: neuropythy.hcp.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.hcp.files module 16 | --------------------------- 17 | 18 | .. automodule:: neuropythy.hcp.files 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: neuropythy.hcp 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/source/neuropythy.io.rst: -------------------------------------------------------------------------------- 1 | neuropythy.io package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.io.core module 8 | ------------------------- 9 | 10 | .. automodule:: neuropythy.io.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: neuropythy.io 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /doc/source/neuropythy.java.rst: -------------------------------------------------------------------------------- 1 | neuropythy.java package 2 | ======================= 3 | 4 | Module contents 5 | --------------- 6 | 7 | .. automodule:: neuropythy.java 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | -------------------------------------------------------------------------------- /doc/source/neuropythy.mri.rst: -------------------------------------------------------------------------------- 1 | neuropythy.mri package 2 | ====================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.mri.core module 8 | -------------------------- 9 | 10 | .. automodule:: neuropythy.mri.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: neuropythy.mri 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /doc/source/neuropythy.registration.rst: -------------------------------------------------------------------------------- 1 | neuropythy.registration package 2 | =============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.registration.core module 8 | ----------------------------------- 9 | 10 | .. automodule:: neuropythy.registration.core 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: neuropythy.registration 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /doc/source/neuropythy.rst: -------------------------------------------------------------------------------- 1 | neuropythy package 2 | ================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | neuropythy.commands 10 | neuropythy.datasets 11 | neuropythy.freesurfer 12 | neuropythy.geometry 13 | neuropythy.graphics 14 | neuropythy.hcp 15 | neuropythy.io 16 | neuropythy.java 17 | neuropythy.mri 18 | neuropythy.registration 19 | neuropythy.test 20 | neuropythy.util 21 | neuropythy.vision 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: neuropythy 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /doc/source/neuropythy.test.rst: -------------------------------------------------------------------------------- 1 | neuropythy.test package 2 | ======================= 3 | 4 | Module contents 5 | --------------- 6 | 7 | .. automodule:: neuropythy.test 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | -------------------------------------------------------------------------------- /doc/source/neuropythy.util.rst: -------------------------------------------------------------------------------- 1 | neuropythy.util package 2 | ======================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.util.conf module 8 | --------------------------- 9 | 10 | .. automodule:: neuropythy.util.conf 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.util.core module 16 | --------------------------- 17 | 18 | .. automodule:: neuropythy.util.core 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: neuropythy.util 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /doc/source/neuropythy.vision.rst: -------------------------------------------------------------------------------- 1 | neuropythy.vision package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | neuropythy.vision.cmag module 8 | ----------------------------- 9 | 10 | .. automodule:: neuropythy.vision.cmag 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | neuropythy.vision.models module 16 | ------------------------------- 17 | 18 | .. automodule:: neuropythy.vision.models 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | neuropythy.vision.optimize module 24 | --------------------------------- 25 | 26 | .. automodule:: neuropythy.vision.optimize 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | neuropythy.vision.retinotopy module 32 | ----------------------------------- 33 | 34 | .. automodule:: neuropythy.vision.retinotopy 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | 40 | Module contents 41 | --------------- 42 | 43 | .. automodule:: neuropythy.vision 44 | :members: 45 | :undoc-members: 46 | :show-inheritance: 47 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | neuropythy: 4 | build: . 5 | ports: 6 | - "8888:8888" 7 | environment: 8 | - HCP_AUTO_RELEASE="$HCP_AUTO_RELEASE" 9 | - HCP_AUTO_DATABASE="$HCP_AUTO_DATABASE" 10 | - HCP_CREDENTIALS="$HCP_CREDENTIALS" 11 | - HCP_AUTO_DOWNLOAD="$HCP_AUTO_DOWNLOAD" 12 | - HCP_AUTO_PATH="/data/hcp/subjects" 13 | - HCP_SUBJECTS_DIR="/data/hcp/subjects" 14 | - SUBJECTS_DIR="/data/freesurfer_subjects" 15 | - NPYTHY_DATA_CACHE_ROOT="/data/cache" 16 | volumes: 17 | - "$SUBJECTS_DIR:/data/freesurfer_subjects" 18 | - "$HCP_SUBJECTS_DIR:/data/hcp/subjects" 19 | - "$NPYTHY_DATA_CACHE_ROOT:/data/cache" 20 | - "$NPYTHY_EXT_DIR:/data/ext" 21 | -------------------------------------------------------------------------------- /docker/build.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | updir="`dirname \"$PWD\"`" 4 | docker build --no-cache --tag nben/neuropythy "$updir" 5 | -------------------------------------------------------------------------------- /docker/help.txt: -------------------------------------------------------------------------------- 1 | General Information 2 | ================================================================================ 3 | Github Repository: https://github.com/noahbenson/neuropythy 4 | Wiki: https://github.com/noahbenson/neuropythy/wiki 5 | Author: Noah C. Benson 6 | License: Affero GPLv3 7 | 8 | Neuropythy Docker Invocation 9 | ================================================================================ 10 | The neuropythy docker can be invoked in a number of ways, which are described in 11 | more detail below. 12 | 13 | * docker run --rm -it nben/neuropythy README 14 | Print the neuropythy README.md file to standard output. 15 | * docker run --rm -it nben/neuropythy LICENSE 16 | Print the neuropythy license to standard output. 17 | * docker run -it nben/neuropythy bash 18 | Execute a bash shell for the user "jovyan". 19 | * docker run -it -p8888:8888 nben/neuropythy notebook 20 | Start an interactive jupyter notebook running on port 8888. 21 | * docker run --rm -it nben/neuropythy 22 | Execute a neuropythy command. These commands include atlas, 23 | benson14_retinotopy, register_retinotopy, and surface_to_image. All 24 | commands print a help message if given the argument "--help" or "-h". 25 | 26 | Jupyter Notebooks and docker-compose 27 | ================================================================================ 28 | If you plan to use neuropythy as a Jupyter computation environment, it is much 29 | easier to checkout the neuropythy repository from github and run the 30 | `docker-compose up` command. Instructions for this are given in the neuropythy 31 | README.md file; see https://github.com/noahbenson/neuropythy 32 | 33 | Configuration 34 | ================================================================================ 35 | In all invocation cases, environment variables that can typically be used to 36 | configure neuropythy can also be used to configure the docker-container. See 37 | [this page](https://github.com/noahbenson/neuropythy/wiki/Configuration) for an 38 | explanation of these variables. Note that you can also mount a custom .npythyrc 39 | in the user (jovyan) home directory, but this is not recommended. The 40 | environment variables should be correct *inside* the docker container, not in 41 | the external filesystem. Note that recommended usage is to not pass these 42 | environment variables into the docker and rather to mount them at predefined 43 | locations; see the section on volumes below. 44 | 45 | If you plan to use neuropythy's auto-downloading features for Human Connectome 46 | Project (HCP) data, then you will need to give neuropythy a valid set of Amazon 47 | S3 credentials (see https://github.com/noahbenson/neuropythy/wiki/Configuration 48 | for more information on configuring neuropythy to work with the HCP). Note that 49 | while you can give neuropythy the filename of a file containing the HCP S3 50 | credentials--with the docker you must also ensure that this file can be read 51 | inside the docker-container. Alternately, you can provide your credentials as 52 | a string ":" or set them inside the docker using code such as: 53 | ``` 54 | import neuropythy as ny 55 | ny.config['hcp_credentials'] = (key, secret) 56 | ``` 57 | 58 | Volumes 59 | ================================================================================ 60 | Neuropythy can interact with a number of directories for various kinds of data; 61 | these are detailed below. 62 | 63 | * /data/freesurfer_subjects (or /freesurfer_subjects) 64 | If you intend to operate on FreeSurfer subjects, then you should mount your 65 | subjects directory in this directory. For example: 66 | > docker run -it -p8888:8888 -v "$SUBJECTS_DIR:/data/freesurfer_subjects" \\ 67 | nben/neuropythy notebook 68 | 69 | * /data/hcp/subjects (or /hcp_subjects) 70 | If you plan to use HCP subject directories or are planning to use neuropythy 71 | to interact with the HCP 1200 subject release and/or the HCP 7T retinotopy 72 | dataset (both of which can be auto-downloaded with the proper configuration) 73 | and you wish to save the data that is downloaded on your local filesystem, 74 | then you should mount this directory. For example: 75 | > docker run -it -p8888:8888 -v "$HCP_SUBJECTS_DIR:/data/hcp/subjects" \\ 76 | nben/neuropythy notebook 77 | 78 | * /data/cache 79 | Neuropythy can save various datasets that it manages (including the HCP 80 | datasets mentioned in the item above, if a specific HCP directory is not 81 | provided) in a single generic cache directory. If you wish to save this 82 | cache on your local filesystem, then you should mount your cache directory 83 | to /data/cache: 84 | > docker run -it -p8888:8888 -v "$NPYTHY_DATA_CACHE_ROOT:/data/cache" \\ 85 | nben/neuropythy notebook 86 | 87 | * /home/jovyan/work 88 | If you want to mount your own code or notebooks inside the docker, then this 89 | is a convenient place to mount them. 90 | 91 | -------------------------------------------------------------------------------- /docker/main.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | # 3 | # This script is run inside the neuropythy docker and simply invokes neuropythy's main function. 4 | # By Noah C. Benson 5 | 6 | set -eo pipefail 7 | 8 | # A few things we do first: 9 | # (1) Make sure SUBJECTS_DIR is setup correctly 10 | if ! [ -d /data/required_subjects ] 11 | then SUBJECTS_DIR="/data/required_subjects" 12 | else SUBJECTS_DIR="" 13 | fi 14 | if [ -d /data/freesurfer_subjects ] 15 | then SUBJECTS_DIR="/data/freesurfer_subjects:$SUBJECTS_DIR" 16 | else mkdir -p /data/local/freesurfer_subjects 17 | SUBJECTS_DIR="/data/local/freesurfer_subjects:$SUBJECTS_DIR" 18 | fi 19 | if [ -d /freesurfer_subjects ] 20 | then SUBJECTS_DIR="/freesurfer_subjects:$SUBJECTS_DIR" 21 | fi 22 | if [ -d /subjects ] 23 | then SUBJECTS_DIR="/subjects:$SUBJECTS_DIR" 24 | fi 25 | 26 | # (2) Make sure the HCP_SUBJECTS_DIR is set correctly 27 | if [ -d /data/hcp/subjects ] 28 | then HCP_SUBJECTS_DIR="/data/hcp/subjects" 29 | else mkdir -p /data/local/hcp/subjects 30 | HCP_SUBJECTS_DIR="/data/local/hcp/subjects" 31 | fi 32 | if [ -d /hcp_subjects ] 33 | then HCP_SUBJECTS_DIR="/hcp_subjects:$HCP_SUBJECTS_DIR" 34 | fi 35 | # (3) Make sure the cache is set correctly 36 | NPYTHY_DATA_CACHE_ROOT="/data/cache" 37 | if [ -d /data/cache ] 38 | then NPYTHY_DATA_CACHE_ROOT="/data/cache" 39 | else mkdir -p /data/local/cache 40 | NPYTHY_DATA_CACHE_ROOT="/data/local/cache" 41 | fi 42 | 43 | export SUBJECTS_DIR 44 | export HCP_SUBJECTS_DIR 45 | export HCP_CREDENTIALS 46 | export NPYTHY_DATA_CACHE_ROOT 47 | 48 | # Okay, now interpret the inputs/args 49 | if [ "$1" = "help" ] || [ "$1" = "-h" ] || [ "$1" = "-help" ] || [ "$1" = "--help" ] 50 | then exec more /help.txt 51 | elif [ "$1" = "README" ] || [ "$1" == "readme" ] 52 | then exec cat /README.md 53 | elif [ "$1" = "LICENSE" ] || [ "$1" == "license" ] 54 | then exec cat /LICENSE.txt 55 | elif [ "$1" = "bash" ] 56 | then exec /bin/bash 57 | elif [ "$1" = "notebook" ] || [ -z "$1" ] 58 | then exec /usr/local/bin/start-notebook.sh 59 | fi 60 | 61 | # Okay, now invoke neuropythy 62 | exec python -m neuropythy.__main__ "$@" 63 | -------------------------------------------------------------------------------- /docker/npythyrc: -------------------------------------------------------------------------------- 1 | {"freesurfer_subject_paths": "/data/freesurfer_subjects:/data/required_subjects", 2 | "data_cache_root": "/data/cache", 3 | "hcp_subject_paths": "/data/hcp/subjects", 4 | "hcp_auto_download": true} 5 | -------------------------------------------------------------------------------- /neuropythy/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # __init__.py 3 | 4 | '''Tools for analyzing and registering cortical meshes.''' 5 | 6 | submodules = ('neuropythy.util.conf', 7 | 'neuropythy.util.core', 8 | 'neuropythy.util.filemap', 9 | 'neuropythy.util.labels', 10 | 'neuropythy.util', 11 | 'neuropythy.math.core', 12 | 'neuropythy.math', 13 | 'neuropythy.java', 14 | 'neuropythy.io.core', 15 | 'neuropythy.io', 16 | 'neuropythy.geometry.util', 17 | 'neuropythy.geometry.mesh', 18 | 'neuropythy.geometry', 19 | 'neuropythy.optimize.core', 20 | 'neuropythy.optimize', 21 | 'neuropythy.mri.core', 22 | 'neuropythy.mri.images', 23 | 'neuropythy.mri', 24 | 'neuropythy.freesurfer.core', 25 | 'neuropythy.freesurfer', 26 | 'neuropythy.hcp.files', 27 | 'neuropythy.hcp.core', 28 | 'neuropythy.hcp', 29 | 'neuropythy.registration.core', 30 | 'neuropythy.registration', 31 | 'neuropythy.vision.models', 32 | 'neuropythy.vision.retinotopy', 33 | 'neuropythy.vision.cmag', 34 | 'neuropythy.vision', 35 | 'neuropythy.graphics.core', 36 | 'neuropythy.graphics', 37 | 'neuropythy.datasets.core', 38 | 'neuropythy.datasets.benson_winawer_2018', 39 | 'neuropythy.datasets.hcp', 40 | 'neuropythy.datasets.visual_performance_fields', 41 | 'neuropythy.datasets.hcp_lines', 42 | 'neuropythy.datasets', 43 | 'neuropythy.plans.core', 44 | 'neuropythy.plans.prfclean', 45 | 'neuropythy.plans', 46 | 'neuropythy.commands.surface_to_ribbon', 47 | 'neuropythy.commands.benson14_retinotopy', 48 | 'neuropythy.commands.register_retinotopy', 49 | 'neuropythy.commands.atlas', 50 | 'neuropythy.commands') 51 | '''neuropythy.submodules is a tuple of all the sub-modules of neuropythy in a loadable order.''' 52 | 53 | def reload_neuropythy(): 54 | ''' 55 | reload_neuropythy() reloads all of the modules of neuropythy and returns the reloaded 56 | neuropythy module. This is similar to reload(neuropythy) except that it reloads all the 57 | neuropythy submodules prior to reloading neuropythy. 58 | 59 | Example: 60 | import neuropythy as ny 61 | # ... some nonsense that breaks the library ... 62 | ny = ny.reload_neuropythy() 63 | ''' 64 | import sys, six 65 | if not six.PY2: 66 | try: from importlib import reload 67 | except Exception: from imp import reload 68 | for mdl in submodules: 69 | if mdl in sys.modules: 70 | sys.modules[mdl] = reload(sys.modules[mdl]) 71 | return reload(sys.modules['neuropythy']) 72 | 73 | from .util import (config, is_image, library_path, to_affine, is_address, address_data, 74 | is_curve_spline, to_curve_spline, curve_spline, flattest, 75 | is_list, is_tuple, to_hemi_str, is_dataframe, to_dataframe, auto_dict, 76 | label_index, is_label_index, to_label_index) 77 | from .util import label_indices as labels 78 | from .io import (load, save, to_nifti) 79 | from .mri import (is_subject, is_cortex, to_cortex, to_image, to_image_spec, 80 | is_image_spec, image_interpolate, image_apply, image_copy, image_clear, 81 | is_pimage, to_image_type) 82 | from .vision import (retinotopy_data, empirical_retinotopy_data, predicted_retinotopy_data, 83 | register_retinotopy, retinotopy_anchors, retinotopy_model, 84 | neighborhood_cortical_magnification, as_retinotopy, 85 | retinotopy_comparison, to_logeccen, from_logeccen) 86 | from .geometry import (mesh, tess, topo, map_projection, path_trace, 87 | is_vset, is_mesh, is_tess, is_topo, is_flatmap, paths_to_labels, 88 | is_map_projection, is_path, is_path_trace, close_path_traces, 89 | to_mesh, to_tess, to_property, to_mask, to_flatmap, to_map_projection, 90 | isolines, map_projections) 91 | from .freesurfer import (subject as freesurfer_subject, to_mgh) 92 | from .hcp import (subject as hcp_subject) 93 | from .datasets import data 94 | from . import util 95 | from . import math 96 | from . import freesurfer 97 | from . import hcp 98 | from . import plans 99 | 100 | # things we might want to load but that might fail 101 | try: 102 | from .graphics import cortex_plot 103 | from . import graphics 104 | except Exception: pass 105 | 106 | # Version information... 107 | __version__ = '0.12.16' 108 | -------------------------------------------------------------------------------- /neuropythy/__main__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # __main__.py 3 | # The main function, if neuropythy is invoked directly as command. 4 | # By Noah C. Benson 5 | 6 | import sys 7 | 8 | from neuropythy.commands import (commands) 9 | 10 | def main(argv): 11 | if len(argv) < 1: 12 | return 0 13 | if argv[0] not in commands: 14 | sys.stderr.write('The given command \'' + argv[0] + '\' not recognized.\n') 15 | return 1 16 | return commands[argv[0]](argv[1:]) 17 | 18 | # Run the main function 19 | sys.exit(main(sys.argv[1:])) 20 | -------------------------------------------------------------------------------- /neuropythy/commands/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # command/__init__.py 3 | # The main function, if neuropythy is invoked directly as command. 4 | # By Noah C. Benson 5 | 6 | import pyrsistent as _pyr 7 | 8 | from . import register_retinotopy as _reg 9 | from . import benson14_retinotopy as _b14 10 | from . import surface_to_image as _s2i 11 | from . import atlas as _atl 12 | from . import retinotopy as _ret 13 | 14 | # The commands that can be run by main: 15 | commands = _pyr.m( 16 | atlas = _atl.main, 17 | retinotopy = _ret.main, 18 | register_retinotopy = _reg.main, 19 | benson14_retinotopy = _b14.main, 20 | surface_to_image = _s2i.main) 21 | 22 | __all__ = ['commands'] 23 | 24 | -------------------------------------------------------------------------------- /neuropythy/commands/benson14_retinotopy.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # main/benson2014_retinotopy.py 3 | # The code for the function that handles the registration of retinotopy 4 | # By Noah C. Benson 5 | 6 | from __future__ import print_function 7 | 8 | import numpy as np 9 | import scipy as sp 10 | import nibabel as nib 11 | import nibabel.freesurfer.io as fsio 12 | import nibabel.freesurfer.mghformat as fsmgh 13 | import os, sys, six, pimms 14 | 15 | from ..freesurfer import (subject, add_subject_path) 16 | from ..vision import (predict_retinotopy, retinotopy_model, clean_retinotopy) 17 | from .. import io as nyio 18 | from ..mri import (is_image, to_image, image_clear) 19 | 20 | info = \ 21 | ''' 22 | The benson14_retinotopy command can be used to project the anatomically defined 23 | template of retinotopy to a subject's left and right hemisphere(s). At least 24 | one subject id (either a freesurfer subject name, if SUBJECTS_DIR is set 25 | appropriately in the environment, or a path to a subject directory) must be 26 | given. In each subject's freesurfer directory, a variety of output data is 27 | deposited: 28 | * surf/lh.benson14_angle surf/rh.benson14_angle 29 | surf/lh.benson14_eccen surf/rh.benson14_eccen 30 | surf/lh.benson14_varea surf/rh.benson14_varea 31 | surf/lh.benson14_sigma surf/rh.benson14_sigma 32 | These files contain predictions of polar angle, eccentricity, visual-area 33 | label, and pRF radius for each surface vertex in each hemisphere of the 34 | subject's hemispheres. The files are, by default, in FreeSurfer's curv 35 | format, but their format can be modified with the --surf-format flag. 36 | * mri/benson14_angle.mgz 37 | mri/benson14_eccen.mgz 38 | mri/benson14_varea.mgz 39 | mri/benson14_sigma.mgz 40 | These contain the data from the above surface data projected into the 41 | subject's 3D volume. Note that the volumes are oriented like Freesurfer's 42 | mri/brain.mgz file; if you want to convert this to the orientation of your 43 | original anatomical scan, use mri_convert: 44 | > mri_convert -rl mri/rawavg.mgz mri/angle_benson14.mgz \\ 45 | mri/scanner.angle_benson14.mgz 46 | The following options are accepted: 47 | * --eccen-tag=|-y 48 | --angle-tag=|-t 49 | --label-tag=|-l 50 | --sigma-tag=|-s 51 | These options specify the output tag to use for the predicted measurement 52 | that results from the registration. By default, these are 53 | 'eccen_benson14', 'angle_benson14', 'varea_benson14', and 'sigma_benson14'. 54 | The output files have the name ..mgz 55 | * --surf-format=|-o 56 | --vol-format=|-v 57 | These flags specify what format the output should be in; note that nii.gz 58 | and nifti are identical; curv is a FreeSurfer curv (AKA morph data) file. 59 | * --no-volume-export|-x 60 | --no-surface-export|-z 61 | These flags indicate that the various data produced and written to the 62 | filesystem under normal execution should be suppressed. The volume export 63 | refers to the predicted volume files exported to the subject's mri directory 64 | and the surface export refers to the .eccen_benson14.mgz and similar 65 | files that are written to the subject's surf directory. 66 | * --subjects-dir=|-d 67 | Specifies additional subject directory search locations (in addition to the 68 | SUBJECTS_DIR environment variable and the FREESURFER_HOME/subjects 69 | directories, which are given here in descending search priority) when looking 70 | for subjects by name. This option cannot be specified multiple times, but it 71 | may contain : characters to separate directories, as in PATH. 72 | * --no-overwrite|-n 73 | This flag indicates that, when writing output files, no file should ever be 74 | replaced, should it already exist. 75 | * --template=|-t 76 | Specifies the specific template that should be applied. By default this is 77 | 'Benson14', the 2017 version of the template originally described in the paper 78 | by Benson et al. (2014). The option 'benson14' is also accepted. If the 79 | * --reg=|-R 80 | Specifies the registration to look for the template in. This is, by default, 81 | fsaverage, but for the templates aligned to the fsaverage_sym hemisphere, 82 | this should specify fsaverage_sym. 83 | * -- 84 | This token, by itself, indicates that the arguments that remain should not be 85 | processed as flags or options, even if they begin with a -. 86 | ''' 87 | _benson14_parser_instructions = [ 88 | # Flags 89 | ('h', 'help', 'help', False), 90 | ('v', 'verbose', 'verbose', False), 91 | ('x', 'no-volume-export', 'no_vol_export', False), 92 | ('z', 'no-surface-export', 'no_surf_export', False), 93 | ('n', 'no-overwrite', 'no_overwrite', False), 94 | # Options 95 | ('e', 'eccen-tag', 'eccen_tag', 'benson14_eccen'), 96 | ('a', 'angle-tag', 'angle_tag', 'benson14_angle'), 97 | ('l', 'label-tag', 'label_tag', 'benson14_varea'), 98 | ('s', 'sigma-tag', 'sigma_tag', 'benson14_sigma'), 99 | ('d', 'subjects-dir', 'subjects_dir', None), 100 | ('t', 'template', 'template', 'benson14'), 101 | ('o', 'surf-format', 'surf_format', 'curv'), 102 | ('v', 'vol-format', 'vol_format', 'mgz'), 103 | ('R', 'reg', 'registration', 'fsaverage')] 104 | _benson14_parser = pimms.argv_parser(_benson14_parser_instructions) 105 | 106 | def main(*args): 107 | ''' 108 | benson14_retinotopy.main(args...) runs the benson14_retinotopy command; see 109 | benson14_retinotopy.info for more information. 110 | ''' 111 | # Parse the arguments... 112 | (args, opts) = _benson14_parser(*args) 113 | # help? 114 | if opts['help']: 115 | print(info, file=sys.stdout) 116 | return 1 117 | # verbose? 118 | if opts['verbose']: 119 | def note(s): 120 | print(s, file=sys.stdout) 121 | return True 122 | else: 123 | def note(s): return False 124 | # based on format, how do we export? 125 | sfmt = opts['surf_format'].lower() 126 | if sfmt in ['curv', 'auto', 'automatic', 'morph']: 127 | sfmt = 'freesurfer_morph' 128 | sext = '' 129 | elif sfmt == 'nifti': 130 | sext = '.nii.gz' 131 | elif sfmt in ['mgh', 'mgz', 'nii', 'nii.gz']: 132 | sext = '.' + sfmt 133 | else: 134 | raise ValueError('Unknown surface format: %s' % opts['surf_format']) 135 | vfmt = opts['vol_format'].lower() 136 | if vfmt == 'nifti': 137 | vext = '.nii.gz' 138 | elif vfmt in ['mgh', 'mgz', 'nii', 'nii.gz']: 139 | vext = '.' + vfmt 140 | else: 141 | raise ValueError('Unknown volume format: %s' % opts['vol_format']) 142 | # Add the subjects directory, if there is one 143 | if 'subjects_dir' in opts and opts['subjects_dir'] is not None: 144 | add_subject_path(opts['subjects_dir']) 145 | ow = not opts['no_overwrite'] 146 | nse = opts['no_surf_export'] 147 | nve = opts['no_vol_export'] 148 | tr = {'angle': opts['angle_tag'], 149 | 'eccen': opts['eccen_tag'], 150 | 'varea': opts['label_tag'], 151 | 'sigma': opts['sigma_tag']} 152 | # okay, now go through the subjects... 153 | for subnm in args: 154 | note('Processing subject %s:' % subnm) 155 | sub = subject(subnm) 156 | note(' - Interpolating template...') 157 | (lhdat, rhdat) = predict_retinotopy(sub, 158 | template=opts['template'], 159 | registration=opts['registration']) 160 | # Export surfaces 161 | if nse: 162 | note(' - Skipping surface export.') 163 | else: 164 | note(' - Exporting surfaces:') 165 | for (t,dat) in six.iteritems(lhdat): 166 | flnm = os.path.join(sub.path, 'surf', 'lh.' + tr[t] + sext) 167 | if ow or not os.path.exists(flnm): 168 | note(' - Exporting LH prediction file: %s' % flnm) 169 | nyio.save(flnm, dat, format=sfmt) 170 | else: 171 | note(' - Not overwriting existing file: %s' % flnm) 172 | for (t,dat) in six.iteritems(rhdat): 173 | flnm = os.path.join(sub.path, 'surf', 'rh.' + tr[t] + sext) 174 | if ow or not os.path.exists(flnm): 175 | note(' - Exporting RH prediction file: %s' % flnm) 176 | nyio.save(flnm, dat, format=sfmt) 177 | else: 178 | note(' - Not overwriting existing file: %s' % flnm) 179 | # Export volumes 180 | if nve: 181 | note(' - Skipping volume export.') 182 | else: 183 | note(' - Exporting Volumes:') 184 | # generate the volumes once... 185 | im = sub.images['brain'] 186 | note(' - Addressing volume...') 187 | addr = (sub.lh.image_address(im), sub.rh.image_address(im)) 188 | for t in lhdat.keys(): 189 | flnm = os.path.join(sub.path, 'mri', tr[t] + vext) 190 | if ow or not os.path.exists(flnm): 191 | note(' - Preparing volume file: %s' % flnm) 192 | dtyp = (np.int32 if t == 'varea' else np.float32) 193 | vol = sub.cortex_to_image( 194 | (lhdat[t], rhdat[t]), image_clear(im), 195 | method=('nearest' if t == 'varea' else 'linear'), 196 | address=addr, dtype=dtyp) 197 | note(' - Exporting volume file: %s' % flnm) 198 | nyio.save(flnm, vol) 199 | else: 200 | note(' - Not overwriting existing file: %s' % flnm) 201 | note(' Subject %s finished!' % sub.name) 202 | return 0 203 | 204 | -------------------------------------------------------------------------------- /neuropythy/commands/surface_to_image.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # main/surface_to_image.py 3 | # The code for the function that handles the registration of retinotopy 4 | # By Noah C. Benson 5 | 6 | from __future__ import print_function 7 | 8 | import numpy as np 9 | import scipy as sp 10 | import nibabel as nib 11 | import nibabel.freesurfer.io as fsio 12 | import nibabel.freesurfer.mghformat as fsmgh 13 | import os, sys, pimms 14 | 15 | from ..freesurfer import (subject, add_subject_path, find_subject_path) 16 | from ..io import (save, load) 17 | from ..mri import (is_image, is_image_spec, image_clear, to_image) 18 | 19 | info = \ 20 | ''' 21 | Syntax: surface_to_image 22 | must be a valid FreeSurfer subject id (in the $SUBJECTS_DIR directory 23 | or configured subject paths) or a full path to a FreeSurfer subject or it can 24 | be a valid HCP subject identifier. 25 | is the output volume file to write. 26 | In addition to the subject and the output filename, at least one and at most two 27 | surface file(s) must be specified. These may be specified using the --lh (or -l) 28 | and --rh (or -r) options below or without them; if they files are specified 29 | without the given arguments and the ordering cannot be detected by the number of 30 | vertices in the file, then the left hemisphere surface is assumed to be 31 | specified first. 32 | The command projects the given surface files into the subject's ribbon and 33 | writes the result out to the given filename. If only one hemisphere's surface 34 | datafile is given, then only that hemisphere's data is projected to the ribbon. 35 | 36 | The following options may be given: 37 | * -v|--verbose 38 | Indicates that detailed output should be printed. 39 | * -l|--lh= 40 | -r|--rh= 41 | These options specify the surface data files that are to be projected to 42 | the subject's ribbon. 43 | * -i|--image= 44 | The given file specifies the image-spec of the output image; all header-data 45 | are copied from this image and the file's affine and the image shape are used 46 | to determine the slice prescription of the output image. File may optionally 47 | be a JSON file containing a mapping with the keys 'affine' and 'image_shape'. 48 | If no image is given then the subject's 'brain' image is used. 49 | * -m|--method= 50 | Specifies the method that should be used. Supported options are linear, 51 | nearest, and auto (the default). Both linear and nearest methods find the 52 | position of the voxel-center in the cortical sheet. The linear option linearly 53 | interpolates between the three vertices that form the triangle faces of the 54 | prism containing the voxel-center while the nearest option uses the nearest of 55 | these three vertices. If the auto option is used, then chooses nearest for int 56 | data and linear for floating-point data. 57 | * -f|--fill= 58 | The fill value (default: 0) is inserted in everywhere in the volume that is 59 | not part of the ribbon. 60 | * -t|--dtype= 61 | Specifies that the output data type should be . Currently supported are 62 | 'int' or 'float' (default: 'float'). 63 | * -d|--subjects-dir= 64 | Specifies that the given path(s) should be added to the subjects directory 65 | when performing the operation. Note that this may include directories 66 | separated by colons (:), as with the PATH environment variable. 67 | * -- 68 | This token, by itself, indicates that the arguments that remain should not 69 | be processed as flags or options, even if they begin with a -. 70 | ''' 71 | _surface_to_ribbon_parser_instructions = [ 72 | # Flags 73 | ('h', 'help', 'help', False), 74 | ('v', 'verbose', 'verbose', False), 75 | # Options 76 | ['l', 'lh', 'lh_file', None], 77 | ['r', 'rh', 'rh_file', None], 78 | ['i', 'image', 'image', None], 79 | ['f', 'fill', 'fill', 0], 80 | ['m', 'method', 'method', 'auto'], 81 | ['t', 'type', 'dtype', None], 82 | ['d', 'subjects-dir', 'subjects_dir', None]] 83 | _surface_to_ribbon_parser = pimms.argv_parser(_surface_to_ribbon_parser_instructions) 84 | 85 | def read_surf_file(flnm): 86 | if flnm.endswith(".mgh") or flnm.endswith(".mgz"): 87 | data = np.array(fsmgh.load(flnm).dataobj).flatten() 88 | else: 89 | data = fsio.read_morph_data(flnm) 90 | return data 91 | 92 | def main(args): 93 | ''' 94 | surface_to_rubbon.main(args) can be given a list of arguments, such as sys.argv[1:]; these 95 | arguments may include any options and must include exactly one subject id and one output 96 | filename. Additionally one or two surface input filenames must be given. The surface files are 97 | projected into the ribbon and written to the output filename. For more information see the 98 | string stored in surface_to_image.info. 99 | ''' 100 | # Parse the arguments 101 | (args, opts) = _surface_to_ribbon_parser(args) 102 | # First, help? 103 | if opts['help']: 104 | print(info, file=sys.stdout) 105 | return 1 106 | # and if we are verbose, lets setup a note function 107 | verbose = opts['verbose'] 108 | def note(s): 109 | if verbose: print(s, file=sys.stdout) 110 | return verbose 111 | # Add the subjects directory, if there is one 112 | if 'subjects_dir' in opts and opts['subjects_dir'] is not None: 113 | add_subject_path(opts['subjects_dir']) 114 | # figure out our arguments: 115 | (lhfl, rhfl) = (opts['lh_file'], opts['rh_file']) 116 | if len(args) == 0: 117 | raise ValueError('Not enough arguments provided!') 118 | elif len(args) == 1: 119 | # must be that the subject is in the env? 120 | sub = find_subject_path(os.getenv('SUBJECT')) 121 | outfl = args[0] 122 | elif len(args) == 2: 123 | sbpth = find_subject_path(args[0]) 124 | if sbpth is not None: 125 | sub = sbpth 126 | else: 127 | sub = find_subject_path(os.getenv('SUBJECT')) 128 | if lhfl is not None: rhfl = args[0] 129 | elif rhfl is not None: lhfl = args[0] 130 | else: raise ValueError('Given arg is not a subject: %s' % args[0]) 131 | outfl = args[1] 132 | elif len(args) == 3: 133 | sbpth0 = find_subject_path(args[0]) 134 | sbpth1 = find_subject_path(args[1]) 135 | if sbpth0 is not None: 136 | sub = sbpth0 137 | if lhfl is not None: rhfl = args[1] 138 | elif rhfl is not None: lhfl = args[1] 139 | else: raise ValueError('Too many arguments given: %s' % args[1]) 140 | elif sbpth1 is not None: 141 | sub = sbpth1 142 | if lhfl is not None: rhfl = args[0] 143 | elif rhfl is not None: lhfl = args[0] 144 | else: raise ValueError('Too many arguments given: %s' % args[0]) 145 | else: 146 | sub = find_subject_path(os.getenv('SUBJECT')) 147 | if lhfl is not None or rhfl is not None: 148 | raise ValueError('Too many arguments and no subject given') 149 | (lhfl, rhfl) = args 150 | outfl = args[2] 151 | elif len(args) == 4: 152 | if lhfl is not None or rhfl is not None: 153 | raise ValueError('Too many arguments and no subject given') 154 | subidx = next((i for (i,a) in enumerate(args) if find_subject_path(a) is not None), None) 155 | if subidx is None: raise ValueError('No subject given') 156 | sub = find_subject_path(args[subidx]) 157 | del args[subidx] 158 | (lhfl, rhfl, outfl) = args 159 | else: 160 | raise ValueError('Too many arguments provided!') 161 | if sub is None: raise ValueError('No subject specified or found in $SUBJECT') 162 | if lhfl is None and rhfl is None: raise ValueError('No surfaces provided') 163 | sub = subject(sub) 164 | # check the method 165 | method = opts['method'].lower() 166 | if method not in ['linear', 'nearest', 'auto']: 167 | raise ValueError('Unsupported method: %s' % method) 168 | # and the datatype 169 | if opts['dtype'] is None: dtyp = None 170 | elif opts['dtype'].lower() == 'float': dtyp = float 171 | elif opts['dtype'].lower() == 'int': dtyp = int 172 | else: raise ValueError('Type argument must be float or int') 173 | if method == 'auto': 174 | if dtyp is float: method = 'linear' 175 | elif dtyp is int: method = 'nearest' 176 | else: method = 'linear' 177 | # and the input/sample image 178 | im = opts['image'] 179 | if im is None: 180 | im = sub.images['brain'] 181 | try: note('Using template image: %s' % im.get_filename()) 182 | except Exception: pass 183 | else: 184 | note('Using template image: %s' % im) 185 | im = load(im) 186 | im = to_image(image_clear(im, fill=opts['fill']), dtype=dtyp) 187 | # Now, load the data: 188 | note('Reading surfaces...') 189 | (lhdat, rhdat) = (None, None) 190 | if lhfl is not None: 191 | note(' - Reading LH file: %s' % lhfl) 192 | lhdat = read_surf_file(lhfl) 193 | if rhfl is not None: 194 | note(' - Reading RH file: %s' % rhfl) 195 | rhdat = read_surf_file(rhfl) 196 | (dat, hemi) = (rhdat, 'rh') if lhdat is None else \ 197 | (lhdat, 'lh') if rhdat is None else \ 198 | ((lhdat, rhdat), None) 199 | # okay, make the volume... 200 | note('Generating volume...') 201 | im = sub.cortex_to_image(dat, im, hemi=hemi, method=method, fill=opts['fill'], dtype=dtyp) 202 | # and write out the file 203 | note('Exporting volume file: %s' % outfl) 204 | save(outfl, im) 205 | note('surface_to_image complete!') 206 | return 0 207 | 208 | -------------------------------------------------------------------------------- /neuropythy/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/datasets/__init__.py 3 | # Datasets for neuropythy. 4 | # by Noah C. Benson 5 | 6 | # mainly just to force these to load when datasets is loaded: 7 | from .benson_winawer_2018 import (BensonWinawer2018Dataset) 8 | from .hcp import (HCPDataset, HCPRetinotopyDataset, HCPMetaDataset) 9 | from .visual_performance_fields import (VisualPerformanceFieldsDataset) 10 | from .hcp_lines import (HCPLinesDataset) 11 | # TODO: https://openneuro.org/crn/datasets/ds001499/snapshots/1.1.0/download -- add the BOLD5000 12 | # : dataset to neuropythy (see bold5000.org) 13 | # import this last so that we get the most updated version of data 14 | from .core import (data, Dataset) 15 | 16 | 17 | -------------------------------------------------------------------------------- /neuropythy/datasets/core.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/datasets/core.py 3 | # Implementation of builtin datasets for Neuropythy 4 | # by Noah C. Benson 5 | 6 | import os, six, shutil, tempfile, atexit, pimms 7 | import numpy as np 8 | 9 | from ..util import (config, to_credentials, ObjectWithMetaData) 10 | from ..freesurfer import subject as freesurfer_subject 11 | 12 | # We declare a configuration variable, data_cache_root -- where to put the data that is downloaded. 13 | # If this is None / unset, then we'll use a temporary directory and auto-delete it on exit. 14 | config.declare_dir('data_cache_root') 15 | 16 | @pimms.immutable 17 | class Dataset(ObjectWithMetaData): 18 | ''' 19 | The Dataset class is a simple immutable class that should be implemented by all neuropythy 20 | datasets. The design is such that neuropythy.data[name] should always (lazily) yield a Dataset 21 | object specific to the dataset given by name, if it exists and can be loaded. 22 | 23 | One reason to require (by convention) that all datasets are distinct classes is that it should 24 | thus be easy to evaluate help(ny.data[name]) to see help on the given dataset. If you overload 25 | this class, be sure to overload the documentation. 26 | ''' 27 | def __init__(self, name, meta_data=None, custom_directory=None, cache_required=True, 28 | create_directories=True, create_mode=0o755): 29 | ObjectWithMetaData.__init__(self, meta_data) 30 | self.custom_directory = custom_directory 31 | self.name = name 32 | self.create_directories = create_directories 33 | self.create_mode = create_mode 34 | self.cache_required = cache_required 35 | def __repr__(self): return self.repr 36 | @pimms.value 37 | def repr(name): 38 | ''' 39 | dataset.repr is the representation string used for the given dataset. 40 | ''' 41 | return ("Dataset('%s')" % name) if pimms.is_str(name) else ("Dataset%s" % (name,)) 42 | @staticmethod 43 | def to_name(nm): 44 | ''' 45 | Dataset.to_name(name) yields a valid dataset name equivalent to the given name or raises an 46 | error if name is not valid. In order to be valid, a name must be either strings or a tuple 47 | of number and strings that start with a string. 48 | ''' 49 | if pimms.is_str(nm): return nm 50 | if not pimms.is_vector(nm): raise ValueError('name must be a string or tuple') 51 | if len(nm) < 1: raise ValueError('names that are tuples must have at least one element') 52 | if not pimms.is_str(nm): raise ValueError('names that are tuples must begin with a string') 53 | if not all(pimms.is_str(x) or pimms.is_number(x) for x in nm): 54 | raise ValueError('dataset names that are tuples must contain only strings and numbers') 55 | return tuple(nm) 56 | @pimms.param 57 | def custom_directory(d): 58 | ''' 59 | dataset.custom_directory is None if no custom directory was provided for the given dataset; 60 | otherwise it is the provided custom directory. 61 | ''' 62 | if d is None: return None 63 | if not pimms.is_str(d): raise ValueError('custom_directory must be a string') 64 | else: return d 65 | @pimms.param 66 | def create_directories(c): 67 | ''' 68 | dataset.create_directories is True if the dataset was instructed to create its cache 69 | directory, should it be found to not exist, and is otherwise False. 70 | ''' 71 | return bool(c) 72 | @pimms.param 73 | def create_mode(c): 74 | ''' 75 | dataset.create_mode is the octal permision mode used to create the cache directory for the 76 | given dataset, if the dataset had to create its directory at all. 77 | ''' 78 | return c 79 | @pimms.param 80 | def name(nm): 81 | ''' 82 | dataset.name is either a string or a tuple of strings and numbers that identifies the given 83 | dataset. If dataset.name is a tuple, then the first element must be a string. 84 | ''' 85 | return Dataset.to_name(nm) 86 | @pimms.param 87 | def cache_required(cr): 88 | ''' 89 | dataset.cache_required is True if the dataset requires a cache directory and False 90 | otherwise. 91 | ''' 92 | return cr 93 | @pimms.value 94 | def cache_root(custom_directory): 95 | ''' 96 | dataset.cache_root is the root directory in which the given dataset has been cached. 97 | ''' 98 | if custom_directory is not None: return None 99 | elif config['data_cache_root'] is None: 100 | # we create a data-cache in a temporary directory 101 | path = tempfile.mkdtemp(prefix='npythy_data_cache_') 102 | if not os.path.isdir(path): raise ValueError('Could not find or create cache directory') 103 | config['data_cache_root'] = path 104 | atexit.register(shutil.rmtree, path) 105 | return config['data_cache_root'] 106 | @pimms.value 107 | def cache_directory(cache_root, name, custom_directory): 108 | ''' 109 | dataset.cache_directory is the directory in which the given dataset is cached. 110 | ''' 111 | if custom_directory is not None: return custom_directory 112 | return os.path.join(cache_root, (name if pimms.is_str(name) else 113 | name[0] if len(name) == 1 else 114 | '%s_%x' % (name[0], hash(name[1:])))) 115 | @pimms.require 116 | def ensure_cache_directory(cache_directory, create_directories, create_mode, cache_required): 117 | ''' 118 | ensure_cache_directory requires that a dataset's cache directory exists and raises an error 119 | if it cannot be found. 120 | ''' 121 | if not cache_required: return True 122 | if os.path.isdir(cache_directory): return True 123 | if not create_directories: 124 | raise ValueError('dataset cache directory not found: %s' % (cache_directory,)) 125 | os.makedirs(os.path.abspath(cache_directory), create_mode) 126 | return True 127 | # We create the dataset repository: this is just a lazy map; to add a dataset to iit, use the 128 | # function add_dataset(), immediately below 129 | data = pimms.lazy_map({}) 130 | def add_dataset(dset, fn=None): 131 | ''' 132 | add_dataset(dset) adds the given dataset to the neuropythy.data map. 133 | add_dataset(name, fn) adds a dataset with the given name; fn must be a function of zero 134 | arguments that yields the dataset. 135 | 136 | add_dataset always yeilds None or raises an error. 137 | ''' 138 | global data 139 | if fn is None: 140 | if not isinstance(dset, Dataset): 141 | raise ValueError('Cannot add non-Dataset object to neuropythy datasets') 142 | nm = dset.name 143 | data = data.set(nm, dset) 144 | else: 145 | nm = Dataset.to_name(dset) 146 | def _load_dset(): 147 | x = fn() 148 | if not isinstance(x, Dataset): 149 | raise ValueError('Loader for dataset %s failed to return a dataset' % nm) 150 | return x 151 | data = data.set(nm, _load_dset) 152 | # we want to update neuropythy.data also; this is a bit of a hack, but should work fine 153 | import neuropythy 154 | neuropythy.data = data 155 | return None 156 | 157 | 158 | 159 | 160 | -------------------------------------------------------------------------------- /neuropythy/freesurfer/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/freesurfer/__init__.py 3 | # This file defines the FreeSurfer tools that are available as part of neuropythy. 4 | 5 | from .core import (subject, forget_subject, forget_all, tkr_vox2ras, 6 | find_subject_path, subject_paths, add_subject_path, clear_subject_paths, 7 | to_mgh, load_LUT, 8 | freesurfer_subject_filemap_instructions, freesurfer_subject_data_hierarchy, 9 | subject_file_map, subject_dir) 10 | 11 | -------------------------------------------------------------------------------- /neuropythy/geometry/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/geometry/__init__.py 3 | # This file defines common rotation functions that are useful with cortical mesh spheres, such as 4 | # those produced with FreeSurfer. 5 | 6 | ''' 7 | The neuropythy.geometry package contains a number of utility functions for calculating 2D and 3D 8 | geometrical values as well as three classes: Mesh, Tesselation, and Topology. The Tesselation 9 | class stores information about triangle meshes---essentially all the information except the 2D or 3D 10 | coordinates of the vertices and that information which requires those coordinates. The Mesh class 11 | simply reifies the Tesselation class with these coordinates and the relevant values that can be 12 | derived from them. Finally, the Topology class tracks a Tesselation object and a set of Meshes that 13 | share that tesselation in common. 14 | ''' 15 | 16 | from .util import ( 17 | normalize, 18 | vector_angle_cos, 19 | vector_angle, 20 | spherical_distance, 21 | rotation_matrix_3D, 22 | rotation_matrix_2D, 23 | alignment_matrix_3D, 24 | alignment_matrix_2D, 25 | point_on_line, point_on_segment, point_in_segment, points_close, 26 | lines_colinear, segments_colinear, segments_overlapping, 27 | lines_touch_2D, segments_touch_2D, 28 | line_intersection_2D, 29 | segment_intersection_2D, 30 | line_segment_intersection_2D, 31 | triangle_area, 32 | triangle_normal, 33 | cartesian_to_barycentric_2D, 34 | cartesian_to_barycentric_3D, 35 | barycentric_to_cartesian, 36 | triangle_address, 37 | triangle_unaddress, 38 | point_in_triangle, 39 | point_in_tetrahedron, 40 | point_in_prism, 41 | tetrahedral_barycentric_coordinates, 42 | prism_barycentric_coordinates) 43 | from .mesh import (VertexSet, Tesselation, Mesh, Topology, MapProjection, Path, PathTrace, 44 | mesh, is_mesh, is_flatmap, 45 | tess, is_tess, 46 | topo, is_topo, 47 | is_vset, is_path, deduce_chirality, 48 | map_projection, is_map_projection, 49 | load_map_projection, load_projections_from_path, 50 | projections_path, map_projections, 51 | path_trace, is_path_trace, close_path_traces, paths_to_labels, 52 | to_tess, to_mesh, to_property, to_mask, isolines, smooth_lines, 53 | to_map_projection, to_flatmap) 54 | 55 | -------------------------------------------------------------------------------- /neuropythy/graphics/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/graphics/__init__.py 3 | # Simple tools for making matplotlib/pyplot graphics with neuropythy. 4 | # By Noah C. Benson 5 | 6 | ''' 7 | The neuropythy.graphics package contains definitions of the various tools for making plots with 8 | cortical data. The primary entry point is the function cortex_plot. 9 | ''' 10 | 11 | from .core import ( 12 | cmap_curvature, 13 | cmap_polar_angle_sym, cmap_polar_angle_lh, cmap_polar_angle_rh, cmap_polar_angle, 14 | cmap_theta_sym, cmap_theta_lh, cmap_theta_rh, cmap_theta, 15 | cmap_eccentricity, cmap_log_eccentricity, cmap_radius, cmap_log_radius, 16 | cmap_cmag, cmap_log_cmag, label_cmap, 17 | vertex_curvature_color, vertex_weight, 18 | vertex_angle, vertex_eccen, vertex_sigma, vertex_varea, 19 | vertex_angle_color, vertex_eccen_color, vertex_sigma_color, vertex_varea_color, 20 | angle_colors, eccen_colors, sigma_colors, radius_colors, varea_colors, to_rgba, 21 | color_overlap, visual_field_legend, curvature_colors, cortex_plot, cortex_plot_colors, 22 | ROIDrawer, trace_roi, scale_for_cmap) 23 | -------------------------------------------------------------------------------- /neuropythy/hcp/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/hcp/__init__.py 3 | # This file defines the Humman Connectome Project (HCP) tools that are available as part of 4 | # neuropythy. 5 | 6 | from .files import (subject_paths, clear_subject_paths, add_subject_path, find_subject_path, 7 | gifti_to_array, cifti_split, cifti_extract, is_hcp_subject_path, 8 | subject_file_map, cifti_label_data, cifti_axis_spec) 9 | from .core import (subject, forget_subject, forget_all, download, subject_dir) 10 | 11 | subject_ids = tuple( 12 | [100206, 100307, 100408, 100610, 101006, 101107, 101309, 101410, 101915, 102008, 102109, 102311, 13 | 102513, 102614, 102715, 102816, 103010, 103111, 103212, 103414, 103515, 103818, 104012, 104416, 14 | 104820, 105014, 105115, 105216, 105620, 105923, 106016, 106319, 106521, 106824, 107018, 107220, 15 | 107321, 107422, 107725, 108020, 108121, 108222, 108323, 108525, 108828, 109123, 109325, 109830, 16 | 110007, 110411, 110613, 111009, 111211, 111312, 111413, 111514, 111716, 112112, 112314, 112516, 17 | 112819, 112920, 113215, 113316, 113417, 113619, 113821, 113922, 114116, 114217, 114318, 114419, 18 | 114621, 114823, 114924, 115017, 115219, 115320, 115724, 115825, 116120, 116221, 116423, 116524, 19 | 116726, 117021, 117122, 117324, 117728, 117930, 118023, 118124, 118225, 118528, 118730, 118831, 20 | 118932, 119025, 119126, 119732, 119833, 120010, 120111, 120212, 120414, 120515, 120717, 121315, 21 | 121416, 121618, 121719, 121820, 121921, 122317, 122418, 122620, 122822, 123117, 123420, 123521, 22 | 123723, 123824, 123925, 124220, 124422, 124624, 124826, 125222, 125424, 125525, 126325, 126426, 23 | 126628, 126931, 127226, 127327, 127630, 127731, 127832, 127933, 128026, 128127, 128329, 128632, 24 | 128935, 129028, 129129, 129331, 129432, 129533, 129634, 129937, 130013, 130114, 130316, 130417, 25 | 130518, 130619, 130720, 130821, 130922, 131217, 131419, 131621, 131722, 131823, 131924, 132017, 26 | 132118, 133019, 133625, 133827, 133928, 134021, 134223, 134324, 134425, 134627, 134728, 134829, 27 | 135124, 135225, 135528, 135629, 135730, 135932, 136126, 136227, 136631, 136732, 136833, 137027, 28 | 137128, 137229, 137431, 137532, 137633, 137936, 138130, 138231, 138332, 138534, 138837, 139233, 29 | 139435, 139637, 139839, 140117, 140319, 140420, 140824, 140925, 141119, 141422, 141826, 142424, 30 | 142828, 143224, 143325, 143426, 143527, 143830, 144125, 144226, 144428, 144731, 144832, 144933, 31 | 145127, 145531, 145632, 145834, 146129, 146331, 146432, 146533, 146634, 146735, 146836, 146937, 32 | 147030, 147636, 147737, 148032, 148133, 148335, 148436, 148840, 148941, 149236, 149337, 149539, 33 | 149741, 149842, 150019, 150423, 150524, 150625, 150726, 150928, 151021, 151223, 151324, 151425, 34 | 151526, 151627, 151728, 151829, 151930, 152225, 152427, 152831, 153025, 153126, 153227, 153429, 35 | 153631, 153732, 153833, 153934, 154229, 154330, 154431, 154532, 154734, 154835, 154936, 155231, 36 | 155635, 155938, 156031, 156233, 156334, 156435, 156536, 156637, 157336, 157437, 157942, 158035, 37 | 158136, 158338, 158540, 158843, 159138, 159239, 159340, 159441, 159744, 159845, 159946, 160123, 38 | 160729, 160830, 160931, 161327, 161630, 161731, 161832, 162026, 162228, 162329, 162733, 162935, 39 | 163129, 163331, 163432, 163836, 164030, 164131, 164636, 164939, 165032, 165234, 165436, 165638, 40 | 165840, 165941, 166438, 166640, 167036, 167238, 167440, 167743, 168038, 168139, 168240, 168341, 41 | 168745, 168947, 169040, 169141, 169343, 169444, 169545, 169747, 169949, 170631, 170934, 171128, 42 | 171330, 171431, 171532, 171633, 171734, 172029, 172130, 172332, 172433, 172534, 172635, 172938, 43 | 173132, 173233, 173334, 173435, 173536, 173637, 173738, 173839, 173940, 174437, 174841, 175035, 44 | 175136, 175237, 175338, 175439, 175540, 175742, 176037, 176239, 176441, 176542, 176744, 176845, 45 | 177140, 177241, 177342, 177645, 177746, 178142, 178243, 178647, 178748, 178849, 178950, 179245, 46 | 179346, 179548, 179952, 180129, 180230, 180432, 180533, 180735, 180836, 180937, 181131, 181232, 47 | 181636, 182032, 182436, 182739, 182840, 183034, 183337, 183741, 185038, 185139, 185341, 185442, 48 | 185846, 185947, 186040, 186141, 186444, 186545, 186848, 186949, 187143, 187345, 187547, 187850, 49 | 188145, 188347, 188448, 188549, 188751, 189349, 189450, 189652, 190031, 190132, 191033, 191235, 50 | 191336, 191437, 191841, 191942, 192035, 192136, 192237, 192439, 192540, 192641, 192843, 193239, 51 | 193441, 193845, 194140, 194443, 194645, 194746, 194847, 195041, 195445, 195647, 195849, 195950, 52 | 196144, 196346, 196750, 196851, 196952, 197348, 197449, 197550, 197651, 198047, 198249, 198350, 53 | 198451, 198653, 198855, 199150, 199251, 199352, 199453, 199655, 199958, 200008, 200109, 200210, 54 | 200311, 200513, 200614, 200917, 201111, 201414, 201515, 201717, 201818, 202113, 202719, 202820, 55 | 203418, 203721, 203923, 204016, 204218, 204319, 204420, 204521, 204622, 205119, 205220, 205725, 56 | 205826, 206222, 206323, 206525, 206727, 206828, 206929, 207123, 207426, 207628, 208024, 208125, 57 | 208226, 208327, 208428, 208630, 209127, 209228, 209329, 209531, 209834, 209935, 210011, 210112, 58 | 210415, 210617, 211114, 211215, 211316, 211417, 211619, 211720, 211821, 211922, 212015, 212116, 59 | 212217, 212318, 212419, 212823, 213017, 213421, 213522, 214019, 214221, 214423, 214524, 214625, 60 | 214726, 217126, 217429, 219231, 220721, 221218, 221319, 223929, 224022, 227432, 227533, 228434, 61 | 231928, 233326, 236130, 237334, 238033, 239136, 239944, 245333, 246133, 248238, 248339, 249947, 62 | 250427, 250932, 251833, 255639, 255740, 256540, 257542, 257845, 257946, 263436, 268749, 268850, 63 | 270332, 274542, 275645, 280739, 280941, 281135, 283543, 284646, 285345, 285446, 286347, 286650, 64 | 287248, 289555, 290136, 293748, 295146, 297655, 298051, 298455, 299154, 299760, 300618, 300719, 65 | 303119, 303624, 304020, 304727, 305830, 307127, 308129, 308331, 309636, 310621, 311320, 314225, 66 | 316633, 316835, 317332, 318637, 320826, 321323, 322224, 325129, 329440, 329844, 330324, 333330, 67 | 334635, 336841, 339847, 341834, 342129, 346137, 346945, 348545, 349244, 350330, 351938, 352132, 68 | 352738, 353740, 355239, 355542, 355845, 356948, 358144, 360030, 361234, 361941, 362034, 365343, 69 | 366042, 366446, 368551, 368753, 371843, 376247, 377451, 378756, 378857, 379657, 380036, 381038, 70 | 381543, 382242, 385046, 385450, 386250, 387959, 389357, 390645, 391748, 392447, 392750, 393247, 71 | 393550, 394956, 395251, 395756, 395958, 397154, 397760, 397861, 401422, 406432, 406836, 412528, 72 | 413934, 414229, 415837, 419239, 421226, 422632, 424939, 429040, 432332, 433839, 436239, 436845, 73 | 441939, 445543, 448347, 449753, 453441, 453542, 454140, 456346, 459453, 461743, 462139, 463040, 74 | 465852, 467351, 468050, 469961, 473952, 475855, 479762, 480141, 481042, 481951, 485757, 486759, 75 | 492754, 495255, 497865, 499566, 500222, 506234, 510225, 510326, 512835, 513130, 513736, 516742, 76 | 517239, 518746, 519647, 519950, 520228, 521331, 522434, 523032, 524135, 525541, 529549, 529953, 77 | 530635, 531536, 531940, 536647, 540436, 541640, 541943, 545345, 547046, 548250, 549757, 550439, 78 | 552241, 552544, 553344, 555348, 555651, 555954, 557857, 558657, 558960, 559053, 559457, 561242, 79 | 561444, 561949, 562345, 562446, 565452, 566454, 567052, 567759, 567961, 568963, 569965, 570243, 80 | 571144, 571548, 572045, 573249, 573451, 576255, 578057, 578158, 579665, 579867, 580044, 580347, 81 | 580650, 580751, 581349, 581450, 583858, 584355, 585256, 585862, 586460, 587664, 588565, 589567, 82 | 590047, 592455, 594156, 597869, 598568, 599065, 599469, 599671, 601127, 604537, 609143, 611231, 83 | 611938, 613235, 613538, 614439, 615441, 615744, 616645, 617748, 618952, 620434, 622236, 623137, 84 | 623844, 626648, 627549, 627852, 628248, 633847, 634748, 635245, 638049, 644044, 644246, 645450, 85 | 645551, 647858, 650746, 654350, 654552, 654754, 656253, 656657, 657659, 660951, 662551, 663755, 86 | 664757, 665254, 667056, 668361, 671855, 672756, 673455, 675661, 677766, 677968, 679568, 679770, 87 | 680250, 680452, 680957, 683256, 685058, 686969, 687163, 688569, 689470, 690152, 692964, 693461, 88 | 693764, 694362, 695768, 698168, 700634, 701535, 702133, 704238, 705341, 706040, 707749, 709551, 89 | 713239, 715041, 715647, 715950, 720337, 723141, 724446, 725751, 727553, 727654, 728454, 729254, 90 | 729557, 731140, 732243, 733548, 734045, 734247, 735148, 737960, 742549, 744553, 745555, 748258, 91 | 748662, 749058, 749361, 751348, 751550, 753150, 753251, 756055, 757764, 759869, 760551, 761957, 92 | 763557, 765056, 765864, 766563, 767464, 769064, 770352, 771354, 773257, 774663, 779370, 782157, 93 | 782561, 783462, 784565, 786569, 788674, 788876, 789373, 792564, 792766, 792867, 793465, 800941, 94 | 802844, 803240, 804646, 809252, 810439, 810843, 812746, 814548, 814649, 815247, 816653, 818455, 95 | 818859, 820745, 822244, 825048, 825553, 825654, 826353, 826454, 827052, 828862, 832651, 833148, 96 | 833249, 835657, 837560, 837964, 841349, 843151, 844961, 845458, 849264, 849971, 852455, 856463, 97 | 856766, 856968, 857263, 859671, 861456, 865363, 867468, 869472, 870861, 871762, 871964, 872158, 98 | 872562, 872764, 873968, 877168, 877269, 878776, 878877, 880157, 882161, 884064, 885975, 886674, 99 | 887373, 888678, 889579, 891667, 894067, 894673, 894774, 896778, 896879, 898176, 899885, 901038, 100 | 901139, 901442, 902242, 904044, 905147, 907656, 908860, 910241, 910443, 911849, 912447, 917255, 101 | 917558, 919966, 922854, 923755, 926862, 927359, 929464, 930449, 932554, 933253, 937160, 942658, 102 | 943862, 947668, 951457, 952863, 953764, 955465, 957974, 958976, 959574, 962058, 965367, 965771, 103 | 966975, 969476, 970764, 971160, 972566, 973770, 978578, 979984, 983773, 984472, 987074, 987983, 104 | 989987, 990366, 991267, 992673, 992774, 993675, 994273, 995174, 996782]) 105 | -------------------------------------------------------------------------------- /neuropythy/io/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/io/__init__.py 3 | 4 | ''' 5 | neuropythy.io is a namespace that contains tools for loading and saving data in neuroscientific 6 | formats. It is intended as an extension of the nibabel libraries in that it is good at 7 | auto-detecting many common formats and data-types and yields data in the neuropythy object system. 8 | ''' 9 | 10 | from .core import (load, save, importer, exporter, forget_importer, forget_exporter, 11 | to_nifti, load_json, save_json, load_csv, save_csv, load_tsv, save_tsv) 12 | 13 | -------------------------------------------------------------------------------- /neuropythy/java/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/java/__init__.py 3 | # The code that manages the neuropythy link to the JVM. 4 | # By Noah C. Benson 5 | 6 | import numpy as np 7 | import scipy as sp 8 | import numbers as num 9 | import os, sys, gzip 10 | 11 | from array import array 12 | from ..util import library_path 13 | 14 | # Java start: 15 | _java_port = None 16 | _java = None 17 | 18 | def _init_registration(): 19 | from py4j.java_gateway import (launch_gateway, JavaGateway, GatewayParameters) 20 | global _java, _java_port 21 | if _java is not None: return 22 | _java_port = launch_gateway( 23 | classpath=os.path.join(library_path(), 'nben', 'target', 'nben-standalone.jar'), 24 | javaopts=['-Xmx4g'], 25 | die_on_exit=True) 26 | _java = JavaGateway(gateway_parameters=GatewayParameters(port=_java_port)) 27 | 28 | def java_link(): 29 | if _java is None: _init_registration() 30 | return _java 31 | 32 | def serialize_numpy(m, t): 33 | ''' 34 | serialize_numpy(m, type) converts the numpy array m into a byte stream that can be read by the 35 | nben.util.Py4j Java class. The function assumes that the type of the array needn't be encoded 36 | in the bytearray itself. The bytearray will begin with an integer, the number of dimensions, 37 | followed by that number of integers (the dimension sizes themselves) then the bytes of the 38 | array, flattened. 39 | The argument type gives the type of the array to be transferred and must be 'i' for integer or 40 | 'd' for double (or any other string accepted by array.array()). 41 | ''' 42 | # Start with the header: ... 43 | header = array('i', [len(m.shape)] + list(m.shape)) 44 | # Now, we can do the array itself, just flattened 45 | body = array(t, m.flatten().tolist()) 46 | # Wrap bytes if necessary... 47 | if sys.byteorder != 'big': 48 | header.byteswap() 49 | body.byteswap() 50 | # And return the result: 51 | try: 52 | return bytearray(header.tostring() + body.tostring()) 53 | except AttributeError: 54 | return bytearray(header.tobytes() + body.tobytes()) 55 | 56 | def to_java_doubles(m): 57 | ''' 58 | to_java_doubles(m) yields a java array object for the vector or matrix m. 59 | ''' 60 | global _java 61 | if _java is None: _init_registration() 62 | m = np.asarray(m) 63 | dims = len(m.shape) 64 | if dims > 2: raise ValueError('1D and 2D arrays supported only') 65 | bindat = serialize_numpy(m, 'd') 66 | return (_java.jvm.nben.util.Numpy.double2FromBytes(bindat) if dims == 2 67 | else _java.jvm.nben.util.Numpy.double1FromBytes(bindat)) 68 | 69 | def to_java_ints(m): 70 | ''' 71 | to_java_ints(m) yields a java array object for the vector or matrix m. 72 | ''' 73 | global _java 74 | if _java is None: _init_registration() 75 | m = np.asarray(m) 76 | dims = len(m.shape) 77 | if dims > 2: raise ValueError('1D and 2D arrays supported only') 78 | bindat = serialize_numpy(m, 'i') 79 | return (_java.jvm.nben.util.Numpy.int2FromBytes(bindat) if dims == 2 80 | else _java.jvm.nben.util.Numpy.int1FromBytes(bindat)) 81 | 82 | def to_java_array(m): 83 | ''' 84 | to_java_array(m) yields to_java_ints(m) if m is an array of integers and to_java_doubles(m) if 85 | m is anything else. The numpy array m is tested via numpy.issubdtype(m.dtype, numpy.int64). 86 | ''' 87 | if not hasattr(m, '__iter__'): return m 88 | m = np.asarray(m) 89 | if np.issubdtype(m.dtype, np.dtype(int).type) or all(isinstance(x, num.Integral) for x in m): 90 | return to_java_ints(m) 91 | else: 92 | return to_java_doubles(m) 93 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/lh.atlasroi.164k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | eJzt20luJFcQRMG6ma7GowtaaQAIVWVlfo/BFrYSmvzxnKQWzX69fv54/er18/t/u+qvj0lPd38t0Ef6a6+T9FbVpPfgGpvvY/Nd7L2HnXew83w2ns2+c9l2JrvOY9NZ7DmHLWewY3/26833YF+26+nqbrbL+WYzu5337V42O8dOPfiequ+Ojez0HPvUddc29rnXnbvY5h42qeXuPWxy3RNb2ONzdsh7agNbvOfp/jb4nfYZJ7pr/2+nmuv+Ot57e/PTrbf21nlm402t9Z3ZdnJfXec1ndY23XFSz3TDKS3T/Sb0TDfr3jDdq3PDdKeu7dKNOnZL9+nWLd2lU690ky7N0h06dEo3qN4ofX/lTumbq/ZJ31qxS/rOam3St1Xqkb6rSov0TRVapG9JN0jfkbw/fUPq9vT7E/en33z65vR7T96bfuupe9NvPHFn+n1P35l+15P3pd/01H3ptzxxW/oNAAAAAAAAn7jrd2OelG4EAAAAAABADf7uqJd3/h7QpvX4e92e7NaP36Xo5Y697NZjK99n/XayVY+NbNVjHzvV38Y+dXexUe1N7FJvD7vYorLkDraoscH2HdLtN/dPd9/aP917Y/t0523N0403dU933dI63XRD53TP6Z3THSf3TTec2jfdbmLbdLNpTdO9JjVNd5rSMt1oQsd0n+4N0206N0w36dou3aNjt3SLbt3SDTr1St/fpVf6bo36N0rfXLlR+taqbdJ3VuySvrFak/R9lXqkb6vSI31ThQ7pe9IN0rck70/f4e5zd6dvOH1z+v0n702//dSt6XefuDP95qdvTL/XbZ/fln7rE3el33n3Tek33nlP+n133ZJ+26Ybfrsj/aaNb0+/58q702/59M3pd0x7KwAAAM+6+ndBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJz3+mGL9NcaAAAAAAAAAAAA8D6/+1ebferxe7P1+H3mOvyOeR1+578G//4iz7+DqcEGddvrr/00ums+nd51Omt9rnX6jd1pnG2r77N902/sxtesptXpqWVFOmqYpp92utWnmV5a6VSdRvpoo4smemihQ9r2Bpvv33r7xru33bzp3i23brhz+o2T75t627S73rln4k3pN266ZcrXWOcbOm8w/e3pN3Zv3em9Xd7a4Z3V31j5fZ3ftv1N1bar8pYK70i/Ifn5q37uCZ/zVNtTP1tOfJ6nP8eTH/+Jj/3Ox/z04975Md/9WHd/vCff9Mmf/+/H+PTP/vPPX/mz1HfHz17gM77fqcTXx3x+Luxj8138/38PW+9g4/lsPJt957LrTHadx8/hWWw5hy1nsGN/9uvNdn3Zrieb9WOzXuzVh616sFN9NqrNPnXZpia71GOTWmxRhx1qsAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMzx+slI372d3fdJbW73XVvb29bYGRtTZ1v7zt43ff9E6U1ta1PsuUV6Q1vO2THdoLv0fja031Y268lm/disF1v1YacefE/VZ6Pa7FNXYhsb2aWj5Ca2sUdl6R3sYYcq0u2375BuvnmDdOut/dONN7ZPt93WPt1zU/N0xy290/2+6d5lh3Sr6V/z6R6TW6fvn9o3fffEtul7T3Y90Td956Sv2fRdU5qmb0l3vKNn+o4K9Puedt/T7br/+3mYfl91el2jFQAAAAAAAAAAAAD87al/gwU8x/f8Tjbfw/f4Lraez/f0fDaezb5z2XYmu85j01nsOYctZ7BjfzbszX592a4nu/Vjs17s1YuterBTDzaqzz512aYuu9Rkk1rsUYct6rBDDTbQfzPt67VPv2s6zfWeTmudJ9H4TOP0GwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgnz8BHvJ6zQ== 65 | 66 | 67 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/lh.atlasroi.32k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 52 | 53 | 54 | 55 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | eJzt3UluwkAQQFHfLFfj6Fllg2KB3UMNfou3SUTXbyywLSQ4jtfPcep4nf/v0+OqurPfSqKf3+jnPXqfq56j6Db76r2n7nuJbnrqPqrvQbvuO83Zu7Wu78zYmr0xc1/WtozXbtmujzP1ZLmHzdDxqWF1S/bZKxoi5u6eeWXe6Myrs+7OuzPn6qy7M76dk3HtTzNWrDu65vvaM9ebtRbQ2+7zNAA8TdXzZsVzfrVrlUrXV1WuCytcx2a/3s7cl/WeJeP9VLamTD0jLTN7qjfM6BidP9JQde6d2btn7py3a9aOOatnrFx/1doz1n1fO/N6s9bKsAa9jZ7/AQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4ImO131/j189Z6QP4Emqv/91eA+v3F+1vWJ3teZKvdVaoxsqN2Y/1lnbsnVlPI6ZerK0ZOiIboicHzU7Ym70cQYgztk54P3v759N+IwAAAAAAAAAAAAAAAAAAAAAAAAAoL+K3y939n16lb43r8tvBXX43aPqv+FUuV93/uZqvRHdo627mmd1rmyd2bhiDzv7rjbubvu2Nbrrv87oBgAAetp5H8qYzseu47667anLfjq9jjrso/oeKvdXba/YXa25am90x7et0Q0dGgEAAAAAAAAAAAAAAAAAgK5+AReeB2U= 76 | 77 | 78 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/lh.atlasroi.59k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 17 | 18 | 19 | 20 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | MODE_AUTO_SCALE_PERCENTAGE 63 | 98.000000 2.000000 2.000000 98.000000 64 | -100.000000 0.000000 0.000000 100.000000 65 | ROY-BIG-BL 66 | true 67 | true 68 | false 69 | true 70 | THRESHOLD_TEST_SHOW_OUTSIDE 71 | THRESHOLD_TYPE_OFF 72 | false 73 | -1.000000 1.000000 74 | -1.000000 1.000000 75 | -1.000000 1.000000 76 | 77 | PALETTE_THRESHOLD_RANGE_MODE_MAP 78 | false 79 | 80 | ]]> 81 | 82 | 83 | eJzt2tFu2zAMBVD/WX8tnz7sYcBaxE1iSyJFnocDFBhg3kt1XazuOB5fx6njcf5n7/r7jM7u7q+b6POqfO7RO4lmT/ZkR3n2E53RbnKxEzu5s4/obHaRaw/Ruewgtn90Jmevt8766tqvZ/WOlftV/VxdsVe1TpXe91912aVPhw7Zu7ybP2OH6rkz5d8t86d5IzPvkvVKztVZr2ZclTNzvqzZ7uSalS1bprt5RuYaleVungw5Rma4kiNy/ujZ786PmDtj5m+zZ877OXPVrNlzAGAno98VVv7bDgAAkE3nd6Gu74Qde5/dAVTt3+n+o8udT4d7rur3eZX7Vb2TrXjfXO0evVKfUV2iO1XoMbrD6h475981+465Z2SemXunvLtk3SHnrIyjcmbOlzVbxlzZMs3M82kmOfKcS+T8qNnVZj6bW3HWijmzZ+z4bCCnO5+bAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIB+jsc4P595Ncu/r0fnG9ENAKATn4W+76H7LuzBDv7fQXQOZ6+3zvrq+rprdA4d9evUrWKvap0q9onOoMf+HXb+e7F79ugMlXe9Y97oDFV2mj1j5nyZswEAwKdGfL599oyz3zP5P8oAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADDf8fguOk9E97NdPBOdefb5XxXdI9s+Kuxk5C523smMPey2i+470L1fd53rd+70vT3zZ3im3rN6Zjrf2R2je67o96rrrB2s7rbqrKM7zOgXnfvdbu92jM468tyis404MwAAgBGi7lGqzlt577b7jJnPn/Vs7+SQU+TvIqjD95Y92ZH9RLMbe7GTcTuJzmMX8ezBDjr379q9Y+/unaOz6Dq+a3QOHe93jM6gGwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOzuD/R09gI= 84 | 85 | 86 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/rh.atlasroi.164k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | eJzt29uOG1UUBFD/Gb82n454QCSBCb706dqX9bAkEFJ7V5WdAeI8Hl9/PL71+Pr+n33ir+fSz4n3Aj2k33udpLeqJr0H77H5Pjbfxd572HkHO89n49n8XJ7LrjPZdR6/Ds9izzlsOYMd+7Nfb/bry3Y9+bnXzyeb2e1+n+5ls/vYqgc71Wej2q7Yx05nXLmNfa5jl3qu3sQ2n7FFHac+GzZ5zckdbPGc0xvY4ffu6N8G/+2u7vX/szt717/O9T1bouuNfad63tR1suMNPaf7ndxxutep3aY7ndhtukt96rR6l937THc3oct0Z917THfVucN0R3rb0126m269pTvp1Fm6iy59pTvo0FU6e/WO0rkrd5TOW7WbdNaK3aQzVuslna1SH+lcVfpI56nQQzpLuod0hmT+9P2p7OnbE7nTd9+dOX3znZnTt96VN33jHVnTt53Omb7pZL70PaeypW85kSt9BwAAAAAAwCuu+F7MaemOAAAAAAAAALb49fdpf/z7X/86fSv/3umV34e3Ya/NfJfCXthqEt8rq893AGs7+V1Ne9Xcxmeqxy72qbuJXertYZd6W9jDDpXYYHf/mzdI9761/3TfG7tP97yt83THmzpPd7ul63SvG3pOdzq963SPk/tNdzi123R/E7tN9zat03Rnk/pM9zWlz3RPE3pMd9S9x3Q3nftL99K1v3QfHXtLd9Gtt3QHnfpK5+/SVTp7h67Smat3lM5buZ901qrdpHNW7CWdsVon6XyV+khnq9JFOleFLtJ50h2ksyTzp3OksqczyHw+c/r+O/Omb78rb/pmGT/PmL73ZL70raeype88kSt949WZ0vddmSd921VZ0nfJkL/pk/vT92y6O33Lqzen75h2KwAAAGe983skAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANzr8QWvSb9nAQAAAOAk/w9sFnvOYcsZ7NifDXuzX1+268lu/disF3v14PtGPdipNt/dq88+9fjeay2+j1yPTerwnf06bJHnz7HU7t8G+p9K93rfQuf6nkrXdbpO3zeBjvXblW712oVO9VmVLvVYgf+2Ot9h+r6qvPf0prO69KWrOzvS03M9pe+r3M3mfnSjF53o444+0vdV6EAP8zvY/h7Ymn1j7m2ZN+XdknVDzukZJ+ebmm1irmmZJuWZkmVCju4ZOt/f9faOd3e7udO9XW7tcGf1GyvfV/W2indVu6nSPVVuSd/xf69/+obk6yde+5nXvPp1737Nu17v2de587UqvsYrPb3y/Fef++yzTzz3yme+86zvnnfFs959xo/P+eQZ8DtX/VwBnuczT5L3wj5+PdjHz4Fd/Ozfw9Y72Hg+n+XZbDuXXWey6zw2ncWec9hyBhv2Z8Pe7NeX3XqyWz8268VefdipBzvVZ6Pa7FOXbWqySz22qMUeddiiBjsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEB/j6+sdP6t7L6PzXex9x623sHG8/ksz5be18azt7XvzF3THUyT3tOu9qTulukOJkhvaMf+G6bzd2e/vuzWk836sVcv9urDVj3Yqb7ERnayzwSpbexjl26Sm9il3h42qbWFPfL92yLf+/Yd0n1v3iDd89b+0/1u7D3d7bb+011u6j3d35au091t6Dzd1fTu031M7jqdf2rP6bwTe01nndZvOtukPtO50vR4HR1eQ3+f0d1n/u5Db5/T2Xv09TpdAQAAAAAAAAAAAMA/Tv6ZNuBaPvP72Hwfe+/gsz2ff1ffwcZz+QzPZduZ7DqPTeex5xy2nMGO/dmwP/v1Zbu+7NaPzXqxVy+26sFOPdioNvvUZpua7FKPTeqxRx22qMMOeTaoQf91+0/fNpnea3WfvmkyfWc7T98ymZ7v7Tl9x2T61S0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACX8C9wp1og== 65 | 66 | 67 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/rh.atlasroi.32k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 52 | 53 | 54 | 55 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | eJzt3Utu4zAQRVHtLFvz0nsaBHZbH1L1ijqDMwgCsy6VwKIQIN6218/20fb6/L1vr+vqzH47qb6+Cde8er+zrlN1n33t21N128j9VHc9eS/2UK9zv27Ne3uTm7t2JramNyb3pZ4rEs+iaWf+pJ6Ulm8dd/RUN1TO3zt79PyKuUdmjph7dN6VmWdmnZl3ds6RWbNnzFx/1tpX1v209tU1/649cr1RawFrq7hPA8CTdL1f/r7Xd9lDt3PK/85Waf1dzoQdzrHpjcnPBKltic9Sac94ST0pLVc6RrVUN1ydf6WhanbF3BEzj8ztNmvPvDvmzJ4xc/1Za3dYc+R6KevAN1fv/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPBE2+uYytmVrQBddH//6/we3vke1LW9e3d1x2rXuGNrdUfnxuS+1LbkruqG1J6UloSOhIancc0BuNO7+86786u/EQAAAAAAAAAAAAAAAAAAAAAAAAA8T9f/L9f9/+at8llBK3z2UffPcOrc37Vdc3bv3c1XW+9qHtU5s3Vk49/ed1+n9O1pT2zb83tR3QRAf7PvJ3eeQZLXnXFeTFtr1HNG1evPvvbM65zjzrnzOZRrVv3Zrbiv1fa0yn5Weg9cYR/d99C5f5X26pYnNFd3rNha3bBCIwAAAAAAAAAAAAAAAAAAsKp/+88MUQ== 76 | 77 | 78 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fs_LR/rh.atlasroi.59k_fs_LR.shape.gii: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 17 | 18 | 19 | 20 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | MODE_AUTO_SCALE_PERCENTAGE 63 | 98.000000 2.000000 2.000000 98.000000 64 | -100.000000 0.000000 0.000000 100.000000 65 | ROY-BIG-BL 66 | true 67 | true 68 | false 69 | true 70 | THRESHOLD_TEST_SHOW_OUTSIDE 71 | THRESHOLD_TYPE_OFF 72 | false 73 | -1.000000 1.000000 74 | -1.000000 1.000000 75 | -1.000000 1.000000 76 | 77 | PALETTE_THRESHOLD_RANGE_MODE_MAP 78 | false 79 | 80 | ]]> 81 | 82 | 83 | eJzt2lFq40AQBFDdbK/mo4dlWchH7Ej2jKpn+n08CATUVa2ESOMcx+PP8dTxeP69K/5ep6sR++skfb92v+/pvax0T9J5q+8pndOO6rIbuxm9l3Q+O8mzC7uwBzs42z+dS3e9ddZX1149d++48/PGru9c3Tqt2Ou3Pit12qXLmR7V+6zeYeX8V7JXyn81d4Xs72RO5pa1Rs67s76b8a6slfN9mm1WvhG5RmerlqlSnlFZPs0zMse7WUZnuJojOT81OzF3xsxX8++cd9es2XMAYCWj3hESzy0AAADU0fU9sOO7cLczgGd9d+zd5aynwznX7ud5O59Z7nomu+N5827n6Dv1GdUl3Wdkj1SX1TusnH909rvyr5h7RuaZuVfKu0rWFXLOyjgqZ+V8VbNVzFUt08w8VzNVyVIhRzpDcn5q9m4zf5p757yd5syeMfu+AH18+kwPAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPRxPMZZIWPFfgAAlXkW8lxoDz/3t4N8Ht311llfXXv27NYxnUW3vr1267Rrn3SOUV3SGbrei9V/L1bPns6w+65XyrtK1hVyVs9YOV/lbAAAcNWr59tX5yG/ff7gf5QBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACDnePzz/+t0nru7f9/BGenMs+7/J9Id7KPuLnbYy+g9rLiL7jvo3L9rd53379zpZ3tG14p9Z/Ws1Hd2x3TPO/qlOqe6ze6d7jCjXzr32W5nO6azjrh33zsDAEAVVZ5RK+RIZ0ifKXSZe/fMxHnYDjuc3WXm9Ve67uhrjrzeqGuNuE767wO1JT+LYA9+tuzJjuwnzW7s5ZN9dN2JfZzbRzqXPdhBlR2kM+l+f/d0Hp3nd05n0VXPd3qmc+j3fr90Br0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADo4QuWEPg5 84 | 85 | 86 | -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.benson14_angle.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.benson14_angle.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.benson14_eccen.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.benson14_eccen.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.benson14_retinotopy.v4_0.sphere.reg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.benson14_retinotopy.v4_0.sphere.reg -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.benson14_sigma.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.benson14_sigma.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.benson14_varea.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.benson14_varea.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.glasser16_atlas.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.glasser16_atlas.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.rosenke18_vcatlas.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.rosenke18_vcatlas.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.wang15_fplbl.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.wang15_fplbl.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/lh.wang15_mplbl.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/lh.wang15_mplbl.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.benson14_angle.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.benson14_angle.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.benson14_eccen.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.benson14_eccen.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.benson14_retinotopy.v4_0.sphere.reg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.benson14_retinotopy.v4_0.sphere.reg -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.benson14_sigma.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.benson14_sigma.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.benson14_varea.v4_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.benson14_varea.v4_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.glasser16_atlas.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.glasser16_atlas.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.rosenke18_vcatlas.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.rosenke18_vcatlas.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.wang15_fplbl.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.wang15_fplbl.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage/surf/rh.wang15_mplbl.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage/surf/rh.wang15_mplbl.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_1.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_1.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_5.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_5.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v3_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v3_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_anorm.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_anorm.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_1.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_1.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_5.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_5.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v3_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v3_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_enorm.v1_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_enorm.v1_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_retinotopy.v3_0.sphere.reg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_retinotopy.v3_0.sphere.reg -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_sigma.v3_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_sigma.v3_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v1.0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v1.0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_1.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_1.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_5.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_5.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v3_0.mgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v3_0.mgz -------------------------------------------------------------------------------- /neuropythy/lib/data/hcp_lines_osftree.json.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/data/hcp_lines_osftree.json.gz -------------------------------------------------------------------------------- /neuropythy/lib/models/lh.benson17.fmm.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/models/lh.benson17.fmm.gz -------------------------------------------------------------------------------- /neuropythy/lib/models/rh.benson17.fmm.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/models/rh.benson17.fmm.gz -------------------------------------------------------------------------------- /neuropythy/lib/models/v123.fmm.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noahbenson/neuropythy/44400c69434cc946feb0d789f7488b36deb3660e/neuropythy/lib/models/v123.fmm.gz -------------------------------------------------------------------------------- /neuropythy/lib/projections/lh.occipital_pole.mp.json: -------------------------------------------------------------------------------- 1 | {"version": 1.0, 2 | "registration": "fsaverage", 3 | "hemisphere": "lh", 4 | "center": [-8.19000, -83.55000, -54.34000], 5 | "center_right": [36.55000, -75.07000, -55.03000], 6 | "method": "orthographic"} -------------------------------------------------------------------------------- /neuropythy/lib/projections/rh.occipital_pole.mp.json: -------------------------------------------------------------------------------- 1 | {"version": 1.0, 2 | "registration": "fsaverage", 3 | "hemisphere": "rh", 4 | "center": [-19.10000, -77.49000, -60.25000], 5 | "center_right": [28.41000, -75.75000, -58.78000], 6 | "method": "orthographic"} -------------------------------------------------------------------------------- /neuropythy/math/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/math/__init__.py 3 | # The Neuropythy math core module. 4 | # by Noah C. Benson 5 | 6 | from .core import (pi, half_pi, quarter_pi, tau, inf, nan, radperdeg, degperrad, 7 | pytorch, to_torchdtype, torchdtype_to_numpydtype, 8 | isarray, istensor, issparse, isdense, eq, ne, le, lt, ge, gt, 9 | clone, totensor, astensor, toarray, asarray, asdense, promote, reshape, 10 | add, sub, mul, div, mod, safesqrt, sqrt, exp, log, log10, log2, abs, 11 | arcsin, sin, arccos, cos, arctan, tan, lgamma, hypot, hypot2, 12 | triarea, eudist2, eudist, trisides2, trisides, trialtitudes, 13 | rangemod, radmod, degmod, branch, zinv, numel, rows, check_sparsity, unbroadcast, 14 | sum, prod, mean, var, std, median, min, max, argmin, argmax, all, 15 | beta_log_prob, beta_prob, normal_log_prob, normal_prob, 16 | cauchy_log_prob, cauchy_prob, halfcauchy_log_prob, halfcauchy_prob, 17 | laplace_log_prob, laplace_prob, exp_log_prob, exp_prob, 18 | gennorm_log_prob, gennorm_prob, gumbel_log_prob, gumbel_prob) 19 | 20 | pytorch_ok = False 21 | try: 22 | if pytorch() is not None: 23 | pytorch_ok = True 24 | except ImportError: pass 25 | 26 | -------------------------------------------------------------------------------- /neuropythy/mri/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/mri/__init__.py 3 | # Data structures and simple tools for dealing with the cortex and cortical data. 4 | # By Noah C. Benson 5 | 6 | ''' 7 | The neuropythy.mri package contains definitions of the relevant data-structures and various tools 8 | for interacting with cortical data. The primary data include: 9 | 10 | * Cortex, a class based on neuropythy.geometry.Topology, which tracks the various layers of the 11 | cortex in the form of CorticalMesh objects. 12 | * Subject, a class that tracks data connected to an individual subject. 13 | ''' 14 | 15 | from .core import (Subject, Cortex, is_subject, is_cortex, to_cortex) 16 | from .images import (to_image_spec, to_image, to_image_header, image_interpolate, image_apply, 17 | image_reslice, is_image_spec, image_array_to_spec, image_header_to_spec, 18 | image_to_spec, image_copy, image_clear, is_pimage, is_npimage, 19 | to_image_type) 20 | from ..util import (is_image, is_image_header) 21 | -------------------------------------------------------------------------------- /neuropythy/optimize/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/optimize/__init__.py 3 | # Initialization code for neuropythy's optimization module. 4 | # By Noah C. Benson 5 | 6 | from .core import (fapply, finto, 7 | PotentialFunction, is_potential, to_potential, 8 | is_const_potential, const_potential, const, identity, is_identity_potential, 9 | compose, part, exp, exp2, power, sqrt, log, log2, log10, erf, sum, dot, 10 | cos, sin, tan, sec, csc, cot, asin, acos, atan, atan2, 11 | piecewise, cos_well, cos_edge, abs, sign, gaussian, sigmoid, 12 | row_norms, col_norms, distances, 13 | signed_face_areas, face_areas) 14 | -------------------------------------------------------------------------------- /neuropythy/plans/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/plans/__init__.py 3 | # A neuropythy module that stores PIMMS calculation plans. 4 | # By Noah C. Benson 5 | 6 | from .core import (limit_param, unlimit_param, imap_forget, imap_efferents) 7 | from .prfclean import (prfclean_plan) 8 | 9 | def run_prfclean(hemi, **kw): 10 | ''' 11 | run_prfclean(hemi) runs the calculations associated with neuropythy.plans.prfclean_plan and 12 | yields a duplicate hemisphere object with the cleaned pRF maps included as properties. These 13 | properties 14 | 15 | In addition to including the cleaned pRF properties as properties of the returned hemisphere, 16 | this function appends meta-data describing the model parameters that were fit by the pRF 17 | cleaning. 18 | 19 | The following options may be given: 20 | * labels (one per vertex of coftex) or the name of a property of cortex. 21 | Note that the labels must not use 0 as an ROI: 0 must always indicate 22 | vertices that should not be optimized. Default: 'visual_area' 23 | * retinotopy: The argument for neuropythy's retinotopy_data function; this 24 | is typically a string prefix for retinotopy data on the cortex's 25 | properties ('prf_' for HCP subjects), or a map of the data itself. 26 | Default: 'prf_'. 27 | * tag: The prefix to give the output properties and the meta-data on the 28 | returned cortex object. The meta-data is named params. 29 | Default: 'prfclean_' 30 | * step_fn: A function that gets run every step of the optimization. The 31 | function must accept 2 arguments: the step number and the imap from the 32 | prfclean_plan. Default: None. 33 | * steps: The number of steps to run for minimization. Default: 200. 34 | * lr: The learning rate of the minimization. Default: 0.1. 35 | * max_eccen: The maximum eccentricity in the optimized maps. This should be 36 | the ecceentricity of the peripheral boundary of the labeled maps. 37 | Default: 90. 38 | * fieldsigns: The fieldsign values for each visual area. This may either be 39 | a dictionary whose keys are visual area labels and whose values are all 40 | either 1 or -1, or it can be Ellipsis or None. If the value is Ellipsis 41 | (the default), then the dictionary visual_area_field_signs from the 42 | neuropythy.vision package is used. If the value is None, then even 43 | labels are given fieldsigns of 1 and odd labels are given fieldsigns of 44 | -1. 45 | * model_knob: The base-2 log of the constant weight that the vmag_likelihood 46 | is multiplied by prior to summation with meas_likelihood. Default: 12. 47 | * boundary_knob: The base-2 log of the constant weight that the 48 | boundary_likelihood is multiplied by prior to summation with 49 | meas_likelihood. Default: 2. 50 | * start_coords: May specify the (2 x N) coordinate matrix of measurements 51 | to use as the starting point in the optimization. The N must be the 52 | number of vertices in the cortex; though values are only needed wherever 53 | there are non-zero labels. If None, then the measurements are used as 54 | the starting coordinates. The first row of the start coordinates must be 55 | the polar angle theta given in counter-clockwise radians starting from 56 | the right horizontal meridian, and the second row must be the 57 | eccentricity of the vertices in visual degrees. 58 | * prior_cmag_tan_params: The initial values of the tangential cortical 59 | magnificatiioin model parametters, whose interpretation depends on the 60 | value given for cmag_tan_method (see it for more details). Note that 61 | cmag_tan_method describes parameters such as loga, which are used in the 62 | model via the transformation a = exp(loga). The value that is given for 63 | the prior should *not* be the transformed param--i.e., provide a not loga 64 | as the prioir value. 65 | * prior_uvm_ipsi: The prior value of the upper-vertical-meridian's 66 | ipsilateral representation. This value is the number of radians into the 67 | ipsilateral visual field that the PRF representations of all visual areas 68 | extend for the upper vertical meridian. 69 | * prior_lvm_ipsi: The prior value of the lower-vertical-meridian's 70 | ipsilateral representation. This value is the number of radians into the 71 | ipsilateral visual field that the PRF representations of all visual areas 72 | extend for the lower vertical meridian. 73 | * grad_ipsi: Whether the uvm_ipsi and lvm_ipsi parameters, representing the 74 | ipsilateral representation in each visual area, are part of the optimized 75 | parameters whose gradients are required. 76 | * grad_coords: Specifies whether the tensors representing the coordinates of 77 | the PRF centers should track their gradients (default: True). 78 | * grad_cmag_tan: Specifies whether the tensors representing the tangential 79 | cortical magnification parameters of the retinotopy model should track 80 | their gradients (default: True). 81 | * grad_cmag_rad: Specifies whether the tensors representing the radial 82 | cortical magnification parameters of the retinotopy model should track 83 | their gradients (default: True). 84 | * device: The pytorch device name to be used for all tensors (default: 'cpu'). 85 | * meas_weight: A vector of weights, one per vertex in mesh, that specifies 86 | how strongly the model should believe that particular vertex's weight. 87 | Weights are the divisor for the meas_stddev. See calc_meas_stddev for 88 | more information about the standard deviation formula. 89 | * prior_cmag_eccen_offsets: The initial value of the offset parameter c2 90 | from Horton and Hoyt's (1991) equation cmag(ecc) = c1 / (c2 + ecc). 91 | This should be either a single number, which is used for all visual 92 | areas, or a dictionary of visual area labels mapped to values. 93 | * dtype: May optionally specify the dtype for PyTorch to use in the 94 | optimization. This may be a PyTorch dtype object or it may be a string, 95 | in which case it is looked up as a member of the torch module (i.e., 96 | 'float' 97 | * cmag_tan_method: The method to use for calculating tangential cortical 98 | magnification. This may be either 'sinusoid' (the default), 'beta', or 99 | None, in which case tangential cortical magnification is modeled to be 100 | uniform across polar angle. For thee sinusoid, the parameters are tan_hva 101 | and tan_vma where the actual hva and vma are arctan(tan_hva)*2/pi and 102 | arctan(tan_vma)*2/pi, limiting them both to the +/- 1 range. For the 103 | 'beta' method, the params are loga and logb, where the beta-distribution 104 | parameters that are used are simply a = exp(loga) and b = exp(logb). 105 | * yield_imap: If True, yields the imap object instead of the hemi object 106 | after the minimization. Default: False. 107 | ''' 108 | import numpy as np 109 | stepfn = kw.pop('step_fn', None) 110 | lr = kw.pop('lr', 0.1) 111 | steps = kw.pop('steps', 200) 112 | tag = kw.pop('tag', 'prfclean_') 113 | yield_imap = kw.pop('yield_imap', False) 114 | kw = dict(dict(retinotopy='prf_', labels='visual_area', max_eccen=90), **kw) 115 | # Go ahead and make the imap now: 116 | imap = prfclean_plan(cortex=hemi, **kw) 117 | llh0 = imap['likelihood'] 118 | gradients = imap['gradients'] 119 | grad_effs = imap_efferents(imap, gradients.keys()) 120 | # Okay, we're going to run it! 121 | torch = imap['torch'] 122 | opt = torch.optim.LBFGS(list(gradients.values()), lr=lr) 123 | for step in range(steps): 124 | if stepfn is not None: 125 | r = stepfn(step, imap) 126 | if r is not None: imap = r 127 | def closure(): 128 | opt.zero_grad() 129 | imap_forget(imap, grad_effs) 130 | nll = -imap['likelihood'] 131 | nll.backward() 132 | return nll 133 | opt.step(closure) 134 | # Finished minimizing--if we need to yield the imap we are done. 135 | if yield_imap: return imap 136 | # We need to put the data on the hemisphere. 137 | mesh = imap['mesh'] 138 | ang = np.full(hemi.vertex_count, np.nan) 139 | ecc = np.full(hemi.vertex_count, np.nan) 140 | ang[mesh.labels] = np.mod(90 - 180/np.pi*imap['theta'].detach().numpy() + 180, 360) - 180 141 | ecc[mesh.labels] = imap['eccen'].detach().numpy() 142 | hemi = hemi.with_prop({(tag + 'polar_angle'): ang, (tag + 'eccentricity'): ecc}) 143 | params = {k: np.array(imap[k].detach().numpy()) 144 | for k in ['cmag_eccen_offsets', 'cmag_tan_params', 'uvm_ipsi', 'lvm_ipsi']} 145 | hemi = hemi.with_meta({(tag + 'params'): params}) 146 | return hemi 147 | 148 | 149 | -------------------------------------------------------------------------------- /neuropythy/plans/core.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/plans/core.py 3 | # Core utilities for PIMMS Calculation Plan algorithms/workflows. 4 | # By Noah C. Benson 5 | 6 | import numpy as np 7 | import pyrsistent as pyr 8 | import os, sys, gzip, six, types, pimms 9 | from ..util import (is_tuple, is_list) 10 | from ..math import (pytorch, totensor, astensor) 11 | from .. import math 12 | 13 | 14 | def limit_param(param, min=-1, max=1): 15 | ''' 16 | limit_param(u) yields the limited parameter x, which must always be in the 17 | range -1 to 1, from its unlimited value u, which may take on any real 18 | value. 19 | limit_param(u, min, max) uses the given minimum and maximum parameters in 20 | place of -1 and 1. 21 | 22 | The opposite of x = limit_param(u) is u = unlimit_param(x). The intended use 23 | of these functions is that, during a continuous optimization in which a 24 | parameter should be restricted to a particular range, the parameter over 25 | which the optimization occurs is in fact the unlimited parameeter, which is 26 | allowed to take on any real value, and which gets limited to its range 27 | prior to its use in any calculation. 28 | ''' 29 | return min + (max - min) * (math.arctan(param)/np.pi + 0.5) 30 | def unlimit_param(param, min=-1, max=1): 31 | ''' 32 | unlimit_param(x) yields the unlimited parameter u, which may take on any 33 | real value, from its limited value x, which must be between -1 and 1. 34 | unlimit_param(x, min, max) uses the given min and max values instead of the 35 | default values -1 and 1. 36 | 37 | The opposite of u = unlimit_param(x) is x = limit_param(u). The intended use 38 | of these functions is that, during a continuous optimization in which a 39 | parameter should be restricted to a particular range, the parameter over 40 | which the optimization occurs is in fact the unlimited parameeter, which is 41 | allowed to take on any real value, and which gets limited to its range 42 | prior to its use in any calculation. 43 | ''' 44 | return math.tan(np.pi * ((param - min) / (max - min) - 0.5)) 45 | def imap_forget(imap, ks): 46 | ''' 47 | imap_forget(imap, k) yields imap after clearing the cacne for key k. This 48 | can also be accomplished with: del imap[k]. 49 | imap_forget(imap, [k1, k2, ...]) clears the cache for all the given keys. 50 | ''' 51 | if pimms.is_str(ks): ks = [ks] 52 | for k in ks: 53 | del imap[k] 54 | return imap 55 | def imap_efferents(imap, ks): 56 | ''' 57 | imap_efferents(imap, k) yields the key names of any efferent of the key k 58 | in imap. 59 | imap_efferents(imap, [k1, k2, ...]) yields the key names of the any 60 | efferent of any of the given keys. 61 | ''' 62 | if pimms.is_str(ks): ks = [ks] 63 | effs = set([]) 64 | for (k,deps) in imap.plan.dependencies.items(): 65 | if k in ks: continue 66 | for d in deps: 67 | if d in ks: 68 | effs.add(k) 69 | break 70 | return list(effs) 71 | -------------------------------------------------------------------------------- /neuropythy/registration/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # registration.py 3 | # Tools for registering the cortical surface to a particular potential function 4 | # By Noah C. Benson 5 | 6 | ''' 7 | The neuropythy.registration package includes tools for performing cortical mesh registration to 2D 8 | models that are projected to the cortical surface. See specifically, the help string for the 9 | mesh_register function. 10 | ''' 11 | 12 | from .core import (mesh_register, java_potential_term) 13 | -------------------------------------------------------------------------------- /neuropythy/registration/core.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # registration/core.py 3 | # Core tools for registering the cortical surface to a particular potential function 4 | # By Noah C. Benson 5 | 6 | import numpy as np 7 | from numpy import pi 8 | from ..java import (to_java_doubles, to_java_ints, to_java_array, java_link, serialize_numpy) 9 | from .. import geometry as geo 10 | import pimms 11 | 12 | # These are dictionaries of all the details we have about each of the possible arguments to the 13 | # mesh_register's field argument: 14 | _parse_field_data_types = { 15 | 'mesh': ['newStandardMeshPotential', ['edge_scale', 1.0], ['angle_scale', 1.0], 'F', 'X'], 16 | 'edge': { 17 | 'harmonic': ['newHarmonicEdgePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 18 | 'harmonic-log': ['newHarmonicLogEdgePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 19 | 'lennard-jones': ['newLJEdgePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 20 | 'infinite-well': ['newWellEdgePotential', ['scale', 1.0], ['order', 0.5], 21 | ['min', 0.5], ['max', 3.0], 22 | 'E', 'X']}, 23 | 'angle': { 24 | 'harmonic': ['newHarmonicAnglePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 25 | 'harmonic-log': ['newHarmonicLogAnglePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 26 | 'lennard-jones': ['newLJAnglePotential', ['scale', 1.0], ['order', 2.0], 'F', 'X'], 27 | 'infinite-well': ['newWellAnglePotential', ['scale', 1.0], ['order', 0.5], 28 | ['min', 0.0], ['max', pi], 29 | 'F', 'X']}, 30 | 'anchor': { 31 | 'harmonic': ['newHarmonicAnchorPotential', ['scale', 1.0], ['shape', 2.0], 0,1, 'X'], 32 | 'gaussian': ['newGaussianAnchorPotential', ['scale', 1.0], ['sigma', 2.0], 33 | ['shape', 2.0], 0, 1, 'X']}, 34 | 'mesh-field': { 35 | 'harmonic': ['newHarmonicMeshPotential', ['scale', 1.0], ['order', 2.0], 0,1,2,3,4, 36 | 'X'], 37 | 'harmonic-log': ['newHarmonicLogMeshPotential', ['scale', 1.0], ['order', 2.0], 0,1,2,3,4, 38 | 'X'], 39 | 'gaussian': ['newGaussianMeshPotential', ['scale', 1.0], ['sigma', 0.5], 40 | ['order', 2.0], 0,1,2,3,4, 'X']}, 41 | 'perimeter': { 42 | 'harmonic': ['newHarmonicPerimeterPotential', ['scale', 1.0], ['shape', 2.0], 43 | 'F', 'X']}}; 44 | 45 | def _parse_field_function_argument(argdat, args, faces, edges, coords): 46 | # first, see if this is an easy one... 47 | if argdat == 'F': 48 | return faces 49 | elif argdat == 'X': 50 | return coords 51 | elif argdat == 'E': 52 | return edges 53 | elif pimms.is_int(argdat): 54 | return to_java_array(args[argdat]) 55 | # okay, none of those; must be a list with a default arg 56 | argname = argdat[0] 57 | argdflt = argdat[1] 58 | # see if we can find such an arg... 59 | for i in range(len(args)): 60 | if pimms.is_str(args[i]) and args[i].lower() == argname.lower(): 61 | return (args[i+1] if pimms.is_number(args[i+1]) else to_java_array(args[i+1])) 62 | # did not find the arg; use the default: 63 | return argdflt 64 | 65 | def _parse_field_argument(instruct, faces, edges, coords): 66 | _java = java_link() 67 | if pimms.is_str(instruct): 68 | insttype = instruct 69 | instargs = [] 70 | elif hasattr(instruct, '__iter__'): 71 | insttype = instruct[0] 72 | instargs = instruct[1:] 73 | else: 74 | raise RuntimeError('potential field instruction must be list/tuple-like or a string') 75 | # look this type up in the types data: 76 | insttype = insttype.lower() 77 | if insttype not in _parse_field_data_types: 78 | raise RuntimeError('Unrecognized field data type: ' + insttype) 79 | instdata = _parse_field_data_types[insttype] 80 | # if the data is a dictionary, we must parse on the next arg 81 | if pimms.is_map(instdata): 82 | shape_name = instargs[0].lower() 83 | instargs = instargs[1:] 84 | if shape_name not in instdata: 85 | raise RuntimeError('Shape ' + shape_name + ' not supported for type ' + insttype) 86 | instdata = instdata[shape_name] 87 | # okay, we have a list of instructions... find the java method we are going to call... 88 | java_method = getattr(_java.jvm.nben.mesh.registration.Fields, instdata[0]) 89 | # and parse the arguments into a list... 90 | java_args = [_parse_field_function_argument(a, instargs, faces, edges, coords) 91 | for a in instdata[1:]] 92 | # and call the function... 93 | return java_method(*java_args) 94 | 95 | # parse a field potential argument and return a java object that represents it 96 | def _parse_field_arguments(arg, faces, edges, coords): 97 | '''See mesh_register.''' 98 | if not hasattr(arg, '__iter__'): 99 | raise RuntimeError('field argument must be a list-like collection of instructions') 100 | pot = [_parse_field_argument(instruct, faces, edges, coords) for instruct in arg] 101 | # make a new Potential sum unless the length is 1 102 | if len(pot) <= 1: 103 | return pot[0] 104 | else: 105 | sp = java_link().jvm.nben.mesh.registration.Fields.newSum() 106 | for field in pot: sp.addField(field) 107 | return sp 108 | 109 | def java_potential_term(mesh, instructions): 110 | ''' 111 | java_potential_term(mesh, instructions) yields a Java object that implements the potential field 112 | described in the given list of instructions. Generally, this should not be invoked directly 113 | and should only be called by mesh_register. Note: this expects a single term's description, 114 | not a series of descriptions. 115 | ''' 116 | faces = to_java_ints(mesh.indexed_faces) 117 | edges = to_java_ints(mesh.indexed_edges) 118 | coords = to_java_doubles(mesh.coordinates) 119 | return _parse_field_arguments([instructions], faces, edges, coords) 120 | 121 | # The mesh_register function 122 | def mesh_register(mesh, field, max_steps=2000, max_step_size=0.05, max_pe_change=1, 123 | method='random', return_report=False, initial_coordinates=None): 124 | ''' 125 | mesh_register(mesh, field) yields the mesh that results from registering the given mesh by 126 | minimizing the given potential field description over the position of the vertices in the 127 | mesh. The mesh argument must be a Mesh object (see neuropythy.geometry) such as can be read 128 | from FreeSurfer using the neuropythy.freesurfer_subject function. The field argument must be 129 | a list of field names and arguments; with the exception of 'mesh' (or 'standard'), the 130 | arguments must be a list, the first element of which is the field type name, the second 131 | element of which is the field shape name, and the final element of which is a dictionary of 132 | arguments accepted by the field shape. 133 | 134 | The following are valid field type names: 135 | * 'mesh' : the standard mesh potential, which includes an edge potential, an angle 136 | potential, and a perimeter potential. Accepts no arguments, and must be passed as a 137 | single string instead of a list. 138 | * 'edge': an edge potential field in which the potential is a function of the change in the 139 | edge length, summed over each edge in the mesh. 140 | * 'angle': an angle potential field in which the potential is a function of the change in 141 | the angle measure, summed over all angles in the mesh. 142 | * 'perimeter': a potential that depends on the vertices on the perimeter of a 2D mesh 143 | remaining in place; the potential changes as a function of the distance of each perimeter 144 | vertex from its reference position. 145 | * 'anchor': a potential that depends on the distance of a set of vertices from fixed points 146 | in space. After the shape name second argument, an anchor must be followed by a list of 147 | vertex ids then a list of fixed points to which the vertex ids are anchored: 148 | ['anchor', shape_name, vertex_ids, fixed_points, args...]. 149 | 150 | The following are valid shape names: 151 | * 'harmonic': a harmonic function with the form (c/q) * abs(x - x0)^q. 152 | Parameters: 153 | * 'scale', the scale parameter c; default: 1. 154 | * 'order', the order parameter q; default: 2. 155 | * 'Lennard-Jones': a Lennard-Jones function with the form c (1 + (r0/r)^q - 2(r0/r)^(q/2)); 156 | Parameters: 157 | * 'scale': the scale parameter c; default: 1. 158 | * 'order': the order parameter q; default: 2. 159 | * 'Gaussian': A Gaussian function with the form c (1 - exp(-0.5 abs((x - x0)/s)^q)) 160 | Parameters: 161 | * 'scale': the scale parameter c; default: 1. 162 | * 'order': the order parameter q; default: 2. 163 | * 'sigma': the standard deviation parameter s; default: 1. 164 | * 'infinite-well': an infinite well function with the form 165 | c ( (((x0 - m)/(x - m))^q - 1)^2 + (((M - x0)/(M - x))^q - 1)^2 ) 166 | Parameters: 167 | * 'scale': the scale parameter c; default: 1. 168 | * 'order': the order parameter q; default: 0.5. 169 | * 'min': the minimum value m; default: 0. 170 | * 'max': the maximum value M; default: pi. 171 | 172 | Options: The following optional arguments are accepted. 173 | * max_steps (default: 2000) the maximum number of steps to minimize for. 174 | * max_step_size (default: 0.1) the maximum distance to allow a vertex to move in a single 175 | minimization step. 176 | * max_pe_change: the maximum fraction of the initial potential value that the minimizer 177 | should minimize away before returning; i.e., 0 indicates that no minimization should be 178 | allowed while 0.9 would indicate that the minimizer should minimize until the potential 179 | is 10% or less of the initial potential. 180 | * return_report (default: False) indicates that instead of returning the registered data, 181 | mesh_register should instead return the Java Minimizer.Report object (for debugging). 182 | * method (default: 'random') specifies the search algorithm used; available options are 183 | 'random', 'nimble', and 'pure'. Generally all options will converge on a similar solution, 184 | but usually 'random' is fastest. The 'pure' option uses the nben library's step function, 185 | which performs straight-forward gradient descent. The 'nimble' option performs a gradient 186 | descent in which subsets of vertices in the mesh that have the highest gradients during the 187 | registration are updated more often than those vertices with small gradients; this can 188 | sometimes but not always increase the speed of the minimization. Note that instead of 189 | 'nimble', one may alternately provide ('nimble', k) where k is the number of partitions that 190 | the vertices should be sorted into (by partition). 'nimble' by itself is equivalent to 191 | ('nimble', 4). Note also that a single step of nimble minimization is equivalent to 2**k 192 | steps of 'pure' minimization. Finally, the 'random' option uses the nben library's 193 | randomStep function, which is a gradient descent algorithm that moves each vertex in the 194 | direction of its negative gradient during each step but which randomizes the length of the 195 | gradient at each individual vertex by drawing from an exponential distribution centered at 196 | the vertex's actual gradient length. In effect, this can prevent vertices with very large 197 | gradients from dominating the minimization and often results in the best results. 198 | * initial_coordinates (default: None) specifies the start coordinates of the registration; 199 | if None, uses those in the given mesh, which is generally desired. 200 | 201 | Examples: 202 | registered_mesh = mesh_register( 203 | mesh, 204 | [['edge', 'harmonic', 'scale', 0.5], # slightly weak edge potential 205 | ['angle', 'infinite-well'], # default arguments for an infinite-well angle potential 206 | ['anchor', 'Gaussian', [1, 10, 50], [[0.0, 0.0], [1.1, 1.1], [2.2, 2.2]]]], 207 | max_step_size=0.05, 208 | max_steps=10000) 209 | ''' 210 | # Sanity checking: 211 | # First, make sure that the arguments are all okay: 212 | if not isinstance(mesh, geo.Mesh): 213 | raise RuntimeError('mesh argument must be an instance of neuropythy.geometry.Mesh') 214 | if not pimms.is_vector(max_steps): max_steps = [max_steps] 215 | for ms in max_steps: 216 | if not pimms.is_int(ms) or ms < 0: 217 | raise RuntimeError('max_steps argument must be a positive integer') 218 | if not pimms.is_vector(max_step_size): max_step_size = [max_step_size] 219 | for mss in max_step_size: 220 | if not pimms.is_number(mss) or mss <= 0: 221 | raise RuntimeError('max_step_size must be a positive number') 222 | if not pimms.is_number(max_pe_change) or max_pe_change <= 0 or max_pe_change > 1: 223 | raise RuntimeError('max_pe_change must be a number x such that 0 < x <= 1') 224 | if pimms.is_vector(method): 225 | if method[0].lower() == 'nimble' and len(method) > 1 and not pimms.is_str(method[1]): 226 | method = [method] 227 | else: method = [method] 228 | if initial_coordinates is None: 229 | init_coords = mesh.coordinates 230 | else: 231 | init_coords = np.asarray(initial_coordinates) 232 | if init_coords.shape[0] != mesh.coordinates.shape[0]: 233 | init_coords = init_coords.T 234 | # If steps is 0, we can skip most of this... 235 | if np.sum(max_steps) == 0: 236 | if return_report: return None 237 | else: return init_coords 238 | # Otherwise, we run at least some minimization 239 | max_pe_change = float(max_pe_change) 240 | nrounds = len(max_steps) 241 | if nrounds > 1: 242 | if len(max_step_size) == 1: max_step_size = [max_step_size[0] for _ in max_steps] 243 | if len(method) == 1: method = [method[0] for _ in max_steps] 244 | # Parse the field argument. 245 | faces = to_java_ints(mesh.tess.indexed_faces) 246 | edges = to_java_ints(mesh.tess.indexed_edges) 247 | coords = to_java_doubles(mesh.coordinates) 248 | init_coords = coords if init_coords is mesh.coordinates else to_java_doubles(init_coords) 249 | potential = _parse_field_arguments(field, faces, edges, coords) 250 | # Okay, that's basically all we need to do the minimization... 251 | rep = [] 252 | for (method,max_step_size,max_steps) in zip(method, max_step_size, max_steps): 253 | minimizer = java_link().jvm.nben.mesh.registration.Minimizer(potential, init_coords) 254 | max_step_size = float(max_step_size) 255 | max_steps = int(max_steps) 256 | if pimms.is_str(method): 257 | method = method.lower() 258 | if method == 'nimble': k = 4 259 | else: k = 0 260 | else: 261 | k = method[1] 262 | method = method[0].lower() 263 | if method == 'pure': 264 | r = minimizer.step(max_pe_change, max_steps, max_step_size) 265 | elif method == 'random': 266 | # if k is -1, we do the inverse version where we draw from the 1/mean distribution 267 | r = minimizer.randomStep(max_pe_change, max_steps, max_step_size, k == -1) 268 | elif method == 'nimble': 269 | r = minimizer.nimbleStep(max_pe_change, max_steps, max_step_size, int(k)) 270 | else: 271 | raise ValueError('Unrecognized method: %s' % method) 272 | rep.append(r) 273 | init_coords = minimizer.getX() 274 | # Return the report if requested 275 | if return_report: 276 | return rep 277 | else: 278 | result = init_coords 279 | return np.asarray([[x for x in row] for row in result]) 280 | 281 | # The topology and registration stuff is below: 282 | class JavaTopology: 283 | ''' 284 | JavaTopology(triangles, registrations) creates a topology object object with the given triangle 285 | mesh, defined by a 3xn matrix of triangle indices, and with the registration coordinate matrices 286 | given in the dictionary registrations. This class should only be instantiated by the neuropythy 287 | library and should generally not be constructed directly. See Hemisphere.topology objects to 288 | access a subject's topologies. 289 | ''' 290 | def __init__(self, triangles, registrations): 291 | # First: make a java object for the topology: 292 | faces = serialize_numpy(triangles.T, 'i') 293 | topo = java_link().jvm.nben.geometry.spherical.MeshTopology.fromBytes(faces) 294 | # Okay, make our registration dictionary 295 | d = {k: topo.registerBytes(serialize_numpy(v, 'd')) 296 | for (k,v) in registrations.iteritems()} 297 | # That's all really 298 | self.__dict__['_java_object'] = topo 299 | self.__dict__['registrations'] = d 300 | def __getitem__(self, attribute): 301 | return self.registrations[attribute] 302 | def __setitem__(self, attribute, dat): 303 | self.registrations[attribute] = self._java_object.registerBytes(serialize_numpy(dat, 'd')) 304 | def keys(self): 305 | return self.registrations.keys() 306 | def iterkeys(self): 307 | return self.registrations.iterkeys() 308 | def values(self): 309 | return self.registrations.values() 310 | def itervalues(self): 311 | return self.registrations.itervalues() 312 | def items(self): 313 | return self.registrations.items() 314 | def iteritems(self): 315 | return self.registrations.iteritems() 316 | def __len__(self): 317 | return len(self.registrations) 318 | 319 | # These let us interpolate... 320 | def interpolate(fromtopo, data, order=2, fill=None): 321 | usable_keys = [] 322 | for k in registrations.iterkeys(): 323 | if k in fromtopo.registrations: 324 | usable_keys.append(k) 325 | if not usable_keys: 326 | raise RuntimeError('no registration found that links topologies') 327 | the_key = usable_keys[0] 328 | # Prep the data into java arrays 329 | jmask = serialize_numpy(np.asarray([1 if d is not None else 0 for d in data]), 'd') 330 | jdata = serialize_numpy(np.asarray([d if d is not None else 0 for d in data]), 'd') 331 | # okay, next step is to call out to the java... 332 | maskres = self._java_object.interpolateBytes( 333 | fromtopo.registrations[the_key], 334 | self.registrations[the_key].coordinates, 335 | order, jdata) 336 | datares = self._java_object.interpolateBytes( 337 | fromtopo.registrations[the_key], 338 | self.registrations[the_key].coordinates, 339 | order, jmask) 340 | # then interpret the results... 341 | return [datares[i] if maskres[i] == 1 else fill for i in range(len(maskres))] 342 | 343 | -------------------------------------------------------------------------------- /neuropythy/test/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/test/__init__.py 3 | # Tests for the neuropythy library. 4 | # By Noah C. Benson 5 | 6 | import unittest, os, sys, six, warnings, logging, pimms 7 | import numpy as np 8 | import pyrsistent as pyr 9 | import neuropythy as ny 10 | 11 | if sys.version_info[0] == 3: from collections import abc as colls 12 | else: import collections as colls 13 | 14 | logging.getLogger().setLevel(logging.INFO) 15 | 16 | from .math import TestNeuropythyMath 17 | 18 | class TestNeuropythy(unittest.TestCase): 19 | ''' 20 | The TestNeuropythy class defines all the tests for the neuropythy library. 21 | ''' 22 | 23 | def test_optimize(self): 24 | ''' 25 | test_optimize tests the neuropythy.optimize package using the data in 26 | neuropythy.test.optimize. 27 | ''' 28 | from neuropythy.geometry import triangle_area 29 | import neuropythy.optimize as opt 30 | import warnings 31 | from . import optimize as opttest 32 | mesh = opttest.mesh 33 | logging.info('neuropythy: Testing optimization package...') 34 | # check that this works in the first place... 35 | def fareas(x, f): 36 | x = np.asarray(x) 37 | (a,b,c) = [np.transpose(x[ii]) for ii in np.transpose(f)] 38 | return triangle_area(a, b, c) 39 | dif = fareas(mesh['coords'], mesh['faces']) 40 | sim = np.isclose(dif, opttest.mesh_face_areas, rtol=0.001) 41 | self.assertTrue(sim.all()) 42 | # minimize the tiny mesh to have all triangle areas equal to 1 43 | m = opttest.mesh 44 | f = opt.sum((1.0 - opt.signed_face_areas(m['faces']))**2) 45 | x = f.argmin(m['coords']) 46 | # see if these are close to 1! 47 | qqq = fareas(x, m['faces']) - 1 48 | sim = np.isclose(qqq, 0, rtol=0, atol=0.0001) 49 | self.assertTrue(sim.all()) 50 | 51 | def test_mesh(self): 52 | ''' 53 | test_mesh() ensures that many general mesh properties and methods are working. 54 | ''' 55 | import neuropythy.geometry as geo 56 | logging.info('neuropythy: Testing meshes and properties...') 57 | # get a random subject's mesh 58 | sub = ny.data['benson_winawer_2018'].subjects['S1204'] 59 | hem = sub.hemis[('lh','rh')[np.random.randint(2)]] 60 | msh = hem.white_surface 61 | # few simple things 62 | self.assertEqual(msh.coordinates.shape[0], 3) 63 | self.assertEqual(msh.tess.faces.shape[0], 3) 64 | self.assertEqual(msh.tess.edges.shape[0], 2) 65 | self.assertEqual(msh.vertex_count, msh.coordinates.shape[1]) 66 | # face areas and edge lengths should all be non-negative 67 | self.assertGreaterEqual(np.min(msh.face_areas), 0) 68 | self.assertGreaterEqual(np.min(msh.edge_lengths), 0) 69 | # test the properties 70 | self.assertTrue('blerg' in msh.with_prop(blerg=msh.prop('curvature')).properties) 71 | self.assertFalse('curvature' in msh.wout_prop('curvature').properties) 72 | self.assertEqual(msh.properties.row_count, msh.vertex_count) 73 | self.assertLessEqual(np.abs(np.mean(msh.prop('curvature'))), 0.1) 74 | # use the property interface to grab a fancy masked property 75 | v123_areas = msh.property('midgray_surface_area', 76 | mask=('inf-prf_visual_area', (1,2,3)), 77 | null=0) 78 | v123_area = np.sum(v123_areas) 79 | self.assertLessEqual(v123_area, 15000) 80 | self.assertGreaterEqual(v123_area, 500) 81 | (v1_ecc, v1_rad) = msh.property(['prf_eccentricity','prf_radius'], 82 | mask=('inf-prf_visual_area', 1), 83 | weights='prf_variance_explained', 84 | weight_min=0.1, 85 | clipped=0, 86 | null=np.nan) 87 | wh = np.isfinite(v1_ecc) & np.isfinite(v1_rad) 88 | self.assertGreater(np.corrcoef(v1_ecc[wh], v1_rad[wh])[0,0], 0.5) 89 | 90 | def test_cmag(self): 91 | ''' 92 | test_cmag() ensures that the neuropythy.vision cortical magnification function is working. 93 | ''' 94 | import neuropythy.vision as vis 95 | logging.info('neuropythy: Testing areal cortical magnification...') 96 | dset = ny.data['benson_winawer_2018'] 97 | sub = dset.subjects['S1202'] 98 | hem = [sub.lh, sub.rh][np.random.randint(2)] 99 | cm = vis.areal_cmag(hem.midgray_surface, 'prf_', 100 | mask=('inf-prf_visual_area', 1), 101 | weight='prf_variance_explained') 102 | # cmag should get smaller in general 103 | ths = np.arange(0, 2*np.pi, np.pi/3) 104 | es = [0.5, 1, 2, 4] 105 | x = np.diff([np.mean(cm(e*np.cos(ths), e*np.sin(ths))) for e in es]) 106 | self.assertTrue((x < 0).all()) 107 | 108 | def test_interpolation(self): 109 | ''' 110 | test_interpolation() performs a variety of high-level tests involving interpolation using 111 | neuropythy that should catch major errors to important components. 112 | ''' 113 | logging.info('neuropythy: Testing interpolation...') 114 | def choose(coll, k): return np.random.choice(coll, k, False) 115 | # to do these tests, we use the builtin dataset from Benson and Winawer (2018); see also 116 | # help(ny.data['benson_winawer_2018']) for more information on this dataset. 117 | dset = ny.data['benson_winawer_2018'] 118 | self.assertTrue(os.path.isdir(dset.cache_directory)) 119 | # pick 1 of the subjects at random 120 | allsubs = [dset.subjects['S12%02d' % (s+1)] for s in range(8)] 121 | subs = choose(allsubs, 1) 122 | fsa = ny.freesurfer_subject('fsaverage') 123 | def check_dtypes(a,b): 124 | for tt in [np.integer, np.floating, np.bool_, np.complexfloating]: 125 | self.assertEqual(np.issubdtype(a.dtype, tt), np.issubdtype(b.dtype, tt)) 126 | def calc_interp(hem, interhem, ps): 127 | for p in ps: self.assertEqual(np.sum(~np.isfinite(hem.prop(p))), 0) 128 | us = hem.interpolate(interhem, ps) 129 | for u in us: self.assertEqual(np.sum(~np.isfinite(u)), 0) 130 | vs = interhem.interpolate(hem, us) 131 | for v in vs: self.assertEqual(np.sum(~np.isfinite(v)), 0) 132 | return vs 133 | def check_interp(hem, ps, vs): 134 | for (p,v) in zip(ps,vs): 135 | logging.info('neuropythy: * %s', p) 136 | p = hem.prop(p) 137 | self.assertEqual(len(p), len(v)) 138 | self.assertLessEqual(np.min(p), np.min(v)) 139 | self.assertGreaterEqual(np.max(p), np.max(v)) 140 | check_dtypes(p, v) 141 | self.assertGreater(np.corrcoef(p, v)[0,0], 0.6) 142 | for sub in subs: 143 | logging.info('neuropythy: - Testing subject %s', sub.name) 144 | # left hemisphere should have a negative mean x-value, right a positive mean x-value 145 | self.assertTrue(np.mean(sub.lh.white_surface.coordinates, axis=1)[0] < 0) 146 | self.assertTrue(np.mean(sub.rh.pial_surface.coordinates, axis=1)[0] > 0) 147 | # some simple ideas: if we interpolate the properties from one subject to another and 148 | # then interpolate back, we should get approximately, if not exactly, the same thing 149 | # for this pick a couple random properties: 150 | ps = ['prf_variance_explained', 'inf-prf10_visual_area'] 151 | intersub = choose(allsubs, 1)[0] 152 | logging.info('neuropythy: - Testing properties %s via subject %s', ps, intersub.name) 153 | logging.info('neuropythy: - Testing LH interpolation') 154 | vs = calc_interp(sub.lh, intersub.lh, ps) 155 | check_interp(sub.lh, ps, vs) 156 | logging.info('neuropythy: - Testing RH interpolation') 157 | vs = calc_interp(sub.rh, intersub.rh, ps) 158 | check_interp(sub.rh, ps, vs) 159 | 160 | def test_path(self): 161 | ''' 162 | test_path() ensures that the neuropythy.geometry.path and .path_trace data structures are 163 | working correctly. 164 | ''' 165 | logging.info('neuropythy: Testing Path and PathTrace') 166 | # simple box: should have an area of ~1600 in a flatmap and something close in a sphere 167 | pts = [(-20,-20), (20,-20), (20,20), (-20,20)] 168 | # use a simple map projection 169 | mpj = ny.map_projection('occipital_pole', 'lh', radius=np.pi/3) 170 | ctx = ny.freesurfer_subject('fsaverage').lh 171 | trc = ny.geometry.path_trace(mpj, pts, closed=True) 172 | fmp = mpj(ctx) 173 | pth = trc.to_path(fmp) 174 | self.assertTrue(np.isclose(1600, pth.surface_area)) 175 | 176 | if __name__ == '__main__': 177 | unittest.main() 178 | -------------------------------------------------------------------------------- /neuropythy/test/math.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/test/math.py 3 | # Tests for the neuropythy library's util.math module. 4 | # By Noah C. Benson 5 | 6 | import unittest, os, sys, six, warnings, logging, pimms, torch 7 | import numpy as np 8 | import scipy.sparse as sps 9 | import pyrsistent as pyr 10 | import neuropythy as ny 11 | 12 | if sys.version_info[0] == 3: from collections import abc as colls 13 | else: import collections as colls 14 | 15 | class TestNeuropythyMath(unittest.TestCase): 16 | 17 | def test_info_fns(self): 18 | from neuropythy.math import (to_torchdtype, torchdtype_to_numpydtype, isarray, istensor, 19 | issparse, isdense) 20 | dts = {'int8': (torch.int8, np.int8), 21 | 'int16': (torch.int16, np.int16), 22 | 'int32': (torch.int32, np.int32), 23 | 'int64': (torch.int64, np.int64), 24 | 'float64': (torch.float64, np.float64), 25 | 'complex128': (torch.complex128, np.complex128), 26 | 'bool': (torch.bool, np.bool_)} 27 | for (k, (vt,vn)) in six.iteritems(dts): 28 | self.assertEqual(to_torchdtype(k), vt) 29 | self.assertEqual(to_torchdtype(vt), vt) 30 | self.assertEqual(to_torchdtype(vn), vt) 31 | self.assertEqual(torchdtype_to_numpydtype(vt), vn) 32 | 33 | x = [1.0, 4.4, 5.1] 34 | y = [6,5,4,3] 35 | xn = np.array(x) 36 | yn = np.array(y) 37 | xt = torch.tensor(x) 38 | yt = torch.tensor(y) 39 | xs = sps.coo_matrix((x, ([0,1,2], [3,2,1])), (4,4)) 40 | ys = sps.coo_matrix((y, ([0,1,2,3], [2,3,2,1])), (4,4)) 41 | xp = torch.sparse_coo_tensor(([0,1,2], [3,2,1]), x, (4,4)) 42 | yp = torch.sparse_coo_tensor(([0,1,2,3], [2,3,2,1]), y, (4,4)) 43 | 44 | self.assertFalse(isarray(x)) 45 | self.assertFalse(isarray(y)) 46 | self.assertTrue(isarray(xn)) 47 | self.assertTrue(isarray(yn)) 48 | self.assertFalse(isarray(xt)) 49 | self.assertFalse(isarray(yt)) 50 | self.assertTrue(isarray(xs)) 51 | self.assertTrue(isarray(ys)) 52 | self.assertFalse(isarray(xp)) 53 | self.assertFalse(isarray(yp)) 54 | 55 | self.assertFalse(istensor(x)) 56 | self.assertFalse(istensor(y)) 57 | self.assertFalse(istensor(xn)) 58 | self.assertFalse(istensor(yn)) 59 | self.assertTrue(istensor(xt)) 60 | self.assertTrue(istensor(yt)) 61 | self.assertFalse(istensor(xs)) 62 | self.assertFalse(istensor(ys)) 63 | self.assertTrue(istensor(xp)) 64 | self.assertTrue(istensor(yp)) 65 | 66 | self.assertFalse(issparse(x)) 67 | self.assertFalse(issparse(y)) 68 | self.assertFalse(issparse(xn)) 69 | self.assertFalse(issparse(yn)) 70 | self.assertFalse(issparse(xt)) 71 | self.assertFalse(issparse(yt)) 72 | self.assertTrue(issparse(xs)) 73 | self.assertTrue(issparse(ys)) 74 | self.assertTrue(issparse(xp)) 75 | self.assertTrue(issparse(yp)) 76 | 77 | self.assertTrue(isdense(x)) 78 | self.assertTrue(isdense(y)) 79 | self.assertTrue(isdense(xn)) 80 | self.assertTrue(isdense(yn)) 81 | self.assertTrue(isdense(xt)) 82 | self.assertTrue(isdense(yt)) 83 | self.assertFalse(isdense(xs)) 84 | self.assertFalse(isdense(ys)) 85 | self.assertFalse(isdense(xp)) 86 | self.assertFalse(isdense(yp)) 87 | 88 | def test_create_fns(self): 89 | from neuropythy.math import (clone, eq, all, astensor, totensor, isarray, istensor, 90 | asarray, toarray) 91 | 92 | x = [1.0, 4.4, 5.1] 93 | y = [6,5,4,3] 94 | xn = np.array(x) 95 | yn = np.array(y) 96 | xt = torch.tensor(x) 97 | yt = torch.tensor(y) 98 | xs = sps.coo_matrix((x, ([0,1,2], [3,2,1])), (4,4)) 99 | ys = sps.coo_matrix((y, ([0,1,2,3], [2,3,2,1])), (4,4)) 100 | xp = torch.sparse_coo_tensor(([0,1,2], [3,2,1]), x, (4,4)) 101 | yp = torch.sparse_coo_tensor(([0,1,2,3], [2,3,2,1]), y, (4,4)) 102 | 103 | for xx in [x, xn, xt, xs, xp]: 104 | u = clone(xx) 105 | self.assertFalse(u is xx) 106 | self.assertTrue(all(eq(u, xx))) 107 | for xx in [xp, yp, xt, yt]: 108 | u = astensor(xx) 109 | self.assertTrue(u is xx) 110 | self.assertTrue(torch.is_tensor(u)) 111 | u = astensor(u, dtype='float32') 112 | self.assertTrue(u.dtype == torch.float32) 113 | self.assertTrue(torch.is_tensor(u)) 114 | self.assertTrue(all(eq(u, xx))) 115 | u = totensor(xx) 116 | self.assertFalse(u is xx) 117 | self.assertTrue(torch.is_tensor(u)) 118 | self.assertFalse(isarray(u)) 119 | self.assertTrue(istensor(u)) 120 | self.assertTrue(all(eq(u, xx))) 121 | u = asarray(xx) 122 | self.assertFalse(u is xx) 123 | self.assertTrue(isinstance(u, np.ndarray) or sps.issparse(u)) 124 | self.assertTrue(isarray(u)) 125 | self.assertFalse(istensor(u)) 126 | self.assertTrue(all(eq(u, xx))) 127 | u = toarray(xx) 128 | self.assertFalse(u is xx) 129 | self.assertTrue(isinstance(u, np.ndarray) or sps.issparse(u)) 130 | self.assertTrue(isarray(u)) 131 | self.assertFalse(istensor(u)) 132 | self.assertTrue(all(eq(u, xx))) 133 | 134 | -------------------------------------------------------------------------------- /neuropythy/test/optimize.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/test/optimize.py 3 | # Code for testing the neuropythy.optimize package. 4 | 5 | import os, gzip, types, six, abc, pimms 6 | import numpy as np 7 | import scipy as sp 8 | import scipy.sparse as sps 9 | import scipy.optimize as spopt 10 | import pyrsistent as pyr 11 | 12 | import neuropythy.optimize as opt 13 | 14 | sqrt2_2 = np.sqrt(2.0)/2.0 15 | tiny_mesh = {'coords': np.array([[0,0], [1,0], [-sqrt2_2, sqrt2_2], [-sqrt2_2, -sqrt2_2]]), 16 | 'faces': np.array([[0,1,2], [0,2,3], [0,3,1]])} 17 | mesh = {'coords': np.array([[ 1.0000, 0.0000], [ 0.9888, 0.1490], [ 0.9556, 0.2948], 18 | [ 0.9010, 0.4339], [ 0.8262, 0.5633], [ 0.7331, 0.6802], 19 | [ 0.6235, 0.7818], [ 0.5000, 0.8660], [ 0.3653, 0.9309], 20 | [ 0.2225, 0.9749], [ 0.0747, 0.9972], [-0.0747, 0.9972], 21 | [-0.2225, 0.9749], [-0.3653, 0.9309], [-0.5000, 0.8660], 22 | [-0.6235, 0.7818], [-0.7331, 0.6802], [-0.8262, 0.5633], 23 | [-0.9010, 0.4339], [-0.9556, 0.2948], [-0.9888, 0.1490], 24 | [-1.0000, 0.0000], [-0.9888, -0.1490], [-0.9556, -0.2948], 25 | [-0.9010, -0.4339], [-0.8262, -0.5633], [-0.7331, -0.6802], 26 | [-0.6235, -0.7818], [-0.5000, -0.8660], [-0.3653, -0.9309], 27 | [-0.2225, -0.9749], [-0.0747, -0.9972], [ 0.0747, -0.9972], 28 | [ 0.2225, -0.9749], [ 0.3653, -0.9309], [ 0.5000, -0.8660], 29 | [ 0.6235, -0.7818], [ 0.7331, -0.6802], [ 0.8262, -0.5633], 30 | [ 0.9010, -0.4339], [ 0.9556, -0.2948], [ 0.9888, -0.1490], 31 | [-0.3968, -0.3165], [-0.2859, 0.4194], [-0.5921, 0.0444], 32 | [-0.6746, -0.2648], [-0.4082, -0.5988], [ 0.1496, -0.4850], 33 | [-0.2136, 0.6925], [-0.5666, 0.4519], [ 0.4396, 0.2538], 34 | [-0.1028, -0.6819], [ 0.4082, -0.5988], [ 0.1756, 0.5694], 35 | [ 0.4929, 0.5313], [ 0.5789, -0.1321], [ 0.7065, 0.1613], 36 | [ 0.0000, 0.0000]]), 37 | 'faces': np.array([[ 45, 26, 46], [ 26, 45, 25], [ 24, 45, 23], 38 | [ 24, 25, 45], [ 33, 51, 32], [ 51, 30, 31], 39 | [ 46, 42, 45], [ 28, 46, 27], [ 49, 15, 16], 40 | [ 46, 26, 27], [ 21, 22, 44], [ 20, 44, 19], 41 | [ 19, 44, 49], [ 12, 13, 48], [ 57, 42, 47], 42 | [ 22, 23, 45], [ 8, 53, 7], [ 49, 18, 19], 43 | [ 49, 16, 17], [ 42, 44, 45], [ 43, 48, 49], 44 | [ 48, 14, 49], [ 21, 44, 20], [ 51, 29, 30], 45 | [ 28, 29, 46], [ 14, 15, 49], [ 22, 45, 44], 46 | [ 14, 48, 13], [ 57, 44, 42], [ 47, 51, 33], 47 | [ 52, 33, 34], [ 55, 57, 47], [ 35, 36, 52], 48 | [ 50, 55, 56], [ 55, 52, 38], [ 35, 52, 34], 49 | [ 0, 56, 55], [ 52, 47, 33], [ 40, 41, 55], 50 | [ 53, 50, 54], [ 56, 0, 1], [ 50, 56, 54], 51 | [ 43, 53, 48], [ 41, 0, 55], [ 7, 54, 6], 52 | [ 46, 51, 42], [ 55, 50, 57], [ 1, 2, 56], 53 | [ 18, 49, 17], [ 8, 9, 53], [ 9, 10, 53], 54 | [ 10, 48, 53], [ 57, 53, 43], [ 39, 40, 55], 55 | [ 11, 12, 48], [ 10, 11, 48], [ 5, 6, 54], 56 | [ 56, 4, 54], [ 36, 37, 52], [ 42, 51, 47], 57 | [ 7, 53, 54], [ 54, 4, 5], [ 52, 55, 47], 58 | [ 57, 43, 44], [ 32, 51, 31], [ 56, 3, 4], 59 | [ 56, 2, 3], [ 44, 43, 49], [ 38, 52, 37], 60 | [ 46, 29, 51], [ 53, 57, 50], [ 55, 38, 39]])} 61 | mesh_face_areas = np.array([0.0650852, 0.0227693, 0.0203633, 0.0209682, 0.0252780, 0.0229886, 62 | 0.0395070, 0.0203633, 0.0209682, 0.0209682, 0.0306438, 0.0306438, 63 | 0.0772443, 0.0203633, 0.1199090, 0.0209682, 0.0304909, 0.0227693, 64 | 0.0203633, 0.0450694, 0.0395070, 0.0650852, 0.0301482, 0.0235642, 65 | 0.0209682, 0.0227693, 0.0533419, 0.0209682, 0.1024960, 0.0689997, 66 | 0.0227693, 0.1305020, 0.0203633, 0.0450694, 0.0945041, 0.0209682, 67 | 0.0533419, 0.0592066, 0.0301482, 0.0450376, 0.0209682, 0.0395070, 68 | 0.0576115, 0.0306438, 0.0209682, 0.0435943, 0.1024960, 0.0203633, 69 | 0.0209682, 0.0299935, 0.0304909, 0.0770522, 0.1182260, 0.0306438, 70 | 0.0209682, 0.0227693, 0.0203633, 0.0650852, 0.0209682, 0.0750569, 71 | 0.0532468, 0.0209682, 0.0700543, 0.1178150, 0.0235642, 0.0227693, 72 | 0.0209682, 0.0575997, 0.0227693, 0.0489386, 0.1028530, 0.0321196]) 73 | -------------------------------------------------------------------------------- /neuropythy/util/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/util/__init__.py 3 | # This file defines the general tools that are available as part of neuropythy. 4 | 5 | from .core import (ObjectWithMetaData, normalize, denormalize, 6 | to_hemi_str, to_affine, simplex_averaging_matrix, simplex_summation_matrix, 7 | is_dataframe, to_dataframe, dataframe_select, dataframe_except, 8 | is_image, is_image_header, curry, 9 | numel, rows, part, hstack, vstack, repmat, replace_close, chop, 10 | flatter, flattest, is_tuple, is_list, is_set, 11 | plus, cplus, minus, cminus, times, ctimes, 12 | inv, zinv, divide, cdivide, zdivide, czdivide, power, cpower, inner, 13 | sine, cosine, tangent, cotangent, secant, cosecant, 14 | arcsine, arccosine, arctangent, 15 | naneq, nanne, nanlt, nanle, nangt, nange, nanlog, 16 | library_path, address_data, is_address, address_interpolate, 17 | AutoDict, auto_dict, curve_spline, curve_intersection, close_curves, 18 | is_curve_spline, to_curve_spline, CurveSpline, 19 | DataStruct, data_struct, tmpdir, dirpath_to_list, apply_affine, try_until) 20 | from .conf import (config, to_credentials, detect_credentials, load_credentials) 21 | from .filemap import (FileMap, file_map, is_file_map, pseudo_path, is_pseudo_path, to_pseudo_path, 22 | osf_crawl, url_download) 23 | from .labels import (label_colors, is_label_index, label_index, label_indices, to_label_index) 24 | 25 | -------------------------------------------------------------------------------- /neuropythy/util/labels.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # neuropythy/util/labels.py 3 | # Simple tools for dealing with neuroscience-related labels for brains. 4 | # By Noah C. Benson 5 | 6 | import numpy as np 7 | import pyrsistent as pyr 8 | import collections as colls 9 | import os, sys, types, six, pimms 10 | 11 | from .core import (ObjectWithMetaData, is_tuple, curry, is_dataframe) 12 | 13 | def label_colors(lbls, cmap=None): 14 | ''' 15 | label_colors(labels) yields a dict object whose keys are the unique values in labels and whose 16 | values are the (r,g,b,a) colors that should be assigned to each label. 17 | label_colors(n) is equivalent to label_colors(range(n)). 18 | 19 | Note that this function uses a heuristic and is not guaranteed to be optimal in any way for any 20 | value of n--but it generally works well enough for most common purposes. 21 | 22 | The following optional arguments may be given: 23 | * cmap (default: None) specifies a colormap to use as a base. If this is None, then a varianct 24 | of 'hsv' is used. 25 | ''' 26 | from neuropythy.graphics import label_cmap 27 | if pimms.is_int(lbls): lbls = np.arange(lbls) 28 | lbls0 = np.unique(lbls) 29 | lbls = np.arange(len(lbls0)) 30 | cm = label_cmap(lbls, cmap=cmap) 31 | mx = float(len(lbls) - 1) 32 | m = {k:cm(l/mx) for (k,l) in zip(lbls0, lbls)} 33 | return m 34 | @pimms.immutable 35 | class LabelEntry(ObjectWithMetaData): 36 | ''' 37 | LabelEntry is a class tracked by LabelIndex objects; it stores information about a single 38 | label. 39 | ''' 40 | def __init__(self, ident, name, color=None, meta_data=None): 41 | self.id = ident 42 | self.name = name 43 | self.color = color 44 | self.meta_data = meta_data 45 | @pimms.param 46 | def id(i): 47 | 'le.id is the id of the given label entry object le.' 48 | if not pimms.is_int(i): raise ValueError('label-entry id must be an int') 49 | return int(i) 50 | @pimms.param 51 | def name(nm): 52 | 'le.name is the (string) name of the given label entry object le.' 53 | if not pimms.is_str(nm): raise ValueError('label-entry name must be a string') 54 | return nm 55 | @pimms.param 56 | def color(c): 57 | 'le.color is the tuple (r,g,b,a) for the given label entry object le.' 58 | if c is None: return c 59 | c = tuple(c) 60 | if len(c) == 3: c = c + (1,) 61 | if len(c) != 4: raise ValueError('Invalid color: %s' % c) 62 | return c 63 | def __repr__(self): 64 | return 'label(<%d: %s>)' % (self.id, self.name) 65 | @pimms.immutable 66 | class LabelIndex(ObjectWithMetaData): 67 | ''' 68 | LabelIndex is an immutable class that tracks label data and can lookup labels by name or integer 69 | value as well as assign colors to them. 70 | ''' 71 | def __init__(self, ids, names, colors=None, entry_meta_data=None, meta_data=None): 72 | ''' 73 | LabelIndex(ids, names) constructs a label index object with the given ids and names. 74 | ''' 75 | self.ids = ids 76 | self.names = names 77 | self.colors = colors 78 | self.entry_meta_data = None 79 | self.meta_data = meta_data 80 | @pimms.param 81 | def ids(ii): 82 | ''' 83 | lblidx.ids is a tuple of the integer identifiers used for the labels in the given label 84 | index object. 85 | ''' 86 | return tuple(ii) 87 | @pimms.param 88 | def names(nms): 89 | ''' 90 | lblidx.names is a tuple of names used for the labels in the given label index object. 91 | ''' 92 | return tuple(nms) 93 | @pimms.param 94 | def colors(cs): 95 | ''' 96 | lblidx.colors is a numpy array of colors for each label or None. 97 | ''' 98 | from neuropythy.graphics import to_rgba 99 | if cs is None: return None 100 | # we want to convert to colors 101 | return pimms.imm_array([to_rgba(c) for c in cs]) 102 | @pimms.param 103 | def entry_meta_data(mds): 104 | ''' 105 | lblidx.entry_meta_data is lists of meta-data maps for each of the labels in the given label 106 | index object. 107 | ''' 108 | if mds is None: return None 109 | if is_dataframe(mds): 110 | mds = {k:mds[k].values for k in mds.colums} 111 | elif pimms.is_map(mds): 112 | ks = list(mds.keys()) 113 | mds = [{k:v for (k,v) in zip(ks,vs)} for vs in np.transpose(list(mds.values()))] 114 | elif not pimms.is_array(mds) or not all(pimms.is_map(u) for u in mds): 115 | raise ValueError('unbalanced or non-map entry meta-data') 116 | return pimms.persist(mds) 117 | @pimms.require 118 | def check_counts(ids, names, colors, entry_meta_data): 119 | ''' 120 | Checks that ids, names, and colors are the same length and that ids is unique. 121 | ''' 122 | if len(np.unique(ids)) != len(ids): raise ValueError('label index ids must be unique') 123 | if len(ids) != len(names): raise ValueError('label index names and ids must be same length') 124 | if colors is not None and len(colors) != len(ids): 125 | raise ValueError('label index colors and ids must be same length') 126 | if entry_meta_data is not None and len(entry_meta_data) != len(ids): 127 | raise ValueError('label index entry_meta_data and ids must be same length') 128 | return True 129 | @pimms.value 130 | def entries(ids, names, colors, entry_meta_data): 131 | ''' 132 | lblidx.entries is a tuple of the label entry objects for the given label index object. 133 | ''' 134 | if 0 not in ids: 135 | ids = np.concatenate([[0],ids]) 136 | names = ['none'] + list(names) 137 | if colors is not None: colors = np.vstack([[(0,0,0,0)], colors]) 138 | if entry_meta_data is not None: entry_meta_data = [None] + list(entry_meta_data) 139 | if colors is None: colors = np.asarray([cs[k] for cs in [label_colors(ids)] for k in ids]) 140 | if entry_meta_data is None: entry_meta_data = [None]*len(ids) 141 | # the 0 id is implied if not given: 142 | les = [LabelEntry(ii, name, color=color, meta_data=md).persist() 143 | for (ii,name,color,md) in zip(ids, names, colors, entry_meta_data)] 144 | return tuple(les) 145 | @pimms.value 146 | def by_id(entries): 147 | ''' 148 | lblidx.by_id is a persistent map of the label entries indexed by their identifier. 149 | ''' 150 | return pyr.pmap({e.id:e for e in entries}) 151 | @pimms.value 152 | def by_name(entries): 153 | ''' 154 | lblidx.by_name is a persistent map of the label entries indexed by their names. 155 | ''' 156 | return pyr.pmap({e.name:e for e in entries}) 157 | @pimms.value 158 | def vmin(ids): 159 | ''' 160 | lblidx.vmin is the minimum value of a label identifier in the given label index. 161 | ''' 162 | return np.min(ids) 163 | @pimms.value 164 | def vmax(ids): 165 | ''' 166 | lblidx.vmax is the maximum value of a label identifier in the given label index. 167 | ''' 168 | return np.max(ids) 169 | @pimms.value 170 | def colormap(entries): 171 | ''' 172 | lblidx.colormap is a colormap appropriate for use with data that has been scaled to run from 173 | 0 at lblidx.vmin to 1 at lblidx.vmax. 174 | ''' 175 | import matplotlib.colors 176 | from_list = matplotlib.colors.LinearSegmentedColormap.from_list 177 | ids = np.asarray([e.id for e in entries]) 178 | ii = np.argsort(ids) 179 | ids = ids[ii] 180 | clrs = np.asarray([e.color for e in entries])[ii] 181 | (vmin,vmax) = [f(ids) for f in (np.min, np.max)] 182 | vals = (ids - vmin) / (vmax - vmin) 183 | return from_list('label%d' % len(vals), list(zip(vals, clrs))) 184 | def __getitem__(self, k): 185 | if pimms.is_int(k): return self.by_id.get(k, None) 186 | elif pimms.is_str(k): return self.by_name.get(k, None) 187 | elif pimms.is_vector(k, 'int'): return np.asarray([self.by_id.get(k, None) for k in k]) 188 | else: return np.asarray([(self.by_name if pimms.is_str(k) else self.by_id).get(k, None) 189 | for k in k]) 190 | def name_lookup(self, ii): 191 | ''' 192 | lblidx.name_lookup(ii) yields the names associated with the labels with the given ids. If 193 | ii is a list of ids, then yields an array of names. 194 | ''' 195 | if pimms.is_int(ii): return self.by_id[ii].name if ii in self.by_id else None 196 | elif pimms.is_str(ii): return self.by_name[ii].name if ii in self.by_name else None 197 | else: return np.asarray([tbl[ii].name if ii in tbl else None 198 | for ii in ii 199 | for tbl in [self.by_name if pimms.is_str(ii) else self.by_id]]) 200 | def id_lookup(self, names): 201 | ''' 202 | lblidx.id_lookup(names) yields the ids associated with the labels with the given names. If 203 | names is a list of names, then yields an array of ids. 204 | ''' 205 | if pimms.is_str(names): return self.by_name[names].id if names in self.by_name else None 206 | elif pimms.is_int(names): return self.by_id[names].id if names in self.by_id else None 207 | else: return np.asarray([tbl[ii].id if ii in tbl else None 208 | for ii in names 209 | for tbl in [self.by_name if pimms.is_str(ii) else self.by_id]]) 210 | def color_lookup(self, ii): 211 | ''' 212 | lblidx.color_lookup(ids) yields the color(s) associated with the labels with the given ids. 213 | If ids is a list of ids, then yields a matrix of colors. 214 | lblidx.color_lookup(names) uses the names to lookup the label colors. 215 | ''' 216 | if pimms.is_int(ii): return self.by_id[ii].color if ii in self.by_id else None 217 | elif pimms.is_str(ii): return self.by_name[ii].color if ii in self.by_name else None 218 | else: return np.asarray([tbl[ii].color if ii in tbl else None 219 | for ii in ii 220 | for tbl in [self.by_name if pimms.is_str(ii) else self.by_id]]) 221 | def cmap(self, data=None): 222 | ''' 223 | lblidx.cmap() yields a colormap for the given label index object that assumes that the data 224 | being plotted will be rescaled such that label 0 is 0 and the highest label value in the 225 | label index is equal to 1. 226 | lblidx.cmap(data) yields a colormap that will correctly color the labels given in data if 227 | data is scaled such that its minimum and maximum value are 0 and 1. 228 | ''' 229 | import matplotlib.colors 230 | from_list = matplotlib.colors.LinearSegmentedColormap.from_list 231 | if data is None: return self.colormap 232 | data = np.asarray(data).flatten() 233 | (vmin,vmax) = (np.min(data), np.max(data)) 234 | ii = np.argsort(self.ids) 235 | ids = np.asarray(self.ids)[ii] 236 | if vmin == vmax: 237 | (vmin,vmax,ii) = (vmin-0.5, vmax+0.5, vmin) 238 | clr = self.color_lookup(ii) 239 | return from_list('label1', [(0, clr), (1, clr)]) 240 | q = (ids >= vmin) & (ids <= vmax) 241 | ids = ids[q] 242 | clrs = self.color_lookup(ids) 243 | vals = (ids - vmin) / (vmax - vmin) 244 | return from_list('label%d' % len(vals), list(zip(vals, clrs))) 245 | def __repr__(self): 246 | return 'LabelIndex(<%d labels>)' % len(self.ids) 247 | def is_label_index(le): 248 | ''' 249 | is_label_index(le) yields True if the given object le is a label index and False otherwise. 250 | ''' 251 | return isinstance(le, LabelIndex) 252 | def label_index(dat, *args, **kw): 253 | ''' 254 | label_index(idx_map) converts the given map- or dict-like object idx_map into a label index by 255 | assuming that the keys are label ids and the values are label names or tuples of label names 256 | and (r,g,b,a) colors. 257 | label_index(ids, names) uses the given ids and names to make the label index. 258 | label_index(ids, names, colors) additionally uses the given colors. 259 | 260 | Note that if there is not a label with id 0 then such a label is automatically created with the 261 | name 'none', the rgba color [0,0,0,0], and no entry meta-data. As a general rule, the label 0 262 | should be used to indicate that a label is missing. 263 | 264 | The optional arguments meta_data and entry_meta_data may specify both the meta-data for the 265 | label-index object itself as well as the meta-data for the individual entries. 266 | ''' 267 | md = kw.pop('meta_data', {}) 268 | mds = kw.pop('entry_meta_data', None) 269 | if len(kw) > 0: raise ValueError('unrecognized optional argument(s) given to label_index') 270 | if len(args) == 0: 271 | if pimms.is_map(dat): 272 | (ids,nms,clrs) = ([],[],[]) 273 | for (k,v) in six.iteritems(dat): 274 | if pimms.is_scalar(v): c = None 275 | else: (v,c) = v 276 | if pimms.is_str(k): 277 | ids.append(v) 278 | nms.append(k) 279 | else: 280 | ids.append(k) 281 | nms.append(v) 282 | if c is not None: clrs.append(c) 283 | elif is_dataframe(dat): 284 | if dat.index.name.lower() == 'id': ids = dat.index.values 285 | else: ids = dat['id'].values 286 | nms = dat['name'].values 287 | if 'color' in dat: clrs = np.array(list(map(list, dat['color'].values))) 288 | elif all(k in dat for k in ['r','g','b']): 289 | ks = ['r','g','b'] 290 | if 'a' in dat: ks.append('a') 291 | clrs = np.array([[r[k] for k in ks].values for (ii,r) in dat.iterrows()]) 292 | else: clrs = [] 293 | elif pimms.is_vector(dat, 'int'): 294 | ids = np.unique(dat) 295 | nms = ['label%d'%k for k in ids] 296 | clrs = [] 297 | else: raise ValueError('label_index(idx_map) given non-map argument') 298 | elif len(args) == 1: (ids,nms,clrs) = (dat, args[0], []) 299 | elif len(args) == 2: (ids,nms,clrs) = (dat, args[0], args[1]) 300 | else: raise ValueError('Too many arguments given to label_index()') 301 | if clrs is None or len(clrs) == 0: clrs = None 302 | elif len(clrs) != len(ids): raise ValueError('color-count must match id-count') 303 | # okay, make the label index 304 | return LabelIndex(ids, nms, colors=clrs, meta_data=md, entry_meta_data=mds) 305 | def to_label_index(obj): 306 | ''' 307 | to_label_index(obj) attempts to coerce the given object into a label index object; if obj is 308 | already a label index object, then obj itself is returned. If obj cannot be coerced into a 309 | label index, then an error is raised. 310 | 311 | The obj argument can be any of the following: 312 | * a label index 313 | * a label list (i.e., an integer vector) 314 | * a tuple of arguments, potentially ending with a kw-options map, that can be passed to the 315 | label_index function successfully. 316 | ''' 317 | if is_label_index(obj): return obj 318 | elif pimms.is_vector(obj, 'int'): return label_index(obj) 319 | elif is_dataframe(obj): return label_index(obj) 320 | elif is_tuple(obj): 321 | if len(obj) > 1 and pimms.is_map(obj[-1]): return label_index(*obj[:-1], **obj[-1]) 322 | else: return label_index(*obj) 323 | else: raise ValueError('could not parse to_label_index parameter: %s' % obj) 324 | label_indices = {} 325 | -------------------------------------------------------------------------------- /neuropythy/vision/__init__.py: -------------------------------------------------------------------------------- 1 | #################################################################################################### 2 | # Models and routines used in visual neuroscience. 3 | # By Noah C. Benson 4 | 5 | from .models import (load_fmm_model, visual_area_names, 6 | visual_area_numbers, visual_area_field_signs, 7 | RetinotopyModel, RetinotopyMeshModel, RegisteredRetinotopyModel, 8 | SchiraModel) 9 | from .retinotopy import (empirical_retinotopy_data, predicted_retinotopy_data, retinotopy_data, 10 | extract_retinotopy_argument, retinotopy_comparison, to_logeccen, 11 | register_retinotopy, retinotopy_registration, from_logeccen, 12 | retinotopy_anchors, retinotopy_model, predict_retinotopy, 13 | retinotopy_data, as_retinotopy, retinotopic_field_sign, 14 | predict_pRF_radius, fit_pRF_radius, occipital_flatmap, 15 | clean_retinotopy_potential, clean_retinotopy, visual_isolines, 16 | visual_field_mesh, retinotopic_property_aliases, 17 | sectors_to_labels, labels_to_sectors, sector_bounds, refit_sectors) 18 | from .cmag import (mag_data, is_mag_data, neighborhood_cortical_magnification, face_vmag, 19 | face_rtcmag, cmag, areal_cmag, field_of_view, isoline_vmag, disk_vmag) 20 | 21 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4"] 3 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | numpy >= 1.14.0 2 | scipy >= 1.2.0 3 | six >= 1.13.0 4 | nibabel >= 2.0.0 5 | pyrsistent >= 0.11.0 6 | pint >= 0.7.0 7 | pimms >= 0.3.24 8 | py4j >= 0.10 9 | s3fs >= 0.1.5 10 | h5py >= 2.8.0 11 | matplotlib >= 1.5.3 12 | ipyvolume >= 0.5.1 13 | torch >= 1.6.0 14 | -------------------------------------------------------------------------------- /requirements-dev27.txt: -------------------------------------------------------------------------------- 1 | numpy >= 1.14.0 2 | scipy >= 1.2.0 3 | six >= 1.13.0 4 | nibabel >= 2.0.0 5 | pyrsistent == 0.14.11 6 | pint >= 0.7.0 7 | pimms >= 0.3.18 8 | py4j >= 0.10 9 | s3fs >= 0.1.5 10 | h5py >= 2.8.0 11 | matplotlib >= 1.5.3 12 | ipyvolume >= 0.5.1 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy >= 1.13.0 2 | scipy >= 1.1.0 3 | six >= 1.13.0 4 | nibabel >= 2.0.0 5 | pyrsistent >= 0.11.0 6 | pint >= 0.7.0 7 | pimms >= 0.3.24 8 | py4j >= 0.10 9 | s3fs >= 0.1.5 10 | h5py >= 2.8.0 11 | 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import os 4 | from setuptools import (setup, Extension) 5 | 6 | # Deduce the version from the __init__.py file: 7 | version = None 8 | with open(os.path.join(os.path.dirname(__file__), 'neuropythy', '__init__.py'), 'r') as fid: 9 | for line in (line.strip() for line in fid): 10 | if line.startswith('__version__'): 11 | version = line.split('=')[1].strip().strip('\'') 12 | break 13 | if version is None: raise ValueError('No version found in neuropythy/__init__.py!') 14 | 15 | setup( 16 | name='neuropythy', 17 | version=version, 18 | description='Toolbox for flexible cortical mesh analysis and registration', 19 | keywords='neuroscience mesh cortex registration', 20 | author='Noah C. Benson', 21 | author_email='nben@uw.edu', 22 | maintainer_email='nben@uw.edu', 23 | long_description=''' 24 | See the README.md file at the github repository for this package: 25 | https://github.com/noahbenson/neuropythy 26 | ''', 27 | url='https://github.com/noahbenson/neuropythy', 28 | download_url='https://github.com/noahbenson/neuropythy', 29 | license='AGPL', 30 | classifiers=[ 31 | 'Development Status :: 3 - Alpha', 32 | 'Intended Audience :: Science/Research', 33 | 'Intended Audience :: Developers', 34 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)', 35 | # Removing support for Python 2, since it's well past EOL. 36 | #'Programming Language :: Python :: 2', 37 | #'Programming Language :: Python :: 2.7', 38 | 'Programming Language :: Python :: 3', 39 | 'Programming Language :: Python :: 3.6', 40 | 'Programming Language :: Python :: 3.7', 41 | 'Programming Language :: Python :: 3.8', 42 | 'Programming Language :: Python :: 3.9', 43 | 'Programming Language :: Python :: 3.10', 44 | 'Programming Language :: Python :: 3.11', 45 | 'Topic :: Software Development', 46 | 'Topic :: Software Development :: Libraries', 47 | 'Topic :: Software Development :: Libraries :: Python Modules', 48 | 'Topic :: Scientific/Engineering', 49 | 'Topic :: Scientific/Engineering :: Information Analysis', 50 | 'Topic :: Scientific/Engineering :: Medical Science Apps.', 51 | 'Operating System :: Microsoft :: Windows', 52 | 'Operating System :: POSIX', 53 | 'Operating System :: Unix', 54 | 'Operating System :: MacOS'], 55 | packages=['neuropythy', 56 | 'neuropythy.util', 57 | 'neuropythy.math', 58 | 'neuropythy.java', 59 | 'neuropythy.io', 60 | 'neuropythy.geometry', 61 | 'neuropythy.optimize', 62 | 'neuropythy.mri', 63 | 'neuropythy.freesurfer', 64 | 'neuropythy.hcp', 65 | 'neuropythy.registration', 66 | 'neuropythy.vision', 67 | 'neuropythy.graphics', 68 | 'neuropythy.datasets', 69 | 'neuropythy.plans', 70 | 'neuropythy.commands', 71 | 'neuropythy.test'], 72 | # not part of library; just included as an example of how this would work 73 | #ext_modules=[Extension('neuropythy.c_label', sources=['src/c_label.c'], 74 | # include_dirs=[np.get_include()])], 75 | include_package_data=True, 76 | package_data={ 77 | '': ['LICENSE.txt', 78 | 'neuropythy/lib/nben/target/nben-standalone.jar', 79 | 'neuropythy/lib/models/v123.fmm.gz', 80 | 'neuropythy/lib/models/lh.benson17.fmm.gz', 81 | 'neuropythy/lib/models/rh.benson17.fmm.gz', 82 | 'neuropythy/lib/projections/lh.occipital_pole.mp.json', 83 | 'neuropythy/lib/projections/rh.occipital_pole.mp.json', 84 | 'neuropythy/lib/data/fsaverage/surf/lh.benson14_angle.v4_0.mgz', 85 | 'neuropythy/lib/data/fsaverage/surf/lh.benson14_sigma.v4_0.mgz', 86 | 'neuropythy/lib/data/fsaverage/surf/lh.benson14_varea.v4_0.mgz', 87 | 'neuropythy/lib/data/fsaverage/surf/lh.benson14_eccen.v4_0.mgz', 88 | 'neuropythy/lib/data/fsaverage/surf/rh.benson14_retinotopy.v4_0.sphere.reg', 89 | 'neuropythy/lib/data/fsaverage/surf/lh.wang15_mplbl.v1_0.mgz', 90 | 'neuropythy/lib/data/fsaverage/surf/rh.benson14_varea.v4_0.mgz', 91 | 'neuropythy/lib/data/fsaverage/surf/rh.benson14_eccen.v4_0.mgz', 92 | 'neuropythy/lib/data/fsaverage/surf/rh.wang15_mplbl.v1_0.mgz', 93 | 'neuropythy/lib/data/fsaverage/surf/rh.benson14_angle.v4_0.mgz', 94 | 'neuropythy/lib/data/fsaverage/surf/rh.benson14_sigma.v4_0.mgz', 95 | 'neuropythy/lib/data/fsaverage/surf/lh.benson14_retinotopy.v4_0.sphere.reg', 96 | 'neuropythy/lib/data/fsaverage/surf/lh.rosenke18_vcatlas.v1_0.mgz', 97 | 'neuropythy/lib/data/fsaverage/surf/rh.rosenke18_vcatlas.v1_0.mgz', 98 | 'neuropythy/lib/data/fsaverage/surf/lh.glasser16_atlas.v1_0.mgz', 99 | 'neuropythy/lib/data/fsaverage/surf/rh.glasser16_atlas.v1_0.mgz', 100 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_0.mgz', 101 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_1.mgz', 102 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v3_0.mgz', 103 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_enorm.v1_0.mgz', 104 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v2_5.mgz', 105 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v3_0.mgz', 106 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v1_0.mgz', 107 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v3_0.mgz', 108 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_5.mgz', 109 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_sigma.v3_0.mgz', 110 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_5.mgz', 111 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_angle.v1_0.mgz', 112 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_0.mgz', 113 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v2_1.mgz', 114 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_0.mgz', 115 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_eccen.v2_1.mgz', 116 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_varea.v1.0.mgz', 117 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_anorm.v1_0.mgz', 118 | 'neuropythy/lib/data/fsaverage_sym/surf/lh.benson14_retinotopy.v3_0.sphere.reg', 119 | 'neuropythy/lib/data/fs_LR/lh.atlasroi.32k_fs_LR.shape.gii', 120 | 'neuropythy/lib/data/fs_LR/rh.atlasroi.32k_fs_LR.shape.gii', 121 | 'neuropythy/lib/data/fs_LR/lh.atlasroi.59k_fs_LR.shape.gii', 122 | 'neuropythy/lib/data/fs_LR/rh.atlasroi.59k_fs_LR.shape.gii', 123 | 'neuropythy/lib/data/fs_LR/lh.atlasroi.164k_fs_LR.shape.gii', 124 | 'neuropythy/lib/data/fs_LR/rh.atlasroi.164k_fs_LR.shape.gii', 125 | 'neuropythy/lib/data/hcp_lines_osftree.json.gz']}, 126 | install_requires=['numpy>=1.13', 127 | 'scipy>=1.1', 128 | 'six >= 1.13', 129 | 'nibabel>=2.0', 130 | 'pyrsistent>=0.11', 131 | 'pint>=0.7', 132 | 'pimms>=0.3.24', 133 | 'py4j>=0.10', 134 | 'h5py>=2.8.0', 135 | 's3fs>=0.1.5'], 136 | extras_require={ 137 | 'graphics2D': ['matplotlib>=1.5.3'], 138 | 'graphics3D': ['matplotlib>=1.5.3', 'ipyvolume>=0.5.1'], 139 | 'torch': ['torch>=1.6.0'], 140 | 'all': ['matplotlib>=1.5.3', 'ipyvolume>=0.5.1', 'torch>=1.6.0']}) 141 | --------------------------------------------------------------------------------