├── .codecov.yml ├── .github ├── dependabot.yml └── workflows │ ├── build_docs.yml │ ├── codespell.yml │ ├── install_from_wheel.yml │ ├── publish_to_pypi.yml │ └── run_tests.yml ├── .gitignore ├── CITATION.cff ├── LICENSE ├── MANIFEST.in ├── OpenCTM-1.0.3 ├── COMPILING.txt ├── LICENSE.txt ├── Makefile.linux ├── Makefile.macosx ├── Makefile.mingw ├── Makefile.msvc ├── README.txt └── lib │ ├── Makefile.linux │ ├── Makefile.macosx │ ├── Makefile.mingw │ ├── Makefile.msvc │ ├── compressMG1.c │ ├── compressMG2.c │ ├── compressRAW.c │ ├── internal.h │ ├── liblzma │ ├── Alloc.c │ ├── Alloc.h │ ├── LzFind.c │ ├── LzFind.h │ ├── LzHash.h │ ├── LzmaDec.c │ ├── LzmaDec.h │ ├── LzmaEnc.c │ ├── LzmaEnc.h │ ├── LzmaLib.c │ ├── LzmaLib.h │ ├── NameMangle.h │ ├── Types.h │ └── readme.txt │ ├── libopenctm.a │ ├── openctm-mingw1.def │ ├── openctm-mingw2.def │ ├── openctm-msvc.def │ ├── openctm.c │ ├── openctm.h │ ├── openctm.rc │ ├── openctmpp.h │ └── stream.c ├── README.md ├── cortex ├── __init__.py ├── align.py ├── anat.py ├── appdirs.py ├── bbr.sch ├── blender │ ├── __init__.py │ └── blendlib.py ├── brainctm.py ├── database.py ├── dataset │ ├── __init__.py │ ├── braindata.py │ ├── dataset.py │ ├── view2D.py │ ├── viewRGB.py │ └── views.py ├── defaults.cfg ├── export │ ├── __init__.py │ ├── _default_params.py │ ├── panels.py │ └── save_views.py ├── fmriprep.py ├── formats.pyx ├── formats_old.py ├── freesurfer.py ├── mapper │ ├── __init__.py │ ├── line.py │ ├── mapper.py │ ├── patch.py │ ├── point.py │ ├── samplers.py │ ├── utils.py │ └── volume.py ├── mayavi_aligner.py ├── mni.py ├── mp.py ├── openctm.pxd ├── openctm.pyx ├── options.py ├── polyutils │ ├── __init__.py │ ├── distortion.py │ ├── exact_geodesic.py │ ├── misc.py │ ├── subsurface.py │ └── surface.py ├── quickflat │ ├── __init__.py │ ├── composite.py │ ├── utils.py │ └── view.py ├── rois.py ├── segment.py ├── surfinfo.py ├── svgbase.xml ├── svgoverlay.py ├── testing_utils.py ├── tests │ ├── __init__.py │ ├── test_dataset.py │ ├── test_formats.py │ ├── test_freesurfer.py │ ├── test_polyutils.py │ ├── test_quickflat.py │ └── test_utils.py ├── utils.py ├── version.py ├── volume.py ├── webgl │ ├── FallbackLoader.py │ ├── __init__.py │ ├── data.py │ ├── demo.html │ ├── explo_demo.html │ ├── favicon.ico │ ├── htmlembed.py │ ├── mixer.html │ ├── public.html │ ├── resources │ │ ├── css │ │ │ ├── demo.css │ │ │ ├── images │ │ │ │ ├── colors.png │ │ │ │ ├── control-pause.png │ │ │ │ ├── control-play.png │ │ │ │ ├── loading.gif │ │ │ │ ├── magnifying_glass.png │ │ │ │ ├── trigger.png │ │ │ │ ├── ui-bg_diagonals-thick_18_b81900_40x40.png │ │ │ │ ├── ui-bg_diagonals-thick_20_666666_40x40.png │ │ │ │ ├── ui-bg_flat_10_000000_40x100.png │ │ │ │ ├── ui-bg_glass_100_f6f6f6_1x400.png │ │ │ │ ├── ui-bg_glass_100_fdf5ce_1x400.png │ │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ │ ├── ui-bg_gloss-wave_35_f6a828_500x100.png │ │ │ │ ├── ui-bg_highlight-soft_100_eeeeee_1x100.png │ │ │ │ ├── ui-bg_highlight-soft_75_ffe45c_1x100.png │ │ │ │ ├── ui-icons_222222_256x240.png │ │ │ │ ├── ui-icons_228ef1_256x240.png │ │ │ │ ├── ui-icons_ef8c08_256x240.png │ │ │ │ ├── ui-icons_ffd27a_256x240.png │ │ │ │ └── ui-icons_ffffff_256x240.png │ │ │ ├── jquery-ui.min.css │ │ │ ├── jquery.miniColors.css │ │ │ ├── jsplot.css │ │ │ ├── mriview.css │ │ │ ├── select2-4.0.3.min.css │ │ │ ├── w2ui-1.2.min.css │ │ │ ├── w2ui-1.4.2.css │ │ │ └── w2ui-1.4.2.min.css │ │ ├── explo_demo │ │ │ ├── cursor.png │ │ │ ├── demo.js │ │ │ ├── flatten_1.svg │ │ │ ├── flatten_2.svg │ │ │ ├── intro.js │ │ │ ├── intro.svg │ │ │ ├── rotate_1.svg │ │ │ ├── rotate_2.svg │ │ │ ├── swipe_left.svg │ │ │ ├── swipe_right.svg │ │ │ ├── zoom_1.svg │ │ │ └── zoom_2.svg │ │ ├── js │ │ │ ├── LandscapeControls.js │ │ │ ├── OculusRiftEffect.js │ │ │ ├── axes3d.js │ │ │ ├── canvg.js │ │ │ ├── ctm │ │ │ │ ├── CTMLoader.js │ │ │ │ ├── CTMWorker.js │ │ │ │ ├── ctm.js │ │ │ │ ├── license │ │ │ │ │ ├── OpenCTM.txt │ │ │ │ │ ├── js-lzma.txt │ │ │ │ │ └── js-openctm.txt │ │ │ │ └── lzma.js │ │ │ ├── dat.gui.min.js │ │ │ ├── datamodel.js │ │ │ ├── dataset.js │ │ │ ├── facepick.js │ │ │ ├── facepick_worker.js │ │ │ ├── figure.js │ │ │ ├── graphview.js │ │ │ ├── hoverintent.min.js │ │ │ ├── jgestures.min.js │ │ │ ├── jquery-2.1.1.min.js │ │ │ ├── jquery-ui.min.js │ │ │ ├── jquery.ddslick.min.js │ │ │ ├── jquery.miniColors.js │ │ │ ├── jquery.svg.min.js │ │ │ ├── jquery.svganim.min.js │ │ │ ├── jquery.ui.touch-punch.min.js │ │ │ ├── kdTree-min.js │ │ │ ├── leap-0.6.4.js │ │ │ ├── leap.js │ │ │ ├── menu.js │ │ │ ├── movement.js │ │ │ ├── mriview.js │ │ │ ├── mriview_surface.js │ │ │ ├── mriview_utils.js │ │ │ ├── python_interface.js │ │ │ ├── rgbcolor.js │ │ │ ├── select2-4.0.3.min.js │ │ │ ├── shaderlib.js │ │ │ ├── shadowtex.js │ │ │ ├── sliceplane.js │ │ │ ├── surfgeometry.js │ │ │ ├── surfload.js │ │ │ ├── svg_todataurl.js │ │ │ ├── svgoverlay.js │ │ │ ├── three.js │ │ │ ├── w2ui-1.2.min.js │ │ │ ├── w2ui-1.4.2.js │ │ │ └── w2ui-1.4.2.min.js │ │ └── json │ │ │ ├── wngraph-definitions.json │ │ │ ├── wngraph-nodenames.json │ │ │ ├── wngraph-rgbcolors.json │ │ │ └── wngraph-testvoxwts.json │ ├── serve.py │ ├── simple.html │ ├── static.html │ ├── template.html │ ├── view.py │ ├── wngraph-labeled.svg │ ├── wngraph-minimal.html │ ├── wngraph.html │ └── wngraph.svg └── xfm.py ├── docs ├── 3dhead.png ├── Makefile ├── _templates │ └── class.rst ├── align.rst ├── aligner │ ├── adjring.png │ ├── colormap.png │ ├── contrast.png │ ├── flipcolor.png │ ├── key-controls.png │ ├── lines1.png │ ├── save.png │ ├── snapshot1.png │ ├── snapshot13.png │ ├── snapshot2.png │ ├── snapshot4.png │ └── surface.png ├── api_reference_flat.rst ├── colormap_rst.py ├── conf.py ├── database.rst ├── dataset.rst ├── example_subsurface.gif ├── flatmap_comparison.gif ├── index.rst ├── install.rst ├── make.bat ├── raw.png ├── raw.svg ├── rois.rst ├── segmentation.rst ├── segmentation_guide.rst ├── sphinxext │ └── numpydoc.py ├── transforms.rst ├── userguide │ ├── .gitkeep │ └── webgl.rst ├── webgl │ └── angle_left.png ├── wn_large.png ├── wn_med.png └── wn_small.png ├── examples ├── README.txt ├── datasets │ ├── README.txt │ ├── plot_dataset_arithmetic.py │ ├── plot_vertex.py │ ├── plot_vertex2D.py │ ├── plot_vertexRGB.py │ ├── plot_volume.py │ ├── plot_volume2D.py │ ├── plot_volumeRGB.py │ └── plot_volume_to_vertex.py ├── fsaverage │ ├── README.txt │ └── upsample_to_fsaverage.py ├── import_surface │ ├── README.txt │ └── import_fmriprep.py ├── quickflat │ ├── README.txt │ ├── plot_advanced_compositing.py │ ├── plot_connected_vertices.py │ ├── plot_custom_toppings.py0 │ ├── plot_cutouts.py │ ├── plot_dropout.py │ ├── plot_make_figure.py │ ├── plot_make_gif.py │ ├── plot_make_png.py │ ├── plot_make_svg.py │ ├── plot_rois.py │ ├── plot_sulci.py │ ├── plot_thickness_nanmean.py │ └── plot_zoom_to_roi.py ├── quickstart │ ├── README.txt │ ├── plot_retinotopy_flatmap.py │ ├── retinotopy_webgl.py │ └── show_config.py ├── surface_analyses │ ├── README.txt │ ├── plot_flatmap_distortion.py │ ├── plot_geodesic_distance.py │ ├── plot_geodesic_path.py │ ├── plot_interpolate_data.py │ ├── plot_subsurfaces.py │ └── plot_tissots_indicatrix.py ├── utils │ ├── README.txt │ ├── mni_to_subject.py │ ├── multi_panels_plots.py │ ├── plot_get_roi_vertices.py │ ├── plot_mosaic.py │ ├── plot_roi_voxel_index_volume.py │ ├── plot_roi_voxel_mask.py │ ├── plot_voxel_distance_from_surface.py │ └── subject_to_mni.py └── webgl │ ├── README.txt │ ├── dynamic_with_custom_template.py │ ├── multiple_datasets.py │ ├── my_template.html │ ├── single_dataset.py │ ├── static.py │ └── static_with_custom_template.py ├── filestore ├── colormaps │ ├── Accent.png │ ├── Accent_r.png │ ├── BPROG.png │ ├── BROYG.png │ ├── BROYG_2D.png │ ├── Blues.png │ ├── Blues_r.png │ ├── BrBG.png │ ├── BrBG_r.png │ ├── BuBkRd.png │ ├── BuBkRd_alpha_2D.png │ ├── BuGn.png │ ├── BuGn_r.png │ ├── BuOr_2D.png │ ├── BuPu.png │ ├── BuPu_r.png │ ├── BuWtRd.png │ ├── BuWtRd_alpha.png │ ├── BuWtRd_black_2D.png │ ├── CyanBlueGrayRedPink.png │ ├── Dark2.png │ ├── Dark2_r.png │ ├── GnBu.png │ ├── GnBu_r.png │ ├── GreenWhiteBlue.png │ ├── GreenWhiteBlue_2D.png │ ├── GreenWhiteRed.png │ ├── GreenWhiteRed_2D.png │ ├── Greens.png │ ├── Greens_r.png │ ├── Greys.png │ ├── Greys_r.png │ ├── J4.png │ ├── J4R.png │ ├── J4s.png │ ├── J5.png │ ├── J5R.png │ ├── J6.png │ ├── J6R.png │ ├── OrRd.png │ ├── OrRd_r.png │ ├── Oranges.png │ ├── Oranges_r.png │ ├── PRGn.png │ ├── PRGn_r.png │ ├── PU_BuOr_covar.png │ ├── PU_BuOr_covar_alpha.png │ ├── PU_PinkBlue_covar.png │ ├── PU_RdBu_covar.png │ ├── PU_RdBu_covar_alpha.png │ ├── PU_RdGn_covar.png │ ├── Paired.png │ ├── Paired_r.png │ ├── Pastel1.png │ ├── Pastel1_r.png │ ├── Pastel2.png │ ├── Pastel2_r.png │ ├── PiYG.png │ ├── PiYG_r.png │ ├── PuBu.png │ ├── PuBuGn.png │ ├── PuBuGn_r.png │ ├── PuBu_r.png │ ├── PuOr.png │ ├── PuOr_r.png │ ├── PuRd.png │ ├── PuRd_r.png │ ├── Purples.png │ ├── Purples_r.png │ ├── RGrB_tsi.png │ ├── RdBu.png │ ├── RdBu_2D.png │ ├── RdBu_2D_r.png │ ├── RdBu_covar.png │ ├── RdBu_covar2.png │ ├── RdBu_covar_alpha.png │ ├── RdBu_r.png │ ├── RdBu_r_alpha.png │ ├── RdGn_covar.png │ ├── RdGy.png │ ├── RdGy_r.png │ ├── RdPu.png │ ├── RdPu_r.png │ ├── RdYlBu.png │ ├── RdYlBu_r.png │ ├── RdYlGn.png │ ├── RdYlGn_r.png │ ├── Reds.png │ ├── Reds_cov.png │ ├── Reds_r.png │ ├── Retinotopy_RYBCR.png │ ├── Retinotopy_RYBCR_2D.png │ ├── Retinotopy_RYBCR_alpha.png │ ├── Set1.png │ ├── Set1_r.png │ ├── Set2.png │ ├── Set2_r.png │ ├── Set3.png │ ├── Set3_r.png │ ├── Spectral.png │ ├── Spectral_r.png │ ├── YlGn.png │ ├── YlGnBu.png │ ├── YlGnBu_r.png │ ├── YlGn_r.png │ ├── YlOrBr.png │ ├── YlOrBr_r.png │ ├── YlOrRd.png │ ├── YlOrRd_r.png │ ├── afmhot.png │ ├── afmhot_r.png │ ├── autumn.png │ ├── autumn_alpha.png │ ├── autumn_blkmin.png │ ├── autumn_blkmin_alpha_2D.png │ ├── autumn_r.png │ ├── autumnblack.png │ ├── autumnblack_alpha_2D.png │ ├── binary.png │ ├── binary_r.png │ ├── bone.png │ ├── bone_r.png │ ├── brg.png │ ├── brg_r.png │ ├── bwr.png │ ├── bwr_r.png │ ├── cool.png │ ├── cool_r.png │ ├── coolwarm.png │ ├── coolwarm_r.png │ ├── copper.png │ ├── copper_r.png │ ├── cubehelix.png │ ├── cubehelix_r.png │ ├── fire.png │ ├── fire_alpha.png │ ├── flag.png │ ├── flag_r.png │ ├── freesurfer_aseg_256.png │ ├── gist_earth.png │ ├── gist_earth_r.png │ ├── gist_gray.png │ ├── gist_gray_r.png │ ├── gist_heat.png │ ├── gist_heat_r.png │ ├── gist_ncar.png │ ├── gist_ncar_r.png │ ├── gist_rainbow.png │ ├── gist_rainbow_r.png │ ├── gist_stern.png │ ├── gist_stern_r.png │ ├── gist_yarg.png │ ├── gist_yarg_r.png │ ├── gnuplot.png │ ├── gnuplot2.png │ ├── gnuplot2_r.png │ ├── gnuplot_r.png │ ├── gray.png │ ├── gray_r.png │ ├── hot.png │ ├── hot_alpha.png │ ├── hot_r.png │ ├── hsv.png │ ├── hsv_r.png │ ├── inferno.png │ ├── inferno_r.png │ ├── jet.png │ ├── jet_r.png │ ├── magma.png │ ├── magma_r.png │ ├── nipy_spectral.png │ ├── nipy_spectral_r.png │ ├── ocean.png │ ├── ocean_r.png │ ├── pink.png │ ├── pink_r.png │ ├── plasma.png │ ├── plasma_alpha.png │ ├── plasma_r.png │ ├── prism.png │ ├── prism_r.png │ ├── rainbow.png │ ├── rainbow_r.png │ ├── seismic.png │ ├── seismic_r.png │ ├── spectral.png │ ├── spectral_r.png │ ├── spring.png │ ├── spring_r.png │ ├── summer.png │ ├── summer_r.png │ ├── terrain.png │ ├── terrain_r.png │ ├── viridis.png │ ├── viridis_r.png │ ├── winter.png │ └── winter_r.png └── db │ └── S1 │ ├── anatomicals │ └── raw.nii.gz │ ├── overlays.svg │ ├── surfaces │ ├── flat_lh.gii │ ├── flat_rh.gii │ ├── inflated_lh.gii │ ├── inflated_rh.gii │ ├── pia_lh.gii │ ├── pia_rh.gii │ ├── wm_lh.gii │ └── wm_rh.gii │ └── transforms │ ├── fullhead │ ├── matrices.xfm │ └── reference.nii.gz │ └── retinotopy │ ├── matrices.xfm │ └── reference.nii.gz ├── pyproject.toml ├── pytest.ini ├── requirements.txt └── setup.py /.codecov.yml: -------------------------------------------------------------------------------- 1 | # For more configuration details: 2 | # https://docs.codecov.io/docs/codecov-yaml 3 | 4 | # Check if this file is valid by running in bash: 5 | # curl -X POST --data-binary @.codecov.yml https://codecov.io/validate 6 | 7 | # Coverage configuration 8 | # ---------------------- 9 | coverage: 10 | status: 11 | project: 12 | default: 13 | threshold: 1% # complain if change in codecoverage is greater than 1% 14 | patch: false 15 | range: 70..90 # First number represents red, and second represents green 16 | # (default is 70..100) 17 | round: down # up, down, or nearest 18 | precision: 2 # Number of decimal places, between 0 and 5 19 | 20 | 21 | # Ignoring Paths 22 | # -------------- 23 | # which folders/files to ignore 24 | # ignore: 25 | # - */tests/.* 26 | # - setup.py 27 | 28 | # Pull request comments: 29 | # ---------------------- 30 | # Diff is the Coverage Diff of the pull request. 31 | # Files are the files impacted by the pull request 32 | comment: false 33 | # layout: diff, files # accepted in any order: reach, diff, flags, and/or files 34 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.github/workflows/build_docs.yml: -------------------------------------------------------------------------------- 1 | name: Build docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - '*' 9 | pull_request: 10 | branches: 11 | - main 12 | 13 | jobs: 14 | build-docs: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v5 21 | with: 22 | python-version: 3.9 23 | 24 | - uses: actions/cache@v4 25 | with: 26 | path: ~/.cache/pip 27 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} 28 | restore-keys: | 29 | ${{ runner.os }}-pip- 30 | 31 | - name: Install dependencies 32 | run: | 33 | sudo apt-get update 34 | sudo apt-get install -y inkscape 35 | pip install --upgrade pip 36 | pip install wheel numpy cython 37 | pip install -q ipython Sphinx sphinx-gallery numpydoc 38 | pip install -e . --no-build-isolation 39 | python -c 'import cortex; print(cortex.__full_version__)' 40 | 41 | - name: Build documents 42 | run: | 43 | cd docs && make html && cd .. 44 | touch docs/_build/html/.nojekyll 45 | 46 | - name: Publish to gh-pages if tagged 47 | if: startsWith(github.ref, 'refs/tags') 48 | uses: JamesIves/github-pages-deploy-action@v4.7.3 49 | with: 50 | branch: gh-pages 51 | folder: docs/_build/html 52 | -------------------------------------------------------------------------------- /.github/workflows/codespell.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Codespell 3 | 4 | on: 5 | push: 6 | branches: [main] 7 | pull_request: 8 | branches: [main] 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | codespell: 15 | name: Check for spelling errors 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | - name: Codespell 22 | uses: codespell-project/actions-codespell@v2 23 | -------------------------------------------------------------------------------- /.github/workflows/install_from_wheel.yml: -------------------------------------------------------------------------------- 1 | name: Install from wheel 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | install-from-wheel: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: [3.8, 3.9, "3.10", "3.11", "3.12", "3.13"] 17 | max-parallel: 5 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | 26 | - uses: actions/cache@v4 27 | with: 28 | path: ~/.cache/pip 29 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} 30 | restore-keys: | 31 | ${{ runner.os }}-pip- 32 | 33 | - name: Install dependencies 34 | run: | 35 | sudo apt-get update 36 | sudo apt-get install -y inkscape 37 | pip install --upgrade pip 38 | pip install setuptools build wheel numpy cython 39 | 40 | - name: Create the wheel 41 | run: python setup.py bdist_wheel 42 | 43 | - name: Install from the wheel 44 | run: | 45 | pip install $(ls dist/*.whl) --no-build-isolation 46 | 47 | - name: Test installation of the filestore 48 | run: | 49 | # change directory to avoid conflict with cortex directory 50 | cd .. 51 | python -c 'import cortex; print(cortex.db.filestore)' 52 | python -c 'from cortex.webgl.view import cmapdir; print(cmapdir)' 53 | -------------------------------------------------------------------------------- /.github/workflows/publish_to_pypi.yml: -------------------------------------------------------------------------------- 1 | name: Build and publish to PyPI if tagged 2 | on: push 3 | jobs: 4 | build-n-publish: 5 | name: Build and publish to PyPI if tagged 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@master 9 | - name: Set up Python 3.9 10 | uses: actions/setup-python@v5 11 | with: 12 | python-version: 3.9 13 | - name: Install pypa/build 14 | run: >- 15 | python -m 16 | pip install 17 | build 18 | - name: Build a source tarball 19 | run: >- 20 | python -m 21 | build 22 | --sdist 23 | --outdir dist/ 24 | - name: Publish distribution to PyPI 25 | if: startsWith(github.ref, 'refs/tags') 26 | uses: pypa/gh-action-pypi-publish@release/v1 27 | with: 28 | password: ${{ secrets.PYPI_API_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/run_tests.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | run-tests: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: [3.8, 3.9, "3.10", "3.11", "3.12", "3.13"] 17 | max-parallel: 5 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | 26 | - uses: actions/cache@v4 27 | with: 28 | path: ~/.cache/pip 29 | key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }} 30 | restore-keys: | 31 | ${{ runner.os }}-pip- 32 | 33 | - name: Install dependencies 34 | run: | 35 | sudo apt-get update 36 | sudo apt-get install -y inkscape 37 | pip install --upgrade pip 38 | pip install wheel setuptools numpy cython 39 | # force using latest nibabel 40 | pip install -U nibabel 41 | pip install -e . --no-build-isolation 42 | python -c 'import cortex; print(cortex.__full_version__)' 43 | 44 | - name: Test with pytest 45 | run: | 46 | pip install -q pytest pytest-cov 47 | pytest --cov=./ 48 | 49 | - name: Upload coverage to Codecov 50 | uses: codecov/codecov-action@v5 51 | with: 52 | env_vars: OS,PYTHON 53 | fail_ci_if_error: true 54 | token: ${{ secrets.CODECOV_TOKEN }} 55 | verbose: false 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | scripts 2 | *.py[cod] 3 | 4 | # C extensions 5 | *.so 6 | cortex/formats.c 7 | cortex/openctm.c 8 | 9 | # Packages 10 | *.egg 11 | *.egg-info 12 | dist 13 | build 14 | eggs 15 | parts 16 | bin 17 | var 18 | sdist 19 | develop-eggs 20 | .installed.cfg 21 | lib 22 | lib64 23 | 24 | # Cached files in filestore 25 | # NOTE: the filestore really should never be in the git repo... Needs work. 26 | filestore/db/*/cache 27 | filestore/db/*/surface-info 28 | filestore 29 | 30 | # Installer logs 31 | pip-log.txt 32 | 33 | # Unit test / coverage reports 34 | .coverage 35 | .tox 36 | nosetests.xml 37 | 38 | # Translations 39 | *.mo 40 | 41 | # Mr Developer 42 | .mr.developer.cfg 43 | .project 44 | .pydevproject 45 | 46 | # OS X 47 | .DS_store 48 | 49 | # vim temp files 50 | *~ 51 | 52 | # vscode and other IDEs 53 | .vscode 54 | .idea 55 | 56 | *.nfs* 57 | 58 | # docs build 59 | docs/_build 60 | docs/auto_examples 61 | docs/generated 62 | docs/colormaps.rst 63 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | authors: 2 | - family-names: Gao 3 | given-names: James S. 4 | - family-names: Huth 5 | given-names: Alexander G. 6 | - family-names: Lescroart 7 | given-names: Mark D. 8 | - family-names: Gallant 9 | given-names: Jack L. 10 | title: 'Pycortex: an interactive surface visualizer for fMRI' 11 | type: software 12 | cff-version: 1.2.0 13 | message: 'If you use this software, please cite it using the paper from this file.' 14 | repository: 'https://github.com/gallantlab/pycortex' 15 | url: 'https://gallantlab.github.io/pycortex' 16 | 17 | preferred-citation: 18 | authors: 19 | - family-names: Gao 20 | given-names: James S. 21 | - family-names: Huth 22 | given-names: Alexander G. 23 | - family-names: Lescroart 24 | given-names: Mark D. 25 | - family-names: Gallant 26 | given-names: Jack L. 27 | title: 'Pycortex: an interactive surface visualizer for fMRI' 28 | doi: 10.3389/fninf.2015.00023 29 | type: article 30 | journal: Frontiers in Neuroinformatics 31 | volume: 9 32 | pages: 23 33 | year: 2015 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013, Regents of the University of California 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 11 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include filestore * 2 | recursive-include OpenCTM-1.0.3 * 3 | recursive-include cortex * 4 | include requirements.txt 5 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009-2010 Marcus Geelnard 2 | 3 | This software is provided 'as-is', without any express or implied 4 | warranty. In no event will the authors be held liable for any damages 5 | arising from the use of this software. 6 | 7 | Permission is granted to anyone to use this software for any purpose, 8 | including commercial applications, and to alter it and redistribute it 9 | freely, subject to the following restrictions: 10 | 11 | 1. The origin of this software must not be misrepresented; you must not 12 | claim that you wrote the original software. If you use this software 13 | in a product, an acknowledgment in the product documentation would be 14 | appreciated but is not required. 15 | 16 | 2. Altered source versions must be plainly marked as such, and must not 17 | be misrepresented as being the original software. 18 | 19 | 3. This notice may not be removed or altered from any source 20 | distribution. 21 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/Makefile.macosx: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Product: OpenCTM 3 | # File: Makefile.macosx 4 | # Description: Top level makefile for Mac OS X. 5 | ############################################################################### 6 | # Copyright (c) 2009 Marcus Geelnard 7 | # 8 | # This software is provided 'as-is', without any express or implied 9 | # warranty. In no event will the authors be held liable for any damages 10 | # arising from the use of this software. 11 | # 12 | # Permission is granted to anyone to use this software for any purpose, 13 | # including commercial applications, and to alter it and redistribute it 14 | # freely, subject to the following restrictions: 15 | # 16 | # 1. The origin of this software must not be misrepresented; you must not 17 | # claim that you wrote the original software. If you use this software 18 | # in a product, an acknowledgment in the product documentation would be 19 | # appreciated but is not required. 20 | # 21 | # 2. Altered source versions must be plainly marked as such, and must not 22 | # be misrepresented as being the original software. 23 | # 24 | # 3. This notice may not be removed or altered from any source 25 | # distribution. 26 | ############################################################################### 27 | 28 | .phony: default all openctm toolset documentation clean 29 | 30 | default: openctm toolset 31 | all: openctm toolset documentation 32 | 33 | clean: 34 | cd lib && $(MAKE) -f Makefile.macosx clean && cd .. 35 | cd tools && $(MAKE) -f Makefile.macosx clean && cd .. 36 | cd doc && $(MAKE) -f Makefile.macosx clean && cd .. 37 | 38 | openctm: 39 | cd lib && $(MAKE) -f Makefile.macosx -j2 && cd .. 40 | 41 | toolset: 42 | cd tools && $(MAKE) -f Makefile.macosx -j2 && cd .. 43 | 44 | documentation: 45 | cd doc && $(MAKE) -f Makefile.macosx -j2 && cd .. 46 | 47 | 48 | # Installation settings 49 | LIBDIR = /usr/local/lib/ 50 | INCDIR = /usr/local/include/ 51 | BINDIR = /usr/local/bin/ 52 | MAN1DIR = /usr/local/share/man/man1/ 53 | CP = cp 54 | MKDIR = mkdir -p 55 | 56 | install: 57 | $(CP) lib/libopenctm.dylib $(LIBDIR) 58 | $(CP) lib/openctm.h $(INCDIR) 59 | $(CP) lib/openctmpp.h $(INCDIR) 60 | $(CP) tools/ctmconv $(BINDIR) 61 | $(CP) tools/ctmviewer $(BINDIR) 62 | $(MKDIR) $(MAN1DIR) 63 | $(CP) doc/ctmconv.1 $(MAN1DIR) 64 | $(CP) doc/ctmviewer.1 $(MAN1DIR) 65 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/Makefile.mingw: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Product: OpenCTM 3 | # File: Makefile.mingw 4 | # Description: Top level makefile for Windows / MinGW32. 5 | ############################################################################### 6 | # Copyright (c) 2009 Marcus Geelnard 7 | # 8 | # This software is provided 'as-is', without any express or implied 9 | # warranty. In no event will the authors be held liable for any damages 10 | # arising from the use of this software. 11 | # 12 | # Permission is granted to anyone to use this software for any purpose, 13 | # including commercial applications, and to alter it and redistribute it 14 | # freely, subject to the following restrictions: 15 | # 16 | # 1. The origin of this software must not be misrepresented; you must not 17 | # claim that you wrote the original software. If you use this software 18 | # in a product, an acknowledgment in the product documentation would be 19 | # appreciated but is not required. 20 | # 21 | # 2. Altered source versions must be plainly marked as such, and must not 22 | # be misrepresented as being the original software. 23 | # 24 | # 3. This notice may not be removed or altered from any source 25 | # distribution. 26 | ############################################################################### 27 | 28 | .phony: default all openctm toolset documentation clean 29 | 30 | default: openctm toolset 31 | all: openctm toolset documentation 32 | 33 | clean: 34 | cd lib && $(MAKE) -f Makefile.mingw clean && cd .. 35 | cd tools && $(MAKE) -f Makefile.mingw clean && cd .. 36 | cd doc && $(MAKE) -f Makefile.win clean && cd .. 37 | 38 | openctm: 39 | cd lib && $(MAKE) -f Makefile.mingw -j2 && cd .. 40 | 41 | toolset: 42 | cd tools && $(MAKE) -f Makefile.mingw -j2 && cd .. 43 | 44 | documentation: 45 | cd doc && $(MAKE) -f Makefile.win -j2 && cd .. 46 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/Makefile.msvc: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Product: OpenCTM 3 | # File: Makefile.msvc 4 | # Description: Top level makefile for Windows / MS Visual Studio 2008. 5 | ############################################################################### 6 | # Copyright (c) 2009 Marcus Geelnard 7 | # 8 | # This software is provided 'as-is', without any express or implied 9 | # warranty. In no event will the authors be held liable for any damages 10 | # arising from the use of this software. 11 | # 12 | # Permission is granted to anyone to use this software for any purpose, 13 | # including commercial applications, and to alter it and redistribute it 14 | # freely, subject to the following restrictions: 15 | # 16 | # 1. The origin of this software must not be misrepresented; you must not 17 | # claim that you wrote the original software. If you use this software 18 | # in a product, an acknowledgment in the product documentation would be 19 | # appreciated but is not required. 20 | # 21 | # 2. Altered source versions must be plainly marked as such, and must not 22 | # be misrepresented as being the original software. 23 | # 24 | # 3. This notice may not be removed or altered from any source 25 | # distribution. 26 | ############################################################################### 27 | 28 | .PHONY: default all openctm toolset documentation clean 29 | 30 | default: openctm toolset 31 | all: openctm toolset documentation 32 | 33 | clean: 34 | cd lib && $(MAKE) /nologo /f Makefile.msvc clean && cd .. 35 | cd tools && $(MAKE) /nologo /f Makefile.msvc clean && cd .. 36 | cd doc && $(MAKE) /nologo /f Makefile.win clean && cd .. 37 | 38 | openctm: 39 | cd lib && $(MAKE) /nologo /f Makefile.msvc && cd .. 40 | 41 | toolset: 42 | cd tools && $(MAKE) /nologo /f Makefile.msvc && cd .. 43 | 44 | documentation: 45 | cd doc && $(MAKE) /nologo /f Makefile.win && cd .. 46 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/liblzma/Alloc.h: -------------------------------------------------------------------------------- 1 | /* Alloc.h -- Memory allocation functions 2 | 2008-03-13 3 | Igor Pavlov 4 | Public domain */ 5 | 6 | #ifndef __COMMON_ALLOC_H 7 | #define __COMMON_ALLOC_H 8 | 9 | #include 10 | 11 | #include "NameMangle.h" 12 | 13 | void *MyAlloc(size_t size); 14 | void MyFree(void *address); 15 | 16 | #ifdef _WIN32 17 | 18 | void SetLargePageSize(); 19 | 20 | void *MidAlloc(size_t size); 21 | void MidFree(void *address); 22 | void *BigAlloc(size_t size); 23 | void BigFree(void *address); 24 | 25 | #else 26 | 27 | #define MidAlloc(size) MyAlloc(size) 28 | #define MidFree(address) MyFree(address) 29 | #define BigAlloc(size) MyAlloc(size) 30 | #define BigFree(address) MyFree(address) 31 | 32 | #endif 33 | 34 | #endif 35 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/liblzma/LzHash.h: -------------------------------------------------------------------------------- 1 | /* LzHash.h -- HASH functions for LZ algorithms 2 | 2008-10-04 : Igor Pavlov : Public domain */ 3 | 4 | #ifndef __LZHASH_H 5 | #define __LZHASH_H 6 | 7 | #define kHash2Size (1 << 10) 8 | #define kHash3Size (1 << 16) 9 | #define kHash4Size (1 << 20) 10 | 11 | #define kFix3HashSize (kHash2Size) 12 | #define kFix4HashSize (kHash2Size + kHash3Size) 13 | #define kFix5HashSize (kHash2Size + kHash3Size + kHash4Size) 14 | 15 | #define HASH2_CALC hashValue = cur[0] | ((UInt32)cur[1] << 8); 16 | 17 | #define HASH3_CALC { \ 18 | UInt32 temp = p->crc[cur[0]] ^ cur[1]; \ 19 | hash2Value = temp & (kHash2Size - 1); \ 20 | hashValue = (temp ^ ((UInt32)cur[2] << 8)) & p->hashMask; } 21 | 22 | #define HASH4_CALC { \ 23 | UInt32 temp = p->crc[cur[0]] ^ cur[1]; \ 24 | hash2Value = temp & (kHash2Size - 1); \ 25 | hash3Value = (temp ^ ((UInt32)cur[2] << 8)) & (kHash3Size - 1); \ 26 | hashValue = (temp ^ ((UInt32)cur[2] << 8) ^ (p->crc[cur[3]] << 5)) & p->hashMask; } 27 | 28 | #define HASH5_CALC { \ 29 | UInt32 temp = p->crc[cur[0]] ^ cur[1]; \ 30 | hash2Value = temp & (kHash2Size - 1); \ 31 | hash3Value = (temp ^ ((UInt32)cur[2] << 8)) & (kHash3Size - 1); \ 32 | hash4Value = (temp ^ ((UInt32)cur[2] << 8) ^ (p->crc[cur[3]] << 5)); \ 33 | hashValue = (hash4Value ^ (p->crc[cur[4]] << 3)) & p->hashMask; \ 34 | hash4Value &= (kHash4Size - 1); } 35 | 36 | /* #define HASH_ZIP_CALC hashValue = ((cur[0] | ((UInt32)cur[1] << 8)) ^ p->crc[cur[2]]) & 0xFFFF; */ 37 | #define HASH_ZIP_CALC hashValue = ((cur[2] | ((UInt32)cur[0] << 8)) ^ p->crc[cur[1]]) & 0xFFFF; 38 | 39 | 40 | #define MT_HASH2_CALC \ 41 | hash2Value = (p->crc[cur[0]] ^ cur[1]) & (kHash2Size - 1); 42 | 43 | #define MT_HASH3_CALC { \ 44 | UInt32 temp = p->crc[cur[0]] ^ cur[1]; \ 45 | hash2Value = temp & (kHash2Size - 1); \ 46 | hash3Value = (temp ^ ((UInt32)cur[2] << 8)) & (kHash3Size - 1); } 47 | 48 | #define MT_HASH4_CALC { \ 49 | UInt32 temp = p->crc[cur[0]] ^ cur[1]; \ 50 | hash2Value = temp & (kHash2Size - 1); \ 51 | hash3Value = (temp ^ ((UInt32)cur[2] << 8)) & (kHash3Size - 1); \ 52 | hash4Value = (temp ^ ((UInt32)cur[2] << 8) ^ (p->crc[cur[3]] << 5)) & (kHash4Size - 1); } 53 | 54 | #endif 55 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/liblzma/LzmaLib.c: -------------------------------------------------------------------------------- 1 | /* LzmaLib.c -- LZMA library wrapper 2 | 2008-08-05 3 | Igor Pavlov 4 | Public domain */ 5 | 6 | #include "LzmaEnc.h" 7 | #include "LzmaDec.h" 8 | #include "Alloc.h" 9 | #include "LzmaLib.h" 10 | 11 | static void *SzAlloc(void *p, size_t size) { p = p; return MyAlloc(size); } 12 | static void SzFree(void *p, void *address) { p = p; MyFree(address); } 13 | static ISzAlloc g_Alloc = { SzAlloc, SzFree }; 14 | 15 | MY_STDAPI LzmaCompress(unsigned char *dest, size_t *destLen, const unsigned char *src, size_t srcLen, 16 | unsigned char *outProps, size_t *outPropsSize, 17 | int level, /* 0 <= level <= 9, default = 5 */ 18 | unsigned dictSize, /* use (1 << N) or (3 << N). 4 KB < dictSize <= 128 MB */ 19 | int lc, /* 0 <= lc <= 8, default = 3 */ 20 | int lp, /* 0 <= lp <= 4, default = 0 */ 21 | int pb, /* 0 <= pb <= 4, default = 2 */ 22 | int fb, /* 5 <= fb <= 273, default = 32 */ 23 | int numThreads, /* 1 or 2, default = 2 */ 24 | int algo /* 0 = fast, 1 = normal */ 25 | ) 26 | { 27 | CLzmaEncProps props; 28 | LzmaEncProps_Init(&props); 29 | props.level = level; 30 | props.dictSize = dictSize; 31 | props.lc = lc; 32 | props.lp = lp; 33 | props.pb = pb; 34 | props.fb = fb; 35 | props.numThreads = numThreads; 36 | props.algo = algo; 37 | 38 | return LzmaEncode(dest, destLen, src, srcLen, &props, outProps, outPropsSize, 0, 39 | NULL, &g_Alloc, &g_Alloc); 40 | } 41 | 42 | 43 | MY_STDAPI LzmaUncompress(unsigned char *dest, size_t *destLen, const unsigned char *src, size_t *srcLen, 44 | const unsigned char *props, size_t propsSize) 45 | { 46 | ELzmaStatus status; 47 | return LzmaDecode(dest, destLen, src, srcLen, props, (unsigned)propsSize, LZMA_FINISH_ANY, &status, &g_Alloc); 48 | } 49 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/liblzma/readme.txt: -------------------------------------------------------------------------------- 1 | This is the C library implementation of LZMA compression/decompression by Igor Pavlov. 2 | 3 | Author: Igor Pavlov 4 | License: Public domain 5 | Version: 4.65 (2009-02-03) 6 | 7 | Some administrative adaptations for integration in OpenCTM were made by Marcus Geelnard. 8 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/libopenctm.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/OpenCTM-1.0.3/lib/libopenctm.a -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/openctm-mingw1.def: -------------------------------------------------------------------------------- 1 | LIBRARY openctm.dll 2 | EXPORTS 3 | ctmAddAttribMap = ctmAddAttribMap@12 @1 4 | ctmAddUVMap = ctmAddUVMap@16 @2 5 | ctmAttribPrecision = ctmAttribPrecision@12 @3 6 | ctmCompressionLevel = ctmCompressionLevel@8 @4 7 | ctmCompressionMethod = ctmCompressionMethod@8 @5 8 | ctmDefineMesh = ctmDefineMesh@24 @6 9 | ctmFileComment = ctmFileComment@8 @7 10 | ctmFreeContext = ctmFreeContext@4 @8 11 | ctmGetAttribMapFloat = ctmGetAttribMapFloat@12 @9 12 | ctmGetAttribMapString = ctmGetAttribMapString@12 @10 13 | ctmGetError = ctmGetError@4 @11 14 | ctmGetFloat = ctmGetFloat@8 @12 15 | ctmGetFloatArray = ctmGetFloatArray@8 @13 16 | ctmGetInteger = ctmGetInteger@8 @14 17 | ctmGetIntegerArray = ctmGetIntegerArray@8 @15 18 | ctmGetNamedAttribMap = ctmGetNamedAttribMap@8 @16 19 | ctmGetNamedUVMap = ctmGetNamedUVMap@8 @17 20 | ctmGetString = ctmGetString@8 @18 21 | ctmGetUVMapFloat = ctmGetUVMapFloat@12 @19 22 | ctmGetUVMapString = ctmGetUVMapString@12 @20 23 | ctmErrorString = ctmErrorString@4 @21 24 | ctmLoad = ctmLoad@8 @22 25 | ctmLoadCustom = ctmLoadCustom@12 @23 26 | ctmNewContext = ctmNewContext@4 @24 27 | ctmNormalPrecision = ctmNormalPrecision@8 @25 28 | ctmSave = ctmSave@8 @26 29 | ctmSaveCustom = ctmSaveCustom@12 @27 30 | ctmUVCoordPrecision = ctmUVCoordPrecision@12 @28 31 | ctmVertexPrecision = ctmVertexPrecision@8 @29 32 | ctmVertexPrecisionRel = ctmVertexPrecisionRel@8 @30 33 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/openctm-mingw2.def: -------------------------------------------------------------------------------- 1 | LIBRARY openctm.dll 2 | EXPORTS 3 | ctmAddAttribMap@12 @1 4 | ctmAddUVMap@16 @2 5 | ctmAttribPrecision@12 @3 6 | ctmCompressionLevel@8 @4 7 | ctmCompressionMethod@8 @5 8 | ctmDefineMesh@24 @6 9 | ctmFileComment@8 @7 10 | ctmFreeContext@4 @8 11 | ctmGetAttribMapFloat@12 @9 12 | ctmGetAttribMapString@12 @10 13 | ctmGetError@4 @11 14 | ctmGetFloat@8 @12 15 | ctmGetFloatArray@8 @13 16 | ctmGetInteger@8 @14 17 | ctmGetIntegerArray@8 @15 18 | ctmGetNamedAttribMap@8 @16 19 | ctmGetNamedUVMap@8 @17 20 | ctmGetString@8 @18 21 | ctmGetUVMapFloat@12 @19 22 | ctmGetUVMapString@12 @20 23 | ctmErrorString@4 @21 24 | ctmLoad@8 @22 25 | ctmLoadCustom@12 @23 26 | ctmNewContext@4 @24 27 | ctmNormalPrecision@8 @25 28 | ctmSave@8 @26 29 | ctmSaveCustom@12 @27 30 | ctmUVCoordPrecision@12 @28 31 | ctmVertexPrecision@8 @29 32 | ctmVertexPrecisionRel@8 @30 33 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/openctm-msvc.def: -------------------------------------------------------------------------------- 1 | LIBRARY openctm.dll 2 | EXPORTS 3 | ctmAddAttribMap 4 | ctmAddUVMap 5 | ctmAttribPrecision 6 | ctmCompressionLevel 7 | ctmCompressionMethod 8 | ctmDefineMesh 9 | ctmFileComment 10 | ctmFreeContext 11 | ctmGetAttribMapFloat 12 | ctmGetAttribMapString 13 | ctmGetError 14 | ctmGetFloat 15 | ctmGetFloatArray 16 | ctmGetInteger 17 | ctmGetIntegerArray 18 | ctmGetNamedAttribMap 19 | ctmGetNamedUVMap 20 | ctmGetString 21 | ctmGetUVMapFloat 22 | ctmGetUVMapString 23 | ctmErrorString 24 | ctmLoad 25 | ctmLoadCustom 26 | ctmNewContext 27 | ctmNormalPrecision 28 | ctmSave 29 | ctmSaveCustom 30 | ctmUVCoordPrecision 31 | ctmVertexPrecision 32 | ctmVertexPrecisionRel 33 | -------------------------------------------------------------------------------- /OpenCTM-1.0.3/lib/openctm.rc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/OpenCTM-1.0.3/lib/openctm.rc -------------------------------------------------------------------------------- /cortex/__init__.py: -------------------------------------------------------------------------------- 1 | # emacs: -*- coding: utf-8; mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | # vi: set fileencoding=utf-8 ft=python sts=4 ts=4 sw=4 et: 3 | from cortex.dataset import Dataset, Volume, Vertex, VolumeRGB, VertexRGB, Volume2D, Vertex2D, Colors 4 | from cortex import align, volume, quickflat, webgl, segment, options 5 | from cortex.database import db 6 | from cortex.utils import * 7 | from cortex.quickflat import make_figure as quickshow 8 | from cortex.volume import mosaic, unmask 9 | import cortex.export 10 | from cortex.version import __version__, __full_version__ 11 | 12 | try: 13 | from cortex import formats 14 | except ImportError: 15 | raise ImportError("Either are running pycortex from the source directory, or the build is broken. " 16 | "If your current working directory is 'cortex', where pycortex is installed, then change this. " 17 | "If your current working directory is somewhere else, then you may have to rebuild pycortex.") 18 | 19 | load = Dataset.from_file 20 | 21 | try: 22 | from cortex import webgl 23 | from cortex.webgl import show as webshow 24 | except ImportError: 25 | pass 26 | 27 | try: 28 | from cortex import anat 29 | except ImportError: 30 | pass 31 | 32 | # Create deprecated interface for database 33 | class dep(object): 34 | def __getattr__(self, name): 35 | warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning) 36 | return getattr(db, name) 37 | def __dir__(self): 38 | warnings.warn("cortex.surfs is deprecated, use cortex.db instead", Warning) 39 | return db.__dir__() 40 | 41 | surfs = dep() 42 | 43 | import sys 44 | if sys.version_info.major == 2: 45 | stdout = sys.stdout 46 | reload(sys) 47 | sys.setdefaultencoding('utf8') 48 | sys.stdout = stdout 49 | -------------------------------------------------------------------------------- /cortex/bbr.sch: -------------------------------------------------------------------------------- 1 | # 1mm scale 2 | setscale 1 force 3 | setoption costfunction bbr 4 | setoption optimisationtype brent 5 | setoption tolerance 0.0005 0.0005 0.0005 0.02 0.02 0.02 0.002 0.002 0.002 0.001 0.001 0.001 6 | #setoption tolerance 0.005 0.005 0.005 0.2 0.2 0.2 0.02 0.02 0.02 0.01 0.01 0.01 7 | setoption boundguess 1 8 | setoption bbrstep 200 9 | clear UA 10 | clear UU 11 | clear UV 12 | clear U 13 | setrowqsform UU 14 | setrow UU 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 15 | measurecost 6 UU:1-2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 16 | gridmeasurecost 6 UU:1-2 -0.07 0.07 0.07 -0.07 0.07 0.07 -0.07 0.07 0.07 -4.0 4.0 4.0 -4.0 4.0 4.0 -4.0 4.0 4.0 0.0 0.0 0.0 abs 8 17 | sort U 18 | copy U UA 19 | clear U 20 | optimise 6 UA:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 21 | setoption optimisationtype powell 22 | optimise 6 U:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 23 | setoption optimisationtype brent 24 | optimise 6 U:2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 25 | sort U 26 | setoption tolerance 0.0002 0.0002 0.0002 0.02 0.02 0.02 0.002 0.002 0.002 0.001 0.001 0.001 27 | setoption bbrstep 2 28 | clear UU 29 | copy U UU 30 | clear U 31 | gridmeasurecost 6 UU:1 -0.0017 0.0017 0.0017 -0.0017 0.0017 0.0017 -0.0017 0.0017 0.0017 -0.1 0.1 0.1 -0.1 0.1 0.1 -0.1 0.1 0.1 0.0 0.0 0.0 abs 8 32 | sort U 33 | clear UB 34 | copy U UB 35 | clear U 36 | setoption optimisationtype brent 37 | optimise 6 UB:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 38 | setoption optimisationtype powell 39 | optimise 12 U:1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 8 40 | setoption optimisationtype brent 41 | optimise 12 U:2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 42 | sort U 43 | print U:1 44 | 45 | -------------------------------------------------------------------------------- /cortex/dataset/__init__.py: -------------------------------------------------------------------------------- 1 | """Contains classes for representing brain data in either volumetric or vertex (surface-based) formats for visualization. 2 | """ 3 | 4 | from .views import Volume, Vertex, VolumeRGB, VertexRGB, Volume2D, Vertex2D, Dataview, _from_hdf_data, Colors 5 | from .dataset import Dataset, normalize -------------------------------------------------------------------------------- /cortex/export/__init__.py: -------------------------------------------------------------------------------- 1 | from .save_views import save_3d_views 2 | from .panels import plot_panels 3 | from ._default_params import ( 4 | params_inflatedless_lateral_medial_ventral, 5 | params_flatmap_lateral_medial, 6 | params_occipital_triple_view, 7 | params_inflated_dorsal_lateral_medial_ventral, 8 | params_flatmap_inflated_lateral_medial_ventral, 9 | ) 10 | 11 | __all__ = [ 12 | "save_3d_views", 13 | "plot_panels", 14 | "params_flatmap_lateral_medial", 15 | "params_occipital_triple_view", 16 | "params_inflatedless_lateral_medial_ventral", 17 | "params_inflated_dorsal_lateral_medial_ventral", 18 | "params_flatmap_inflated_lateral_medial_ventral", 19 | ] 20 | -------------------------------------------------------------------------------- /cortex/formats_old.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def read_vtk(filename): 4 | with open(filename) as vtk: 5 | pts, polys = None, None 6 | line = vtk.readline() 7 | while len(line) > 0 and (pts is None or polys is None): 8 | if line.startswith("POINTS"): 9 | _, n, dtype = line.split() 10 | data = vtk.readline().split() 11 | n = int(n) 12 | nel = n*3 13 | while len(data) < nel: 14 | data += vtk.readline().split() 15 | pts = np.array(data, dtype=float).reshape(n, 3) 16 | elif line.startswith("POLYGONS"): 17 | _, n, nel = line.split() 18 | nel = int(nel) 19 | data = vtk.readline().split() 20 | while len(data) < nel: 21 | data += vtk.readline().split() 22 | polys = np.array(data, dtype=np.uint32).reshape(int(n), 4)[:,1:] 23 | 24 | line = vtk.readline() 25 | return pts, polys 26 | -------------------------------------------------------------------------------- /cortex/mapper/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | 5 | from .. import dataset 6 | from .mapper import Mapper, _savecache 7 | from .utils import nanproject, vol2surf 8 | 9 | 10 | def get_mapper(subject, xfmname, type='nearest', recache=False, **kwargs): 11 | from ..database import db 12 | from . import point, patch, line 13 | 14 | mapcls = dict( 15 | nearest=point.PointNN, 16 | trilinear=point.PointTrilin, 17 | gaussian=point.PointGauss, 18 | lanczos=point.PointLanczos, 19 | const_patch_nn=patch.ConstPatchNN, 20 | const_patch_trilin=patch.ConstPatchTrilin, 21 | const_patch_lanczos=patch.ConstPatchLanczos, 22 | line_nearest=line.LineNN, 23 | line_trilinear=line.LineTrilin, 24 | line_lanczos=line.LineLanczos) 25 | Map = mapcls[type] 26 | ptype = Map.__name__.lower() 27 | kwds ='_'.join(['%s%s'%(k,str(v)) for k, v in list(kwargs.items())]) 28 | if len(kwds) > 0: 29 | ptype += '_'+kwds 30 | 31 | fname = "{xfmname}_{projection}.npz".format(xfmname=xfmname, projection=ptype) 32 | 33 | xfmfile = db.get_paths(subject)['xfmdir'].format(xfmname=xfmname) 34 | cachefile = os.path.join(db.get_cache(subject), fname) 35 | 36 | try: 37 | if not recache and (xfmname == "identity" or os.stat(cachefile).st_mtime > os.stat(xfmfile).st_mtime): 38 | return Map.from_cache(cachefile, subject, xfmname) 39 | raise Exception 40 | except Exception: 41 | return Map._cache(cachefile, subject, xfmname, **kwargs) 42 | -------------------------------------------------------------------------------- /cortex/mapper/line.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import sparse 3 | 4 | from . import Mapper, _savecache 5 | from . import samplers 6 | 7 | class LineMapper(Mapper): 8 | @classmethod 9 | def _cache(cls, filename, subject, xfmname, **kwargs): 10 | from .. import db 11 | masks = [] 12 | xfm = db.get_xfm(subject, xfmname, xfmtype='coord') 13 | pia = db.get_surf(subject, "pia", merge=False, nudge=False) 14 | wm = db.get_surf(subject, "wm", merge=False, nudge=False) 15 | 16 | #iterate over hemispheres 17 | for (wpts, polys), (ppts, _) in zip(pia, wm): 18 | masks.append(cls._getmask(xfm(ppts), xfm(wpts), polys, xfm.shape, **kwargs)) 19 | 20 | _savecache(filename, masks[0], masks[1], xfm.shape) 21 | return cls(masks[0], masks[1], xfm.shape, subject, xfmname) 22 | 23 | @classmethod 24 | def _getmask(cls, pia, wm, polys, shape, npts=64, mp=True, **kwargs): 25 | valid = np.unique(polys) 26 | #vidx = np.nonzero(valid)[0] 27 | mapper = sparse.csr_matrix((len(pia), np.prod(shape))) 28 | for t in np.linspace(0, 1, npts+2)[1:-1]: 29 | i, j, data = cls.sampler(pia*t + wm*(1-t), shape) 30 | mapper = mapper + sparse.csr_matrix((data / npts, (i, j)), shape=mapper.shape) 31 | return mapper 32 | 33 | class LineNN(LineMapper): 34 | sampler = staticmethod(samplers.nearest) 35 | 36 | class LineTrilin(LineMapper): 37 | sampler = staticmethod(samplers.trilinear) 38 | 39 | class LineGauss(LineMapper): 40 | sampler = staticmethod(samplers.gaussian) 41 | 42 | class LineLanczos(LineMapper): 43 | sampler = staticmethod(samplers.lanczos) 44 | -------------------------------------------------------------------------------- /cortex/mapper/patch.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import sparse 3 | 4 | from . import Mapper 5 | from . import samplers 6 | from .. import polyutils 7 | 8 | class PatchMapper(Mapper): 9 | @classmethod 10 | def _getmask(cls, pts, polys, shape, npts=64, mp=True, **kwargs): 11 | rand = np.random.rand(2, npts) 12 | 13 | def func(ipts): 14 | idx, pts = ipts 15 | if pts is not None: 16 | A = np.outer(1-np.sqrt(rand[0]), pts[:,0].ravel()) 17 | B = np.outer(np.sqrt(rand[0]) * (1-rand[1]), pts[:,1].ravel()) 18 | C = np.outer(rand[1] * np.sqrt(rand[0]), pts[:,2].ravel()) 19 | randpts = (A+B+C).reshape(-1, pts.shape[0], pts.shape[2]) 20 | areas = polyutils.face_area(pts) 21 | areas /= areas.sum() 22 | 23 | allj, alldata = [], [] 24 | for tri, area in zip(randpts.swapaxes(0,1), areas): 25 | i, j, data = cls.sampler(tri, shape, renorm=False, mp=False, **kwargs) 26 | alldata.append(data / data.sum() * area) 27 | allj.append(j) 28 | 29 | #print idx 30 | return samplers.collapse(np.hstack(allj), np.hstack(alldata)) 31 | return None, None 32 | 33 | surf = polyutils.Surface(pts, polys) 34 | patches = surf.patches(n=cls.patchsize) 35 | if mp: 36 | from .. import mp 37 | samples = mp.map(func, enumerate(patches)) 38 | else: 39 | #samples = map(func, enumerate(patches)) 40 | samples = [func(x) for x in enumerate(patches)] 41 | 42 | ij, alldata = [], [] 43 | for i, (j, data) in enumerate(samples): 44 | if data is not None: 45 | ij.append(np.vstack(np.broadcast_arrays(i, j)).T) 46 | alldata.append(data) 47 | 48 | data, ij = np.hstack(alldata), np.vstack(ij).T 49 | csrshape = len(pts), np.prod(shape) 50 | return sparse.csr_matrix((data, ij), shape=csrshape) 51 | 52 | class ConstPatch(PatchMapper): 53 | patchsize = 1 54 | 55 | class ConstPatchNN(ConstPatch): 56 | sampler = staticmethod(samplers.nearest) 57 | 58 | class ConstPatchTrilin(ConstPatch): 59 | sampler = staticmethod(samplers.trilinear) 60 | 61 | class ConstPatchLanczos(ConstPatch): 62 | sampler = staticmethod(samplers.lanczos) -------------------------------------------------------------------------------- /cortex/mapper/point.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import sparse 3 | 4 | from . import Mapper 5 | from . import samplers 6 | 7 | class PointMapper(Mapper): 8 | @classmethod 9 | def _getmask(cls, coords, polys, shape, **kwargs): 10 | valid = np.unique(polys) 11 | mcoords = np.nan * np.ones_like(coords) 12 | mcoords[valid] = coords[valid] 13 | i, j, data = cls.sampler(mcoords, shape, **kwargs) 14 | csrshape = len(coords), np.prod(shape) 15 | return sparse.csr_matrix((data, np.array([i, j])), shape=csrshape) 16 | 17 | class PointNN(PointMapper): 18 | sampler = staticmethod(samplers.nearest) 19 | 20 | class PointTrilin(PointMapper): 21 | sampler = staticmethod(samplers.trilinear) 22 | 23 | class PointGauss(PointMapper): 24 | sampler = staticmethod(samplers.gaussian) 25 | 26 | class PointLanczos(PointMapper): 27 | sampler = staticmethod(samplers.lanczos) 28 | -------------------------------------------------------------------------------- /cortex/mp.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import marshal 3 | import multiprocessing as mp 4 | try: 5 | import progressbar as pb 6 | except ImportError: 7 | pass 8 | 9 | def map(func, iterable, procs = mp.cpu_count()): 10 | input, output = mp.Queue(), mp.Queue() 11 | length = mp.Value('i',0) 12 | 13 | def _fill(iterable, procs, input, output): 14 | for data in enumerate(iterable): 15 | input.put(data) 16 | length.value += 1 17 | for _ in range(procs*2): 18 | input.put((-1,-1)) 19 | 20 | def _func(proc, input, output): 21 | idx, data = input.get() 22 | while idx != -1: 23 | output.put((idx, func(data))) 24 | idx, data = input.get() 25 | 26 | filler = mp.Process(target = _fill, args=(iterable, procs, input, output)) 27 | filler.daemon = True 28 | filler.start() 29 | for i in range(procs): 30 | proc = mp.Process(target=_func, args=(i, input, output)) 31 | proc.daemon = True 32 | proc.start() 33 | 34 | try: 35 | iterlen = len(iterable) 36 | except: 37 | filler.join() 38 | iterlen = length.value 39 | 40 | data = [[]]*iterlen 41 | try: 42 | progress = pb.ProgressBar(widgets=[pb.Percentage(), pb.Bar()], maxval=iterlen) 43 | progress.start() 44 | for i in range(iterlen): 45 | idx, result = output.get() 46 | data[idx] = result 47 | progress.update(i+1) 48 | progress.finish() 49 | except NameError: 50 | for _ in range(iterlen): 51 | idx, result = output.get() 52 | data[idx] = result 53 | 54 | return data 55 | 56 | if __name__ == "__main__": 57 | #pool = Pool() 58 | #data = pool.map( 59 | map(lambda x: max(x), zip(*(iter(range(65536)),)*3)) 60 | -------------------------------------------------------------------------------- /cortex/options.py: -------------------------------------------------------------------------------- 1 | import os 2 | try: 3 | import configparser 4 | except ImportError: 5 | import ConfigParser as configparser 6 | from . import appdirs 7 | 8 | cwd = os.path.split(os.path.abspath(__file__))[0] 9 | userdir = appdirs.user_data_dir("pycortex", "JamesGao") 10 | usercfg = os.path.join(userdir, "options.cfg") 11 | 12 | # Read defaults from pycortex repo 13 | config = configparser.ConfigParser() 14 | config.read(os.path.join(cwd, 'defaults.cfg')) 15 | 16 | # Update defaults with user-sepecifed values in user config 17 | files_successfully_read = config.read(usercfg) 18 | 19 | # If user config doesn't exist, create it 20 | if len(files_successfully_read) == 0: 21 | if not os.path.exists(userdir): 22 | os.makedirs(userdir) 23 | with open(usercfg, 'w') as fp: 24 | config.write(fp) 25 | 26 | #set default path in case the module is imported from the source code directory 27 | if not config.has_option("basic", "filestore"): 28 | config.set("basic", "filestore", os.path.join(cwd, os.pardir, "filestore/db")) 29 | 30 | if not config.has_option("webgl", "colormaps"): 31 | config.set("webgl", "colormaps", os.path.join(cwd, os.pardir, "filestore/colormaps")) 32 | -------------------------------------------------------------------------------- /cortex/polyutils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from .distortion import Distortion 3 | from .misc import ( 4 | _memo, 5 | tetra_vol, 6 | brick_vol, 7 | sort_polys, 8 | face_area, 9 | face_volume, 10 | decimate, 11 | inside_convex_poly, 12 | make_cube, 13 | boundary_edges, 14 | trace_poly, 15 | rasterize, 16 | voxelize, 17 | measure_volume, 18 | marching_cubes, 19 | ) 20 | from .surface import Surface, _ptset, _quadset 21 | -------------------------------------------------------------------------------- /cortex/quickflat/__init__.py: -------------------------------------------------------------------------------- 1 | from .view import make_figure, make_png, make_svg, make_movie, make_gif 2 | from .utils import make_flatmap_image 3 | from . import composite -------------------------------------------------------------------------------- /cortex/svgbase.xml: -------------------------------------------------------------------------------- 1 | 2 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 24 | 25 | 28 | 31 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /cortex/testing_utils.py: -------------------------------------------------------------------------------- 1 | """Module containing utils for testing""" 2 | import subprocess as sp 3 | from shutil import which 4 | 5 | 6 | def has_installed(name): 7 | return which(name) is not None 8 | 9 | 10 | def inkscape_version(): 11 | if not has_installed('inkscape'): 12 | return None 13 | cmd = 'inkscape --version' 14 | output = sp.check_output(cmd.split(), stderr=sp.PIPE) 15 | # b'Inkscape 1.0 (4035a4f, 2020-05-01)\n' 16 | version = output.split()[1] 17 | if isinstance(version, bytes): 18 | version = version.decode('utf-8') 19 | return version 20 | 21 | 22 | INKSCAPE_VERSION = inkscape_version() 23 | 24 | 25 | -------------------------------------------------------------------------------- /cortex/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/tests/__init__.py -------------------------------------------------------------------------------- /cortex/tests/test_formats.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | import numpy as np 5 | 6 | import cortex 7 | from cortex.formats import read_gii, write_gii 8 | 9 | from numpy.testing import assert_array_equal 10 | 11 | def test_write_read_gii(): 12 | wm, polys = cortex.db.get_surf("S1", "wm", "lh") 13 | # make sure they are int32 or nibabel will complain 14 | wm = wm.astype(np.int32) 15 | polys = wm.astype(np.int32) 16 | with tempfile.TemporaryDirectory() as tmpdir: 17 | fnout = os.path.join(tmpdir, "out.gii") 18 | write_gii(fnout, wm, polys) 19 | wm2, polys2 = read_gii(fnout) 20 | assert_array_equal(wm, wm2) 21 | assert_array_equal(polys, polys2) 22 | 23 | -------------------------------------------------------------------------------- /cortex/tests/test_freesurfer.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from cortex.freesurfer import _remove_disconnected_polys 4 | 5 | 6 | def test_remove_disconnected_polys_examples(): 7 | polys = np.array([[0, 1, 2], 8 | [0, 1, 3], 9 | [1, 2, 4], 10 | [5, 6, 7]]) 11 | expected_result = np.array([[0, 1, 2], 12 | [0, 1, 3], 13 | [1, 2, 4]]) 14 | result = _remove_disconnected_polys(polys) 15 | np.testing.assert_array_equal(result, expected_result) 16 | 17 | 18 | def test_remove_disconnected_polys_idempotence(): 19 | rng = np.random.RandomState(0) 20 | for n_polys in [10, 20, 30, 40]: 21 | polys_0 =rng.randint(0, 100, size=3 * n_polys).reshape(-1, 3) 22 | 23 | # make sure this example filters something 24 | polys_1 = _remove_disconnected_polys(polys_0) 25 | assert len(polys_0) != len(polys_1) 26 | 27 | # make sure calling the function does not change anything 28 | polys_2 = _remove_disconnected_polys(polys_1) 29 | np.testing.assert_array_equal(polys_1, polys_2) 30 | -------------------------------------------------------------------------------- /cortex/tests/test_polyutils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from cortex import polyutils 3 | 4 | ## this test is annoying 5 | # def test_cube(): 6 | # from mayavi import mlab 7 | # pts, polys = polyutils.make_cube((.5, .5, .5), 1) 8 | # mlab.triangular_mesh(pts[:,0], pts[:,1], pts[:,2], polys) 9 | # assert True 10 | 11 | def test_surfpatch(): 12 | from cortex import db 13 | wm, polys = db.get_surf("S1", "wm", "lh") 14 | pia, _ = db.get_surf("S1", "pia", "lh") 15 | surf = polyutils.Surface(wm, polys) 16 | subwm, subpia, subpolys = surf.extract_chunk(auxpts=pia) 17 | subsurf = polyutils.Surface(subwm, subpolys) 18 | _ = [patch for patch in subsurf.patches(n=0.5)] 19 | -------------------------------------------------------------------------------- /cortex/tests/test_quickflat.py: -------------------------------------------------------------------------------- 1 | import cortex 2 | import numpy as np 3 | import tempfile 4 | import pytest 5 | 6 | from cortex.testing_utils import has_installed 7 | 8 | no_inkscape = not has_installed('inkscape') 9 | 10 | 11 | @pytest.mark.skipif(no_inkscape, reason='Inkscape required') 12 | def test_quickflat(): 13 | tf = tempfile.NamedTemporaryFile(suffix=".png") 14 | view = cortex.Volume.random("S1", "fullhead", cmap="hot") 15 | cortex.quickflat.make_png(tf.name, view) 16 | 17 | 18 | @pytest.mark.skipif(no_inkscape, reason='Inkscape required') 19 | def test_colorbar_location(): 20 | view = cortex.Volume.random("S1", "fullhead", cmap="hot") 21 | for colorbar_location in ['left', 'center', 'right', (0, 0.2, 0.4, 0.3)]: 22 | cortex.quickflat.make_figure(view, with_colorbar=True, 23 | colorbar_location=colorbar_location) 24 | 25 | with pytest.raises(ValueError): 26 | cortex.quickflat.make_figure(view, with_colorbar=True, 27 | colorbar_location='unknown_location') 28 | 29 | 30 | @pytest.mark.skipif(no_inkscape, reason='Inkscape required') 31 | @pytest.mark.parametrize("type_", ["thick", "thin"]) 32 | @pytest.mark.parametrize("nanmean", [True, False]) 33 | def test_make_flatmap_image_nanmean(type_, nanmean): 34 | mask = cortex.db.get_mask("S1", "fullhead", type=type_) 35 | data = np.ones(mask.sum()) 36 | # set 50% of the values in the dataset to NaN 37 | data[np.random.rand(*data.shape) > 0.5] = np.nan 38 | vol = cortex.Volume(data, "S1", "fullhead", vmin=0, vmax=1) 39 | img, extents = cortex.quickflat.utils.make_flatmap_image( 40 | vol, nanmean=nanmean) 41 | # assert that the nanmean only returns NaNs and 1s 42 | assert np.nanmin(img) == 1 43 | -------------------------------------------------------------------------------- /cortex/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import cortex 2 | 3 | def test_download_subject(): 4 | # Test that newly downloaded subjects are added to the current database. 5 | 6 | # remove fsaverage from the list of available subjects if present. 7 | if "fsaverage" in cortex.db.subjects: 8 | cortex.db._subjects.pop("fsaverage") 9 | 10 | assert "fsaverage" not in cortex.db.subjects 11 | cortex.utils.download_subject(subject_id='fsaverage') 12 | assert "fsaverage" in cortex.db.subjects 13 | # test that downloading it again works 14 | cortex.utils.download_subject(subject_id='fsaverage', download_again=True) 15 | -------------------------------------------------------------------------------- /cortex/webgl/FallbackLoader.py: -------------------------------------------------------------------------------- 1 | import os 2 | from tornado import template 3 | 4 | class FallbackLoader(template.BaseLoader): 5 | """Loads templates from one of multiple potential directories, falling back 6 | to next if the template is not found in the first directory. 7 | """ 8 | def __init__(self, root_directories, **kwargs): 9 | super(FallbackLoader, self).__init__(**kwargs) 10 | self.roots = [os.path.abspath(d) for d in root_directories] 11 | 12 | def resolve_path(self, name, parent_path=None): 13 | if parent_path and parent_path[0] not in ["<", "/"] and not name.startswith("/"): 14 | for root in self.roots: 15 | current_path = os.path.join(root, parent_path) 16 | file_dir = os.path.dirname(os.path.abspath(current_path)) 17 | relative_path = os.path.abspath(os.path.join(file_dir, name)) 18 | newname = relative_path[len(root)+1:] 19 | ## Check if path exists 20 | if os.path.exists(relative_path): 21 | return newname 22 | else: 23 | raise Exception("Couldn't find template.") 24 | return name 25 | 26 | def _create_template(self, name): 27 | for root in self.roots: 28 | path = os.path.join(root, name) 29 | if os.path.exists(path): 30 | f = open(path, "rb") 31 | t = template.Template(f.read(), name=name, loader=self) 32 | f.close() 33 | return t 34 | else: 35 | raise Exception("Couldn't find template.") 36 | 37 | -------------------------------------------------------------------------------- /cortex/webgl/__init__.py: -------------------------------------------------------------------------------- 1 | """Makes an interactive viewer for viewing data in a browser 2 | """ 3 | from ..utils import DocLoader 4 | 5 | show = DocLoader("show", ".view", "cortex.webgl") 6 | 7 | #def show(*args, **kwargs): 8 | # from . import view 9 | # return view.show(*args, **kwargs) 10 | 11 | make_static = DocLoader("make_static", ".view", "cortex.webgl") 12 | 13 | #def make_static(*args, **kwargs): 14 | # from . import view 15 | # return view.make_static(*args, **kwargs) 16 | -------------------------------------------------------------------------------- /cortex/webgl/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/favicon.ico -------------------------------------------------------------------------------- /cortex/webgl/mixer.html: -------------------------------------------------------------------------------- 1 | {% autoescape None %} 2 | {% extends template.html %} 3 | {% block jsinit %} 4 | var viewer, subjects, datasets, figure, sock, viewopts; 5 | {% end %} 6 | {% block onload %} 7 | viewopts = {{viewopts}}; 8 | subjects = {{subjects}}; 9 | for (var name in subjects) { 10 | subjects[name] = new mriview.Surface(subjects[name]); 11 | } 12 | 13 | figure = new jsplot.W2Figure(); 14 | viewer = figure.add(mriview.Viewer, "main", true); 15 | 16 | dataviews = dataset.fromJSON({{data}}); 17 | viewer.addData(dataviews); 18 | 19 | sock = new Websock(); 20 | {% end %} 21 | -------------------------------------------------------------------------------- /cortex/webgl/public.html: -------------------------------------------------------------------------------- 1 | {% extends simple.html %} 2 | {% block jsinit %} 3 | var viewer, sock; 4 | {% end %} 5 | {% block onload %} 6 | viewer = new MRIview(); 7 | viewer.load("{{ctmfile}}"); 8 | viewer.addData(classify({{data}})); 9 | {% end %} -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/colors.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/colors.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/control-pause.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/control-pause.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/control-play.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/control-play.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/loading.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/loading.gif -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/magnifying_glass.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/magnifying_glass.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/trigger.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/trigger.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_diagonals-thick_18_b81900_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_diagonals-thick_18_b81900_40x40.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_diagonals-thick_20_666666_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_diagonals-thick_20_666666_40x40.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_flat_10_000000_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_flat_10_000000_40x100.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_glass_100_f6f6f6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_glass_100_f6f6f6_1x400.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_glass_100_fdf5ce_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_glass_100_fdf5ce_1x400.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_gloss-wave_35_f6a828_500x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_gloss-wave_35_f6a828_500x100.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_highlight-soft_100_eeeeee_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_highlight-soft_100_eeeeee_1x100.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-bg_highlight-soft_75_ffe45c_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-bg_highlight-soft_75_ffe45c_1x100.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-icons_228ef1_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-icons_228ef1_256x240.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-icons_ef8c08_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-icons_ef8c08_256x240.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-icons_ffd27a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-icons_ffd27a_256x240.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/images/ui-icons_ffffff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/css/images/ui-icons_ffffff_256x240.png -------------------------------------------------------------------------------- /cortex/webgl/resources/css/jsplot.css: -------------------------------------------------------------------------------- 1 | html { height:100%; } 2 | body { 3 | margin:0px; 4 | height:100%; 5 | } 6 | div.jsplot_figure { 7 | height:100%; 8 | } 9 | .jsplot_figure > table { 10 | width:100%; 11 | height:100%; 12 | } 13 | .jsplot_figure > table > tr, .jsplot_figure>table>tr>td { 14 | overflow:hidden; 15 | padding:0px; 16 | } 17 | .jsplot_axes { 18 | width:100%; 19 | height:100%; 20 | overflow:hidden; 21 | position:relative; 22 | } 23 | 24 | .movie_wrapper { 25 | display:table; 26 | height:100%; 27 | width:100%; 28 | } 29 | .movie_cell { 30 | display:table-cell; 31 | vertical-align: middle; 32 | } 33 | .movie { 34 | width:100%; 35 | } 36 | .movie_load { 37 | display:none; 38 | } -------------------------------------------------------------------------------- /cortex/webgl/resources/explo_demo/cursor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/cortex/webgl/resources/explo_demo/cursor.png -------------------------------------------------------------------------------- /cortex/webgl/resources/js/ctm/CTMWorker.js: -------------------------------------------------------------------------------- 1 | importScripts( "lzma.js", "ctm.js" ); 2 | 3 | self.onmessage = function( event ) { 4 | 5 | var files = []; 6 | 7 | for ( var i = 0; i < event.data.offsets.length; i ++ ) { 8 | 9 | var stream = new CTM.Stream( event.data.data ); 10 | stream.offset = event.data.offsets[ i ]; 11 | 12 | self.postMessage( new CTM.File( stream ) ); 13 | 14 | } 15 | 16 | self.close(); 17 | 18 | } 19 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/ctm/license/OpenCTM.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2009-2010 Marcus Geelnard 2 | 3 | This software is provided 'as-is', without any express or implied 4 | warranty. In no event will the authors be held liable for any damages 5 | arising from the use of this software. 6 | 7 | Permission is granted to anyone to use this software for any purpose, 8 | including commercial applications, and to alter it and redistribute it 9 | freely, subject to the following restrictions: 10 | 11 | 1. The origin of this software must not be misrepresented; you must not 12 | claim that you wrote the original software. If you use this software 13 | in a product, an acknowledgment in the product documentation would be 14 | appreciated but is not required. 15 | 16 | 2. Altered source versions must be plainly marked as such, and must not 17 | be misrepresented as being the original software. 18 | 19 | 3. This notice may not be removed or altered from any source 20 | distribution. 21 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/ctm/license/js-lzma.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 Juan Mellado 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/ctm/license/js-openctm.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011 Juan Mellado 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/facepick_worker.js: -------------------------------------------------------------------------------- 1 | importScripts( "kdTree-min.js" ); 2 | 3 | var num = 0; 4 | self.onmessage = function( event ) { 5 | var pts = [], x, y, z; 6 | if (event.data.wm !== undefined) { 7 | for (var i = 0, il = event.data.pos.length / 3; i < il; i++) { 8 | x = event.data.pos[i*3+0] * 0.5 + event.data.wm[i*4+0]*0.5; 9 | y = event.data.pos[i*3+1] * 0.5 + event.data.wm[i*4+1]*0.5; 10 | z = event.data.pos[i*3+2] * 0.5 + event.data.wm[i*4+2]*0.5; 11 | pts.push([x, y, z, i]); 12 | } 13 | } else { 14 | for (var i = 0, il = event.data.pos.length; i < il; i+= 3) { 15 | pts.push([event.data.pos[i], event.data.pos[i+1], event.data.pos[i+2], i/3]); 16 | } 17 | } 18 | 19 | var dist = function (a, b) { 20 | return (a[0]-b[0])*(a[0]-b[0]) + (a[1]-b[1])*(a[1]-b[1]) + (a[2]-b[2])*(a[2]-b[2]); 21 | } 22 | var kdt = new kdTree(pts, dist, [0, 1, 2]); 23 | 24 | self.postMessage( {kdt:kdt.root, name:event.data.name} ); 25 | if (++num > 1) //close after two hemispheres 26 | self.close(); 27 | } 28 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/hoverintent.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * hoverIntent r6 // 2011.02.26 // jQuery 1.5.1+ 3 | * 4 | * 5 | * @param f onMouseOver function || An object with configuration options 6 | * @param g onMouseOut function || Nothing (use configuration options object) 7 | * @author Brian Cherne brian(at)cherne(dot)net 8 | */ 9 | (function($){$.fn.hoverIntent=function(f,g){var cfg={sensitivity:7,interval:100,timeout:0};cfg=$.extend(cfg,g?{over:f,out:g}:f);var cX,cY,pX,pY;var track=function(ev){cX=ev.pageX;cY=ev.pageY};var compare=function(ev,ob){ob.hoverIntent_t=clearTimeout(ob.hoverIntent_t);if((Math.abs(pX-cX)+Math.abs(pY-cY))1){return;}g.preventDefault();var i=g.originalEvent.changedTouches[0],f=document.createEvent("MouseEvents");f.initMouseEvent(h,true,true,window,1,i.screenX,i.screenY,i.clientX,i.clientY,false,false,false,false,0,null);g.target.dispatchEvent(f);}c._touchStart=function(g){var f=this;if(a||!f._mouseCapture(g.originalEvent.changedTouches[0])){return;}a=true;f._touchMoved=false;d(g,"mouseover");d(g,"mousemove");d(g,"mousedown");};c._touchMove=function(f){if(!a){return;}this._touchMoved=true;d(f,"mousemove");};c._touchEnd=function(f){if(!a){return;}d(f,"mouseup");d(f,"mouseout");if(!this._touchMoved){d(f,"click");}a=false;};c._mouseInit=function(){var f=this;f.element.bind("touchstart",b.proxy(f,"_touchStart")).bind("touchmove",b.proxy(f,"_touchMove")).bind("touchend",b.proxy(f,"_touchEnd"));e.call(f);};})(jQuery); -------------------------------------------------------------------------------- /cortex/webgl/resources/js/python_interface.js: -------------------------------------------------------------------------------- 1 | function Websock() { 2 | this.ws = new WebSocket("ws://"+location.host+"/wsconnect/"); 3 | this.ws.onopen = function(evt) { 4 | this.ws.send("connect"); 5 | }.bind(this); 6 | this.ws.onmessage = function(evt) { 7 | var jsdat = classify(JSON.parse(evt.data)); 8 | var func = this[jsdat.method]; 9 | var resp = func.apply(this, jsdat.params); 10 | //Don't return jquery objects, 11 | if (resp instanceof $) { 12 | this.ws.send(JSON.stringify(null)); 13 | } else { 14 | this.ws.send(JSON.stringify(resp)) 15 | } 16 | }.bind(this); 17 | } 18 | Websock.prototype.get = function(name) { 19 | var last; 20 | var o = window; 21 | var names = name.split("."); 22 | for (var i = 1; i < names.length; i++) { 23 | last = o; 24 | if (!(o[names[i]] instanceof Object || o[names[i]] instanceof Function)) 25 | o = names[i] 26 | else 27 | o = o[names[i]]; 28 | } 29 | return [last, o]; 30 | } 31 | Websock.prototype.query = function(name) { 32 | var names = {}; 33 | var obj = this.get(name)[1]; 34 | for (var name in obj) { 35 | names[name] = [typeof(obj[name])]; 36 | if (names[name] != "object" && names[name] != "function") 37 | names[name].push(obj[name]) 38 | } 39 | return names; 40 | } 41 | Websock.prototype.set = function(name, value) { 42 | var resp = this.get(name); 43 | var obj = resp[0], val = resp[1]; 44 | try { 45 | obj[val] = value; 46 | resp = null; 47 | } catch (e) { 48 | resp = {error:e.message}; 49 | } 50 | return resp; 51 | } 52 | Websock.prototype.run = function(name, params) { 53 | var resp = this.get(name); 54 | var obj = resp[0], func = resp[1]; 55 | try { 56 | resp = func.apply(obj, params); 57 | } catch (e) { 58 | resp = {error:e.message}; 59 | } 60 | return resp === undefined ? null : resp; 61 | } 62 | Websock.prototype.index = function(name, idx) { 63 | 64 | } 65 | 66 | -------------------------------------------------------------------------------- /cortex/webgl/resources/js/surfgeometry.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author jamesgao / james@jamesgao.com 3 | */ 4 | 5 | THREE.SurfGeometry = function (gl, geoms) { 6 | this.vertexPositionArray = geoms.fiducial.pts; 7 | this.vertexIndexArray = geoms.fiducial.polys; 8 | this.vertexNormalArray = geoms.fiducial.norms; 9 | 10 | this.vertexPositionBuffer = gl.createBuffer(); 11 | gl.bindBuffer( gl.ARRAY_BUFFER, this.vertexPositionBuffer ); 12 | gl.bufferData( gl.ARRAY_BUFFER, this.vertexPositionArray, gl.STATIC_DRAW ); 13 | this.vertexPositionBuffer.itemSize = 3; 14 | this.vertexPositionBuffer.numItems = this.vertexPositionArray.length / 3; 15 | 16 | this.vertexIndexBuffer = gl.createBuffer(); 17 | gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, this.vertexIndexBuffer ); 18 | gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, this.vertexIndexArray, gl.STATIC_DRAW ); 19 | this.vertexIndexBuffer.itemSize = 1; 20 | this.vertexIndexBuffer.numItems = this.vertexIndexArray.length; 21 | 22 | if (this.vertexNormalArray) { 23 | this.vertexNormalBuffer = gl.createBuffer(); 24 | gl.bindBuffer( gl.ARRAY_BUFFER, this.vertexNormalBuffer ); 25 | gl.bufferData( gl.ARRAY_BUFFER, this.vertexNormalArray, gl.STATIC_DRAW ); 26 | this.vertexNormalBuffer.itemSize = 3; 27 | this.vertexNormalBuffer.numItems = this.vertexNormalArray.length / 3; 28 | } 29 | }; 30 | 31 | THREE.SurfGeometry.prototype = new THREE.BufferGeometry(); 32 | THREE.SurfGeometry.prototype.constructor = THREE.SurfGeometry; 33 | -------------------------------------------------------------------------------- /cortex/webgl/static.html: -------------------------------------------------------------------------------- 1 | {% autoescape None %} 2 | {% extends template.html %} 3 | {% block jsinit %} 4 | var viewer, subjects, datasets, figure, sock, viewopts; 5 | {% end %} 6 | {% block onload %} 7 | viewopts = {{viewopts}}; 8 | subjects = {{subjects}}; 9 | for (var name in subjects) { 10 | subjects[name] = new mriview.Surface(subjects[name]); 11 | } 12 | 13 | figure = new jsplot.W2Figure(); 14 | viewer = figure.add(mriview.Viewer, "main", true); 15 | 16 | dataviews = dataset.fromJSON({{data}}); 17 | viewer.addData(dataviews); 18 | {% end %} 19 | -------------------------------------------------------------------------------- /cortex/webgl/wngraph.html: -------------------------------------------------------------------------------- 1 | {% extends template.html %} 2 | {% block javascripts %} 3 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | {% end %} 57 | 58 | {% block jsinit %} 59 | var viewer, sock, gr; 60 | {% end %} 61 | 62 | {% block onload %} 63 | gr = setupGraph(); 64 | gr.setrgbdata(colordata); 65 | 66 | var wngraph = $("#wngraph-plugin").detach(); 67 | wngraph.css("visibility", "visible"); 68 | 69 | viewer = new MRIview(); 70 | viewer.load("{{ctmfile}}", function() { 71 | gr.viewer = viewer; 72 | viewer.picker.callback = function(idx) { 73 | console.log(idx); 74 | gr.showvoxel(idx); 75 | } 76 | }); 77 | 78 | viewer.addData(classify({{data}})); 79 | viewer.addPlugin(wngraph, true); 80 | {% end %} 81 | 82 | {% block extrahtml %} 83 | 89 | {% end %} 90 | -------------------------------------------------------------------------------- /docs/3dhead.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/3dhead.png -------------------------------------------------------------------------------- /docs/_templates/class.rst: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | .. currentmodule:: {{ module }} 5 | 6 | .. autoclass:: {{ objname }} 7 | 8 | {% block methods %} 9 | 10 | {% if methods %} 11 | .. rubric:: Methods 12 | 13 | .. autosummary:: 14 | :toctree: {{ objname }} 15 | {% for item in methods %} 16 | ~{{ name }}.{{ item }} 17 | {%- endfor %} 18 | {% endif %} 19 | {% endblock %} 20 | 21 | {% block attributes %} 22 | {% if attributes %} 23 | .. rubric:: Attributes 24 | 25 | .. autosummary:: 26 | :toctree: {{ objname }} 27 | {% for item in attributes %} 28 | ~{{ name }}.{{ item }} 29 | {%- endfor %} 30 | {% endif %} 31 | {% endblock %} -------------------------------------------------------------------------------- /docs/aligner/adjring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/adjring.png -------------------------------------------------------------------------------- /docs/aligner/colormap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/colormap.png -------------------------------------------------------------------------------- /docs/aligner/contrast.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/contrast.png -------------------------------------------------------------------------------- /docs/aligner/flipcolor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/flipcolor.png -------------------------------------------------------------------------------- /docs/aligner/key-controls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/key-controls.png -------------------------------------------------------------------------------- /docs/aligner/lines1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/lines1.png -------------------------------------------------------------------------------- /docs/aligner/save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/save.png -------------------------------------------------------------------------------- /docs/aligner/snapshot1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/snapshot1.png -------------------------------------------------------------------------------- /docs/aligner/snapshot13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/snapshot13.png -------------------------------------------------------------------------------- /docs/aligner/snapshot2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/snapshot2.png -------------------------------------------------------------------------------- /docs/aligner/snapshot4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/snapshot4.png -------------------------------------------------------------------------------- /docs/aligner/surface.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/aligner/surface.png -------------------------------------------------------------------------------- /docs/colormap_rst.py: -------------------------------------------------------------------------------- 1 | """ 2 | This will make the colormaps.rst file for the docs page. 3 | """ 4 | import os 5 | 6 | path_to_colormaps = "../filestore/colormaps/" 7 | all_colormaps = os.listdir(path_to_colormaps) 8 | all_colormaps.sort() 9 | all_paths = [os.path.join(path_to_colormaps, f) for f in all_colormaps] 10 | all_names = [f[:-4] for f in all_colormaps] 11 | 12 | rst_lines = ["Colormaps\n", 13 | "=========\n", 14 | "\n", 15 | "There are a number of colormaps available in pycortex.\n" 16 | "A full list of those that can be used are below.\n" 17 | "\n"] 18 | 19 | rst_file = open("colormaps.rst", "w") 20 | for l in rst_lines: 21 | rst_file.write(l) 22 | 23 | path_template = ".. image:: {path}\n" 24 | width = " :width: 200px\n" 25 | height_1d = " :height: 25px\n" 26 | height_2d = " :height: 200px\n" 27 | 28 | for path, name in zip(all_paths, all_names): 29 | rst_file.write(name) 30 | rst_file.write("\n\n") 31 | rst_file.write(path_template.format(path=path)) 32 | if ("2D" in name) or ("covar" in name) or ("alpha" in name): 33 | rst_file.write(height_2d) 34 | else: 35 | rst_file.write(height_1d) 36 | rst_file.write(width) 37 | rst_file.write("\n\n") 38 | 39 | rst_file.close() 40 | -------------------------------------------------------------------------------- /docs/example_subsurface.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/example_subsurface.gif -------------------------------------------------------------------------------- /docs/flatmap_comparison.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/flatmap_comparison.gif -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Pycortex Documentation 2 | ====================== 3 | 4 | .. image:: 3dhead.png 5 | :width: 85% 6 | :align: center 7 | 8 | Pycortex is a software package for generating beautiful interactive 3D visualizations of fMRI data projected onto cortical surface models. It can also generate high quality 2D flattened cortical visualizations. 9 | 10 | This documentation is still under development. Please report errors or bugs at https://github.com/gallantlab/pycortex/issues. 11 | 12 | For a list of recent changes, please see the `releases on GitHub `_. 13 | 14 | 15 | User Guide 16 | ---------- 17 | .. toctree:: 18 | :maxdepth: 2 19 | 20 | install 21 | segmentation_guide 22 | database 23 | align 24 | .. dataset 25 | rois 26 | transforms 27 | colormaps 28 | 29 | Example Gallery 30 | --------------- 31 | .. toctree:: 32 | :maxdepth: 3 33 | 34 | auto_examples/index 35 | 36 | API Reference 37 | ------------- 38 | .. toctree:: 39 | api_reference_flat 40 | 41 | Citation 42 | -------- 43 | If you use pycortex in published work, please cite the `pycortex paper `_: 44 | 45 | *Gao JS, Huth AG, Lescroart MD and Gallant JL (2015) Pycortex: an interactive surface visualizer for fMRI. Front. Neuroinform. 9:23. doi: 10.3389/fninf.2015.00023* 46 | 47 | 48 | Indices and tables 49 | ------------------ 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | * :ref:`search` 54 | 55 | -------------------------------------------------------------------------------- /docs/raw.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/raw.png -------------------------------------------------------------------------------- /docs/rois.rst: -------------------------------------------------------------------------------- 1 | Surface-defined ROIs 2 | ==================== 3 | 4 | pycortex supports a method of defining surface ROIs using Inkscape. The ROIs are rendered as surface textures in the viewers, and roi masks can be extracted using helper functions. 5 | 6 | -------------------------------------------------------------------------------- /docs/segmentation.rst: -------------------------------------------------------------------------------- 1 | Segmentation Tutorial 2 | ===================== 3 | In order to plot data, you will need to create surfaces for your particular subject. General surfaces such as fsaverage is NOT recommended, since individual subject anatomay can be highly variable. Averaging across subjects will destroy your signal specificity! The recommended path for generating surfaces is with Freesurfer_. 4 | 5 | This document will walk you through a general guide for how to create a Freesurfer surface usable with pycortex. 6 | 7 | Installation 8 | ------------ 9 | Unfortunately, there is no simple unified method for installing Freesurfer on your computer. You will need to download a package, then acquire a (free) registration. For additional instructions, go to http://surfer.nmr.mgh.harvard.edu/fswiki/Download. 10 | 11 | Segmentation 12 | ------------ 13 | Segmentation is the process of identifying the boundary between white matter and gray matter, and between gray matter and dura. With Caret_, only one surface is estimated: the midway point between white matter and pia, also known as the "fiducial" surface. This boundary is converted into a triangular mesh representation using a `marching cubes `_ algorithm. However, segmenting this boundary is nontrivial. 14 | 15 | Pycortex wraps many of the segmentation steps from Freesurfer_ for a simpler, more integrated process. Generally, the four functions in cortex.segment is all that's required to go from anatomical image to segmented and flattened surface. 16 | 17 | .. _Freesurfer: http://surfer.nmr.mgh.harvard.edu/ 18 | .. _Caret: http://brainvis.wustl.edu/wiki/index.php/Caret:Download 19 | 20 | Quickstart 21 | ---------- 22 | 23 | #. Collect a T1 MPRAGE image of the subject. `Preferred protocols `_: some variety of multiecho T1. 24 | #. cortex.segment.init_subject -------------------------------------------------------------------------------- /docs/transforms.rst: -------------------------------------------------------------------------------- 1 | Transform formats 2 | ================= 3 | Functional data, usually collected by an epi sequence, typically does not have the same scan parameters as the anatomical MPRAGE scan used to generate the surfaces. Additionally, fMRI sequences which are usually optimized for T2* have drastically different and larger distortions than a typical T1 anatomical sequence. While automatic algorithms exist to align these two scan types, they will sometimes fail spectacularly, especially if a partial volume slice prescription is necessary. 4 | 5 | pycortex includes a tool based on mayavi_ to do manual **affine** alignments. Please see the :module:`align` module for more information. Alternatively, if an automatic algorithm works well enough, you can also commit your own transform to the database. Transforms in pycortex always go from **fiducial to functional** space. They have four variables associated: 6 | 7 | * **Subject** : name of the subject, must match the surfaces used to create the transform 8 | * **Name** : A unique identifier for this transform 9 | * **type** : The type of transform -- from fiducial to functional **magnet** space, or fiducial to **coord** innate space 10 | * **epifile** : the filename of the functional data that the fiducial is aligned to 11 | 12 | Transforms always store the epifile in order to allow visual validation of alignment using the :module:`align` module. 13 | 14 | .. _mayavi: http://docs.enthought.com/mayavi/mayavi/ -------------------------------------------------------------------------------- /docs/userguide/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/userguide/.gitkeep -------------------------------------------------------------------------------- /docs/webgl/angle_left.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/webgl/angle_left.png -------------------------------------------------------------------------------- /docs/wn_large.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/wn_large.png -------------------------------------------------------------------------------- /docs/wn_med.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/wn_med.png -------------------------------------------------------------------------------- /docs/wn_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/docs/wn_small.png -------------------------------------------------------------------------------- /examples/README.txt: -------------------------------------------------------------------------------- 1 | .. _general_examples: 2 | 3 | Example Gallery 4 | ================ 5 | 6 | .. contents:: Contents 7 | :local: 8 | :depth: 2 9 | -------------------------------------------------------------------------------- /examples/datasets/README.txt: -------------------------------------------------------------------------------- 1 | Datasets Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to create, manipulate, plot datasets. 5 | 6 | -------------------------------------------------------------------------------- /examples/datasets/plot_dataset_arithmetic.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================== 3 | Dataset Arithmetic 4 | ================== 5 | 6 | This plots example volume data onto an example subject, S1, onto a flatmap 7 | using quickflat. In order for this to run, you have to have a flatmap for 8 | this subject in the pycortex filestore. 9 | 10 | Once you have created a cortex.Volume object, you can manipulate it with 11 | normal arithmetic operators like +, -, *, /, and ** 12 | """ 13 | 14 | import cortex 15 | import numpy as np 16 | np.random.seed(1234) 17 | import matplotlib.pyplot as plt 18 | 19 | subject = 'S1' 20 | xfm = 'fullhead' 21 | 22 | # Creating a random dataset that is the shape for this transform with one 23 | # entry for each voxel 24 | test_data = np.random.randn(31, 100, 100) 25 | 26 | # This creates a Volume object for our test dataset for the given subject 27 | # and transform 28 | vol_data = cortex.Volume(test_data, subject, xfm, vmin=-2, vmax=2) 29 | cortex.quickshow(vol_data) 30 | plt.show() 31 | 32 | # Now you can do arithmetic with the Volume 33 | vol_plus = vol_data + 1 34 | cortex.quickshow(vol_plus) 35 | plt.show() 36 | 37 | # You can also do multiplication 38 | vol_mult = vol_data * 4 39 | cortex.quickshow(vol_mult) 40 | plt.show() 41 | -------------------------------------------------------------------------------- /examples/datasets/plot_vertex.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================ 3 | Plot Vertex Data 4 | ================ 5 | 6 | This plots example vertex data onto an example subject, S1, onto a flatmap 7 | using quickflat. In order for this to run, you have to have a flatmap for 8 | this subject in the pycortex filestore. 9 | 10 | The cortex.Vertex object is instantiated with a numpy array of the same size 11 | as the total number of vertices in that subject's flatmap. Each pixel is 12 | colored according to the value given for the nearest vertex in the flatmap. 13 | 14 | Instead of the random test data, you can replace this with any array that is 15 | the length of all of the vertices in the subject. 16 | 17 | Additionally, if you create a Vertex object using only the number of vertices 18 | that exists in the left hemisphere of the brain, the right hemisphere is 19 | filled in with zeros. 20 | """ 21 | 22 | import cortex 23 | import cortex.polyutils 24 | import numpy as np 25 | np.random.seed(1234) 26 | import matplotlib.pyplot as plt 27 | 28 | subject = 'S1' 29 | 30 | # In order to get the number of vertices in this subject's cortical surface 31 | # we have to load in their surfaces and get the number of points in each 32 | surfs = [cortex.polyutils.Surface(*d) 33 | for d in cortex.db.get_surf(subject, "fiducial")] 34 | 35 | # This is the total number of vertices in both hemispheres combined 36 | num_verts = surfs[0].pts.shape[0] + surfs[1].pts.shape[0] 37 | 38 | # Creating a random dataset with one entry for each vertex 39 | test_data = np.random.randn(num_verts) 40 | 41 | # This creates a Vertex object for our subject and test dataset 42 | vertex_data = cortex.Vertex(test_data, subject) 43 | # And now we can display it on a flatmap 44 | cortex.quickshow(vertex_data) 45 | plt.show() 46 | 47 | # We can also plot just the left hemisphere data 48 | numl = surfs[0].pts.shape[0] 49 | # This creates a Vertex object with an array only as long as the number of 50 | # vertices in the left hemisphere, and the right hemisphere will be filled 51 | # in with zeros 52 | vertex_data_left = cortex.Vertex(test_data[:numl], subject) 53 | cortex.quickshow(vertex_data_left) 54 | plt.show() 55 | -------------------------------------------------------------------------------- /examples/datasets/plot_vertex2D.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================== 3 | Plot 2D Vertex Data 4 | =================== 5 | 6 | This plots example 2D vertex data onto an example subject, S1, onto a flatmap 7 | using quickflat. In order for this to run, you have to have a flatmap for this 8 | subject in the pycortex filestore. 9 | 10 | The cortex.Vertex2D object is instantiated with two numpy arrays of the same 11 | size as the total number of vertices in that subject's flatmap. Each pixel is 12 | colored according to both values given for the nearest vertex in the flatmap. 13 | 14 | Instead of random test data, you can replace these with any arrays that are 15 | the length of the all the vertices in the subject. 16 | """ 17 | 18 | import cortex 19 | import cortex.polyutils 20 | import numpy as np 21 | import matplotlib.pyplot as plt 22 | 23 | subject = 'S1' 24 | 25 | # In order to get the number of vertices in this subject's cortical surface 26 | # we have to load in their surfaces and get the number of points in each 27 | surfs = [cortex.polyutils.Surface(*d) 28 | for d in cortex.db.get_surf(subject, "fiducial")] 29 | 30 | # This is the total number of vertices in the left and right hemispheres 31 | num_verts = [s.pts.shape[0] for s in surfs] 32 | 33 | # Creating one random dataset that is basically a gradient across each 34 | # hemisphere based on vertex number 35 | test_data1 = np.hstack((np.arange(num_verts[0]), np.arange(num_verts[1]))) 36 | 37 | # Picking a different vertex in each hemisphere to create another fake 38 | # gradient away from that vertex 39 | second_verts = [n / 4 for n in num_verts] 40 | test_data2 = np.hstack((np.abs(np.arange(num_verts[0]) - second_verts[0]), 41 | np.abs(np.arange(num_verts[1]) - second_verts[1]))) 42 | 43 | # This creates a 2D Vertex object with both of our test datasets for the 44 | # given subject 45 | vertex_data = cortex.Vertex2D(test_data1, test_data2, subject) 46 | cortex.quickshow(vertex_data, with_colorbar=False) 47 | plt.show() 48 | -------------------------------------------------------------------------------- /examples/datasets/plot_volume.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================ 3 | Plot Volume Data 4 | ================ 5 | 6 | This plots example volume data onto an example subject, S1, onto a flatmap 7 | using quickflat. In order for this to run, you have to have a flatmap for 8 | this subject in the pycortex filestore. 9 | 10 | The cortex.Volume object is instantiated with a numpy array of the same size 11 | as the scan for this subject and transform. Instead of the random test data, 12 | you can replace this with any numpy array of the correct dimensionality. 13 | 14 | By changing the parameters vmin and vmax, you get thresholded data, as shown 15 | in the colorbar for the figure. 16 | 17 | If you have NaN values within your array, those voxels show up transparent 18 | on the brain. 19 | """ 20 | 21 | import cortex 22 | import numpy as np 23 | np.random.seed(1234) 24 | import matplotlib.pyplot as plt 25 | 26 | subject = 'S1' 27 | xfm = 'fullhead' 28 | 29 | # Creating a random dataset that is the shape for this transform with one 30 | # entry for each voxel 31 | test_data = np.random.randn(31, 100, 100) 32 | 33 | # This creates a Volume object for our test dataset for the given subject 34 | # and transform 35 | vol_data = cortex.Volume(test_data, subject, xfm) 36 | cortex.quickshow(vol_data) 37 | plt.show() 38 | 39 | # Can also alter the minimum and maximum values shown on the colorbar 40 | vol_data_thresh = cortex.Volume(test_data, subject, xfm, vmin=-1, vmax=1) 41 | cortex.quickshow(vol_data_thresh) 42 | plt.show() 43 | 44 | # If you have NaN values, those voxels show up transparent on the brain 45 | test_data[10:15, :, :] = np.nan 46 | vol_data_nan = cortex.Volume(test_data, subject, xfm) 47 | cortex.quickshow(vol_data_nan) 48 | plt.show() 49 | -------------------------------------------------------------------------------- /examples/datasets/plot_volume_to_vertex.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============================== 3 | Map from Volume to Vertex Data 4 | ============================== 5 | 6 | In order to move from Volume data to Vertex data, you start with data from 7 | voxels and then create a cortex.Volume object. Then, you get a mapper to go 8 | between voxels and vertices for the specific subject and transform you are 9 | working with. Pass the voxel volume through the mapper and you get out a 10 | vertex mapping of that data. You can plot both of these as you normally would. 11 | """ 12 | 13 | import cortex 14 | import cortex.polyutils 15 | import numpy as np 16 | np.random.seed(1234) 17 | import matplotlib.pyplot as plt 18 | 19 | subject = 'S1' 20 | xfm = 'fullhead' 21 | 22 | # First create example voxel data for this subject and transform 23 | voxel_data = np.random.randn(31, 100, 100) 24 | voxel_vol = cortex.Volume(voxel_data, subject, xfm) 25 | 26 | # Then we have to get a mapper from voxels to vertices for this transform 27 | mapper = cortex.get_mapper(subject, xfm, 'line_nearest', recache=True) 28 | 29 | # Just pass the voxel data through the mapper to get vertex data 30 | vertex_map = mapper(voxel_vol) 31 | 32 | # You can plot both as you would normally plot Volume and Vertex data 33 | cortex.quickshow(voxel_vol) 34 | plt.show() 35 | cortex.quickshow(vertex_map) 36 | plt.show() 37 | -------------------------------------------------------------------------------- /examples/fsaverage/README.txt: -------------------------------------------------------------------------------- 1 | Examples with fsaverage 2 | ------------------------------ 3 | 4 | Examples showing how to use PyCortex to visualize data on fsaverage. -------------------------------------------------------------------------------- /examples/fsaverage/upsample_to_fsaverage.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================== 3 | Upsample data from a lower resolution fsaverage template to fsaverage for visualization 4 | =================== 5 | 6 | This example shows how data in a lower resolution fsaverage template 7 | (e.g., fsaverage5 or fsaverage6) can be upsampled to the high resolution fsaverage 8 | template for visualization. 9 | """ 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy as np 13 | 14 | import cortex 15 | 16 | subject = "fsaverage" 17 | 18 | # First we check if the fsaverage template is already in the pycortex filestore. If not, 19 | # we download the template from the web and add it to the filestore. 20 | if subject not in cortex.db.subjects: 21 | cortex.download_subject(subject) 22 | 23 | # Next we create some data on fsaverage5. Each hemisphere has 10242 vertices. 24 | n_vertices_fsaverage5 = 10242 25 | data_fs5 = np.arange(1, n_vertices_fsaverage5 + 1) 26 | # We concatenate the data to itself to create a vector of length 20484, corresponding to 27 | # the two hemispheres together. 28 | data_fs5 = np.concatenate((data_fs5, data_fs5)) 29 | # Finally, we upsample the data to fsaverage. 30 | data_fs7 = cortex.freesurfer.upsample_to_fsaverage(data_fs5, "fsaverage5") 31 | 32 | # Now that the data is in the fsaverage template, we can visualize it in PyCortex as any 33 | # other vertex dataset. 34 | vtx = cortex.Vertex(data_fs7, subject, vmin=0, vmax=n_vertices_fsaverage5, cmap="turbo") 35 | cortex.quickshow(vtx, with_curvature=False, with_colorbar=False) 36 | plt.show() 37 | -------------------------------------------------------------------------------- /examples/import_surface/README.txt: -------------------------------------------------------------------------------- 1 | Importing Surfaces Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to import surfraces from other software (e.g. 5 | FreeSurfer, fmriprep). 6 | 7 | -------------------------------------------------------------------------------- /examples/import_surface/import_fmriprep.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================== 3 | Import fmriprep output 4 | ================== 5 | 6 | Recently, many people have start to use fmriprep as a complete preprocessing workflow of anatomical and functional data. Pycortex has a convenience function to import 7 | the output of this workflow. 8 | 9 | This example is based on the fmriprep 1.0.15 output of openfmri ds000164 that can be found on openneuro.org: https://openneuro.org/datasets/ds000164/versions/00001 10 | 11 | NB: `cortex.fmriprep` is a work-in-progress and cannot currently handle multiple datasets when the subject IDs are the same (see https://github.com/gallantlab/pycortex/issues/304). 12 | """ 13 | 14 | import cortex 15 | from cortex import fmriprep 16 | from os import path as op 17 | 18 | # Location of the downloaded openfmri dataset 19 | source_directory = '/derivatives/ds000164' 20 | # fmriprep subject name (without "sub-") 21 | subject_id = '001' 22 | 23 | # import subject into pycortex database 24 | fmriprep.import_subj(subject_id, source_directory) 25 | 26 | # We can visualize the imported subject's T1-weighted image 27 | anat_nifti = 'fmriprep/sub-001/anat/sub-001_T1w_preproc.nii.gz' 28 | t1_image_path = op.join(source_directory, anat_nifti) 29 | 30 | # Now we can make a volume using the built-in identity transform 31 | t1w_volume = cortex.Volume(t1_image_path, subject_id, 'identity') 32 | 33 | # And show the result. 34 | ds = cortex.Dataset(t1w=t1w_volume) 35 | cortex.webgl.show(ds) 36 | -------------------------------------------------------------------------------- /examples/quickflat/README.txt: -------------------------------------------------------------------------------- 1 | Quickflat Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to create flatmaps. 5 | 6 | -------------------------------------------------------------------------------- /examples/quickflat/plot_advanced_compositing.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============================== 3 | Plot with advanced compositing 4 | ============================== 5 | 6 | The way flatmap plotting works in pycortex is to create different image 7 | layers (data, ROIs, sulci, etc) and overlay each on top of the other. 8 | Usually, quickflat.make_figure() handles all this for you, but each 9 | layer can be manipulated independently for fancier effects with the 10 | quickflat.composite sub-module. 11 | """ 12 | 13 | import cortex 14 | import matplotlib.pyplot as plt 15 | 16 | # Create a random volume 17 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 18 | 19 | # Create basic figure, with rois, labels, sulci all off 20 | fig = cortex.quickflat.make_figure(volume, 21 | with_curvature=True, 22 | with_rois=False, 23 | with_labels=False, 24 | with_sulci=False) 25 | # Add sulci in light yellow 26 | _ = cortex.quickflat.composite.add_sulci(fig, volume, 27 | with_labels=False, 28 | linewidth=2, 29 | linecolor=(0.9, 0.85, 0.5)) 30 | # Add all rois, with a particular color scheme: 31 | _ = cortex.quickflat.composite.add_rois(fig, volume, 32 | with_labels=False, 33 | linewidth=1, 34 | linecolor=(0.8, 0.8, 0.8)) 35 | # Highlight face- and body-selective ROIs: 36 | _ = cortex.quickflat.composite.add_rois(fig, volume, 37 | roi_list=['FFA', 'EBA', 'OFA'], # (This defaults to all rois if not specified) 38 | with_labels=True, 39 | linewidth=5, 40 | linecolor=(0.9, 0.5, 0.5), 41 | labelcolor=(0.9, 0.5, 0.5), 42 | labelsize=20, 43 | roifill=(0.9, 0.5, 0.5), 44 | fillalpha=0.35, 45 | dashes=(5, 3) # Dash length & gap btw dashes 46 | ) 47 | plt.show() 48 | -------------------------------------------------------------------------------- /examples/quickflat/plot_connected_vertices.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============================================== 3 | Plot which vertices are inside the same voxels 4 | ============================================== 5 | 6 | Show lines connecting vertices on the flatmap that are actually within the same 7 | voxels in a given scan. 8 | 9 | Here, we used advanced compositing to be explicit about display options for the 10 | connecting lines. 11 | 12 | """ 13 | import cortex 14 | import numpy as np 15 | import matplotlib.pyplot as plt 16 | 17 | # Create an empty pycortex Volume 18 | volume = cortex.Volume.empty(subject='S1', xfmname='retinotopy', value=np.nan) 19 | 20 | # Plot a flatmap with the data projected onto the surface 21 | fig = cortex.quickflat.make_figure(volume, with_curvature=True, with_colorbar=False) 22 | 23 | # Advanced compositing addition of connected vertices. 24 | # Note that this will not currently resize correctly with a figure. 25 | lines = cortex.quickflat.composite.add_connected_vertices(fig, volume, 26 | exclude_border_width=None, color=(1.0, 0.5, 0.1, 0.6), linewidth=0.75, 27 | alpha=0.3, recache=True) 28 | plt.show() 29 | -------------------------------------------------------------------------------- /examples/quickflat/plot_custom_toppings.py0: -------------------------------------------------------------------------------- 1 | """ 2 | ====================================================== 3 | Custom toppings you can add to `quickflat.make_figure` 4 | ====================================================== 5 | 6 | """ 7 | import cortex 8 | import matplotlib.pyplot as plt 9 | 10 | # Create a random pycortex Volume 11 | volume = cortex.Volume.random(subject='S1', xfmname='retinotopy') 12 | 13 | # Plot a flatmap with the data projected onto the surface 14 | # By default ROIs and their labels will be overlaid to the plot 15 | # Also a colorbar will be added 16 | _ = cortex.quickflat.make_figure(volume, with_curvature=True) 17 | 18 | ax = plt.gcf().axes[0] 19 | ax.title("Random data on retinotopic regions") 20 | 21 | plt.show() 22 | -------------------------------------------------------------------------------- /examples/quickflat/plot_cutouts.py: -------------------------------------------------------------------------------- 1 | """ 2 | =========================== 3 | Plot cutouts on the flatmap 4 | =========================== 5 | 6 | Cutouts are manually generated cuts of the cortical surface to highlight 7 | a region of interest. 8 | 9 | Cutouts are defined as sub-layers of the `cutouts` layer 10 | in //overlays.svg. 11 | 12 | The parameter `cutout` of the `quickflat.make_figure` method should be the 13 | name of the flatmap cutout defined in the `overlays.svg` file. 14 | 15 | """ 16 | import cortex 17 | import numpy as np 18 | np.random.seed(1234) 19 | 20 | # Name of a sub-layer of the 'cutouts' layer in overlays.svg file 21 | cutout_name = "VisualCortexRight" 22 | 23 | # Create a random pycortex Volume 24 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 25 | 26 | # Plot a flatmap with the data projected onto the surface 27 | # Highlight the curvature and which cutout to be displayed 28 | _ = cortex.quickflat.make_figure(volume, 29 | with_curvature=True, 30 | cutout=cutout_name) 31 | -------------------------------------------------------------------------------- /examples/quickflat/plot_dropout.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================== 3 | Plot dropout regions on the flatmap 4 | =================================== 5 | 6 | A dropout region is a region with very low EPI signal. In pycortex a 7 | crosshatch is used to display such dropout regions. 8 | 9 | The crosshatches are created using the reference nifti image file 10 | with a threshold. 11 | Setting the `with_dropout=True` parameter in `quickflat.make_figure` 12 | takes the reference nifti image file and computes a thresholded version 13 | of this using the following formula: 14 | 15 | FIXME: 16 | .. code-block:: python 17 | rawdata[rawdata==0] = np.mean(rawdata[rawdata!=0]) 18 | normdata = (rawdata - rawdata.min()) / (rawdata.max() - rawdata.min()) 19 | normdata = (1 - normdata) ** power 20 | """ 21 | 22 | import cortex 23 | import numpy as np 24 | np.random.seed(1234) 25 | 26 | # Create a random pycortex Volume 27 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 28 | 29 | # Plot a flatmap with the data projected onto the surface 30 | # Highlight the curvature and dropout regions 31 | _ = cortex.quickflat.make_figure(volume, 32 | with_curvature=True, 33 | with_dropout=True) 34 | -------------------------------------------------------------------------------- /examples/quickflat/plot_make_figure.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======================== 3 | Plot a 2D static flatmap 4 | ======================== 5 | 6 | quickflat visualizations use matplotlib to generate figure-quality 2D flatmaps. 7 | 8 | Similar to webgl, this tool uses pixel-based mapping to project functional data 9 | onto the cortical surfaces. 10 | 11 | This demo will use randomly generated data and plot a flatmap. Different 12 | options to visualize the data will be demonstrated. 13 | 14 | **Some words on the `rechache` parameter before we begin:** 15 | 16 | Setting the `recache=True` parameter recaches the flatmap cache located in 17 | //cache. By default intermediate steps for a flatmap are 18 | cached after the first generation to speed up the process for the future. If 19 | any of the intermediate steps changes, the flatmap generation may fail. 20 | `recache=True` will load these intermediate steps new. 21 | This can be helpful if you think there is no reason that the 22 | `quickflat.make_figure` to fail but it nevertheless fails. Try it, it's magic! 23 | 24 | """ 25 | import cortex 26 | import matplotlib.pyplot as plt 27 | import numpy as np 28 | np.random.seed(1234) 29 | 30 | 31 | # Create a random pycortex Volume 32 | volume = cortex.Volume.random(subject='S1', xfmname='retinotopy') 33 | 34 | # Plot a flatmap with the data projected onto the surface 35 | # By default ROIs and their labels will be overlaid to the plot 36 | # Also a colorbar will be added 37 | _ = cortex.quickflat.make_figure(volume) 38 | plt.show() 39 | 40 | # The cortex.quickshow method is a pointer to quickflat.make_figure 41 | # and will plot exactly the same as the above plot 42 | _ = cortex.quickshow(volume) 43 | plt.show() 44 | 45 | # Highlight the curvature 46 | _ = cortex.quickflat.make_figure(volume, with_curvature=True) 47 | plt.show() 48 | 49 | # Remove ROI labels from the plot 50 | _ = cortex.quickflat.make_figure(volume, 51 | with_curvature=True, 52 | with_labels=False) 53 | plt.show() 54 | 55 | # Remove ROIs from the plot 56 | _ = cortex.quickflat.make_figure(volume, 57 | with_curvature=True, 58 | with_rois=False) 59 | plt.show() 60 | 61 | # Remove the colorbar from the plot 62 | cortex.quickflat.make_figure(volume, 63 | with_curvature=True, 64 | with_colorbar=False) 65 | 66 | plt.show() 67 | -------------------------------------------------------------------------------- /examples/quickflat/plot_make_gif.py: -------------------------------------------------------------------------------- 1 | """ 2 | ==================================== 3 | Animate a series of volumes as a GIF 4 | ==================================== 5 | 6 | A convenient way to compare two flat maps (e.g., prediction performance or 7 | tuning weights) is to flip back and forth between them. This example shows how 8 | to make an animated gif in which each frame is a flatmap. 9 | 10 | """ 11 | import cortex 12 | import matplotlib.pyplot as plt 13 | import numpy as np 14 | np.random.seed(1234) 15 | 16 | ################################################################################ 17 | # Create several pycortex Volumes 18 | # 19 | 20 | volumes = {'first': cortex.Volume.random(subject='S1', xfmname='fullhead', vmin=-2, vmax=2, cmap="RdBu_r"), 21 | 'second': cortex.Volume.random(subject='S1', xfmname='fullhead', vmin=-2, vmax=2, cmap="RdBu_r")} 22 | 23 | ################################################################################ 24 | # Plot flat maps individually 25 | # 26 | 27 | _ = cortex.quickflat.make_figure(volumes['first'], colorbar_location="right") 28 | _ = cortex.quickflat.make_figure(volumes['second'], colorbar_location="right") 29 | _ = plt.show() 30 | 31 | 32 | ################################################################################ 33 | # Generate an animated gif that switches between frames every 1.5 seconds 34 | # 35 | 36 | filename = "./flatmap_comparison.gif" 37 | cortex.quickflat.make_gif(filename, volumes, frame_duration=1.5, colorbar_location="right") 38 | 39 | ################################################################################ 40 | # Display gif inline in an IPython notebook 41 | # 42 | 43 | import io 44 | from IPython.display import Image 45 | 46 | stream = io.BytesIO() 47 | cortex.quickflat.make_gif(stream, volumes, frame_duration=1.5, colorbar_location="right") 48 | 49 | Image(stream.read()) 50 | 51 | 52 | ################################################################################ 53 | # .. image:: ../../flatmap_comparison.gif 54 | -------------------------------------------------------------------------------- /examples/quickflat/plot_make_png.py: -------------------------------------------------------------------------------- 1 | """ 2 | =============================== 3 | Save a 2D static flatmap as PNG 4 | =============================== 5 | 6 | Plot a 2D static flatmap and save it as PNG file. 7 | 8 | **Some words on the `recache` parameter before we begin:** 9 | 10 | Setting the `recache=True` parameter recaches the flatmap cache located in 11 | //cache. By default intermediate steps for a flatmap are 12 | cached after the first generation to speed up the process for the future. If 13 | any of the intermediate steps changes, the flatmap generation may fail. 14 | `recache=True` will load these intermediate steps new. 15 | This can be helpful if you think there is no reason that the 16 | `quickflat.make_figure` to fail but it nevertheless fails. Try it, it's magic! 17 | 18 | The default background is set to be a transparent image. If you want to change 19 | that use the parameter `bgcolor`. 20 | 21 | """ 22 | import cortex 23 | import matplotlib.pyplot as plt 24 | import numpy as np 25 | np.random.seed(1234) 26 | 27 | # Create a random pycortex Volume 28 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 29 | 30 | # Plot a flatmap with the data projected onto the surface 31 | _ = cortex.quickflat.make_figure(volume) 32 | plt.show() 33 | 34 | # Save this flatmap 35 | filename = "./my_flatmap.png" 36 | _ = cortex.quickflat.make_png(filename, volume, recache=False) 37 | -------------------------------------------------------------------------------- /examples/quickflat/plot_make_svg.py: -------------------------------------------------------------------------------- 1 | """ 2 | =============================== 3 | Save a 2D static flatmap as SVG 4 | =============================== 5 | 6 | Plot a 2D static flatmap and save it as SVG file. 7 | 8 | **Some words on the `rechache` parameter before we begin:** 9 | 10 | Setting the `recache=True` parameter recaches the flatmap cache located in 11 | //cache. By default intermediate steps for a flatmap are 12 | cached after the first generation to speed up the process for the future. If 13 | any of the intermediate steps changes, the flatmap generation may fail. 14 | `recache=True` will load these intermediate steps new. 15 | This can be helpful if you think there is no reason that the 16 | `quickflat.make_figure` to fail but it nevertheless fails. Try it, it's magic! 17 | 18 | The default background is set to be a transparent image. If you want to change 19 | that use the parameter `bgcolor`. 20 | """ 21 | import cortex 22 | import matplotlib.pyplot as plt 23 | import numpy as np 24 | np.random.seed(1234) 25 | 26 | # Create a random pycortex Volume 27 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 28 | 29 | # Plot a flatmap with the data projected onto the surface 30 | _ = cortex.quickflat.make_figure(volume) 31 | plt.show() 32 | 33 | # Save this flatmap 34 | filename = "./my_flatmap.svg" 35 | _ = cortex.quickflat.make_png(filename, volume, recache=False) 36 | -------------------------------------------------------------------------------- /examples/quickflat/plot_rois.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======================== 3 | Plot ROIs on the flatmap 4 | ======================== 5 | 6 | ROIs are defined as sub-layers of the `roi` layer in 7 | //overlays.svg 8 | 9 | By default, ROIs and ROI labels are displayed when a flatmap is plotted using 10 | `quickflat.make_figure`. 11 | 12 | `with_labels=False` turns off the ROI labels. 13 | `with_rois=False` turns off the ROI display. 14 | 15 | """ 16 | import cortex 17 | import numpy as np 18 | import matplotlib.pyplot as plt 19 | np.random.seed(1234) 20 | 21 | # Create a random pycortex Volume 22 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 23 | 24 | # Plot a flatmap with the data projected onto the surface 25 | # By default the ROIs and their labels will be displayed 26 | _ = cortex.quickflat.make_figure(volume) 27 | plt.show() 28 | 29 | # Turn off the ROI labels 30 | _ = cortex.quickflat.make_figure(volume, with_labels=False) 31 | plt.show() 32 | 33 | # Turn off the ROIs 34 | _ = cortex.quickflat.make_figure(volume, with_rois=False) 35 | plt.show() 36 | -------------------------------------------------------------------------------- /examples/quickflat/plot_sulci.py: -------------------------------------------------------------------------------- 1 | """ 2 | ========================= 3 | Plot sulci on the flatmap 4 | ========================= 5 | 6 | The sulci are defined in a sub-layer of the sulci layer in 7 | //overlays.svg. 8 | 9 | The parameter `with_sulci` in `quickflat.make_figure` controls 10 | displaying the sulci on the surface. 11 | 12 | """ 13 | import cortex 14 | import numpy as np 15 | np.random.seed(1234) 16 | 17 | # Create a random pycortex Volume 18 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 19 | 20 | # Plot a flatmap with the data projected onto the surface 21 | # Highlight the curvature and display the sulci 22 | _ = cortex.quickflat.make_figure(volume, 23 | with_curvature=True, 24 | with_sulci=True) 25 | -------------------------------------------------------------------------------- /examples/quickflat/plot_thickness_nanmean.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================== 3 | Ignore NaN (not-a-number) values in thickness mapping 4 | =================================== 5 | 6 | By default, pycortex quickshow averages across the thickness of the cortex 7 | for each pixel in the resulting flatmap. If any of these layers contain a value 8 | of NaN (not-a-number), then the result of the average will also be Nan. This 9 | behavior might be undesirable. To avoid it, pass the argument `nanmean=True` to 10 | `cortex.quickshow` (or `cortex.quickflat.make_figure`). This will only take the 11 | mean of the non-NaN values when averaging across the thickness of cortex. A 12 | pixel will only have the value NaN if every voxel between pia and white matter 13 | has the value NaN. 14 | """ 15 | 16 | import cortex 17 | import numpy as np 18 | from matplotlib import pyplot as plt 19 | 20 | # create dataset with volume of all 1's 21 | vol = cortex.Volume.empty('S1', 'fullhead', vmin=0, vmax=2) + 1 22 | 23 | # set 20% of the values in the dataset to NaN 24 | vol.data[np.random.rand(*vol.data.shape) > 0.8] = np.nan 25 | 26 | 27 | # plot the volume with nanmean=False 28 | # here a nan in ANY layer of the thickness mapping will result in a nan 29 | # in the final image 30 | # so this image should have many, many holes that show curvature 31 | # and all the non-hole points should have value of 1 32 | _ = cortex.quickshow(vol, nanmean=False, with_curvature=True) 33 | 34 | plt.show() 35 | 36 | # plot the volume with nanmean=True 37 | # here there should only be a nan in the final image if EVERY layer of the 38 | # thickness mapping has a nan for the given pixel 39 | # so this image should have many fewer holes that show curvature 40 | # and, again, all the non-hole points should have value of 1 41 | _ = cortex.quickshow(vol, nanmean=True, with_curvature=True) 42 | 43 | plt.show() -------------------------------------------------------------------------------- /examples/quickflat/plot_zoom_to_roi.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================================= 3 | Plotting a zoomed view of one ROI 4 | ================================= 5 | 6 | Sometimes it is useful to create a flatmap that is zoomed in one just a single 7 | ROI. The location of the ROI can be inferred automatically, making it easy to 8 | show just the region around that ROI. 9 | 10 | """ 11 | # sphinx_gallery_thumbnail_number = 2 12 | 13 | import cortex 14 | import numpy as np 15 | np.random.seed(1234) 16 | from matplotlib import pyplot as plt 17 | 18 | def zoom_to_roi(subject, roi, hem, margin=10.0): 19 | roi_verts = cortex.get_roi_verts(subject, roi)[roi] 20 | roi_map = cortex.Vertex.empty(subject) 21 | roi_map.data[roi_verts] = 1 22 | 23 | (lflatpts, lpolys), (rflatpts, rpolys) = cortex.db.get_surf(subject, "flat", 24 | nudge=True) 25 | sel_pts = dict(left=lflatpts, right=rflatpts)[hem] 26 | roi_pts = sel_pts[np.nonzero(getattr(roi_map, hem))[0],:2] 27 | 28 | xmin, ymin = roi_pts.min(0) - margin 29 | xmax, ymax = roi_pts.max(0) + margin 30 | plt.axis([xmin, xmax, ymin, ymax]) 31 | 32 | # Create dataset 33 | data = cortex.Volume.random('S1', 'fullhead') 34 | 35 | # Plot it using quickflat 36 | cortex.quickshow(data) 37 | 38 | # Zoom on just one region 39 | zoom_to_roi('S1', 'AC', 'left') 40 | 41 | # notice that the quality of this figure is now quite poor/grainy 42 | # we can improve this by changing the 'height' argument to quickflat 43 | 44 | cortex.quickshow(data, height=2048) 45 | zoom_to_roi('S1', 'AC', 'left') -------------------------------------------------------------------------------- /examples/quickstart/README.txt: -------------------------------------------------------------------------------- 1 | Quickstart Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to get started with pycortex. 5 | 6 | -------------------------------------------------------------------------------- /examples/quickstart/plot_retinotopy_flatmap.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================================ 3 | Plot Example Retinotopy Flatmaps 4 | ================================ 5 | 6 | This demo shows how to plot example retinotopy data onto a subject's brain 7 | on a flatmap. In order for this demo to work, you need to download this 8 | dataset_, but that can also be done automatically through the `urllib` 9 | command that is included. 10 | 11 | 12 | .. _dataset: https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf 13 | 14 | S1 is the example subject that comes with pycortex, but if you want to plot 15 | data onto a different subject, you will need to have them in your filestore, 16 | and you will also need a flatmap for them. 17 | """ 18 | import cortex 19 | import matplotlib.pyplot as plt 20 | from urllib.request import urlretrieve 21 | 22 | 23 | # Download the dataset and load it 24 | _ = urlretrieve( 25 | "https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf", 26 | "S1_retinotopy.hdf" 27 | ) 28 | ret_data = cortex.load("S1_retinotopy.hdf") 29 | 30 | # The retinotopy data has to be divided into left and right hemispheres 31 | left_data = ret_data.angle_left 32 | cortex.quickshow(left_data, with_curvature=True, 33 | curvature_contrast=0.5, 34 | curvature_brightness=0.5, 35 | curvature_threshold=True) 36 | plt.show() 37 | 38 | right_data = ret_data.angle_right 39 | cortex.quickshow(right_data, with_curvature=True, 40 | curvature_contrast=0.5, 41 | curvature_brightness=0.5, 42 | curvature_threshold=True) 43 | plt.show() 44 | -------------------------------------------------------------------------------- /examples/quickstart/retinotopy_webgl.py: -------------------------------------------------------------------------------- 1 | """ 2 | ===================================== 3 | Plot Example Retinotopy in Web Viewer 4 | ===================================== 5 | 6 | This demo shows how to plot example retinotopy data onto a subject's brain 7 | in a web viewer. In order for this demo to work, you need to download this 8 | dataset_, but that can also be done automatically through the `urllib` 9 | command that is included. 10 | 11 | .. _dataset: https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf 12 | 13 | S1 is the example subject that comes with pycortex, but if you want to plot 14 | data onto a different subject, you will need to have them in your filestore. 15 | 16 | This demo will not actually open the web viewer for you, but if you run it 17 | yourself you will get a viewer showing something like the following. 18 | 19 | .. image:: ../../webgl/angle_left.png 20 | 21 | """ 22 | 23 | import cortex 24 | from urllib.request import urlretrieve 25 | 26 | 27 | # Download and load in retinotopy data 28 | _ = urlretrieve( 29 | "https://s3.us-west-1.wasabisys.com/glab-public-datasets/S1_retinotopy.hdf", 30 | "S1_retinotopy.hdf" 31 | ) 32 | ret_data = cortex.load("S1_retinotopy.hdf") 33 | 34 | # Open the webviewer 35 | cortex.webshow(ret_data) 36 | -------------------------------------------------------------------------------- /examples/quickstart/show_config.py: -------------------------------------------------------------------------------- 1 | """ 2 | ===================================================== 3 | Finding out where the config and filestore are 4 | ===================================================== 5 | 6 | Easily locating your config file and filestore locations. 7 | This comes in useful when things don't work because the config file is not set correctly. 8 | """ 9 | from __future__ import print_function 10 | import cortex 11 | from cortex.options import config 12 | 13 | ########################################################## 14 | # Finding where your config file is. 15 | print(cortex.options.usercfg) 16 | 17 | ########################################################## 18 | # Finding where the current filestore is. 19 | # Useful for when your subjects don't show up in cortex.db, and all you have is S1. 20 | print(config.get('basic', 'filestore')) 21 | 22 | ########################################################## 23 | # Finding where pycortex is looking for colormaps. 24 | # Useful for when you get color map not found messages. 25 | print(config.get('webgl', 'colormaps')) 26 | 27 | ########################################################## 28 | # To look at your config file, it is recommended that you open it with a text editor. 29 | # However, you *can* still look at options from within pycortex. 30 | 31 | # sections gets the upper-level sections in the config file 32 | sections = config.sections() 33 | print(sections) 34 | 35 | # items gets the option items within a section as a list of key-value pairs. 36 | basic_config = config.items('paths_default') 37 | print(basic_config) -------------------------------------------------------------------------------- /examples/surface_analyses/README.txt: -------------------------------------------------------------------------------- 1 | Surface Analysis Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating operations and analyses done on brain surfaces. 5 | 6 | -------------------------------------------------------------------------------- /examples/surface_analyses/plot_geodesic_distance.py: -------------------------------------------------------------------------------- 1 | """ 2 | =========================== 3 | Plotting Geodesic Distances 4 | =========================== 5 | 6 | This plots the distances (in mm) between a vertex or set of vertices and 7 | all other vertices on a surface. These two operations take the exact same 8 | amount of time to run. 9 | 10 | To look at the distance to a single point, just supply the index of that 11 | vertex. To look at the distance to a set of points, supply a numpy array of 12 | all of the vertices in the area. In the case of a set of points, the 13 | geodesic distance measure will return the minimum distance to the set of 14 | points as a whole. 15 | 16 | The two hemispheres must be run separately. 17 | """ 18 | import cortex 19 | import cortex.polyutils 20 | import numpy as np 21 | import matplotlib.pyplot as plt 22 | 23 | subject = "S1" 24 | 25 | # First we need to import the surfaces for this subject 26 | surfs = [cortex.polyutils.Surface(*d) 27 | for d in cortex.db.get_surf(subject, "fiducial")] 28 | 29 | # Then we will pick one vertex in each hemisphere to find distances to 30 | vert = 10000 31 | dists = [s.geodesic_distance(vert) for s in surfs] 32 | 33 | # Now we can plot these distances onto a flatmap 34 | all_dists = np.hstack((dists[0], dists[1])) 35 | dist_map = cortex.Vertex(all_dists, subject, cmap="hot") 36 | cortex.quickshow(dist_map) 37 | plt.show() 38 | 39 | # Alternatively, you can find the minimum distance from a set of points to the 40 | # surface 41 | # Here, we use an example of an ROI 42 | all_eba = cortex.utils.get_roi_verts(subject, "EBA")["EBA"] 43 | 44 | # We have to then separate these vertices by hemisphere 45 | numl = surfs[0].pts.shape[0] 46 | eba_verts = [all_eba[all_eba < numl], all_eba[all_eba >= numl] - numl] 47 | 48 | # Now look at geodesic distances for each hemisphere separately 49 | dists = [s.geodesic_distance(verts) for s, verts in zip(surfs, eba_verts)] 50 | all_dists = np.hstack((dists[0], dists[1])) 51 | 52 | # And now plot these distances onto the cortical surface 53 | dist_map = cortex.Vertex(all_dists, subject, cmap="hot") 54 | cortex.quickshow(dist_map) 55 | plt.show() 56 | -------------------------------------------------------------------------------- /examples/surface_analyses/plot_geodesic_path.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======================= 3 | Plotting Geodesic Paths 4 | ======================= 5 | 6 | This will plot a geodesic path between two vertices on the cortical surface. 7 | This path is based on geodesic distances across the surface. The path starts 8 | at the given endpoint and selects the neighbor of that point in the surface 9 | map that is closest to the other endpoint. This process continues iteratilvely 10 | until the last vertex in the path is the endpoint you gave to it. 11 | 12 | All you need to do is supply a surface object and two vertices on that surface 13 | and you can find the geodesic path. This script additionally makes a plot to 14 | show all of the vertices listed in the path. 15 | """ 16 | import cortex 17 | import cortex.polyutils 18 | import numpy as np 19 | import matplotlib.pyplot as plt 20 | 21 | subject = "S1" 22 | 23 | # First we need to import the surfaces for this subject 24 | surfs = [cortex.polyutils.Surface(*d) 25 | for d in cortex.db.get_surf(subject, "fiducial")] 26 | numl = surfs[0].pts.shape[0] 27 | numr = surfs[1].pts.shape[0] 28 | num_vertices = numl + numr 29 | 30 | # Now we need to pick the start and end points of the line we will draw 31 | pt_a = 100 32 | pt_b = 50000 33 | 34 | # Then we find the geodesic path between these points 35 | path = surfs[0].geodesic_path(pt_a, pt_b) 36 | 37 | # In order to plot this on the cortical surface, we need an array that is the 38 | # same size as the number of vertices 39 | path_data = np.zeros(num_vertices) * np.nan 40 | for v in path: 41 | path_data[v] = 1 42 | 43 | # And now plot these distances onto the cortical surface 44 | path_verts = cortex.Vertex(path_data, subject, cmap="Reds", vmin=0, vmax=1) 45 | cortex.quickshow(path_verts, with_colorbar=False, with_curvature=True) 46 | plt.show() 47 | -------------------------------------------------------------------------------- /examples/surface_analyses/plot_interpolate_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================== 3 | Interpolate Data 4 | =================== 5 | 6 | In this example we show how to interpolate data from a sparse collection of points 7 | to all the points in the cortical surface. 8 | 9 | The method used here is biharmonic interpolation, which finds the solution with 10 | the minimum squared Laplacian (fourth derivative) that still passes through all 11 | the selected points. This is similar to thin plate splines. 12 | 13 | """ 14 | 15 | import cortex 16 | from cortex.polyutils import Surface 17 | import numpy as np 18 | np.random.seed(1234) 19 | from matplotlib import pyplot as plt 20 | 21 | subject = "S1" 22 | 23 | # First we need to import the surfaces for this subject 24 | lsurf, rsurf = [Surface(*d) for d in cortex.db.get_surf(subject, "fiducial")] 25 | 26 | # Let's choose a few points and generate data for them 27 | selected_pts = np.arange(len(lsurf.pts), step=5000) 28 | num_selected_pts = len(selected_pts) 29 | sparse_data = np.random.randn(num_selected_pts) 30 | 31 | # Then interpolate 32 | interp_data = lsurf.interp(selected_pts, sparse_data) 33 | 34 | # Plot the result 35 | # interp_data is only for the left hemisphere, but the Vertex constructor 36 | # infers that and fills the right hemisphere with zeros 37 | interp_vertex = cortex.Vertex(interp_data[:,0], subject, 38 | vmin=-2, vmax=2, cmap='RdBu_r') 39 | cortex.quickshow(interp_vertex, with_labels=False, with_rois=False) 40 | 41 | 42 | # plot the locations of the points we selected originally 43 | 44 | # nudge=True puts both left and right hemispheres in the same space, moving them 45 | # so that they don't overlap. These are the coordinates used in quickflat 46 | (lflatpts, lpolys), (rflatpts, rpolys) = cortex.db.get_surf(subject, "flat", 47 | nudge=True) 48 | 49 | ax = plt.gca() 50 | # zorder is set to 10 to make sure points go on top of other quickflat layers 51 | ax.scatter(lflatpts[selected_pts,0], lflatpts[selected_pts,1], s=50, 52 | c=sparse_data, vmin=-2, vmax=2, cmap=plt.cm.RdBu_r, zorder=10) 53 | 54 | 55 | # the interpolate function can also handle multiple dimensions at the same time 56 | # (this takes a while to run for no plotting, and thus is commented out) 57 | #sparse_data_2d = np.random.randn(10, num_selected_pts) 58 | #interp_data_2d = lsurf.interp(selected_pts, sparse_data_2d) 59 | 60 | # > interp_data_2d.shape 61 | # (152893, 10) 62 | plt.show() -------------------------------------------------------------------------------- /examples/surface_analyses/plot_tissots_indicatrix.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================== 3 | Tissot's Indicatrix 4 | =================== 5 | 6 | Creating a flatmap from a folded cortical surface always introduces some 7 | distortion. This is similar to what happens when a map of the globe is flattened 8 | into a 2-D map like a Mercator projection. For the cortical surface the amount 9 | and type of distortion will depend on the curvature of the surface (i.e. whether 10 | it is on a gyrus or a sulcus) and on the distance to the nearest cut. 11 | 12 | In general, we recommend examining data both in flattened and original 3-D space 13 | using the interactive webGL viewer, but it is also informative to visualize the 14 | distortion directly. 15 | 16 | One method to show distortion is to visualize how geodesic discs, which contain 17 | all of the points within some geodesic distance of a central point, appear on the 18 | flattened cortical surface. 19 | 20 | This technique is traditionally used to characterize and visualize distortions 21 | introduced by flattening the globe onto a map: 22 | 23 | .. image::https://upload.wikimedia.org/wikipedia/commons/8/87/Tissot_mercator.png 24 | 25 | """ 26 | 27 | import cortex 28 | import matplotlib.pyplot as plt 29 | 30 | tissot = cortex.db.get_surfinfo("S1", "tissots_indicatrix", radius=10, spacing=30) 31 | tissot.cmap = 'plasma' 32 | 33 | cortex.quickshow(tissot, with_labels=False, with_rois=False, with_colorbar=False) 34 | 35 | plt.show() -------------------------------------------------------------------------------- /examples/utils/README.txt: -------------------------------------------------------------------------------- 1 | Utility Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to do all sorts of beautiful things using pycortex. 5 | 6 | -------------------------------------------------------------------------------- /examples/utils/mni_to_subject.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================== 3 | Transform from MNI to Subject Space 4 | =================================== 5 | 6 | Pycortex has built-in functionality for linearly transforming data to and from 7 | standard atlas spaces (e.g. MNI-152). This functionality is built on top of FSL. 8 | 9 | This example shows how to create a transform from some subject functional space 10 | to MNI space (the same as in subject_to_mni.py), and how to use that to put data 11 | into subject space from MNI space. 12 | 13 | """ 14 | import cortex 15 | 16 | # First let's do this "manually", using cortex.mni 17 | from cortex import mni 18 | 19 | import numpy as np 20 | np.random.seed(1234) 21 | 22 | 23 | # This transform is gonna be from one specific functional space for a subject 24 | # which is defined by the transform (xfm) 25 | s1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead') 26 | # s1_to_mni is a 4x4 array describing the transformation in homogeneous corods 27 | 28 | # Transform data from MNI to subject space 29 | # first we will create a dataset to transform 30 | # this uses the implicitly created "identity" transform, which is used for data 31 | # in the native anatomical space (i.e. same dims as the base anatomical image, 32 | # and in the same space as the surface) 33 | data = cortex.Volume.random('MNI', 'identity') 34 | 35 | # then transform it into the space defined by the 'fullhead' transform for 'S1' 36 | subject_data = mni.transform_mni_to_subject('S1', 'fullhead', 37 | data.data, s1_to_mni) 38 | # subject_data is a nibabel Nifti1Image 39 | 40 | subject_data_vol = mni_data.get_fdata() # the actual array, shape=(100,100,31) 41 | 42 | # That was the manual method. pycortex can also cache these transforms for you 43 | # if you get them using the pycortex database 44 | s1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead') 45 | # this is the same as s1_to_mni, but will return instantly on subsequent calls -------------------------------------------------------------------------------- /examples/utils/plot_get_roi_vertices.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======================= 3 | Get Vertices for an ROI 4 | ======================= 5 | 6 | In this example we show how to get the vertices that are inside an ROI that was 7 | defined in the SVG ROI file (see :doc:`/rois.rst`). 8 | 9 | """ 10 | import cortex 11 | 12 | # get vertices for fusiform face area FFA in subject S1 13 | roi_verts = cortex.get_roi_verts('S1', 'FFA') 14 | 15 | # roi_verts is a dictionary (in this case with only one entry) 16 | ffa_verts = roi_verts['FFA'] 17 | 18 | # this includes indices from both hemispheres 19 | # let's create an empty Vertex object and fill FFA 20 | 21 | ffa_map = cortex.Vertex.empty('S1', cmap='plasma') 22 | ffa_map.data[ffa_verts] = 1.0 23 | 24 | cortex.quickshow(ffa_map) 25 | -------------------------------------------------------------------------------- /examples/utils/plot_mosaic.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding: utf-8 -*- 3 | """ 4 | =============================== 5 | Mosaic plot 6 | =============================== 7 | 8 | The function `mosaic` plots all of the slices in a volume in a matplotlib figure. Good for quick visualization or testing of volumes. 9 | 10 | """ 11 | # sphinx_gallery_thumbnail_number = 3 12 | 13 | import cortex 14 | import matplotlib.pyplot as plt 15 | 16 | # load reference functional image for test purposes 17 | volume_arr = cortex.db.get_xfm('S1', 'fullhead').reference.get_fdata().T 18 | # volume_arr is a (31,100,100) ndarray 19 | 20 | 21 | # with no args mosaic slices this volume in the first dimension 22 | plt.figure() 23 | cortex.mosaic(volume_arr) 24 | 25 | # slices along a different dimension can be plotted using the dim param 26 | # here coronal 27 | plt.figure() 28 | cortex.mosaic(volume_arr, dim=1) 29 | 30 | # kwargs are passed through to imshow 31 | plt.figure() 32 | cortex.mosaic(volume_arr, cmap=plt.cm.gray, vmin=0, vmax=1500) 33 | 34 | # mosaic always returns the mosaic image along with info about its shape 35 | # here show=False so we don't generate another plot 36 | mosaic_arr, (nwide, ntall) = cortex.mosaic(volume_arr, show=False) 37 | # mosaic_arr is 607 x 607, with nwide = 6, ntall = 6 -------------------------------------------------------------------------------- /examples/utils/plot_roi_voxel_index_volume.py: -------------------------------------------------------------------------------- 1 | """ 2 | ==================== 3 | Get ROI Index Volume 4 | ==================== 5 | 6 | Create an index volume (similar to the aseg masks in freesurfer) with a different 7 | integer index for each ROI. ROIs in the left hemisphere will have negative values, 8 | ROIs in the right hemisphere will have positive values. 9 | 10 | """ 11 | 12 | import cortex 13 | import numpy as np 14 | import matplotlib.pyplot as plt 15 | 16 | subject = "S1" 17 | xfm = "fullhead" 18 | 19 | # Get the map of which voxels are inside of our ROI 20 | index_volume, index_keys = cortex.utils.get_roi_masks(subject, xfm, 21 | roi_list=None, # Default (None) gives all available ROIs in overlays.svg 22 | gm_sampler='cortical-conservative', # Select only voxels mostly within cortex 23 | split_lr=True, # Separate left/right ROIs (this occurs anyway with index volumes) 24 | threshold=0.9, # convert probability values to boolean mask for each ROI 25 | return_dict=False # return index volume, not dict of masks 26 | ) 27 | 28 | lim = np.max(np.abs(index_volume)) 29 | # Plot the mask for one ROI onto a flatmap 30 | roi_data = cortex.Volume(index_volume, subject, xfm, 31 | vmin=-lim, # This is a probability mask, so only 32 | vmax=lim, # so scale btw zero and one 33 | cmap="RdBu_r", # Shades of blue for L hem, red for R hem ROIs 34 | ) 35 | 36 | cortex.quickflat.make_figure(roi_data, 37 | thick=1, # select a single depth (btw white matter & pia) 38 | sampler='nearest', # no interpolation 39 | with_curvature=True, 40 | with_colorbar=True, 41 | ) 42 | print("Index keys for which ROI is which in `index_volume`:") 43 | print(index_keys) 44 | plt.show() 45 | -------------------------------------------------------------------------------- /examples/utils/plot_roi_voxel_mask.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================== 3 | Get ROI Voxel Mask 4 | ================== 5 | 6 | Get proportion of each voxel that exists within a named ROI (this 7 | constitutes a probability map for the ROI, with values ranging from 8 | 0-1). Plot this probabilistic roi mask onto a flatmap. 9 | 10 | In order for this to work, the specified ROI must exist in the 11 | overlays.svg file in the pycortex filestore for this subject. 12 | """ 13 | 14 | import cortex 15 | import matplotlib.pyplot as plt 16 | 17 | subject = "S1" 18 | xfm = "fullhead" 19 | roi = "EBA" 20 | 21 | # Get the map of which voxels are inside of our ROI 22 | roi_masks = cortex.utils.get_roi_masks(subject, xfm, 23 | roi_list=[roi], 24 | gm_sampler='cortical-conservative', # Select only voxels mostly within cortex 25 | split_lr=False, # No separate left/right ROIs 26 | threshold=None, # Leave roi mask values as probabilities / fractions 27 | return_dict=True 28 | ) 29 | 30 | # Plot the mask for one ROI onto a flatmap 31 | roi_data = cortex.Volume(roi_masks[roi], subject, xfm, 32 | vmin=0, # This is a probability mask, so only 33 | vmax=1, # so scale btw zero and one 34 | cmap="inferno", # For pretty 35 | ) 36 | 37 | cortex.quickflat.make_figure(roi_data, 38 | thick=1, # select a single depth (btw white matter & pia) 39 | sampler='nearest', # no interpolation 40 | with_curvature=True, 41 | with_colorbar=True, 42 | ) 43 | 44 | plt.show() -------------------------------------------------------------------------------- /examples/utils/plot_voxel_distance_from_surface.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | =============================== 4 | Voxel distance from surface 5 | =============================== 6 | 7 | The utility function `get_vox_dist` returns the distance from each voxel in some space to the nearest vertex on the given surface. This function is used for generating ROI masks, cortical masks, etc. 8 | 9 | """ 10 | 11 | import cortex 12 | import matplotlib.pyplot as plt 13 | 14 | # get distance to nearest point on the mid-cortical (fiducial) surface from each 15 | # voxel in the functional space for subject 'S1' and the transform 'fullhead' 16 | dist, argdist = cortex.get_vox_dist(subject='S1', 17 | xfmname='fullhead', 18 | surface='fiducial') 19 | 20 | # dist contains the distance from each voxel to the nearest vertex 21 | # dist.shape = (31, 100, 100) <-- the same size as the functional volume 22 | 23 | # argdist contains the index of the nearest vertex to each voxel 24 | # argdist.shape = (31, 100, 100) <-- the same size as the functional volume 25 | 26 | # let's visualize the distance field using the mosaic function 27 | cortex.mosaic(dist, cmap=plt.cm.plasma) 28 | plt.colorbar(label='mm from surface') -------------------------------------------------------------------------------- /examples/utils/subject_to_mni.py: -------------------------------------------------------------------------------- 1 | """ 2 | =================================== 3 | Transform from Subject to MNI Space 4 | =================================== 5 | 6 | Pycortex has built-in functionality for linearly transforming data to and from 7 | standard atlas spaces (e.g. MNI-152). This functionality is built on top of FSL. 8 | 9 | This example shows how to create a transform from some subject functional space 10 | to MNI space, and how to apply that transform to a dataset. 11 | 12 | """ 13 | 14 | import cortex 15 | 16 | # First let's do this "manually", using cortex.mni 17 | from cortex import mni 18 | 19 | import numpy as np 20 | np.random.seed(1234) 21 | 22 | # This transform is gonna be from one specific functional space for a subject 23 | # which is defined by the transform (xfm) 24 | s1_to_mni = mni.compute_mni_transform(subject='S1', xfm='fullhead') 25 | # s1_to_mni is a 4x4 array describing the transformation in homogeneous corods 26 | 27 | # Transform data from subject to MNI space 28 | # first we will create a dataset to transform 29 | data = cortex.Volume.random('S1', 'fullhead') 30 | 31 | # then transform it! 32 | mni_data = mni.transform_to_mni(data, s1_to_mni) 33 | # mni_data is a nibabel Nifti1Image 34 | 35 | mni_data_vol = mni_data.get_fdata() # the actual array, shape=(182,218,182) 36 | 37 | # That was the manual method. pycortex can also cache these transforms for you 38 | # if you get them using the pycortex database 39 | s1_to_mni_db = cortex.db.get_mnixfm('S1', 'fullhead') 40 | # this is the same as s1_to_mni, but will return instantly on subsequent calls -------------------------------------------------------------------------------- /examples/webgl/README.txt: -------------------------------------------------------------------------------- 1 | WebGL Examples 2 | ------------------------------ 3 | 4 | Examples demonstrating how to make beautiful interactive 3-D viewers 5 | 6 | -------------------------------------------------------------------------------- /examples/webgl/dynamic_with_custom_template.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================================= 3 | Dynamic viewer with html template 4 | ================================= 5 | A webgl viewer displays a 3D view of brain data in a web browser 6 | 7 | Pycortex has the functionality of provide a custom html template to specify 8 | the attributes of the viewer. The file 'my_template.html' included in this 9 | directory has css code which changes the color scheme of the controls 10 | window. The background is now white, with black text, and the sliders 11 | are red instead of blue. 12 | 13 | The templates files included with pycortex () have been modified by adding 14 | the following css code: 15 | 16 | 44 | 45 | 46 | """ 47 | 48 | import cortex 49 | 50 | import numpy as np 51 | np.random.seed(1234) 52 | 53 | # gather data Volume 54 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 55 | 56 | # create viewer 57 | cortex.webgl.show(data=volume, template='my_template.html', recache=True) 58 | 59 | # a port number will then be output, for example "Started server on port 39140" 60 | # the viewer can then be accessed in a web browser, in this case at "localhost:39140" 61 | # the viewer will display the modifications specified in the template -------------------------------------------------------------------------------- /examples/webgl/multiple_datasets.py: -------------------------------------------------------------------------------- 1 | """ 2 | =============================================== 3 | Create a 3D WebGL Viewer with Multiple Datasets 4 | =============================================== 5 | 6 | A webgl viewer displays a 3D view of brain data in a web browser 7 | 8 | Multiple datasets can be loaded into the same viewer 9 | 10 | The `priority` kwarg passed to Volume objects determines the display ordering 11 | 12 | Lower values of `priority` are displayed first 13 | 14 | In the browser you can switch between datasets with the + and - keys 15 | 16 | """ 17 | 18 | import cortex 19 | 20 | import numpy as np 21 | np.random.seed(1234) 22 | 23 | # gather multiple datasets 24 | volume1 = cortex.Volume.random(subject='S1', xfmname='fullhead', priority=1) 25 | volume2 = cortex.Volume.random(subject='S1', xfmname='fullhead', priority=2) 26 | volume3 = cortex.Volume.random(subject='S1', xfmname='fullhead', priority=3) 27 | volumes = { 28 | 'First Dataset': volume1, 29 | 'Second Dataset': volume2, 30 | 'Third Dataset': volume3, 31 | } 32 | 33 | # create viewer 34 | cortex.webgl.show(data=volumes) 35 | 36 | # a port number will then be output, for example "Started server on port 39140" 37 | # the viewer can then be accessed in a web browser, in this case at "localhost:39140" 38 | -------------------------------------------------------------------------------- /examples/webgl/single_dataset.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======================== 3 | Create a 3D WebGL Viewer 4 | ======================== 5 | 6 | A webgl viewer displays a 3D view of brain data in a web browser 7 | 8 | """ 9 | 10 | import cortex 11 | 12 | import numpy as np 13 | np.random.seed(1234) 14 | 15 | # gather data Volume 16 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 17 | 18 | # create viewer 19 | cortex.webgl.show(data=volume) 20 | 21 | # a port number will then be output, for example "Started server on port 39140" 22 | # the viewer can then be accessed in a web browser, in this case at "localhost:39140" 23 | -------------------------------------------------------------------------------- /examples/webgl/static.py: -------------------------------------------------------------------------------- 1 | """ 2 | ====================== 3 | Create a static viewer 4 | ====================== 5 | 6 | A static viewer is a brain viewer that exists permanently on a filesystem 7 | 8 | The viewer is stored in a directory that stores html, javascript, data, etc 9 | 10 | The viewer directory must be hosted by a server such as nginx 11 | """ 12 | 13 | import cortex 14 | 15 | import numpy as np 16 | np.random.seed(1234) 17 | 18 | # gather data Volume 19 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 20 | 21 | # select path for static viewer on disk 22 | viewer_path = '/path/to/store/viewer' 23 | 24 | # create viewer 25 | cortex.webgl.make_static(outpath=viewer_path, data=volume, recache=True) 26 | 27 | # a webserver such as nginx can then be used to host the static viewer 28 | -------------------------------------------------------------------------------- /examples/webgl/static_with_custom_template.py: -------------------------------------------------------------------------------- 1 | """ 2 | ================================ 3 | Static viewer with html template 4 | ================================ 5 | A static viewer is a brain viewer that exists permanently on a filesystem 6 | The viewer is stored in a directory that stores html, javascript, data, etc 7 | The viewer directory must be hosted by a server such as nginx. 8 | 9 | Pycortex has the functionality of provide a custom html template to specify 10 | the attributes of the viewer. The file 'my_template.html' included in this 11 | directory has css code which changes the color scheme of the controls 12 | window. The background is now white, with black text, and the sliders 13 | are red instead of blue. 14 | 15 | The templates files included with pycortex () have been modified by adding 16 | the following css code: 17 | 18 | 46 | 47 | 48 | """ 49 | 50 | import cortex 51 | 52 | import numpy as np 53 | np.random.seed(1234) 54 | 55 | # gather data Volume 56 | volume = cortex.Volume.random(subject='S1', xfmname='fullhead') 57 | 58 | # select path for static viewer on disk 59 | viewer_path = '/path/to/store/viewer' 60 | 61 | # create viewer using the 'my_template.html' template: 62 | cortex.webgl.make_static(outpath=viewer_path, data=volume, 63 | template = 'my_template.html') 64 | 65 | # a webserver such as nginx can then be used to host the static viewer 66 | # with the new template -------------------------------------------------------------------------------- /filestore/colormaps/Accent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Accent.png -------------------------------------------------------------------------------- /filestore/colormaps/Accent_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Accent_r.png -------------------------------------------------------------------------------- /filestore/colormaps/BPROG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BPROG.png -------------------------------------------------------------------------------- /filestore/colormaps/BROYG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BROYG.png -------------------------------------------------------------------------------- /filestore/colormaps/BROYG_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BROYG_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/Blues.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Blues.png -------------------------------------------------------------------------------- /filestore/colormaps/Blues_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Blues_r.png -------------------------------------------------------------------------------- /filestore/colormaps/BrBG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BrBG.png -------------------------------------------------------------------------------- /filestore/colormaps/BrBG_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BrBG_r.png -------------------------------------------------------------------------------- /filestore/colormaps/BuBkRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuBkRd.png -------------------------------------------------------------------------------- /filestore/colormaps/BuBkRd_alpha_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuBkRd_alpha_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/BuGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuGn.png -------------------------------------------------------------------------------- /filestore/colormaps/BuGn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuGn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/BuOr_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuOr_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/BuPu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuPu.png -------------------------------------------------------------------------------- /filestore/colormaps/BuPu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuPu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/BuWtRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuWtRd.png -------------------------------------------------------------------------------- /filestore/colormaps/BuWtRd_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuWtRd_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/BuWtRd_black_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/BuWtRd_black_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/CyanBlueGrayRedPink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/CyanBlueGrayRedPink.png -------------------------------------------------------------------------------- /filestore/colormaps/Dark2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Dark2.png -------------------------------------------------------------------------------- /filestore/colormaps/Dark2_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Dark2_r.png -------------------------------------------------------------------------------- /filestore/colormaps/GnBu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GnBu.png -------------------------------------------------------------------------------- /filestore/colormaps/GnBu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GnBu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/GreenWhiteBlue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GreenWhiteBlue.png -------------------------------------------------------------------------------- /filestore/colormaps/GreenWhiteBlue_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GreenWhiteBlue_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/GreenWhiteRed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GreenWhiteRed.png -------------------------------------------------------------------------------- /filestore/colormaps/GreenWhiteRed_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/GreenWhiteRed_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/Greens.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Greens.png -------------------------------------------------------------------------------- /filestore/colormaps/Greens_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Greens_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Greys.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Greys.png -------------------------------------------------------------------------------- /filestore/colormaps/Greys_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Greys_r.png -------------------------------------------------------------------------------- /filestore/colormaps/J4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J4.png -------------------------------------------------------------------------------- /filestore/colormaps/J4R.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J4R.png -------------------------------------------------------------------------------- /filestore/colormaps/J4s.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J4s.png -------------------------------------------------------------------------------- /filestore/colormaps/J5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J5.png -------------------------------------------------------------------------------- /filestore/colormaps/J5R.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J5R.png -------------------------------------------------------------------------------- /filestore/colormaps/J6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J6.png -------------------------------------------------------------------------------- /filestore/colormaps/J6R.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/J6R.png -------------------------------------------------------------------------------- /filestore/colormaps/OrRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/OrRd.png -------------------------------------------------------------------------------- /filestore/colormaps/OrRd_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/OrRd_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Oranges.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Oranges.png -------------------------------------------------------------------------------- /filestore/colormaps/Oranges_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Oranges_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PRGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PRGn.png -------------------------------------------------------------------------------- /filestore/colormaps/PRGn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PRGn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_BuOr_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_BuOr_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_BuOr_covar_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_BuOr_covar_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_PinkBlue_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_PinkBlue_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_RdBu_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_RdBu_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_RdBu_covar_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_RdBu_covar_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/PU_RdGn_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PU_RdGn_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/Paired.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Paired.png -------------------------------------------------------------------------------- /filestore/colormaps/Paired_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Paired_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Pastel1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Pastel1.png -------------------------------------------------------------------------------- /filestore/colormaps/Pastel1_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Pastel1_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Pastel2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Pastel2.png -------------------------------------------------------------------------------- /filestore/colormaps/Pastel2_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Pastel2_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PiYG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PiYG.png -------------------------------------------------------------------------------- /filestore/colormaps/PiYG_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PiYG_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PuBu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuBu.png -------------------------------------------------------------------------------- /filestore/colormaps/PuBuGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuBuGn.png -------------------------------------------------------------------------------- /filestore/colormaps/PuBuGn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuBuGn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PuBu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuBu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PuOr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuOr.png -------------------------------------------------------------------------------- /filestore/colormaps/PuOr_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuOr_r.png -------------------------------------------------------------------------------- /filestore/colormaps/PuRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuRd.png -------------------------------------------------------------------------------- /filestore/colormaps/PuRd_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/PuRd_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Purples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Purples.png -------------------------------------------------------------------------------- /filestore/colormaps/Purples_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Purples_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RGrB_tsi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RGrB_tsi.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_2D_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_2D_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_covar2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_covar2.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_covar_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_covar_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RdBu_r_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdBu_r_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/RdGn_covar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdGn_covar.png -------------------------------------------------------------------------------- /filestore/colormaps/RdGy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdGy.png -------------------------------------------------------------------------------- /filestore/colormaps/RdGy_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdGy_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RdPu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdPu.png -------------------------------------------------------------------------------- /filestore/colormaps/RdPu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdPu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RdYlBu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdYlBu.png -------------------------------------------------------------------------------- /filestore/colormaps/RdYlBu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdYlBu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/RdYlGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdYlGn.png -------------------------------------------------------------------------------- /filestore/colormaps/RdYlGn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/RdYlGn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Reds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Reds.png -------------------------------------------------------------------------------- /filestore/colormaps/Reds_cov.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Reds_cov.png -------------------------------------------------------------------------------- /filestore/colormaps/Reds_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Reds_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Retinotopy_RYBCR.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Retinotopy_RYBCR.png -------------------------------------------------------------------------------- /filestore/colormaps/Retinotopy_RYBCR_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Retinotopy_RYBCR_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/Retinotopy_RYBCR_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Retinotopy_RYBCR_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/Set1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set1.png -------------------------------------------------------------------------------- /filestore/colormaps/Set1_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set1_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Set2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set2.png -------------------------------------------------------------------------------- /filestore/colormaps/Set2_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set2_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Set3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set3.png -------------------------------------------------------------------------------- /filestore/colormaps/Set3_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Set3_r.png -------------------------------------------------------------------------------- /filestore/colormaps/Spectral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Spectral.png -------------------------------------------------------------------------------- /filestore/colormaps/Spectral_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/Spectral_r.png -------------------------------------------------------------------------------- /filestore/colormaps/YlGn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlGn.png -------------------------------------------------------------------------------- /filestore/colormaps/YlGnBu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlGnBu.png -------------------------------------------------------------------------------- /filestore/colormaps/YlGnBu_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlGnBu_r.png -------------------------------------------------------------------------------- /filestore/colormaps/YlGn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlGn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/YlOrBr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlOrBr.png -------------------------------------------------------------------------------- /filestore/colormaps/YlOrBr_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlOrBr_r.png -------------------------------------------------------------------------------- /filestore/colormaps/YlOrRd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlOrRd.png -------------------------------------------------------------------------------- /filestore/colormaps/YlOrRd_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/YlOrRd_r.png -------------------------------------------------------------------------------- /filestore/colormaps/afmhot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/afmhot.png -------------------------------------------------------------------------------- /filestore/colormaps/afmhot_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/afmhot_r.png -------------------------------------------------------------------------------- /filestore/colormaps/autumn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumn.png -------------------------------------------------------------------------------- /filestore/colormaps/autumn_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumn_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/autumn_blkmin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumn_blkmin.png -------------------------------------------------------------------------------- /filestore/colormaps/autumn_blkmin_alpha_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumn_blkmin_alpha_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/autumn_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumn_r.png -------------------------------------------------------------------------------- /filestore/colormaps/autumnblack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumnblack.png -------------------------------------------------------------------------------- /filestore/colormaps/autumnblack_alpha_2D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/autumnblack_alpha_2D.png -------------------------------------------------------------------------------- /filestore/colormaps/binary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/binary.png -------------------------------------------------------------------------------- /filestore/colormaps/binary_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/binary_r.png -------------------------------------------------------------------------------- /filestore/colormaps/bone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/bone.png -------------------------------------------------------------------------------- /filestore/colormaps/bone_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/bone_r.png -------------------------------------------------------------------------------- /filestore/colormaps/brg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/brg.png -------------------------------------------------------------------------------- /filestore/colormaps/brg_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/brg_r.png -------------------------------------------------------------------------------- /filestore/colormaps/bwr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/bwr.png -------------------------------------------------------------------------------- /filestore/colormaps/bwr_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/bwr_r.png -------------------------------------------------------------------------------- /filestore/colormaps/cool.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/cool.png -------------------------------------------------------------------------------- /filestore/colormaps/cool_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/cool_r.png -------------------------------------------------------------------------------- /filestore/colormaps/coolwarm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/coolwarm.png -------------------------------------------------------------------------------- /filestore/colormaps/coolwarm_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/coolwarm_r.png -------------------------------------------------------------------------------- /filestore/colormaps/copper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/copper.png -------------------------------------------------------------------------------- /filestore/colormaps/copper_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/copper_r.png -------------------------------------------------------------------------------- /filestore/colormaps/cubehelix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/cubehelix.png -------------------------------------------------------------------------------- /filestore/colormaps/cubehelix_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/cubehelix_r.png -------------------------------------------------------------------------------- /filestore/colormaps/fire.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/fire.png -------------------------------------------------------------------------------- /filestore/colormaps/fire_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/fire_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/flag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/flag.png -------------------------------------------------------------------------------- /filestore/colormaps/flag_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/flag_r.png -------------------------------------------------------------------------------- /filestore/colormaps/freesurfer_aseg_256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/freesurfer_aseg_256.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_earth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_earth.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_earth_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_earth_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_gray.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_gray.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_gray_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_gray_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_heat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_heat.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_heat_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_heat_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_ncar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_ncar.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_ncar_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_ncar_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_rainbow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_rainbow.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_rainbow_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_rainbow_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_stern.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_stern.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_stern_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_stern_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_yarg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_yarg.png -------------------------------------------------------------------------------- /filestore/colormaps/gist_yarg_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gist_yarg_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gnuplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gnuplot.png -------------------------------------------------------------------------------- /filestore/colormaps/gnuplot2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gnuplot2.png -------------------------------------------------------------------------------- /filestore/colormaps/gnuplot2_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gnuplot2_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gnuplot_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gnuplot_r.png -------------------------------------------------------------------------------- /filestore/colormaps/gray.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gray.png -------------------------------------------------------------------------------- /filestore/colormaps/gray_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/gray_r.png -------------------------------------------------------------------------------- /filestore/colormaps/hot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/hot.png -------------------------------------------------------------------------------- /filestore/colormaps/hot_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/hot_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/hot_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/hot_r.png -------------------------------------------------------------------------------- /filestore/colormaps/hsv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/hsv.png -------------------------------------------------------------------------------- /filestore/colormaps/hsv_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/hsv_r.png -------------------------------------------------------------------------------- /filestore/colormaps/inferno.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/inferno.png -------------------------------------------------------------------------------- /filestore/colormaps/inferno_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/inferno_r.png -------------------------------------------------------------------------------- /filestore/colormaps/jet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/jet.png -------------------------------------------------------------------------------- /filestore/colormaps/jet_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/jet_r.png -------------------------------------------------------------------------------- /filestore/colormaps/magma.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/magma.png -------------------------------------------------------------------------------- /filestore/colormaps/magma_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/magma_r.png -------------------------------------------------------------------------------- /filestore/colormaps/nipy_spectral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/nipy_spectral.png -------------------------------------------------------------------------------- /filestore/colormaps/nipy_spectral_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/nipy_spectral_r.png -------------------------------------------------------------------------------- /filestore/colormaps/ocean.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/ocean.png -------------------------------------------------------------------------------- /filestore/colormaps/ocean_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/ocean_r.png -------------------------------------------------------------------------------- /filestore/colormaps/pink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/pink.png -------------------------------------------------------------------------------- /filestore/colormaps/pink_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/pink_r.png -------------------------------------------------------------------------------- /filestore/colormaps/plasma.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/plasma.png -------------------------------------------------------------------------------- /filestore/colormaps/plasma_alpha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/plasma_alpha.png -------------------------------------------------------------------------------- /filestore/colormaps/plasma_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/plasma_r.png -------------------------------------------------------------------------------- /filestore/colormaps/prism.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/prism.png -------------------------------------------------------------------------------- /filestore/colormaps/prism_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/prism_r.png -------------------------------------------------------------------------------- /filestore/colormaps/rainbow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/rainbow.png -------------------------------------------------------------------------------- /filestore/colormaps/rainbow_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/rainbow_r.png -------------------------------------------------------------------------------- /filestore/colormaps/seismic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/seismic.png -------------------------------------------------------------------------------- /filestore/colormaps/seismic_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/seismic_r.png -------------------------------------------------------------------------------- /filestore/colormaps/spectral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/spectral.png -------------------------------------------------------------------------------- /filestore/colormaps/spectral_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/spectral_r.png -------------------------------------------------------------------------------- /filestore/colormaps/spring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/spring.png -------------------------------------------------------------------------------- /filestore/colormaps/spring_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/spring_r.png -------------------------------------------------------------------------------- /filestore/colormaps/summer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/summer.png -------------------------------------------------------------------------------- /filestore/colormaps/summer_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/summer_r.png -------------------------------------------------------------------------------- /filestore/colormaps/terrain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/terrain.png -------------------------------------------------------------------------------- /filestore/colormaps/terrain_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/terrain_r.png -------------------------------------------------------------------------------- /filestore/colormaps/viridis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/viridis.png -------------------------------------------------------------------------------- /filestore/colormaps/viridis_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/viridis_r.png -------------------------------------------------------------------------------- /filestore/colormaps/winter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/winter.png -------------------------------------------------------------------------------- /filestore/colormaps/winter_r.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/colormaps/winter_r.png -------------------------------------------------------------------------------- /filestore/db/S1/anatomicals/raw.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/db/S1/anatomicals/raw.nii.gz -------------------------------------------------------------------------------- /filestore/db/S1/transforms/fullhead/matrices.xfm: -------------------------------------------------------------------------------- 1 | { 2 | "coord": [ 3 | [ 4 | -0.44486981846094426, 5 | -0.0021363672818559996, 6 | -0.03721181986487324, 7 | 46.62686084588364 8 | ], 9 | [ 10 | 0.005235315303737166, 11 | -0.44485768384714863, 12 | -0.03704886912935894, 13 | 60.165881316857195 14 | ], 15 | [ 16 | -0.02001550497747565, 17 | -0.020260819840215893, 18 | 0.24044994416882276, 19 | 12.698317611104553 20 | ], 21 | [ 22 | 0.0, 23 | 0.0, 24 | 0.0, 25 | 1.0 26 | ] 27 | ], 28 | "magnet": [ 29 | [ 30 | 0.9965083975951243, 31 | 0.004785462731731425, 32 | 0.08335447685219563, 33 | 7.555831260552253 34 | ], 35 | [ 36 | -0.01172710633029911, 37 | 0.9964812160601064, 38 | 0.08298946720308957, 39 | 4.541688215906447 40 | ], 41 | [ 42 | -0.08266403784756719, 43 | -0.08367718825875846, 44 | 0.9930582969345504, 45 | 28.30609847515487 46 | ], 47 | [ 48 | 0.0, 49 | 0.0, 50 | 0.0, 51 | 1.0 52 | ] 53 | ] 54 | } -------------------------------------------------------------------------------- /filestore/db/S1/transforms/fullhead/reference.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/db/S1/transforms/fullhead/reference.nii.gz -------------------------------------------------------------------------------- /filestore/db/S1/transforms/retinotopy/matrices.xfm: -------------------------------------------------------------------------------- 1 | { 2 | "coord": [ 3 | [ 4 | -0.44433479119238484, 5 | -0.008558358562072857, 6 | -0.004920572244992459, 7 | 36.658401885195964 8 | ], 9 | [ 10 | 0.006981029916364745, 11 | -0.11576549497712417, 12 | -0.4290460075989473, 13 | 36.49854993538463 14 | ], 15 | [ 16 | 0.006262112715724827, 17 | -0.38488404380263075, 18 | 0.10395156498945099, 19 | -2.8397223542738104 20 | ], 21 | [ 22 | 0.0, 23 | 0.0, 24 | 0.0, 25 | 1.0 26 | ] 27 | ], 28 | "magnet": [ 29 | [ 30 | 0.9997532801828659, 31 | 0.01925630676466393, 32 | 0.011071287551233034, 33 | -1.5100907651284103 34 | ], 35 | [ 36 | -0.01925494110260343, 37 | 0.9998145799870785, 38 | -0.00022997677388369997, 39 | -4.601510299509542 40 | ], 41 | [ 42 | -0.011073662992737534, 43 | 1.674304765131307e-05, 44 | 0.9999386849762423, 45 | -0.3801662882046628 46 | ], 47 | [ 48 | 0.0, 49 | 0.0, 50 | 0.0, 51 | 1.0 52 | ] 53 | ] 54 | } -------------------------------------------------------------------------------- /filestore/db/S1/transforms/retinotopy/reference.nii.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gallantlab/pycortex/262af5126c6597c9fa287bcf814cc3c0033200e6/filestore/db/S1/transforms/retinotopy/reference.nii.gz -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # Minimum requirements for the build system to execute, according to PEP518 3 | # specification. 4 | requires = ["setuptools", "build", "numpy", "cython", "wheel"] 5 | build-backend = "setuptools.build_meta" 6 | 7 | [tool.codespell] 8 | skip = '.git,*.pdf,*.svg,*.css,*.min.*,*.gii,resources,OpenCTM-1.0.3,filestore,build,_build' 9 | check-hidden = true 10 | # ignore-regex = '' 11 | ignore-words-list = 'nd,acount,anormal,fpt,coo,transpart,FO,lins' 12 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = 3 | cortex 4 | addopts = 5 | -r a 6 | -v 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | setuptools 2 | future 3 | numpy 4 | scipy 5 | tornado>=4.3 6 | shapely 7 | lxml 8 | html5lib 9 | h5py 10 | numexpr 11 | cython 12 | matplotlib 13 | pillow 14 | nibabel>=2.1 15 | networkx>=2.1 16 | imageio 17 | looseversion 18 | mda_xdrlib 19 | --------------------------------------------------------------------------------