├── .gitignore ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── README.md ├── noxfile.py ├── pyproject.toml ├── src └── hyve │ ├── __init__.py │ ├── const.py │ ├── data │ └── cmap │ │ ├── tpl-fsLR_hemi-L_desc-modal_rgba.gii │ │ ├── tpl-fsLR_hemi-L_desc-network_rgba.gii │ │ ├── tpl-fsLR_hemi-R_desc-modal_rgba.gii │ │ ├── tpl-fsLR_hemi-R_desc-network_rgba.gii │ │ ├── tpl-fsaverage_hemi-L_desc-modal_rgba.gii │ │ ├── tpl-fsaverage_hemi-L_desc-network_rgba.gii │ │ ├── tpl-fsaverage_hemi-R_desc-modal_rgba.gii │ │ └── tpl-fsaverage_hemi-R_desc-network_rgba.gii │ ├── elements.py │ ├── flows.py │ ├── geom │ ├── __init__.py │ ├── base.py │ ├── network.py │ ├── points.py │ ├── prim.py │ ├── surf.py │ └── transforms.py │ ├── layout.py │ ├── plot.py │ ├── prim.py │ ├── transforms.py │ └── util.py └── tests ├── __init__.py ├── test_allview.py ├── test_figbuilder.py ├── test_flat.py ├── test_hbfigbuilder.py ├── test_injection.py ├── test_interactive.py ├── test_layout.py ├── test_netplot.py ├── test_overlay.py ├── test_sparque.py ├── test_surfplot.py ├── test_uniplot.py └── test_volplot.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | # MACOS garbage 163 | .DS_Store 164 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 2 | RUN apt-get update && apt-get install ffmpeg libsm6 libxext6 -y 3 | RUN pip install --upgrade pip 4 | RUN wget https://www.vtk.org/files/release/9.3/vtk-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl 5 | RUN pip install vtk-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl 6 | RUN pip install hyve[test]==0.0.2 7 | RUN pip install hyve-examples 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include src/hyve/data * 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ``hyve`` 2 | Interactive and static 3D visualisation for functional brain mapping 3 | 4 | ``hyve`` (``hypercoil`` visualisation engine) is a Python package for interactive and static 3D visualisation of functional brain mapping data. It was originally designed to be used in conjunction with the [``hypercoil``](https://github.com/hypercoil/) project for differentiable programming in the context of functional brain mapping, but can be used independently. 5 | 6 | This system is currently under development, and the API is accordingly subject to sweeping changes without notice. In particular, before using this system, be aware of the *major* limitations that exist, detailed under the *v1.0.0* header here (and soon to be added as issues). Documentation is also extremely sparse, but will be added in the near future. To get a sense of how the package might look and feel when it is more mature, you can take a look at the test cases in the ``tests`` directory. 7 | 8 | ``hyve`` allows for the visualisation of 3D data in a variety of formats, including volumetric data, surface meshes, and 3-dimensional network renderings. It is built using a rudimentary quasi-functional programming paradigm, allowing users to compose new plotting utilities for their data by chaining together functional primitives. The system is designed to be modular and extensible, and can be easily extended to support new data types and visualisation techniques. It is built on top of the ``pyvista`` library and therefore uses VTK as its rendering backend. The system is also capable of combining visualisations as panels of a SVG figure. 9 | 10 | ## Installation 11 | 12 | ``hyve`` can be installed from PyPI using ``pip``: 13 | 14 | ```bash 15 | pip install hyve==0.0.2 16 | ``` 17 | 18 | The below examples also require installation of the ``hyve-examples`` package, which can be installed from PyPI using ``pip``: 19 | 20 | ```bash 21 | pip install hyve-examples 22 | ``` 23 | 24 | ## Contributing 25 | 26 | Suggestions for improvement and contributions are welcome. Please open an issue or submit a pull request if you have any ideas for how to improve the package. ``hyve`` is not feature-complete and is still under active development, so there are many opportunities for improvement. There are also likely to be many bugs, so please open an issue if you encounter any problems. 27 | 28 | ## Basic usage 29 | 30 | The following example demonstrates how to use ``hyve`` to visualise a 3D surface mesh and create a HTML-based interactive visualisation (built on the ``trame`` library) that can be viewed in a web browser: 31 | 32 | ```python 33 | from hyve_examples import get_null400_cifti 34 | from hyve.flows import plotdef 35 | from hyve.transforms import ( 36 | surf_from_archive, 37 | surf_scalars_from_cifti, 38 | parcellate_colormap, 39 | vertex_to_face, 40 | plot_to_html, 41 | ) 42 | 43 | plot_f = plotdef( 44 | surf_from_archive(), 45 | surf_scalars_from_cifti('parcellation'), 46 | parcellate_colormap('parcellation', 'network'), 47 | vertex_to_face('parcellation'), 48 | plot_to_html( 49 | fname_spec=( 50 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_cmap-network' 51 | ), 52 | ), 53 | ) 54 | plot_f( 55 | template='fsLR', 56 | load_mask=True, 57 | parcellation_cifti=get_null400_cifti(), 58 | surf_projection=['veryinflated'], 59 | hemisphere=['left', 'right'], 60 | window_size=(800, 800), 61 | output_dir='/tmp', 62 | ) 63 | ``` 64 | 65 | The HTML files generated by this example will be written to ``/tmp/scalars-parcellation_hemisphere-left_cmap-network_scene.html`` and ``/tmp/scalars-parcellation_hemisphere-right_cmap-network_scene.html``. These files can be opened in a web browser to view the interactive visualisation. 66 | 67 | Note that, unlike many other plotting libraries, ``hyve`` does not provide a single function that can be used to generate a plot. Instead, it provides a set of functional primitives that can be chained together to create a custom plotting pipeline using the ``plotdef`` function. This allows users to create new plotting utilities by composing primirives. For example, the ``plot_f`` function used in the example above is a composition of the ``surf_from_archive``, ``surf_scalars_from_cifti``, ``parcellate_colormap``, ``vertex_to_face``, and ``plot_to_html`` functions with a unified base plotter. The ``plot_f`` function can be used to generate a plot by passing it a set of keyword arguments that specify the data to be plotted and the visualisation parameters. The ``plot_f`` function can also be used to generate a plot from a set of keyword arguments that specify the data to be plotted and the visualisation parameters. 68 | 69 | This approach allows users to create new plotting utilities without having to write much new code, but it can be difficult to understand at first. 70 | 71 | It's also possible to use ``hyve`` to create static visualisations. For example, the following example creates glass brain visualisations of the pain network from Xu et al. (2020) (10.1016/j.neubiorev.2020.01.004). 72 | 73 | ```python 74 | from hyve_examples import get_pain_thresh_nifti 75 | from hyve.flows import plotdef 76 | from hyve.transforms import ( 77 | surf_from_archive, 78 | points_scalars_from_nifti, 79 | plot_to_image, 80 | save_snapshots, 81 | ) 82 | 83 | 84 | nii = get_pain_thresh_nifti() 85 | plot_f = plotdef( 86 | surf_from_archive(), 87 | points_scalars_from_nifti('pain'), 88 | plot_to_image(), 89 | save_snapshots( 90 | fname_spec=( 91 | 'scalars-{pointsscalars}_view-{view}' 92 | ), 93 | ), 94 | ) 95 | plot_f( 96 | template='fsaverage', 97 | surf_projection=('pial',), 98 | surf_alpha=0.3, 99 | pain_nifti=nii, 100 | points_scalars_cmap='magma', 101 | views=('dorsal', 'left', 'anterior'), 102 | output_dir='/tmp', 103 | ) 104 | ``` 105 | 106 | And the below code demonstrates how to use ``hyve`` to create a static PNG image of a BrainNetViewer-like scene of a 3D brain network embedded in a surface mesh: 107 | 108 | ```python 109 | import numpy as np 110 | import pandas as pd 111 | 112 | from hyve_examples import ( 113 | get_schaefer400_synthetic_conmat, 114 | get_schaefer400_cifti, 115 | ) 116 | from hyve.flows import plotdef 117 | from hyve.flows import add_network_data 118 | from hyve.transforms import ( 119 | surf_from_archive, 120 | surf_scalars_from_cifti, 121 | parcellate_colormap, 122 | add_node_variable, 123 | add_edge_variable, 124 | plot_to_image, 125 | save_snapshots, 126 | node_coor_from_parcels, 127 | build_network, 128 | add_network_overlay, 129 | ) 130 | 131 | # Get a parcellation and the corresponding connectivity matrix 132 | parcellation = get_schaefer400_cifti() 133 | cov = pd.read_csv( 134 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 135 | ).values 136 | 137 | # Select some nodes and edges to be highlighted 138 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 139 | vis_nodes_edge_selection[0:5] = True 140 | vis_nodes_edge_selection[200:205] = True 141 | 142 | # Define a plotting function 143 | plot_f = plotdef( 144 | surf_from_archive(), 145 | surf_scalars_from_cifti('parcellation', plot=False), 146 | add_network_data( 147 | add_node_variable('vis'), 148 | add_edge_variable( 149 | 'vis_conn', 150 | threshold=10, 151 | topk_threshold_nodewise=True, 152 | absolute=True, 153 | incident_node_selection=vis_nodes_edge_selection, 154 | emit_degree=True, 155 | ), 156 | add_edge_variable( 157 | 'vis_internal_conn', 158 | absolute=True, 159 | connected_node_selection=vis_nodes_edge_selection, 160 | ), 161 | ), 162 | node_coor_from_parcels('parcellation'), 163 | build_network('vis'), 164 | parcellate_colormap('parcellation', 'network', target='node'), 165 | plot_to_image(), 166 | save_snapshots( 167 | fname_spec=( 168 | 'network-schaefer400_view-{view}' 169 | ), 170 | ), 171 | ) 172 | 173 | # Generate a plot 174 | plot_f( 175 | template='fsLR', 176 | surf_projection='inflated', 177 | surf_alpha=0.2, 178 | parcellation_cifti=parcellation, 179 | node_radius='vis_conn_degree', 180 | node_color='index', 181 | edge_color='vis_conn_sgn', 182 | edge_radius='vis_conn_val', 183 | vis_nodal=vis_nodes_edge_selection.astype(int), 184 | vis_conn_adjacency=cov, 185 | vis_internal_conn_adjacency=cov, 186 | views=('dorsal', 'left', 'posterior'), 187 | output_dir='/tmp', 188 | ) 189 | ``` 190 | 191 | Here is another, more involved example, this time demonstrating how to use ``hyve`` to create a static SVG figure: 192 | 193 | ```python 194 | import templateflow.api as tflow 195 | from hyve.elements import TextBuilder 196 | from hyve_examples import get_null400_cifti 197 | from hyve.flows import plotdef 198 | from hyve.transforms import ( 199 | surf_from_archive, 200 | surf_scalars_from_nifti, 201 | add_surface_overlay, 202 | save_grid, 203 | plot_to_image, 204 | ) 205 | 206 | # Annotate the panels of the figure so that the figure builder knows 207 | # where to place different elements. Note that we'll need a layout with 208 | # 9 panels, so we'll be creating a 3x3 grid of images when we parameterise 209 | # the plot definition below. 210 | annotations = { 211 | 0: dict( 212 | hemisphere='left', 213 | view='lateral', 214 | ), 215 | 1: dict(view='anterior'), 216 | 2: dict( 217 | hemisphere='right', 218 | view='lateral', 219 | ), 220 | 3: dict(view='dorsal'), 221 | 4: dict(elements=['title', 'scalar_bar']), 222 | 5: dict(view='ventral'), 223 | 6: dict( 224 | hemisphere='left', 225 | view='medial', 226 | ), 227 | 7: dict(view='posterior'), 228 | 8: dict( 229 | hemisphere='right', 230 | view='medial', 231 | ), 232 | } 233 | 234 | # Define a plotting function 235 | plot_f = plotdef( 236 | surf_from_archive(), 237 | add_surface_overlay( 238 | 'GM Density', 239 | surf_scalars_from_nifti( 240 | 'GM Density', template='fsaverage', plot=True 241 | ), 242 | ), 243 | plot_to_image(), 244 | save_grid( 245 | n_cols=3, n_rows=3, padding=10, 246 | canvas_size=(1800, 1500), 247 | canvas_color=(0, 0, 0), 248 | fname_spec='scalars-gmdensity_view-all_page-{page}', 249 | scalar_bar_action='collect', 250 | annotations=annotations, 251 | ), 252 | ) 253 | 254 | # Generate a plot 255 | plot_f( 256 | template='fsaverage', 257 | gm_density_nifti=tflow.get( 258 | template='MNI152NLin2009cAsym', 259 | suffix='probseg', 260 | label='GM', 261 | resolution=2 262 | ), 263 | gm_density_clim=(0.2, 0.9), 264 | gm_density_below_color=None, 265 | gm_density_scalar_bar_style={ 266 | 'name': None, 267 | 'orientation': 'h', 268 | }, 269 | surf_projection=('pial',), 270 | # This won't be the recommended way to add a title in the future. 271 | elements={ 272 | 'title': ( 273 | TextBuilder( 274 | content='GM density', 275 | bounding_box_height=192, 276 | font_size_multiplier=0.2, 277 | font_color='#cccccc', 278 | priority=-1, 279 | ), 280 | ), 281 | }, 282 | load_mask=True, 283 | hemisphere=['left', 'right', None], 284 | views={ 285 | 'left': ('medial', 'lateral'), 286 | 'right': ('medial', 'lateral'), 287 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 288 | }, 289 | output_dir='/tmp', 290 | window_size=(600, 500), 291 | ) 292 | ``` 293 | 294 | ## Feature roadmap 295 | 296 | ### v1.0.0 297 | 298 | Several critical features are missing from the alpha version of ``hyve``. We will do our best to be responsive to issues that arise as a consequence of the inevitable breaking API changes that arise as this functionality is implemented. In particular, the following features are significant enough that the API cannot be considered stable until they are implemented: 299 | 300 | - [x] Disambiguation and filtering of graphical elements according to metadata. Currently, the layout system does not support matching metadata in any graphical elements other than snapshots. For instance, scalar bars cannot be routed according to grouping specifications. This can be implemented by first making snapshots a graphical raster element and then modifying the code to match metadata for any elements. This is a critical feature that is necessary for creating many publication-ready figures with multiple panels and plot elements: currently, most figures must be edited manually because there is no control over, for instance, what scalar bars are placed in the SVG. 301 | - [x] Uniform backend for all geometric primitives and topo-transforms. Currently, the way that different geometric primitives are handled is inconsistent--networks (correctly) have a ``ggplot``-like API that allows mapping different variables to different visual properties, while surfaces and volumes are still missing this functionality. Additionally, the handling and filtering of data by hemisphere should be lifted into a topo-transform, so that it can be applied to any geometric primitive. Finally, the naming of aesthetic mappings (e.g., ``radius``, ``rlim``, and ``rmap``; ``color``, ``clim``, and ``cmap``) should be made more principled. This is a critical feature that is necessary before we extend the library to support new geometric primitives. 302 | - [x] Improving reusability of plot protocols. Currently, plot protocols created by ``hyve.plotdef`` have many parameters fixed at the time of creation, which undermines their reusability. This should be changed by having the parameterisation of primitives spliced into protocols at the time of creation set default values to arguments, while allowing the user to override these defaults at the time of calling the protocol. 303 | - [ ] Reconcile and formalise the relationship between the argument mapper (of the core visualisation loop) and the overlay system. Currently the system cannot handle this situation reasonably, and often not at all. This issue arises only when both are used in conjunction with one another, which we haven't seen much need for yet--but for which the need certainly exists. The user should be provided a way of controlling the desired behaviour when, for instance, vector-valued data are passed to multiple overlays--is an "outer" (product) or "inner" (zip) broadcast desired? 304 | 305 | ### Future releases 306 | 307 | Among others, the following features are planned for future releases: 308 | 309 | - More 3D visual primitives, including contours, reconstructed regional surfaces, and voxel block volumes. 310 | - Support for grouping semantics in figure parameterisations, allowing users to specify that certain parameters should be applied to multiple elements at once according to shared metadata (e.g., same hemisphere), or that the same layout should be applied groupwise in a single figure (e.g., to visualise multiple parcellations or surface projections). 311 | - Joining the outputs from multiple calls to the core plotting automapper into a single figure. 312 | - Floating plot elements that are not anchored to a specific panel, and whose parameters can be defined in relation to a plot page, plot group, or plot panel. This feature could be used to add inset plots, colorbars, and legends to a figure. 313 | - More interactive ``pyvista`` widgets, for instance to allow users to slice volumes or to select a subset of nodes or edges to highlight in a plot. 314 | - Control over lighting, texture, material, etc. of 3D scenes and objects. 315 | - Transformations for adding panel and plot elements. 316 | - Postprocessing of snapshots (with the option to filter by metadata), e.g., background removal, cropping, and resizing. 317 | - Additional SVG plot elements, including text, drop shadows, and shapes. 318 | - Additional cameras, for instance close-up views of specific regions of interest. 319 | - Injection of vector plots generated by other libraries, such as ``matplotlib``, into ``hyve`` figures. Injection of raster and vector graphic files as elements in ``hyve`` figures. 320 | - Self-documenting parameterisation of plot definitions. 321 | 322 | If there is another feature you would like to see in ``hyve``, please [open an issue](https://github.com/hypercoil/hyve/issues/new/choose) to let us know! We are also interested in hearing which of the above features would be most useful to you, so that we can prioritise our development efforts accordingly. Finally, if you would like to contribute to the development of ``hyve``, please open an issue or pull request on GitHub. We will add a more detailed guide for contributions in the near future. 323 | 324 | ## Alternative options 325 | 326 | While ``hyve`` is designed to be modular and extensible, some users might prefer a library with a more stable and intuitive API, or one that uses a backend that is simpler to install. If you are looking for a more user-friendly alternative, you might consider the following more mature libraries: 327 | 328 | - [``netplotbrain``](https://github.com/wiheto/netplotbrain): A Python library that supports many of the same kinds of plots, but is more user-friendly and uses ``matplotlib`` as its visualisation backend. 329 | - [``surfplot``](https://github.com/danjgale/surfplot): A lightweight VTK-based package (using the ``brainspaces`` library) that provides a user-friendly interface and produces simple and high-quality figures out of the box. 330 | - [``pysurfer``](https://github.com/nipy/PySurfer): A Python library for visualising and manipulating surface-based neuroimaging data that is built on top of VTK (using the ``mayavi`` library) and provides a ``tksurfer``-like user experience. 331 | - [``brainnetviewer``](https://www.nitrc.org/projects/bnv/): A MATLAB-based package that provides a GUI for visualising brain networks and surface-based neuroimaging data. A top-quality library that is widely used in the community, but it is built on a proprietary platform and is not easily extensible. 332 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Noxfile 6 | """ 7 | import nox 8 | 9 | @nox.session() 10 | def clean(session): 11 | session.install('coverage[toml]') 12 | session.run('coverage', 'erase') 13 | 14 | @nox.session(python=["3.10", "3.11"]) 15 | def tests(session): 16 | session.install('.[test]') 17 | session.run( 18 | 'pytest', 19 | '--cov', 'hyve', 20 | '--cov-append', 21 | 'tests/', 22 | ) 23 | session.run('ruff', 'check', 'src/hyve') 24 | 25 | @nox.session() 26 | def report(session): 27 | session.install('coverage[toml]') 28 | session.run( 29 | 'coverage', 30 | 'report', '--fail-under=85', 31 | "--omit='*test*,*__init__*'", 32 | ) 33 | session.run( 34 | 'coverage', 35 | 'html', 36 | "--omit='*test*,*__init__*'", 37 | ) 38 | session.run( 39 | 'coverage', 40 | 'xml', 41 | "--omit='*test*,*__init__*'", 42 | ) 43 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >=61"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "hyve" 7 | version = "0.1.0" 8 | description = "Interactive and static 3D visualisation for functional brain mapping" 9 | authors = [ 10 | { name = "Rastko Ciric", email = "rastko@stanford.edu" }, 11 | ] 12 | maintainers = [ 13 | { name = "Rastko Ciric", email = "rastko@stanford.edu" }, 14 | ] 15 | license = { file = "LICENSE" } 16 | readme = "README.md" 17 | classifiers = [ 18 | "Programming Language :: Python :: 3", 19 | "License :: OSI Approved :: Apache Software License", 20 | "Operating System :: POSIX :: Linux", 21 | "Operating System :: MacOS :: MacOS X", 22 | ] 23 | requires-python = ">=3.10" 24 | dependencies = [ 25 | "conveyant", 26 | "lytemaps", 27 | "matplotlib", 28 | "nibabel", 29 | "numpy", 30 | "pandas", 31 | "pyvista[all]", 32 | "scipy", 33 | "svg.py", 34 | "templateflow", 35 | ] 36 | 37 | [project.optional-dependencies] 38 | test = [ 39 | "hyve-examples", 40 | "coverage[toml]", 41 | "pytest", 42 | "pytest-cov", 43 | "ruff", 44 | "seaborn", 45 | ] 46 | 47 | [project.urls] 48 | "Homepage" = "https://github.com/hypercoil/hyve" 49 | 50 | [tool.coverage.report] 51 | show_missing = true 52 | skip_covered = true 53 | precision = 2 54 | 55 | [tool.coverage.run] 56 | branch = true 57 | omit = [ 58 | '*test*', 59 | '*__init__*', 60 | ] 61 | 62 | [tool.ruff] 63 | line-length = 79 64 | lint.ignore = ["E731"] 65 | lint.select = [ 66 | "E", 67 | "F", 68 | "W", 69 | "I001", 70 | ] 71 | 72 | [tool.ruff.lint.per-file-ignores] 73 | "__init__.py" = ["F401", "I001"] 74 | -------------------------------------------------------------------------------- /src/hyve/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | from .elements import ( 5 | ElementBuilder, 6 | RasterBuilder, 7 | ScalarBarBuilder, 8 | TextBuilder, 9 | ) 10 | from .flows import plotdef, add_network_data 11 | from .layout import ( 12 | AnnotatedLayout, 13 | Cell, 14 | CellLayout, 15 | ColGroupSpec, 16 | GroupSpec, 17 | RowGroupSpec, 18 | grid, 19 | hsplit, 20 | vsplit, 21 | ) 22 | from .plot import unified_plotter 23 | from .transforms import ( 24 | surf_from_archive, 25 | surf_from_freesurfer, 26 | surf_from_gifti, 27 | surf_scalars_from_cifti, 28 | surf_scalars_from_gifti, 29 | surf_scalars_from_array, 30 | surf_scalars_from_nifti, 31 | points_scalars_from_nifti, 32 | points_scalars_from_array, 33 | parcellate_colormap, 34 | parcellate_surf_scalars, 35 | scatter_into_parcels, 36 | vertex_to_face, 37 | add_surface_overlay, 38 | add_points_overlay, 39 | add_network_overlay, 40 | build_network, 41 | node_coor_from_parcels, 42 | add_node_variable, 43 | add_edge_variable, 44 | scalar_focus_camera, 45 | closest_ortho_camera, 46 | planar_sweep_camera, 47 | auto_camera, 48 | plot_to_image, 49 | plot_final_image, 50 | plot_to_html, 51 | plot_to_display, 52 | save_snapshots, 53 | save_figure, 54 | save_grid, 55 | svg_element, 56 | text_element, 57 | pyplot_element, 58 | ) 59 | -------------------------------------------------------------------------------- /src/hyve/flows.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Visualisation control flow 6 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 7 | Functions for transforming the control flow of visualisation functions. 8 | See also ``transforms.py`` for functions that transform the input and output 9 | flows of visualisation functions. 10 | """ 11 | import inspect 12 | from typing import Any, Literal, Optional, Sequence 13 | 14 | from conveyant import emulate_assignment, ichain, join, splice_docstring 15 | 16 | from .const import DOCBASE, RETBASE, docbuilder 17 | from .prim import automap_unified_plotter_p 18 | 19 | 20 | def plotdef(*pparams: Sequence[callable]) -> callable: 21 | plot_f = ichain(*pparams)(automap_unified_plotter_p) 22 | # drop variadic parameters 23 | plot_f.__signature__ = inspect.signature(plot_f).replace( 24 | parameters=tuple( 25 | p for p in plot_f.__signature__.parameters.values() 26 | if p.kind != p.VAR_POSITIONAL 27 | ) 28 | ) 29 | plot_f = emulate_assignment()(plot_f) 30 | return splice_docstring( 31 | f=plot_f, 32 | template=docbuilder(), 33 | base_str=DOCBASE, 34 | returns=RETBASE, 35 | ) 36 | 37 | 38 | def _get_unique_parameters_and_make_signature( 39 | joined: callable, 40 | fs: Sequence[callable], 41 | ) -> callable: 42 | seen_params = set() 43 | unique_params = [] 44 | for f in fs: 45 | f_params = [] 46 | for name, param in f.__signature__.parameters.items(): 47 | if name not in seen_params: 48 | seen_params.add(name) 49 | f_params.append(param) 50 | unique_params = f_params + unique_params 51 | joined.__signature__ = inspect.signature(joined).replace( 52 | parameters=unique_params 53 | ) 54 | return joined 55 | 56 | 57 | def joindata( 58 | join_vars: Optional[Sequence[str]] = None, 59 | how: Literal['outer', 'inner'] = 'outer', 60 | fill_value: Any = None, 61 | ) -> callable: 62 | def joining_f(arg): 63 | arg = list(a for a in arg if a is not None) 64 | out = arg[0].join(arg[1:], how=how) 65 | if fill_value is not None: 66 | out = out.fillna(fill_value) 67 | return out 68 | 69 | return join( 70 | joining_f, 71 | join_vars, 72 | postprocess=_get_unique_parameters_and_make_signature, 73 | ) 74 | 75 | 76 | def add_network_data( 77 | *pparams: Sequence[callable], 78 | how: Literal['outer', 'inner'] = 'outer', 79 | fill_value: Any = 0.0, 80 | ) -> callable: 81 | return joindata( 82 | join_vars= ('edge_values', 'node_values'), 83 | how=how, 84 | fill_value=fill_value, 85 | )(*pparams) 86 | -------------------------------------------------------------------------------- /src/hyve/geom/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | from .base import ( 5 | Layer, 6 | ) 7 | from .network import ( 8 | EdgeLayer, 9 | NodeLayer, 10 | ) 11 | from .prim import ( 12 | plot_network_f, 13 | plot_network_p, 14 | plot_points_f, 15 | plot_points_p, 16 | plot_surf_f, 17 | plot_surf_p, 18 | ) 19 | from .transforms import ( 20 | hemisphere_select_fit, 21 | hemisphere_slack_fit, 22 | ) 23 | from .network import ( 24 | NetworkData, 25 | NetworkDataCollection, 26 | ) 27 | from .points import ( 28 | PointData, 29 | PointDataCollection, 30 | ) 31 | from .surf import ( 32 | CortexTriSurface, 33 | ) 34 | -------------------------------------------------------------------------------- /src/hyve/geom/base.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Geometric primitives 6 | ~~~~~~~~~~~~~~~~~~~~ 7 | Geometric primitives and geometric transforms. 8 | """ 9 | import dataclasses 10 | from collections.abc import Mapping as MappingABC 11 | from typing import ( 12 | Any, 13 | Literal, 14 | Mapping, 15 | Optional, 16 | Sequence, 17 | Tuple, 18 | Union, 19 | ) 20 | 21 | import numpy as np 22 | from matplotlib import cm, colors 23 | 24 | from ..const import ( 25 | DEFAULT_CMAP, 26 | LAYER_ALIM_DEFAULT_VALUE, 27 | LAYER_ALIM_NEGATIVE_DEFAULT_VALUE, 28 | LAYER_ALIM_PERCENTILE_DEFAULT_VALUE, 29 | LAYER_ALPHA_DEFAULT_VALUE, 30 | LAYER_ALPHA_NEGATIVE_DEFAULT_VALUE, 31 | LAYER_AMAP_DEFAULT_VALUE, 32 | LAYER_AMAP_NEGATIVE_DEFAULT_VALUE, 33 | LAYER_BELOW_COLOR_DEFAULT_VALUE, 34 | LAYER_BLEND_MODE_DEFAULT_VALUE, 35 | LAYER_CLIM_DEFAULT_VALUE, 36 | LAYER_CLIM_NEGATIVE_DEFAULT_VALUE, 37 | LAYER_CLIM_PERCENTILE_DEFAULT_VALUE, 38 | LAYER_CMAP_NEGATIVE_DEFAULT_VALUE, 39 | LAYER_COLOR_DEFAULT_VALUE, 40 | LAYER_COLOR_NEGATIVE_DEFAULT_VALUE, 41 | LAYER_NAN_OVERRIDE_DEFAULT_VALUE, 42 | Tensor, 43 | ) 44 | from ..elements import ScalarBarBuilder 45 | from ..util import ( 46 | LinearScalarMapper, 47 | premultiply_alpha, 48 | scalar_percentile, 49 | source_over, 50 | unmultiply_alpha, 51 | ) 52 | 53 | BLEND_MODES = { 54 | 'source_over': source_over, 55 | } 56 | 57 | 58 | @dataclasses.dataclass(frozen=True) 59 | class _LayerBase: 60 | """Base class for layers.""" 61 | name: Optional[str] 62 | # long_name: Optional[str] = None 63 | color: Optional[Any] = LAYER_COLOR_DEFAULT_VALUE 64 | #TODO: color_negative and alpha_negative aren't sure what they are 65 | # supposed to be. We need to decide and appropriately refactor 66 | # before we can expose them or remove them. 67 | color_negative: Optional[Any] = LAYER_COLOR_NEGATIVE_DEFAULT_VALUE 68 | cmap: Optional[Any] = DEFAULT_CMAP 69 | cmap_negative: Optional[Any] = LAYER_CMAP_NEGATIVE_DEFAULT_VALUE 70 | clim: Optional[Tuple[float, float]] = LAYER_CLIM_DEFAULT_VALUE 71 | clim_negative: Optional[Tuple[float, float]] = ( 72 | LAYER_CLIM_NEGATIVE_DEFAULT_VALUE 73 | ) 74 | clim_percentile: bool = LAYER_CLIM_PERCENTILE_DEFAULT_VALUE 75 | below_color: Optional[Any] = LAYER_BELOW_COLOR_DEFAULT_VALUE 76 | alpha: Union[float, str] = LAYER_ALPHA_DEFAULT_VALUE 77 | alpha_negative: Optional[float] = LAYER_ALPHA_NEGATIVE_DEFAULT_VALUE 78 | alim: Optional[Tuple[float, float]] = LAYER_ALIM_DEFAULT_VALUE 79 | alim_negative: Optional[Tuple[float, float]] = ( 80 | LAYER_ALIM_NEGATIVE_DEFAULT_VALUE 81 | ) 82 | alim_percentile: bool = LAYER_ALIM_PERCENTILE_DEFAULT_VALUE 83 | amap: Optional[callable] = LAYER_AMAP_DEFAULT_VALUE 84 | amap_negative: Optional[callable] = LAYER_AMAP_NEGATIVE_DEFAULT_VALUE 85 | nan_override: Optional[Tuple[float, float, float, float]] = ( 86 | LAYER_NAN_OVERRIDE_DEFAULT_VALUE 87 | ) 88 | hide_subthreshold: bool = False 89 | # style: Optional[Mapping[str, Any]] = None 90 | scalar_bar_style: Optional[Mapping[str, Any]] = dataclasses.field( 91 | default_factory=dict, 92 | ) 93 | blend_mode: Literal['source_over'] = LAYER_BLEND_MODE_DEFAULT_VALUE 94 | 95 | 96 | # Right now, it's the same as the base class, but we might want to add 97 | # additional parameters later. 98 | @dataclasses.dataclass(frozen=True) 99 | class Layer(_LayerBase): 100 | """Container for metadata to construct a single layer of a plot.""" 101 | name: str 102 | # hide_subthreshold: bool = True 103 | 104 | 105 | @dataclasses.dataclass 106 | class SubgeometryParameters(MappingABC): 107 | """ 108 | Addressable container for parameters specific to some subgeometry, most 109 | likely a cortical hemisphere. 110 | """ 111 | params: Mapping[str, Mapping[str, Any]] 112 | 113 | def __init__(self, **params): 114 | self.params = params 115 | 116 | def __len__(self): 117 | return len(self.params) 118 | 119 | def __iter__(self): 120 | return iter(self.params) 121 | 122 | def __getitem__(self, key): 123 | return self.params[key] 124 | 125 | def get(self, geom: str, param: str): 126 | return self.params[geom][param] 127 | 128 | 129 | def _property_vector( 130 | scalar_array: Tensor, 131 | lim: Optional[Tuple[float, float]] = None, 132 | percentile: bool = False, 133 | mapper: Optional[Union[callable, Tuple[float, float]]] = None, 134 | ) -> Tuple[Tensor, Optional[callable]]: 135 | if lim is not None: 136 | if percentile: 137 | lim = scalar_percentile(scalar_array, lim) 138 | vmin, vmax = lim 139 | else: 140 | vmin, vmax = np.nanmin(scalar_array), np.nanmax(scalar_array) 141 | scalar_array = np.clip(scalar_array, vmin, vmax) 142 | if mapper is not None: 143 | if isinstance(mapper, Sequence): 144 | mapper = LinearScalarMapper(norm=mapper) 145 | scalar_array = mapper( 146 | scalar_array, 147 | vmin=vmin, 148 | vmax=vmax, 149 | ) 150 | return scalar_array, mapper 151 | 152 | 153 | def _rgba_impl( 154 | color_scalars: Tensor, 155 | alpha_scalars: Optional[Tensor] = None, 156 | color: Optional[str] = None, 157 | clim: Optional[Tuple[float, float]] = None, 158 | clim_percentile: bool = False, 159 | cmap: Any = 'viridis', 160 | below_color: Optional[str] = None, 161 | alpha: Optional[float] = None, 162 | alim: Optional[Tuple[float, float]] = None, 163 | alim_percentile: bool = False, 164 | amap: Optional[callable] = None, 165 | nan_override: Optional[ 166 | Tuple[float, float, float, float] 167 | ] = (0, 0, 0, 0), 168 | hide_subthreshold: bool = False, 169 | scalar_bar_builder: Optional[ScalarBarBuilder] = None, 170 | ) -> Tuple[Tensor, Optional[ScalarBarBuilder]]: 171 | if color is not None: 172 | rgba = np.tile(colors.to_rgba(color), (len(color_scalars), 1)) 173 | vmin, vmax, mapper = None, None, None 174 | else: 175 | if clim_percentile: 176 | clim = scalar_percentile(color_scalars, clim) 177 | if clim is not None: 178 | vmin, vmax = clim 179 | else: 180 | vmin, vmax = np.nanmin(color_scalars), np.nanmax(color_scalars) 181 | hide_subthreshold = False 182 | norm = colors.Normalize(vmin=vmin, vmax=vmax) 183 | mapper = cm.ScalarMappable(norm=norm, cmap=cmap) 184 | rgba = mapper.to_rgba(color_scalars) 185 | if alpha_scalars is not None: 186 | alpha_scalars, _ = _property_vector( 187 | alpha_scalars, 188 | lim=alim, 189 | percentile=alim_percentile, 190 | mapper=amap if amap is not None else (0, 1), 191 | ) 192 | rgba[:, 3] = alpha_scalars 193 | elif alpha is not None: 194 | rgba[:, 3] *= alpha 195 | if nan_override is not None: 196 | rgba = np.where( 197 | np.isnan(color_scalars[..., None]), 198 | np.asarray(nan_override), 199 | rgba, 200 | ) 201 | if vmin is not None: 202 | if below_color is not None: 203 | rgba[color_scalars < vmin] = colors.to_rgba(below_color) 204 | elif hide_subthreshold: 205 | # Set alpha to 0 for sub-threshold values 206 | rgba[color_scalars < vmin, 3] = 0 207 | if mapper is not None: 208 | scalar_bar_builder = ScalarBarBuilder(**{ 209 | **scalar_bar_builder, 210 | **{ 211 | 'mapper': mapper, 212 | 'below_color': below_color, 213 | }, 214 | }) 215 | else: 216 | scalar_bar_builder = None 217 | return rgba, scalar_bar_builder 218 | 219 | 220 | def scalars_to_rgba( 221 | color_scalars: Optional[Tensor] = None, 222 | alpha_scalars: Optional[Tensor] = None, 223 | color: Optional[str] = LAYER_COLOR_DEFAULT_VALUE, 224 | color_negative: Optional[str] = LAYER_COLOR_NEGATIVE_DEFAULT_VALUE, 225 | clim: Optional[Tuple[float, float]] = LAYER_CLIM_DEFAULT_VALUE, 226 | clim_negative: Optional[Tuple[float, float]] = ( 227 | LAYER_CLIM_NEGATIVE_DEFAULT_VALUE 228 | ), 229 | clim_percentile: bool = LAYER_CLIM_PERCENTILE_DEFAULT_VALUE, 230 | cmap: Optional[str] = DEFAULT_CMAP, 231 | cmap_negative: Optional[str] = LAYER_CMAP_NEGATIVE_DEFAULT_VALUE, 232 | below_color: Optional[str] = LAYER_BELOW_COLOR_DEFAULT_VALUE, 233 | alpha: Optional[float] = LAYER_ALPHA_DEFAULT_VALUE, 234 | alpha_negative: Optional[float] = LAYER_ALPHA_NEGATIVE_DEFAULT_VALUE, 235 | alim: Optional[Tuple[float, float]] = LAYER_ALIM_DEFAULT_VALUE, 236 | alim_negative: Optional[Tuple[float, float]] = ( 237 | LAYER_ALIM_NEGATIVE_DEFAULT_VALUE 238 | ), 239 | alim_percentile: bool = LAYER_ALIM_PERCENTILE_DEFAULT_VALUE, 240 | amap: Optional[callable] = LAYER_AMAP_DEFAULT_VALUE, 241 | amap_negative: Optional[callable] = LAYER_AMAP_NEGATIVE_DEFAULT_VALUE, 242 | nan_override: Optional[ 243 | Tuple[float, float, float, float] 244 | ] = LAYER_NAN_OVERRIDE_DEFAULT_VALUE, 245 | hide_subthreshold: bool = False, 246 | scalar_bar_builder: Optional[ScalarBarBuilder] = None, 247 | ) -> Tuple[Tensor, Sequence[Optional[ScalarBarBuilder]]]: 248 | """ 249 | Convert scalar values to RGBA colors. 250 | 251 | Converting all scalars to RGBA colors enables us to plot multiple 252 | scalar values on the same surface by leveraging blend operations. 253 | 254 | Parameters 255 | ---------- 256 | scalars : Tensor 257 | Scalar values to convert to RGBA colors. 258 | clim : tuple of float, optional 259 | Color limits. If ``clim_neg`` is also specified, this is the color 260 | limits for positive values. 261 | clim_neg : tuple of float, optional 262 | Color limits for negative values. 263 | cmap : str, optional 264 | Name of colormap to use for positive values. 265 | cmap_neg : str, optional 266 | Name of colormap to use for negative values. 267 | alpha : float, optional 268 | Opacity value to use for all scalar values, or opacity multiplier 269 | for the colormap(s). 270 | color : str, optional 271 | Color to use for all scalar values. 272 | below_color : str, optional 273 | Color to use for values below ``clim``. If ``clim_neg`` is also 274 | specified, this is the color to use for small absolute values 275 | between ``-clim_neg[0]`` and ``clim[0]``. 276 | hide_subthreshold : bool, optional 277 | If ``True``, set the alpha value to 0 for values below ``clim``. 278 | scalar_bar_builder : ScalarBarBuilder, optional 279 | Template for building scalar bars. If not specified, no scalar bar 280 | will be built. 281 | """ 282 | scalar_bar_builder_negative = None 283 | if cmap_negative is not None or color_negative is not None: 284 | if color_negative is None: 285 | color_negative = color 286 | if clim_negative is None: 287 | clim_negative = clim 288 | if alpha_negative is None: 289 | alpha_negative = alpha 290 | if amap_negative is None: 291 | amap_negative = amap 292 | if alim_negative is None: 293 | alim_negative = alim 294 | color_scalars_negative = -color_scalars.copy() 295 | color_scalars = color_scalars.copy() 296 | neg_idx = (color_scalars_negative > 0) 297 | color_scalars_negative[~neg_idx] = 0 298 | color_scalars[neg_idx] = 0 299 | if scalar_bar_builder is not None: 300 | scalar_bar_builder_negative = ScalarBarBuilder(**{ 301 | **scalar_bar_builder, 302 | **{'name_suffix': ' (—)'} 303 | }) 304 | scalar_bar_builder = ScalarBarBuilder(**{ 305 | **scalar_bar_builder, 306 | **{'name_suffix': ' (+)'} 307 | }) 308 | rgba_neg, scalar_bar_builder_negative = _rgba_impl( 309 | color_scalars=color_scalars_negative, 310 | alpha_scalars=alpha_scalars, 311 | color=color_negative, 312 | clim=clim_negative, 313 | clim_percentile=clim_percentile, 314 | cmap=cmap_negative, 315 | below_color=below_color, 316 | alpha=alpha_negative, 317 | alim=alim_negative, 318 | alim_percentile=alim_percentile, 319 | amap=amap_negative, 320 | nan_override=nan_override, 321 | hide_subthreshold=hide_subthreshold, 322 | scalar_bar_builder=scalar_bar_builder_negative, 323 | ) 324 | 325 | rgba, scalar_bar_builder = _rgba_impl( 326 | color_scalars=color_scalars, 327 | alpha_scalars=alpha_scalars, 328 | color=color, 329 | clim=clim, 330 | clim_percentile=clim_percentile, 331 | cmap=cmap, 332 | below_color=below_color, 333 | alpha=alpha, 334 | alim=alim, 335 | alim_percentile=alim_percentile, 336 | amap=amap, 337 | nan_override=nan_override, 338 | hide_subthreshold=hide_subthreshold, 339 | scalar_bar_builder=scalar_bar_builder, 340 | ) 341 | if cmap_negative is not None: 342 | rgba[neg_idx] = rgba_neg[neg_idx] 343 | return rgba, (scalar_bar_builder, scalar_bar_builder_negative) 344 | 345 | 346 | def layer_rgba( 347 | layer: Layer, 348 | color_scalar_array: Tensor, 349 | alpha_scalar_array: Optional[Tensor] = None, 350 | ) -> Tuple[Tensor, Sequence[ScalarBarBuilder]]: 351 | cmap = layer.cmap or DEFAULT_CMAP 352 | if layer.scalar_bar_style is not None: 353 | scalar_bar_builder = ScalarBarBuilder(mapper=None, name=layer.name) 354 | else: 355 | scalar_bar_builder = None 356 | rgba, scalar_bar_builders = scalars_to_rgba( 357 | color_scalars=color_scalar_array, 358 | alpha_scalars=alpha_scalar_array, 359 | cmap=cmap, 360 | cmap_negative=layer.cmap_negative, 361 | clim=layer.clim, 362 | clim_negative=layer.clim_negative, 363 | clim_percentile=layer.clim_percentile, 364 | color=layer.color, 365 | color_negative=layer.color_negative, 366 | below_color=layer.below_color, 367 | alpha=layer.alpha, 368 | alpha_negative=layer.alpha_negative, 369 | amap=layer.amap, 370 | amap_negative=layer.amap_negative, 371 | alim=layer.alim, 372 | alim_negative=layer.alim_negative, 373 | alim_percentile=layer.alim_percentile, 374 | nan_override=layer.nan_override, 375 | hide_subthreshold=layer.hide_subthreshold, 376 | scalar_bar_builder=scalar_bar_builder, 377 | ) 378 | if layer.scalar_bar_style is not None: 379 | # We should be able to override anything in the scalar bar builder 380 | # with the layer's scalar bar style, including the mapper and name 381 | # if we want to. 382 | scalar_bar_builders = tuple( 383 | ScalarBarBuilder(**{ 384 | **scalar_bar_builder, 385 | **layer.scalar_bar_style, 386 | }) 387 | for scalar_bar_builder in scalar_bar_builders 388 | if scalar_bar_builder is not None 389 | ) 390 | else: 391 | scalar_bar_builders = () 392 | 393 | return rgba, scalar_bar_builders 394 | 395 | 396 | def compose_layers( 397 | geom: Any, 398 | layers: Sequence[Layer], 399 | **params, 400 | ) -> Tuple[Tensor, str, Sequence[ScalarBarBuilder]]: 401 | """ 402 | Compose layers into a single RGB(A) array. 403 | """ 404 | dst, tensors, params = geom.init_composition(layers, **params) 405 | dst, scalar_bar_builders = layer_rgba(dst, *tensors) 406 | dst = premultiply_alpha(dst) 407 | 408 | for layer in layers[1:]: 409 | src, new_builders = geom.layer_to_tensors(layer, **params) 410 | src = premultiply_alpha(src) 411 | blend_layers = BLEND_MODES[layer.blend_mode] 412 | dst = blend_layers(src, dst) 413 | scalar_bar_builders = scalar_bar_builders + new_builders 414 | dst = unmultiply_alpha(dst) 415 | return dst, scalar_bar_builders, params 416 | -------------------------------------------------------------------------------- /src/hyve/geom/prim.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Geometric primitives 6 | ~~~~~~~~~~~~~~~~~~~~ 7 | Geometric primitives containers. 8 | """ 9 | from typing import Mapping, NamedTuple, Optional, Tuple 10 | 11 | from .network import ( 12 | plot_network_aux_f, 13 | plot_network_f, 14 | ) 15 | from .points import ( 16 | plot_points_aux_f, 17 | plot_points_f, 18 | ) 19 | from .surf import ( 20 | plot_surf_aux_f, 21 | plot_surf_f, 22 | ) 23 | from .transforms import ( 24 | hemisphere_select_fit_network, 25 | hemisphere_select_fit_points, 26 | hemisphere_select_fit_surf, 27 | hemisphere_select_transform_network, 28 | hemisphere_select_transform_points, 29 | hemisphere_select_transform_surf, 30 | hemisphere_slack_fit_network, 31 | hemisphere_slack_fit_points, 32 | hemisphere_slack_fit_surf, 33 | hemisphere_slack_transform_network, 34 | hemisphere_slack_transform_points, 35 | hemisphere_slack_transform_surf, 36 | ) 37 | 38 | 39 | class GeomPrimitive(NamedTuple): 40 | name: str 41 | plot: callable 42 | meta: callable 43 | xfms: Mapping[str, Tuple[Optional[callable], Optional[callable]]] 44 | 45 | 46 | plot_surf_p = GeomPrimitive( 47 | name='surf', 48 | plot=plot_surf_f, 49 | meta=plot_surf_aux_f, 50 | xfms={ 51 | 'hemisphere_select': ( 52 | hemisphere_select_fit_surf, 53 | hemisphere_select_transform_surf, 54 | ), 55 | 'hemisphere_slack': ( 56 | hemisphere_slack_fit_surf, 57 | hemisphere_slack_transform_surf, 58 | ), 59 | }, 60 | ) 61 | 62 | 63 | plot_points_p = GeomPrimitive( 64 | name='points', 65 | plot=plot_points_f, 66 | meta=plot_points_aux_f, 67 | xfms={ 68 | 'hemisphere_select': ( 69 | hemisphere_select_fit_points, 70 | hemisphere_select_transform_points, 71 | ), 72 | 'hemisphere_slack': ( 73 | hemisphere_slack_fit_points, 74 | hemisphere_slack_transform_points, 75 | ), 76 | }, 77 | ) 78 | 79 | 80 | plot_network_p = GeomPrimitive( 81 | name='network', 82 | plot=plot_network_f, 83 | meta=plot_network_aux_f, 84 | xfms={ 85 | 'hemisphere_select': ( 86 | hemisphere_select_fit_network, 87 | hemisphere_select_transform_network, 88 | ), 89 | 'hemisphere_slack': ( 90 | hemisphere_slack_fit_network, 91 | hemisphere_slack_transform_network, 92 | ), 93 | }, 94 | ) 95 | -------------------------------------------------------------------------------- /src/hyve/geom/transforms.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Geometric transformations 6 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 7 | Geometric transformations containers. 8 | """ 9 | from typing import ( 10 | Any, 11 | Literal, 12 | Mapping, 13 | NamedTuple, 14 | Optional, 15 | Sequence, 16 | Tuple, 17 | Union, 18 | ) 19 | 20 | import numpy as np 21 | 22 | from ..const import Tensor 23 | from .base import SubgeometryParameters 24 | from .network import NetworkDataCollection 25 | from .points import PointDataCollection 26 | from .surf import CortexTriSurface 27 | 28 | 29 | class GeomTransform(NamedTuple): 30 | name: str 31 | fit: callable 32 | transform: callable 33 | meta: Optional[callable] = None 34 | 35 | 36 | def hemisphere_select_fit( 37 | key_geom: Optional[str] = 'surf', 38 | key_scalars: Optional[str] = None, 39 | *, 40 | hemisphere: Optional[str] = None, 41 | **params, 42 | ) -> Mapping[str, Any]: 43 | # by convention, None denotes both hemispheres (no selection) 44 | if hemisphere == 'both': 45 | hemisphere = None 46 | hemispheres = ( 47 | (hemisphere,) if hemisphere is not None else ('left', 'right') 48 | ) 49 | key_geom = params.get(key_geom, None) 50 | if key_geom is not None and key_scalars is not None: 51 | hemispheres = tuple( 52 | hemi for hemi in hemispheres 53 | if key_scalars is None or key_geom.present_in_hemisphere( 54 | hemi, key_scalars 55 | ) 56 | ) 57 | if len(hemispheres) == 1: 58 | hemispheres_str = hemispheres[0] 59 | else: 60 | hemispheres_str = 'both' 61 | return { 62 | 'hemispheres': hemispheres, 63 | 'hemispheres_str': hemispheres_str, 64 | } 65 | 66 | 67 | def hemisphere_select_transform( 68 | fit_params: Mapping[str, Any], 69 | **params, 70 | ) -> Mapping[str, Any]: 71 | return { 72 | 'hemispheres': fit_params.get('hemispheres', ('left', 'right')), 73 | 'hemispheres_str': fit_params.get('hemispheres_str', 'both'), 74 | } 75 | 76 | 77 | def hemisphere_select_meta( 78 | hemisphere: Optional[str] = None, 79 | key_geom: Optional[str] = 'surf', 80 | key_scalars: Optional[str] = None, 81 | **params, 82 | ) -> Mapping[str, Any]: 83 | return { 84 | 'hemisphere': [ 85 | hemisphere_select_fit( 86 | hemisphere=hemisphere, 87 | key_geom=key_geom, 88 | key_scalars=key_scalars, 89 | meta_call=True, 90 | **params, 91 | )['hemispheres_str'] 92 | ], 93 | } 94 | 95 | 96 | def hemisphere_select_assign_parameters( 97 | *, 98 | surf_scalars_cmap: Any, 99 | surf_scalars_clim: Any, 100 | surf_scalars_layers: Any, 101 | subgeom_params: Optional[SubgeometryParameters] = None, 102 | ) -> Tuple[Mapping[str, Any], Mapping[str, Any]]: 103 | left = {} 104 | right = {} 105 | 106 | def assign_tuple(arg, name): 107 | left[name], right[name] = arg 108 | 109 | def assign_scalar(arg, name): 110 | left[name] = right[name] = arg 111 | 112 | def conditional_assign(condition, arg, name): 113 | if arg is not None and condition(arg): 114 | assign_tuple(arg, name) 115 | else: 116 | assign_scalar(arg, name) 117 | 118 | conditional_assign( 119 | lambda x: len(x) == 2, 120 | surf_scalars_cmap, 121 | 'surf_scalars_cmap', 122 | ) 123 | conditional_assign( 124 | lambda x: len(x) == 2 and isinstance(x[0], (tuple, list)), 125 | surf_scalars_clim, 126 | 'surf_scalars_clim', 127 | ) 128 | conditional_assign( 129 | lambda x: len(x) == 2 and isinstance(x[0], (tuple, list)), 130 | surf_scalars_layers, 131 | 'surf_scalars_layers', 132 | ) 133 | if subgeom_params is None: 134 | subgeom_params = {} 135 | if subgeom_params.get('left') is not None: 136 | left = {**subgeom_params.params.pop('left'), **left} 137 | if subgeom_params.get('right') is not None: 138 | right = {**subgeom_params.params.pop('right'), **right} 139 | return SubgeometryParameters(left=left, right=right, **subgeom_params) 140 | 141 | 142 | def hemisphere_select_fit_surf(f: callable) -> callable: 143 | 144 | def _hemisphere_select_fit_surf( 145 | surf: Optional[CortexTriSurface] = None, 146 | surf_projection: Optional[str] = None, 147 | **params, 148 | ) -> Mapping[str, Any]: 149 | fit_params = f(**params, surf=surf, surf_projection=surf_projection) 150 | 151 | surf_hemi_params = hemisphere_select_assign_parameters( 152 | surf_scalars_cmap=params.get('surf_scalars_cmap'), 153 | surf_scalars_clim=params.get('surf_scalars_clim'), 154 | surf_scalars_layers=params.get('surf_scalars_layers'), 155 | ) 156 | fit_params['hemisphere_parameters'] = surf_hemi_params 157 | 158 | return fit_params 159 | return _hemisphere_select_fit_surf 160 | 161 | 162 | def hemisphere_select_fit_network(f: callable) -> callable: 163 | def _hemisphere_select_fit_network(**params) -> Mapping[str, Any]: 164 | fit_params = f(**params) 165 | return fit_params 166 | return _hemisphere_select_fit_network 167 | 168 | 169 | def hemisphere_select_fit_points(f: callable) -> callable: 170 | def _hemisphere_select_fit_points(**params) -> Mapping[str, Any]: 171 | fit_params = f(**params) 172 | return fit_params 173 | return _hemisphere_select_fit_points 174 | 175 | 176 | def hemisphere_select_transform_surf(f: callable) -> callable: 177 | # This function doesn't actually select anything, but it prepares the 178 | # hemisphere parameters for the surface geometric primitive to perform 179 | # any specified selection later. 180 | def _hemisphere_select_transform_surf( 181 | fit_params: Optional[Mapping[str, Any]] = None, 182 | **params, 183 | ) -> Mapping[str, Any]: 184 | result = f(fit_params=fit_params, **params) 185 | hemisphere_parameters = fit_params.get('hemisphere_parameters', None) 186 | if hemisphere_parameters is None: 187 | return result 188 | 189 | result['hemisphere_parameters'] = hemisphere_parameters 190 | return result 191 | return _hemisphere_select_transform_surf 192 | 193 | 194 | def hemisphere_select_transform_network(f: callable) -> callable: 195 | def _hemisphere_select_transform_network( 196 | fit_params: Optional[Mapping[str, Any]] = None, 197 | **params, 198 | ) -> Mapping[str, Any]: 199 | result = f(fit_params=fit_params, **params) 200 | hemispheres_str = fit_params.get('hemispheres_str', 'both') 201 | networks = params.get('networks', None) 202 | if networks is not None: 203 | condition = None 204 | if hemispheres_str == 'left': 205 | if any([ds.lh_mask is None for ds in networks]): 206 | condition = lambda coor, _, __: coor[:, 0] < 0 207 | else: 208 | condition = lambda _, __, lh_mask: lh_mask 209 | elif hemispheres_str == 'right': 210 | if any([ds.lh_mask is None for ds in networks]): 211 | condition = lambda coor, _, __: coor[:, 0] > 0 212 | else: 213 | condition = lambda _, __, lh_mask: ~lh_mask 214 | if condition is not None: 215 | networks = networks.__class__( 216 | ds.select(condition=condition) for ds in networks 217 | ) 218 | result['networks'] = networks 219 | return result 220 | return _hemisphere_select_transform_network 221 | 222 | 223 | def hemisphere_select_transform_points(f: callable) -> callable: 224 | def _hemisphere_select_transform_points( 225 | fit_params: Optional[Mapping[str, Any]] = None, 226 | **params, 227 | ) -> Mapping[str, Any]: 228 | result = f(fit_params=fit_params, **params) 229 | hemispheres_str = fit_params.get('hemispheres_str', 'both') 230 | points = params.get('points', None) 231 | if points is not None: 232 | condition = None 233 | if hemispheres_str == 'left': 234 | condition = lambda coor, _: coor[:, 0] < 0 235 | elif hemispheres_str == 'right': 236 | condition = lambda coor, _: coor[:, 0] > 0 237 | if condition is not None: 238 | points = points.__class__( 239 | ds.select(condition=condition) for ds in points 240 | ) 241 | result['points'] = points 242 | return result 243 | return _hemisphere_select_transform_points 244 | 245 | 246 | def hemisphere_slack_fit( 247 | hemispheres: Sequence[str], 248 | surf_projection: Optional[str] = None, 249 | *, 250 | hemisphere_slack: Optional[Union[float, Literal['default']]] = 'default', 251 | **params, 252 | ) -> Mapping[str, Any]: 253 | if hemisphere_slack == 'default': 254 | proj_require_slack = {'inflated', 'veryinflated', 'sphere'} 255 | if surf_projection in proj_require_slack: 256 | hemisphere_slack = 1.1 257 | else: 258 | hemisphere_slack = None 259 | return { 260 | 'hemispheres': hemispheres, 261 | 'hemisphere_slack': hemisphere_slack, 262 | } 263 | 264 | 265 | def hemisphere_slack_transform( 266 | fit_params: Mapping[str, Any], 267 | **params, 268 | ) -> Mapping[str, Any]: 269 | return {} 270 | 271 | 272 | def hemisphere_slack_get_displacement( 273 | hemisphere_slack: float, 274 | hemi_gap: float, 275 | hw_left: float, 276 | hw_right: float, 277 | ) -> float: 278 | min_gap = hw_left + hw_right 279 | target_gap = min_gap * hemisphere_slack 280 | displacement = (target_gap - hemi_gap) / 2 281 | return displacement 282 | 283 | 284 | def hemisphere_slack_get_displacement_from_coor( 285 | hemisphere_slack: float, 286 | coor: Tensor, 287 | left_mask: Tensor, 288 | ) -> float: 289 | hw_left = ( 290 | coor[left_mask, 0].max() 291 | - coor[left_mask, 0].min() 292 | ) / 2 293 | hw_right = ( 294 | coor[~left_mask, 0].max() 295 | - coor[~left_mask, 0].min() 296 | ) / 2 297 | hemi_gap = ( 298 | coor[~left_mask, 0].max() 299 | + coor[~left_mask, 0].min() 300 | ) / 2 - ( 301 | coor[left_mask, 0].max() 302 | + coor[left_mask, 0].min() 303 | ) / 2 304 | return hemisphere_slack_get_displacement( 305 | hemisphere_slack=hemisphere_slack, 306 | hemi_gap=hemi_gap, 307 | hw_left=hw_left, 308 | hw_right=hw_right, 309 | ) 310 | 311 | 312 | def hemisphere_slack_fit_surf(f: callable) -> callable: 313 | def _hemisphere_slack_fit_surf( 314 | surf: Optional[CortexTriSurface] = None, 315 | surf_projection: Optional[str] = None, 316 | **params, 317 | ) -> Mapping[str, Any]: 318 | fit_params = f(surf_projection=surf_projection, **params) 319 | displacement = fit_params.get('displacement', None) 320 | if displacement is not None: 321 | return fit_params 322 | 323 | hemispheres = fit_params.get('hemispheres', ()) 324 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 325 | if len(hemispheres) == 2 and hemisphere_slack is not None: 326 | if surf is not None: 327 | surf.left.project(surf_projection) 328 | surf.right.project(surf_projection) 329 | hw_left = (surf.left.bounds[1] - surf.left.bounds[0]) / 2 330 | hw_right = (surf.right.bounds[1] - surf.right.bounds[0]) / 2 331 | hemi_gap = surf.right.center[0] - surf.left.center[0] 332 | displacement = hemisphere_slack_get_displacement( 333 | hemisphere_slack=hemisphere_slack, 334 | hemi_gap=hemi_gap, 335 | hw_left=hw_left, 336 | hw_right=hw_right, 337 | ) 338 | fit_params['displacement'] = displacement 339 | 340 | return fit_params 341 | return _hemisphere_slack_fit_surf 342 | 343 | 344 | def hemisphere_slack_fit_network(f: callable) -> callable: 345 | def _hemisphere_slack_fit_network( 346 | networks: Optional[Sequence[NetworkDataCollection]] = None, 347 | **params, 348 | ) -> Mapping[str, Any]: 349 | fit_params = f(**params) 350 | displacement = fit_params.get('displacement', None) 351 | if displacement is not None: 352 | return fit_params 353 | 354 | hemispheres = fit_params.get('hemispheres', ()) 355 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 356 | if len(hemispheres) == 2 and hemisphere_slack is not None: 357 | if networks is not None: 358 | ref_coor = np.concatenate([n.coor for n in networks]) 359 | if any([n.lh_mask is None for n in networks]): 360 | left_mask = ref_coor[:, 0] < 0 361 | else: 362 | left_mask = np.concatenate([n.lh_mask for n in networks]) 363 | displacement = hemisphere_slack_get_displacement_from_coor( 364 | hemisphere_slack=hemisphere_slack, 365 | coor=ref_coor, 366 | left_mask=left_mask, 367 | ) 368 | fit_params['displacement'] = displacement 369 | 370 | return fit_params 371 | return _hemisphere_slack_fit_network 372 | 373 | 374 | def hemisphere_slack_fit_points(f: callable) -> callable: 375 | def _hemisphere_slack_fit_points( 376 | points: Optional[Sequence[PointDataCollection]] = None, 377 | **params, 378 | ) -> Mapping[str, Any]: 379 | fit_params = f(**params) 380 | displacement = fit_params.get('displacement', None) 381 | if displacement is not None: 382 | return fit_params 383 | 384 | hemispheres = fit_params.get('hemispheres', ()) 385 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 386 | if len(hemispheres) == 2 and hemisphere_slack is not None: 387 | if points is not None: 388 | ref_coor = np.concatenate([p.points.points for p in points]) 389 | left_mask = ref_coor[:, 0] < 0 390 | displacement = hemisphere_slack_get_displacement_from_coor( 391 | hemisphere_slack=hemisphere_slack, 392 | coor=ref_coor, 393 | left_mask=left_mask, 394 | ) 395 | fit_params['displacement'] = displacement 396 | 397 | return fit_params 398 | return _hemisphere_slack_fit_points 399 | 400 | 401 | def hemisphere_slack_transform_surf(f: callable) -> callable: 402 | def _hemisphere_slack_transform_surf( 403 | surf: Optional[CortexTriSurface] = None, 404 | surf_projection: Optional[str] = None, 405 | fit_params: Optional[Mapping[str, Any]] = None, 406 | **params, 407 | ) -> Mapping[str, Any]: 408 | result = f(fit_params=fit_params, **params) 409 | displacement = fit_params.get('displacement', None) 410 | if displacement is None: 411 | return result 412 | 413 | hemispheres = fit_params.get('hemispheres', ()) 414 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 415 | if len(hemispheres) == 2 and hemisphere_slack is not None: 416 | if surf is not None: 417 | left = surf.left.translate((-displacement, 0, 0)) 418 | right = surf.right.translate((displacement, 0, 0)) 419 | surf = CortexTriSurface(left=left, right=right, mask=surf.mask) 420 | result['surf'] = surf 421 | result['surf_projection'] = f'{surf_projection}_translated' 422 | 423 | return result 424 | return _hemisphere_slack_transform_surf 425 | 426 | 427 | def hemisphere_slack_transform_network(f: callable) -> callable: 428 | def _hemisphere_slack_transform_network( 429 | networks: Optional[Sequence[NetworkDataCollection]] = None, 430 | fit_params: Optional[Mapping[str, Any]] = None, 431 | **params, 432 | ) -> Mapping[str, Any]: 433 | result = f(fit_params=fit_params, **params) 434 | displacement = fit_params.get('displacement', None) 435 | if displacement is None: 436 | return result 437 | 438 | hemispheres = fit_params.get('hemispheres', ()) 439 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 440 | if len(hemispheres) == 2 and hemisphere_slack is not None: 441 | if networks is not None: 442 | if any([n.lh_mask is None for n in networks]): 443 | def lh_condition(coor, _, __): 444 | return coor[:, 0] < 0 445 | def rh_condition(coor, _, __): 446 | return coor[:, 0] > 0 447 | else: 448 | def lh_condition(_, __, lh_mask): 449 | return lh_mask 450 | def rh_condition(_, __, lh_mask): 451 | return ~lh_mask 452 | networks = networks.translate( 453 | (-displacement, 0, 0), 454 | condition=lh_condition, 455 | ) 456 | networks = networks.translate( 457 | (displacement, 0, 0), 458 | condition=rh_condition, 459 | ) 460 | result['networks'] = networks 461 | 462 | return result 463 | return _hemisphere_slack_transform_network 464 | 465 | 466 | def hemisphere_slack_transform_points(f: callable) -> callable: 467 | def _hemisphere_slack_transform_points( 468 | points: Optional[Sequence[PointDataCollection]] = None, 469 | fit_params: Optional[Mapping[str, Any]] = None, 470 | **params, 471 | ) -> Mapping[str, Any]: 472 | result = f(fit_params=fit_params, **params) 473 | displacement = fit_params.get('displacement', None) 474 | if displacement is None: 475 | return result 476 | 477 | hemispheres = fit_params.get('hemispheres', ()) 478 | hemisphere_slack = fit_params.get('hemisphere_slack', None) 479 | if len(hemispheres) == 2 and hemisphere_slack is not None: 480 | if points is not None: 481 | left_points = points.translate( 482 | (-displacement, 0, 0), 483 | condition=lambda coor, _: coor[:, 0] < 0, 484 | ) 485 | right_points = points.translate( 486 | (displacement, 0, 0), 487 | condition=lambda coor, _: coor[:, 0] > 0 488 | ) 489 | points = PointDataCollection( 490 | lp + rp for lp, rp in zip(left_points, right_points) 491 | ) 492 | result['points'] = points 493 | 494 | return result 495 | return _hemisphere_slack_transform_points 496 | 497 | 498 | hemisphere_select = GeomTransform( 499 | name='hemisphere_select', 500 | fit=hemisphere_select_fit, 501 | transform=hemisphere_select_transform, 502 | meta=hemisphere_select_meta, 503 | ) 504 | 505 | 506 | hemisphere_slack = GeomTransform( 507 | name='hemisphere_slack', 508 | fit=hemisphere_slack_fit, 509 | transform=hemisphere_slack_transform, 510 | ) 511 | -------------------------------------------------------------------------------- /src/hyve/util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Plot and report utilities 6 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 7 | Utilities for plotting and reporting. 8 | """ 9 | import dataclasses 10 | import re 11 | from math import floor 12 | from typing import ( 13 | Any, 14 | Dict, 15 | Literal, 16 | Optional, 17 | Sequence, 18 | Tuple, 19 | Union, 20 | ) 21 | 22 | import numpy as np 23 | import pandas as pd 24 | import pyvista as pv 25 | from PIL import Image 26 | from pyvista.plotting.helpers import view_vectors 27 | 28 | from .const import DEFAULT_ROBUST_LIM_PCT, Tensor 29 | 30 | 31 | def sanitise(string: str) -> str: 32 | """ 33 | Sanitise a string for use as a parameter name. 34 | """ 35 | string = string.lower() 36 | return re.sub(r'\W+|^(?=\d)','_', string) 37 | 38 | 39 | def cortex_theme() -> Any: 40 | """ 41 | Return a theme for the pyvista plotter for use with the cortical surface 42 | plotter. In practice, we currently don't use this because PyVista doesn't 43 | always handle transparency the way that we would like, but we keep it 44 | here in case we want to use it in the future. 45 | """ 46 | cortex_theme = pv.themes.DocumentTheme() 47 | cortex_theme.transparent_background = True 48 | return cortex_theme 49 | 50 | 51 | def half_width( 52 | p: pv.Plotter, 53 | slack: float = 1.05, 54 | ) -> Tuple[float, float, float]: 55 | """ 56 | Return the half-width of the plotter's bounds, multiplied by a slack 57 | factor. 58 | 59 | We use this to set the position of the camera when we're using 60 | ``cortex_cameras`` and we receive a string corresponding to an anatomical 61 | direction (e.g. "dorsal", "ventral", etc.) as the ``position`` argument. 62 | 63 | The slack factor is used to ensure that the camera is not placed exactly 64 | on the edge of the plotter bounds, which can cause clipping of the 65 | surface. 66 | """ 67 | return ( 68 | (p.bounds[1] - p.bounds[0]) / 2 * slack, 69 | (p.bounds[3] - p.bounds[2]) / 2 * slack, 70 | (p.bounds[5] - p.bounds[4]) / 2 * slack, 71 | ) 72 | 73 | 74 | def _relabel_parcels_hemi( 75 | data: np.ndarray, 76 | null_value: int = 0, 77 | ) -> np.ndarray: 78 | data = data.astype(np.int32) 79 | null_mask = (data == null_value) 80 | _, data = np.unique(data, return_inverse=True) 81 | data[null_mask] = null_value - 1 82 | return data + 1 83 | 84 | 85 | def relabel_parcels( 86 | left_data: np.ndarray, 87 | right_data: np.ndarray, 88 | null_value: int = 0, 89 | ) -> Tuple[np.ndarray, np.ndarray]: 90 | """ 91 | Relabel the parcels in the left and right hemisphere data arrays so that 92 | they are contiguous and start at 1. 93 | 94 | Parameters 95 | ---------- 96 | left_data : np.ndarray 97 | Array of parcel values for the left hemisphere. 98 | right_data : np.ndarray 99 | Array of parcel values for the right hemisphere. 100 | null_value : int, optional 101 | The value to use for null (or background) values in the data arrays. 102 | By default, this is 0. 103 | 104 | Returns 105 | ------- 106 | left_data : np.ndarray 107 | Array of parcel values for the left hemisphere, with contiguous parcel 108 | values starting at 1. 109 | right_data : np.ndarray 110 | Array of parcel values for the right hemisphere, with contiguous parcel 111 | values starting at the maximum value in left_data, plus 1. 112 | """ 113 | if left_data.squeeze().ndim == 2: 114 | return left_data, right_data 115 | # Relabel the parcels in the left hemisphere 116 | left_data = _relabel_parcels_hemi(left_data, null_value=null_value) 117 | offset = np.max(left_data) 118 | 119 | # Relabel the parcels in the right hemisphere 120 | right_data = _relabel_parcels_hemi(right_data, null_value=null_value) 121 | right_data[right_data != null_value] += offset 122 | 123 | return left_data, right_data 124 | 125 | 126 | def auto_focus( 127 | vector: Sequence[float], 128 | plotter: pv.Plotter, 129 | slack: float = 1.05, 130 | focal_point: Optional[Sequence[float]] = None, 131 | ) -> Tuple[float, float, float]: 132 | vector = np.asarray(vector) 133 | if focal_point is None: 134 | focal_point = plotter.center 135 | hw = half_width(plotter, slack=slack) 136 | hw = np.asarray(hw) 137 | hw[hw == 0] = 1 138 | scalar = np.nanmin(hw / np.abs(vector + np.finfo(np.float32).eps)) 139 | vector = vector * scalar + focal_point 140 | return vector, focal_point 141 | 142 | 143 | def set_default_views( 144 | hemisphere: str, 145 | ) -> Sequence[str]: 146 | common = ('dorsal', 'ventral', 'anterior', 'posterior') 147 | if hemisphere == 'both': 148 | views = ('left', 'right') + common 149 | else: 150 | views = ('lateral', 'medial') + common 151 | return views 152 | 153 | 154 | def cortex_view_dict() -> Dict[str, Tuple[Sequence[float], Sequence[float]]]: 155 | """ 156 | Return a dict containing tuples of (vector, viewup) pairs for each 157 | hemisphere. The vector is the position of the camera, and the 158 | viewup is the direction of the "up" vector in the camera frame. 159 | """ 160 | common = { 161 | 'dorsal': ((0, 0, 1), (1, 0, 0)), 162 | 'ventral': ((0, 0, -1), (-1, 0, 0)), 163 | 'anterior': ((0, 1, 0), (0, 0, 1)), 164 | 'posterior': ((0, -1, 0), (0, 0, 1)), 165 | 'down': ((0, 0, 1), (0, 1, 0)), 166 | } 167 | return { 168 | 'left': { 169 | 'lateral': ((-1, 0, 0), (0, 0, 1)), 170 | 'medial': ((1, 0, 0), (0, 0, 1)), 171 | **common, 172 | }, 173 | 'right': { 174 | 'lateral': ((1, 0, 0), (0, 0, 1)), 175 | 'medial': ((-1, 0, 0), (0, 0, 1)), 176 | **common, 177 | }, 178 | 'both': { 179 | 'left': ((-1, 0, 0), (0, 0, 1)), 180 | 'right': ((1, 0, 0), (0, 0, 1)), 181 | **common, 182 | }, 183 | } 184 | 185 | 186 | def cortex_cameras( 187 | position: Union[str, Sequence[Tuple[float, float, float]]], 188 | plotter: pv.Plotter, 189 | negative: bool = False, 190 | hemisphere: Optional[Literal['left', 'right']] = None, 191 | ) -> Tuple[ 192 | Tuple[float, float, float], 193 | Tuple[float, float, float], 194 | Tuple[float, float, float], 195 | ]: 196 | """ 197 | Return a tuple of (position, focal_point, view_up) for the camera. This 198 | function accepts a string corresponding to an anatomical direction (e.g. 199 | "dorsal", "ventral", etc.) as the ``position`` argument, and returns the 200 | corresponding camera position, focal point, and view up vector. 201 | """ 202 | hemisphere = hemisphere or 'both' 203 | # if not isinstance(hemisphere, str): 204 | # if len(hemisphere) == 1: 205 | # hemisphere = hemisphere[0] 206 | # else: 207 | # hemisphere = 'both' 208 | if isinstance(position, str): 209 | try: 210 | # TODO: I'm a little bit concerned that ``view_vectors`` is not 211 | # part of the public API. We should probably find a better 212 | # way to do this. 213 | position = view_vectors(view=position, negative=negative) 214 | except ValueError as e: 215 | if isinstance(hemisphere, str): 216 | try: 217 | vector, view_up = cortex_view_dict()[hemisphere][position] 218 | vector, focal_point = auto_focus(vector, plotter) 219 | return (vector, focal_point, view_up) 220 | except KeyError: 221 | raise e 222 | else: 223 | raise e 224 | return position 225 | 226 | 227 | def scale_image_preserve_aspect_ratio( 228 | img: Image.Image, 229 | target_size: Tuple[int, int], 230 | ) -> Image.Image: 231 | width, height = img.size 232 | target_width, target_height = target_size 233 | width_ratio = target_width / width 234 | height_ratio = target_height / height 235 | ratio = min(width_ratio, height_ratio) 236 | new_width = floor(width * ratio) 237 | new_height = floor(height * ratio) 238 | return img.resize((new_width, new_height)) 239 | 240 | 241 | def scalar_percentile( 242 | data: Tensor, 243 | percent: Union[float, Tuple[float, float]] = DEFAULT_ROBUST_LIM_PCT, 244 | bgval: Optional[float] = 0.0, 245 | ) -> Tuple[float, float]: 246 | if isinstance(percent, float): 247 | percent = (percent, 100 - percent) 248 | excl_mask = np.isnan(data) | np.isinf(data) 249 | if bgval is not None: 250 | excl_mask |= np.isclose(data, bgval) 251 | data = data[~excl_mask] 252 | return ( 253 | np.nanpercentile(data, percent[0]), 254 | np.nanpercentile(data, percent[1]), 255 | ) 256 | 257 | 258 | def plot_to_display( 259 | p: pv.Plotter, 260 | cpos: Optional[Sequence[Sequence[float]]] = 'yz', 261 | ) -> None: 262 | p.show(cpos=cpos) 263 | 264 | 265 | def format_position_as_string( 266 | position: Union[str, Sequence[Tuple[float, float, float]]], 267 | precision: int = 2, 268 | delimiter: str = 'x', 269 | ) -> str: 270 | def _fmt_field(field: float) -> str: 271 | return delimiter.join( 272 | str(round(v, precision)) 273 | if v >= 0 274 | else f'neg{abs(round(v, precision))}' 275 | for v in field 276 | ) 277 | 278 | if isinstance(position, str): 279 | return f'{position}' 280 | elif isinstance(position[0], float) or isinstance(position[0], int): 281 | return f'vector{_fmt_field(position)}' 282 | else: 283 | return ( 284 | f'vector{_fmt_field(position[0])}AND' 285 | f'focus{_fmt_field(position[1])}AND' 286 | f'viewup{_fmt_field(position[2])}' 287 | ) 288 | 289 | 290 | def filter_node_data( 291 | val: np.ndarray, 292 | name: str = 'node', 293 | threshold: Optional[Union[float, int]] = 0.0, 294 | percent_threshold: bool = False, 295 | topk_threshold: bool = False, 296 | absolute: bool = True, 297 | node_selection: Optional[np.ndarray] = None, 298 | incident_edge_selection: Optional[np.ndarray] = None, 299 | removed_val: Optional[float] = None, 300 | surviving_val: Optional[float] = 1.0, 301 | ) -> pd.DataFrame: 302 | node_incl = np.ones_like(val, dtype=bool) 303 | 304 | sgn = np.sign(val) 305 | if absolute: 306 | val = np.abs(val) 307 | if node_selection is not None: 308 | node_incl[~node_selection] = 0 309 | if incident_edge_selection is not None: 310 | node_incl[~incident_edge_selection.any(axis=-1)] = 0 311 | if topk_threshold: 312 | indices = np.argpartition(-val, int(threshold)) 313 | node_incl[indices[int(threshold) :]] = 0 314 | elif percent_threshold: 315 | node_incl[val < np.percentile(val[node_incl], 100 * threshold)] = 0 316 | elif threshold is not None: 317 | node_incl[val < threshold] = 0 318 | 319 | if removed_val is not None: 320 | val[~node_incl] = removed_val 321 | if surviving_val is not None: 322 | val[node_incl] = surviving_val 323 | index = np.arange(val.shape[0]) 324 | else: 325 | val = val[node_incl] 326 | sgn = sgn[node_incl] 327 | index = np.where(node_incl)[0] 328 | 329 | return pd.DataFrame( 330 | { 331 | f'{name}_val': val, 332 | f'{name}_sgn': sgn, 333 | }, 334 | index=pd.Index(index + 1, name=name), 335 | ) 336 | 337 | 338 | def filter_adjacency_data( 339 | adj: np.ndarray, 340 | name: str = 'edge', 341 | threshold: Union[float, int] = 0.0, 342 | percent_threshold: bool = False, 343 | topk_threshold_nodewise: bool = False, 344 | absolute: bool = True, 345 | incident_node_selection: Optional[np.ndarray] = None, 346 | connected_node_selection: Optional[np.ndarray] = None, 347 | edge_selection: Optional[np.ndarray] = None, 348 | removed_val: Optional[float] = None, 349 | surviving_val: Optional[float] = 1.0, 350 | emit_degree: Union[bool, Literal['abs', '+', '-']] = False, 351 | emit_incident_nodes: Union[bool, tuple] = False, 352 | ) -> pd.DataFrame: 353 | adj_incl = np.ones_like(adj, dtype=bool) 354 | 355 | sgn = np.sign(adj) 356 | if absolute: 357 | adj = np.abs(adj) 358 | if incident_node_selection is not None: 359 | adj_incl[~incident_node_selection, :] = 0 360 | if connected_node_selection is not None: 361 | adj_incl[~connected_node_selection, :] = 0 362 | adj_incl[:, ~connected_node_selection] = 0 363 | if edge_selection is not None: 364 | adj_incl[~edge_selection] = 0 365 | if topk_threshold_nodewise: 366 | indices = np.argpartition(-adj, int(threshold), axis=-1) 367 | indices = indices[..., int(threshold) :] 368 | adj_incl[ 369 | np.arange(adj.shape[0], dtype=int).reshape(-1, 1), indices 370 | ] = 0 371 | elif percent_threshold: 372 | adj_incl[adj < np.percentile(adj[adj_incl], 100 * threshold)] = 0 373 | else: 374 | adj_incl[adj < threshold] = 0 375 | 376 | degree = None 377 | if emit_degree == 'abs': 378 | degree = np.abs(adj).sum(axis=0) 379 | elif emit_degree == '+': 380 | degree = np.maximum(adj, 0).sum(axis=0) 381 | elif emit_degree == '-': 382 | degree = -np.minimum(adj, 0).sum(axis=0) 383 | elif emit_degree: 384 | degree = adj.sum(axis=0) 385 | 386 | indices_incl = np.triu_indices(adj.shape[0], k=1) 387 | adj_incl = adj_incl | adj_incl.T 388 | 389 | incidents = None 390 | if emit_incident_nodes: 391 | incidents = adj_incl.any(axis=0) 392 | if isinstance(emit_incident_nodes, tuple): 393 | exc, inc = emit_incident_nodes 394 | incidents = np.where(incidents, inc, exc) 395 | 396 | if removed_val is not None: 397 | adj[~adj_incl] = removed_val 398 | if surviving_val is not None: 399 | adj[adj_incl] = surviving_val 400 | else: 401 | adj_incl = adj_incl[indices_incl] 402 | indices_incl = tuple(i[adj_incl] for i in indices_incl) 403 | adj = adj[indices_incl] 404 | sgn = sgn[indices_incl] 405 | 406 | indices_incl = [i + 1 for i in indices_incl] 407 | edge_values = pd.DataFrame( 408 | { 409 | f'{name}_val': adj, 410 | f'{name}_sgn': sgn, 411 | }, 412 | index=pd.MultiIndex.from_arrays(indices_incl, names=['src', 'dst']), 413 | ) 414 | 415 | if degree is not None: 416 | degree = pd.DataFrame( 417 | degree, 418 | index=range(1, degree.shape[0] + 1), 419 | columns=(f'{name}_degree',), 420 | ) 421 | if incidents is None: 422 | return edge_values, degree 423 | if incidents is not None: 424 | incidents = pd.DataFrame( 425 | incidents, 426 | index=range(1, incidents.shape[0] + 1), 427 | columns=(f'{name}_incidents',), 428 | ) 429 | if degree is None: 430 | return edge_values, incidents 431 | df = degree.join(incidents, how='outer') 432 | return edge_values, df 433 | return edge_values 434 | 435 | 436 | def premultiply_alpha( 437 | input: Tensor, 438 | ) -> Tensor: 439 | """Premultiply alpha channel of RGBA image.""" 440 | return np.concatenate( 441 | (input[..., :3] * input[..., 3:], input[..., 3:]), 442 | axis=-1, 443 | ) 444 | 445 | 446 | def unmultiply_alpha( 447 | input: Tensor, 448 | ) -> Tensor: 449 | """Unmultiply alpha channel of RGBA image.""" 450 | out = np.concatenate( 451 | (input[..., :3] / input[..., 3:], input[..., 3:]), 452 | axis=-1, 453 | ) 454 | return np.where(np.isnan(out), 0.0, out) 455 | 456 | 457 | def source_over( 458 | src: Tensor, 459 | dst: Tensor, 460 | ) -> Tensor: 461 | """ 462 | Alpha composite two RGBA images using the source-over blend mode. 463 | Assumes premultiplied alpha. 464 | """ 465 | return src + dst * (1.0 - src[..., 3:]) 466 | 467 | 468 | @dataclasses.dataclass(frozen=True) 469 | class LinearScalarMapper: 470 | norm: Optional[Tuple[float, float]] = None 471 | 472 | def __call__( 473 | self, 474 | X: Tensor, 475 | vmin: float, 476 | vmax: float, 477 | ) -> Tensor: 478 | X = np.clip(X, vmin, vmax) 479 | if self.norm is None: 480 | return X 481 | return ( 482 | (X - vmin) / 483 | (vmax - vmin) * 484 | (self.norm[1] - self.norm[0]) + 485 | self.norm[0] 486 | ) 487 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hypercoil/hyve/5928f87fc6808cb81d5b59ae4a69327d3e4d9c45/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_allview.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for all views on a brain surface 6 | """ 7 | import templateflow.api as tflow 8 | 9 | from hyve_examples import get_null400_cifti 10 | from hyve.flows import plotdef 11 | from hyve.transforms import ( 12 | surf_from_archive, 13 | surf_scalars_from_cifti, 14 | surf_scalars_from_nifti, 15 | add_surface_overlay, 16 | parcellate_colormap, 17 | vertex_to_face, 18 | save_grid, 19 | plot_to_image, 20 | text_element, 21 | ) 22 | 23 | from hyve.elements import TextBuilder 24 | 25 | COMMON_PARAMS = dict( 26 | load_mask=True, 27 | hemisphere=['left', 'right', None], 28 | views={ 29 | 'left': ('medial', 'lateral'), 30 | 'right': ('medial', 'lateral'), 31 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 32 | }, 33 | output_dir='/tmp', 34 | window_size=(600, 500), 35 | ) 36 | 37 | 38 | def get_annotations(): 39 | return { 40 | 0: dict( 41 | hemisphere='left', 42 | view='lateral', 43 | ), 44 | 1: dict(view='anterior'), 45 | 2: dict( 46 | hemisphere='right', 47 | view='lateral', 48 | ), 49 | 3: dict(view='dorsal'), 50 | 4: dict(elements=['title']), 51 | 5: dict(view='ventral'), 52 | 6: dict( 53 | hemisphere='left', 54 | view='medial', 55 | ), 56 | 7: dict(view='posterior'), 57 | 8: dict( 58 | hemisphere='right', 59 | view='medial', 60 | ), 61 | } 62 | 63 | 64 | def test_allviews_scalars(): 65 | annotations = get_annotations() 66 | annotations[4]['elements'] = ['scalar_bar', 'title'] 67 | plot_f = plotdef( 68 | surf_from_archive(), 69 | add_surface_overlay( 70 | 'GM Density', 71 | surf_scalars_from_nifti( 72 | 'GM Density', template='fsaverage', plot=True 73 | ), 74 | ), 75 | plot_to_image(), 76 | save_grid( 77 | n_cols=3, n_rows=3, padding=10, 78 | canvas_size=(1800, 1500), 79 | canvas_color=(0, 0, 0), 80 | fname_spec='scalars-gmdensity_view-all_page-{page}', 81 | scalar_bar_action='collect', 82 | annotations=annotations, 83 | ), 84 | text_element( 85 | name='title', 86 | content='{surfscalars}', 87 | bounding_box_height=192, 88 | font_size_multiplier=0.2, 89 | font_color='#cccccc', 90 | priority=-1, 91 | ), 92 | ) 93 | plot_f( 94 | template='fsaverage', 95 | gm_density_nifti=tflow.get( 96 | template='MNI152NLin2009cAsym', 97 | suffix='probseg', 98 | label='GM', 99 | resolution=2 100 | ), 101 | gm_density_clim=(0.2, 0.9), 102 | gm_density_below_color=None, 103 | gm_density_scalar_bar_style={ 104 | 'name': None, 105 | 'orientation': 'h', 106 | }, 107 | surf_projection=('pial',), 108 | title_element_content='Gray Matter Density', 109 | **COMMON_PARAMS, 110 | ) 111 | 112 | 113 | def test_allviews_parcellation(): 114 | annotations = get_annotations() 115 | plot_f = plotdef( 116 | surf_from_archive(), 117 | surf_scalars_from_cifti( 118 | 'parcellation', 119 | allow_multihemisphere=False, 120 | ), 121 | parcellate_colormap('parcellation', 'network'), 122 | vertex_to_face('parcellation'), 123 | plot_to_image(), 124 | save_grid( 125 | n_cols=3, n_rows=3, padding=10, 126 | canvas_size=(1800, 1500), 127 | canvas_color=(0, 0, 0), 128 | fname_spec='scalars-parcellation_view-all_page-{page}', 129 | scalar_bar_action='collect', 130 | annotations=annotations, 131 | ), 132 | ) 133 | plot_f( 134 | template='fsLR', 135 | parcellation_cifti=get_null400_cifti(), 136 | surf_projection=('veryinflated',), 137 | elements={ 138 | 'title': ( 139 | TextBuilder( 140 | content='null parcellation', 141 | bounding_box_height=128, 142 | font_size_multiplier=0.2, 143 | font_color='#cccccc', 144 | priority=-1, 145 | ), 146 | ), 147 | }, 148 | **COMMON_PARAMS, 149 | ) 150 | -------------------------------------------------------------------------------- /tests/test_figbuilder.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for figure builder mapping over both hemispheres 6 | """ 7 | import pytest 8 | import pyvista as pv 9 | import templateflow.api as tflow 10 | 11 | from hyve.flows import plotdef 12 | from hyve.layout import Cell, ColGroupSpec 13 | from hyve.transforms import ( 14 | surf_from_archive, 15 | surf_scalars_from_nifti, 16 | closest_ortho_camera, 17 | scalar_focus_camera, 18 | planar_sweep_camera, 19 | auto_camera, 20 | plot_to_image, 21 | save_grid, 22 | save_figure, 23 | ) 24 | 25 | 26 | @pytest.mark.ci_unsupported 27 | def test_panoptic(): 28 | layout = Cell() | Cell() << (1 / 2) 29 | layout = layout * layout * layout 30 | annotations = { 31 | 0: dict( 32 | hemisphere='left', 33 | view='lateral', 34 | ), 35 | 1: dict( 36 | hemisphere='left', 37 | view='medial', 38 | ), 39 | 2: dict( 40 | hemisphere='right', 41 | view='lateral', 42 | ), 43 | 3: dict( 44 | hemisphere='right', 45 | view='medial', 46 | ), 47 | 4: dict( 48 | view='dorsal', 49 | ), 50 | 5: dict( 51 | view='ventral', 52 | ), 53 | 6: dict( 54 | view='anterior', 55 | ), 56 | 7: dict( 57 | view='posterior', 58 | ), 59 | } 60 | layout = layout.annotate(annotations) 61 | plot_f = plotdef( 62 | surf_from_archive(), 63 | surf_scalars_from_nifti( 64 | 'difumo', 65 | template='fsaverage', 66 | select=list(range(60)), 67 | plot=True, 68 | ), 69 | plot_to_image(), 70 | save_figure( 71 | canvas_size=(3200, 4800), 72 | canvas_color=(0, 0, 0), 73 | sort_by=['surfscalars'], 74 | layout_kernel=layout, 75 | group_spec = [ 76 | ColGroupSpec( 77 | variable='surfscalars', 78 | max_levels=16, 79 | ), 80 | ], 81 | fname_spec=( 82 | 'scalars-difumo_view-all_page-{page}' 83 | ), 84 | ), 85 | ) 86 | nifti = tflow.get( 87 | template='MNI152NLin2009cAsym', 88 | atlas='DiFuMo', 89 | resolution=2, 90 | desc='64dimensions' 91 | ) 92 | plot_f( 93 | template='fsaverage', 94 | load_mask=True, 95 | difumo_nifti=nifti, 96 | surf_projection='pial', 97 | surf_scalars_cmap='viridis', 98 | surf_scalars_clim_percentile=True, 99 | surf_scalars_below_color='#666666', 100 | #theme=pv.themes.DarkTheme(), 101 | window_size=(400, 300), 102 | hemisphere=['left', 'right', None], 103 | views={ 104 | 'left': ('medial', 'lateral'), 105 | 'right': ('medial', 'lateral'), 106 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 107 | }, 108 | output_dir='/tmp', 109 | ) 110 | 111 | 112 | @pytest.mark.ci_unsupported 113 | def test_focused_view_both_hemispheres(): 114 | plot_f = plotdef( 115 | surf_from_archive(), 116 | surf_scalars_from_nifti( 117 | 'difumo', 118 | template='fsaverage', 119 | select=list(range(60)), 120 | plot=True 121 | ), 122 | plot_to_image(), 123 | scalar_focus_camera(kind='centroid'), 124 | save_grid( 125 | n_cols=4, n_rows=8, padding=4, 126 | canvas_size=(1280, 1640), 127 | canvas_color=(0, 0, 0), 128 | sort_by=['surfscalars'], 129 | fname_spec=( 130 | 'scalars-difumo_view-focused_page-{page}' 131 | ) 132 | ), 133 | ) 134 | nifti = tflow.get( 135 | template='MNI152NLin2009cAsym', 136 | atlas='DiFuMo', 137 | resolution=2, 138 | desc='64dimensions' 139 | ) 140 | plot_f( 141 | template='fsaverage', 142 | load_mask=True, 143 | difumo_nifti=nifti, 144 | surf_projection='pial', 145 | surf_scalars_cmap='viridis', 146 | surf_scalars_below_color='#333333', 147 | window_size=(400, 250), 148 | output_dir='/tmp', 149 | ) 150 | 151 | @pytest.mark.ci_unsupported 152 | def test_ortho_views_both_hemispheres(): 153 | plot_f = plotdef( 154 | surf_from_archive(), 155 | surf_scalars_from_nifti('difumo', template='fsaverage', plot=True), 156 | plot_to_image(), 157 | closest_ortho_camera(n_ortho=3), 158 | save_grid( 159 | n_cols=3, n_rows=8, padding=4, 160 | canvas_size=(960, 1640), 161 | canvas_color=(0, 0, 0), 162 | sort_by=['surfscalars'], 163 | fname_spec=( 164 | 'scalars-difumo_view-ortho_page-{page}' 165 | ) 166 | ), 167 | ) 168 | nifti = tflow.get( 169 | template='MNI152NLin2009cAsym', 170 | atlas='DiFuMo', 171 | resolution=2, 172 | desc='64dimensions' 173 | ) 174 | plot_f( 175 | template='fsaverage', 176 | load_mask=True, 177 | difumo_nifti=nifti, 178 | surf_projection='pial', 179 | surf_scalars_cmap='Purples', 180 | surf_scalars_below_color='white', 181 | window_size=(400, 250), 182 | output_dir='/tmp', 183 | #hemisphere=['left', 'right'], 184 | ) 185 | 186 | @pytest.mark.ci_unsupported 187 | def test_planar_sweep_both_hemispheres(): 188 | plot_f = plotdef( 189 | surf_from_archive(), 190 | surf_scalars_from_nifti('difumo', template='fsaverage', plot=True), 191 | plot_to_image(), 192 | planar_sweep_camera(initial=(1, 0, 0), n_angles=10), 193 | save_grid( 194 | n_cols=10, n_rows=8, padding=4, 195 | canvas_size=(3200, 1640), 196 | canvas_color=(0, 0, 0), 197 | sort_by=['surfscalars'], 198 | fname_spec=( 199 | 'scalars-difumo_view-planar_page-{page}' 200 | ) 201 | ), 202 | ) 203 | nifti = tflow.get( 204 | template='MNI152NLin2009cAsym', 205 | atlas='DiFuMo', 206 | resolution=2, 207 | desc='64dimensions' 208 | ) 209 | plot_f( 210 | template='fsaverage', 211 | load_mask=True, 212 | difumo_nifti=nifti, 213 | surf_projection='pial', 214 | surf_scalars_cmap='Purples', 215 | surf_scalars_below_color='white', 216 | window_size=(400, 250), 217 | output_dir='/tmp', 218 | hemisphere=['left', 'right'], 219 | ) 220 | 221 | @pytest.mark.ci_unsupported 222 | def test_auto_view_both_hemispheres(): 223 | plot_f = plotdef( 224 | surf_from_archive(), 225 | surf_scalars_from_nifti('difumo', template='fsaverage', plot=True), 226 | plot_to_image(), 227 | auto_camera(), 228 | save_grid( 229 | n_cols=7, n_rows=8, padding=4, 230 | canvas_size=(2240, 1640), 231 | canvas_color=(0, 0, 0), 232 | sort_by=['surfscalars'], 233 | fname_spec=( 234 | 'scalars-difumo_view-auto_page-{page}' 235 | ) 236 | ), 237 | ) 238 | nifti = tflow.get( 239 | template='MNI152NLin2009cAsym', 240 | atlas='DiFuMo', 241 | resolution=2, 242 | desc='64dimensions' 243 | ) 244 | plot_f( 245 | template='fsaverage', 246 | load_mask=True, 247 | difumo_nifti=nifti, 248 | surf_projection='pial', 249 | surf_scalars_cmap='Purples', 250 | surf_scalars_below_color='white', 251 | autocam_n_ortho=3, 252 | autocam_focus='peak', 253 | autocam_sweep_n_angles=3, 254 | window_size=(400, 250), 255 | output_dir='/tmp', 256 | hemisphere=['left', 'right'], 257 | ) 258 | -------------------------------------------------------------------------------- /tests/test_flat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for flat map visualisations loaded from a GIfTI file 6 | """ 7 | import pytest 8 | 9 | import templateflow.api as tflow 10 | 11 | from hyve_examples import ( 12 | get_null400_cifti, 13 | get_fsLR_flatmap_gifti, 14 | ) 15 | from hyve.flows import plotdef 16 | from hyve.transforms import ( 17 | surf_from_gifti, 18 | surf_scalars_from_cifti, 19 | surf_scalars_from_nifti, 20 | parcellate_colormap, 21 | vertex_to_face, 22 | plot_to_image, 23 | save_snapshots, 24 | ) 25 | 26 | 27 | lh_mask = tflow.get( 28 | template='fsLR', 29 | hemi='L', 30 | desc='nomedialwall', 31 | density='32k', 32 | ) 33 | rh_mask = tflow.get( 34 | template='fsLR', 35 | hemi='R', 36 | desc='nomedialwall', 37 | density='32k', 38 | ) 39 | surf = get_fsLR_flatmap_gifti() 40 | 41 | 42 | @pytest.mark.ci_unsupported 43 | def test_scalars(): 44 | plot_f = plotdef( 45 | surf_from_gifti( 46 | projection='flat', 47 | left_surf=surf['left'], 48 | right_surf=surf['right'], 49 | left_mask=lh_mask, 50 | right_mask=rh_mask, 51 | ), 52 | surf_scalars_from_nifti('GM Density', template='fsLR'), 53 | plot_to_image(), 54 | save_snapshots( 55 | fname_spec=( 56 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_projection-flat' 57 | ), 58 | ), 59 | ) 60 | plot_f( 61 | gm_density_nifti=tflow.get( 62 | template='MNI152NLin2009cAsym', 63 | suffix='probseg', 64 | label='GM', 65 | resolution=2, 66 | ), 67 | hemisphere=['left', 'right'], 68 | views=['down'], 69 | output_dir='/tmp', 70 | ) 71 | 72 | 73 | @pytest.mark.parametrize( 74 | 'cmap, cmap_name', [ 75 | ('network', 'network'), 76 | ('modal', 'modal'), 77 | ('network', 'bone') 78 | ]) 79 | def test_parcellation(cmap, cmap_name): 80 | extra_args = {} 81 | plot_f = plotdef( 82 | surf_from_gifti(projection='flat'), 83 | surf_scalars_from_cifti('parcellation', plot=True), 84 | parcellate_colormap('parcellation', cmap), 85 | vertex_to_face('parcellation'), 86 | plot_to_image(), 87 | save_snapshots( 88 | fname_spec=( 89 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_' 90 | f'cmap-{cmap_name}_projection-flat' 91 | ), 92 | ), 93 | ) 94 | if cmap_name == 'bone': 95 | extra_args['surf_scalars_clim'] = (0.1, 400.9) 96 | plot_f( 97 | flat_left_surf=surf['left'], 98 | flat_right_surf=surf['right'], 99 | flat_left_mask=lh_mask, 100 | flat_right_mask=rh_mask, 101 | parcellation_cifti=get_null400_cifti(), 102 | surf_scalars_cmap=cmap_name, 103 | hemisphere=['left', 'right'], 104 | views=['down'], 105 | output_dir='/tmp', 106 | **extra_args, 107 | ) 108 | -------------------------------------------------------------------------------- /tests/test_hbfigbuilder.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for figure builder where each scalar dataset is hemispherically 6 | bound 7 | """ 8 | import pytest 9 | import numpy as np 10 | 11 | from hyve.flows import plotdef 12 | from hyve.layout import Cell, ColGroupSpec, RowGroupSpec 13 | from hyve.transforms import ( 14 | surf_from_archive, 15 | surf_scalars_from_array, 16 | # surf_scalars_from_nifti, 17 | closest_ortho_camera, 18 | scalar_focus_camera, 19 | planar_sweep_camera, 20 | auto_camera, 21 | plot_to_image, 22 | save_figure, 23 | save_grid, 24 | text_element, 25 | ) 26 | 27 | 28 | @pytest.mark.ci_unsupported 29 | @pytest.mark.parametrize('num_maps', [2]) 30 | def test_focused_view_autoselect_hemisphere_groupspec0( 31 | num_maps, 32 | ): 33 | plot_f = plotdef( 34 | surf_from_archive(), 35 | surf_scalars_from_array( 36 | 'gaussiannoise', 37 | allow_multihemisphere=False, 38 | is_masked=False, 39 | apply_mask=True, 40 | ), 41 | plot_to_image(), 42 | scalar_focus_camera(kind='peak'), 43 | save_figure( 44 | padding=4, 45 | canvas_size=(4 * 200 * num_maps, 200), 46 | canvas_color=(0, 0, 0), 47 | sort_by=['surfscalars'], 48 | group_spec = [ 49 | RowGroupSpec( 50 | variable='hemisphere', 51 | ), 52 | RowGroupSpec(variable='surfscalars'), 53 | ], 54 | fname_spec=( 55 | 'scalars-gaussiannoise_view-focused3_layout-emptyctr_page-{page}' 56 | #'scalars-gaussiannoise_view-focused2_page-{page}' 57 | ) 58 | ), 59 | ) 60 | 61 | array_left = np.random.randn(num_maps, 32492) 62 | array_right = np.random.randn(num_maps, 32492) 63 | plot_f( 64 | template="fsLR", 65 | load_mask=True, 66 | gaussiannoise_array_left=array_left, 67 | gaussiannoise_array_right=array_right, 68 | surf_projection='veryinflated', 69 | surf_scalars_cmap='RdYlBu', 70 | surf_scalars_below_color='#333333', 71 | window_size=(400, 250), 72 | output_dir='/tmp', 73 | ) 74 | 75 | 76 | @pytest.mark.ci_unsupported 77 | def test_focused_view_autoselect_hemisphere_groupspec1(): 78 | plot_f = plotdef( 79 | surf_from_archive(), 80 | surf_scalars_from_array( 81 | 'gaussiannoise', 82 | allow_multihemisphere=False, 83 | is_masked=False, 84 | apply_mask=True, 85 | ), 86 | plot_to_image(), 87 | scalar_focus_camera(kind='peak'), 88 | save_figure( 89 | padding=4, 90 | canvas_size=(200, 1000), 91 | canvas_color=(0, 0, 0), 92 | sort_by=['surfscalars'], 93 | group_spec = [ 94 | RowGroupSpec( 95 | variable='hemisphere', 96 | max_levels=2, 97 | ), 98 | ColGroupSpec(variable='surfscalars'), 99 | ], 100 | fname_spec=( 101 | 'scalars-gaussiannoise_view-focused_layout-2col_page-{page}' 102 | ) 103 | ), 104 | ) 105 | 106 | array_left = np.random.randn(10, 32492) 107 | array_right = np.random.randn(10, 32492) 108 | plot_f( 109 | template="fsLR", 110 | load_mask=True, 111 | gaussiannoise_array_left=array_left, 112 | gaussiannoise_array_right=array_right, 113 | surf_projection='veryinflated', 114 | surf_scalars_cmap='RdYlBu', 115 | surf_scalars_below_color='#333333', 116 | window_size=(400, 250), 117 | output_dir='/tmp', 118 | ) 119 | 120 | 121 | @pytest.mark.ci_unsupported 122 | def test_panoptic_groupspec_nohb(): 123 | layout = Cell() | Cell() << (1 / 2) 124 | layout = layout * layout * layout 125 | layout = layout | Cell() << (32 / 33) 126 | layout = Cell() / layout << (1 / 13) 127 | annotations = { 128 | 0: dict(elements=['subtitle']), 129 | 1: dict( 130 | hemisphere='left', 131 | view='lateral', 132 | ), 133 | 2: dict( 134 | hemisphere='left', 135 | view='medial', 136 | ), 137 | 3: dict( 138 | hemisphere='right', 139 | view='lateral', 140 | ), 141 | 4: dict( 142 | hemisphere='right', 143 | view='medial', 144 | ), 145 | 5: dict( 146 | view='dorsal', 147 | ), 148 | 6: dict( 149 | view='ventral', 150 | ), 151 | 7: dict( 152 | view='anterior', 153 | ), 154 | 8: dict( 155 | view='posterior', 156 | ), 157 | 9: dict( 158 | elements=['scalar_bar'], 159 | ), 160 | } 161 | layout = layout.annotate(annotations) 162 | plot_f = plotdef( 163 | surf_from_archive(), 164 | surf_scalars_from_array( 165 | 'gaussiannoise', 166 | is_masked=False, 167 | apply_mask=True, 168 | ), 169 | text_element('subtitle', content='{surfscalars}'), 170 | plot_to_image(), 171 | save_figure( 172 | canvas_size=(3200, 1200), 173 | canvas_color=(0, 0, 0), 174 | sort_by=['surfscalars'], 175 | layout_kernel=layout, 176 | group_spec = [ 177 | ColGroupSpec( 178 | variable='surfscalars', 179 | max_levels=3, 180 | ), 181 | ], 182 | fname_spec=( 183 | 'scalars-gaussiannoise_view-all_page-{page}' 184 | ), 185 | scalar_bar_action='collect', 186 | ), 187 | ) 188 | 189 | # array_left = np.random.randn(12, 32492) 190 | # array_right = np.random.randn(12, 32492) 191 | array_left = np.random.randn(6, 32492) 192 | array_right = np.random.randn(6, 32492) 193 | plot_f( 194 | template='fsLR', 195 | load_mask=True, 196 | gaussiannoise_array_left=array_left, 197 | gaussiannoise_array_right=array_right, 198 | surf_projection='veryinflated', 199 | surf_scalars_cmap='RdYlBu', 200 | surf_scalars_clim_percentile=True, 201 | surf_scalars_below_color='#333333', 202 | window_size=(400, 300), 203 | hemisphere=['left', 'right', None], 204 | views={ 205 | 'left': ('medial', 'lateral'), 206 | 'right': ('medial', 'lateral'), 207 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 208 | }, 209 | output_dir='/tmp', 210 | ) 211 | 212 | 213 | @pytest.mark.ci_unsupported 214 | def test_focused_view_autoselect_hemisphere(): 215 | plot_f = plotdef( 216 | surf_from_archive(), 217 | surf_scalars_from_array( 218 | 'gaussiannoise', 219 | allow_multihemisphere=False, 220 | is_masked=False, 221 | apply_mask=True, 222 | ), 223 | plot_to_image(), 224 | scalar_focus_camera(kind='peak'), 225 | save_grid( 226 | n_cols=4, n_rows=5, padding=4, 227 | canvas_size=(1280, 1024), 228 | canvas_color=(0, 0, 0), 229 | sort_by=['surfscalars'], 230 | fname_spec=( 231 | 'scalars-gaussiannoise_view-focused_page-{page}' 232 | ) 233 | ), 234 | ) 235 | 236 | array_left = np.random.randn(10, 32492) 237 | array_right = np.random.randn(10, 32492) 238 | plot_f( 239 | template="fsLR", 240 | load_mask=True, 241 | gaussiannoise_array_left=array_left, 242 | gaussiannoise_array_right=array_right, 243 | surf_projection='veryinflated', 244 | surf_scalars_cmap='RdYlBu', 245 | surf_scalars_below_color='#333333', 246 | window_size=(400, 250), 247 | output_dir='/tmp', 248 | ) 249 | 250 | @pytest.mark.ci_unsupported 251 | def test_ortho_views_autoselect_hemisphere(): 252 | selected = list(range(19)) + list(range(20, 39)) 253 | plot_f = plotdef( 254 | surf_from_archive(), 255 | surf_scalars_from_array( 256 | 'gaussiannoise', 257 | allow_multihemisphere=False, 258 | is_masked=False, 259 | apply_mask=True, 260 | select=selected, 261 | ), 262 | plot_to_image(), 263 | closest_ortho_camera(n_ortho=3), 264 | save_grid( 265 | n_cols=3, n_rows=10, padding=4, 266 | canvas_size=(960, 2048), 267 | canvas_color=(0, 0, 0), 268 | sort_by=['surfscalars'], 269 | fname_spec=( 270 | 'scalars-gaussiannoise_view-ortho_page-{page}' 271 | ) 272 | ), 273 | ) 274 | 275 | array_left = np.random.randn(10, 32492) 276 | array_right = np.random.randn(10, 32492) 277 | plot_f( 278 | template="fsLR", 279 | load_mask=True, 280 | gaussiannoise_array_left=array_left, 281 | gaussiannoise_array_right=array_right, 282 | surf_projection='veryinflated', 283 | surf_scalars_cmap='RdYlBu', 284 | surf_scalars_below_color='#333333', 285 | window_size=(400, 250), 286 | output_dir='/tmp', 287 | ) 288 | 289 | @pytest.mark.ci_unsupported 290 | def test_planar_sweep_autoselect_hemisphere(): 291 | plot_f = plotdef( 292 | surf_from_archive(), 293 | surf_scalars_from_array( 294 | 'gaussiannoise', 295 | allow_multihemisphere=False, 296 | is_masked=False, 297 | apply_mask=True, 298 | ), 299 | plot_to_image(), 300 | planar_sweep_camera(initial=(1, 0, 0), normal=(0, 0, 1), n_angles=10), 301 | save_grid( 302 | n_cols=10, n_rows=5, padding=4, 303 | canvas_size=(3200, 1024), 304 | canvas_color=(0, 0, 0), 305 | sort_by=['surfscalars'], 306 | fname_spec=( 307 | 'scalars-gaussiannoise_view-planar_page-{page}' 308 | ) 309 | ), 310 | ) 311 | 312 | array_left = np.random.randn(10, 32492) 313 | array_right = np.random.randn(10, 32492) 314 | plot_f( 315 | template="fsLR", 316 | load_mask=True, 317 | gaussiannoise_array_left=array_left, 318 | gaussiannoise_array_right=array_right, 319 | surf_projection='veryinflated', 320 | surf_scalars_cmap='RdYlBu', 321 | surf_scalars_below_color='#333333', 322 | window_size=(400, 250), 323 | output_dir='/tmp', 324 | ) 325 | 326 | @pytest.mark.ci_unsupported 327 | def test_auto_view_autoselect_hemisphere(): 328 | plot_f = plotdef( 329 | surf_from_archive(), 330 | surf_scalars_from_array( 331 | 'gaussiannoise', 332 | allow_multihemisphere=False, 333 | is_masked=False, 334 | apply_mask=True, 335 | ), 336 | plot_to_image(), 337 | auto_camera(), 338 | save_grid( 339 | n_cols=7, n_rows=10, padding=4, 340 | canvas_size=(2240, 2048), 341 | canvas_color=(0, 0, 0), 342 | sort_by=['surfscalars'], 343 | fname_spec=( 344 | 'scalars-gaussiannoise_view-auto_page-{page}' 345 | ) 346 | ), 347 | ) 348 | 349 | array_left = np.random.randn(10, 32492) 350 | array_right = np.random.randn(10, 32492) 351 | plot_f( 352 | template="fsLR", 353 | load_mask=True, 354 | gaussiannoise_array_left=array_left, 355 | gaussiannoise_array_right=array_right, 356 | surf_projection='veryinflated', 357 | surf_scalars_cmap='RdYlBu', 358 | surf_scalars_below_color='#333333', 359 | autocam_n_ortho=3, 360 | autocam_focus='peak', 361 | autocam_sweep_n_angles=3, 362 | window_size=(400, 250), 363 | output_dir='/tmp', 364 | ) 365 | -------------------------------------------------------------------------------- /tests/test_injection.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for all views on a brain surface 6 | """ 7 | import pytest 8 | import matplotlib.pyplot as plt 9 | import numpy as np 10 | import pyvista as pv 11 | import seaborn as sns 12 | import templateflow.api as tflow 13 | 14 | from hyve_examples import get_pain_thresh_nifti, get_svg_cuboid, get_svg_blend 15 | from hyve.flows import plotdef 16 | from hyve.layout import Cell 17 | from hyve.transforms import ( 18 | surf_from_archive, 19 | surf_scalars_from_nifti, 20 | add_surface_overlay, 21 | save_figure, 22 | plot_to_image, 23 | pyplot_element, 24 | svg_element, 25 | text_element, 26 | ) 27 | 28 | COMMON_PARAMS = dict( 29 | load_mask=True, 30 | hemisphere=['left', 'right', None], 31 | views={ 32 | 'left': ('medial', 'lateral'), 33 | 'right': ('medial', 'lateral'), 34 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 35 | }, 36 | output_dir='/tmp', 37 | window_size=(600, 500), 38 | ) 39 | 40 | 41 | def get_annotations(): 42 | return { 43 | 0: dict( 44 | hemisphere='left', 45 | view='lateral', 46 | ), 47 | 1: dict( 48 | hemisphere='left', 49 | view='medial', 50 | ), 51 | 2: dict(view='dorsal'), 52 | 3: dict(view='anterior'), 53 | 4: dict( 54 | hemisphere='right', 55 | view='lateral', 56 | ), 57 | 5: dict( 58 | hemisphere='right', 59 | view='medial', 60 | ), 61 | 6: dict(view='ventral'), 62 | 7: dict(view='posterior'), 63 | 8: {}, 64 | 9: {}, 65 | } 66 | 67 | 68 | #@pytest.mark.parametrize('projection', ['pial', 'veryinflated']) 69 | @pytest.mark.parametrize('insert', ['cube', 'blend']) 70 | def test_svg_injection(insert): #projection): 71 | projection = 'pial' 72 | layout = Cell() | Cell() << (1 / 2) 73 | layout = layout * (Cell() / Cell() / Cell() / Cell() << (1 / 4)) 74 | layout = layout | (Cell() / Cell() << (1 / 2)) << (1 / 2) 75 | annotations = get_annotations() 76 | annotations[8]['elements'] = ['title', 'scalar_bar'] 77 | annotations[9]['elements'] = ['cuboid'] 78 | layout = layout.annotate(annotations) 79 | paramargs = {} 80 | if insert == 'blend': 81 | paramargs = { 82 | 'cuboid_element_src_file': get_svg_blend(), 83 | } 84 | 85 | if projection == 'pial': 86 | template = 'fsaverage' 87 | else: 88 | template = 'fsLR' 89 | pain = get_pain_thresh_nifti() 90 | plot_f = plotdef( 91 | surf_from_archive(), 92 | add_surface_overlay( 93 | 'pain', 94 | surf_scalars_from_nifti( 95 | 'pain', template=template, plot=True 96 | ), 97 | ), 98 | svg_element( 99 | name='cuboid', 100 | src_file=get_svg_cuboid(), 101 | height=262, 102 | width=223, 103 | ), 104 | text_element( 105 | name='title', 106 | content='pain', 107 | bounding_box_height=192, 108 | font_size_multiplier=0.25, 109 | font_color='#cccccc', 110 | priority=-1, 111 | ), 112 | plot_to_image(), 113 | save_figure( 114 | layout_kernel=layout, 115 | padding=10, 116 | canvas_size=(1800, 1500), 117 | canvas_color=(0, 0, 0), 118 | fname_spec=f'scalars-pain_insert-{insert}_desc-cuboid_page-{{page}}', 119 | scalar_bar_action='collect', 120 | ), 121 | ) 122 | plot_f( 123 | template=template, 124 | pain_nifti=pain, 125 | pain_clim=(2, 8), 126 | pain_below_color=(0, 0, 0, 0), 127 | pain_scalar_bar_style={ 128 | 'name': 'z-stat', 129 | 'orientation': 'h', 130 | 'lim_fontsize_multiplier': 0.6, 131 | 'name_fontsize_multiplier': 0.5, 132 | }, 133 | pain_cmap='inferno', 134 | surf_projection=(projection,), 135 | **COMMON_PARAMS, 136 | **paramargs, 137 | ) 138 | 139 | 140 | def test_pyplot_injection(): 141 | layout = Cell() | Cell() << (1 / 2) 142 | layout = layout * (Cell() / Cell() / Cell() / Cell() << (1 / 4)) 143 | layout = layout | (Cell() / Cell() << (1 / 2)) << (1 / 2) 144 | layout = layout % (( 145 | Cell() | Cell() << (1 / 2)) / ( 146 | Cell() | Cell() << (1 / 2)) << (1 / 2)) << 9 147 | annotations = get_annotations() 148 | annotations[8]['elements'] = ['title', 'scalar_bar'] 149 | annotations[9]['elements'] = ['pyplot0'] 150 | annotations[10] = {'elements': ['pyplot1']} 151 | annotations[11] = {'elements': ['pyplot2']} 152 | annotations[12] = {'elements': ['pyplot3']} 153 | layout = layout.annotate(annotations) 154 | 155 | # https://gist.github.com/mwaskom/7be0963cc57f6c89f7b2 156 | plt.style.use("dark_background") 157 | sns.set(style="ticks", context="talk") 158 | def pyplot0_f(figsize): 159 | fig, ax = plt.subplots(figsize=figsize) 160 | # https://seaborn.pydata.org/examples/layered_bivariate_plot.html 161 | n = 10000 162 | mean = [0, 0] 163 | cov = [(2, .4), (.4, .2)] 164 | rng = np.random.RandomState(0) 165 | x, y = rng.multivariate_normal(mean, cov, n).T 166 | sns.scatterplot(x=x, y=y, s=5, color=".15") 167 | sns.histplot(x=x, y=y, bins=50, pthresh=.1, cmap="mako") 168 | sns.kdeplot(x=x, y=y, levels=5, color="w", linewidths=1) 169 | return fig 170 | def pyplot1_f(figsize): 171 | height = min(figsize) 172 | aspect = figsize[0] / figsize[1] 173 | # https://seaborn.pydata.org/examples/multiple_conditional_kde.html 174 | sns.set_theme(style="white") 175 | diamonds = sns.load_dataset("diamonds") 176 | fig = sns.displot( 177 | data=diamonds, 178 | x="carat", hue="cut", kind="kde", 179 | multiple="fill", clip=(0, None), 180 | palette="ch:rot=-.25,hue=1,light=.75", 181 | height=height, aspect=aspect, 182 | ) 183 | return fig 184 | def pyplot2_f(figsize): 185 | height = min(figsize) 186 | # https://seaborn.pydata.org/examples/smooth_bivariate_kde.html 187 | sns.set_theme() 188 | df = sns.load_dataset("penguins") 189 | g = sns.JointGrid( 190 | data=df, x="body_mass_g", y="bill_depth_mm", 191 | space=0, height=height, 192 | ) 193 | g.plot_joint(sns.kdeplot, 194 | fill=True, clip=((2200, 6800), (10, 25)), 195 | thresh=0, levels=100, cmap="rocket") 196 | g.plot_marginals(sns.histplot, color="#03051A", alpha=1, bins=25) 197 | return g 198 | def pyplot3_f(figsize): 199 | height = figsize[1] 200 | aspect = figsize[0] / figsize[1] 201 | # https://seaborn.pydata.org/examples/strip_regplot.html 202 | sns.set_theme() 203 | mpg = sns.load_dataset("mpg") 204 | fig = sns.catplot( 205 | data=mpg, x="cylinders", y="acceleration", hue="weight", 206 | native_scale=True, zorder=1, height=height, aspect=aspect, 207 | ) 208 | sns.regplot( 209 | data=mpg, x="cylinders", y="acceleration", 210 | scatter=False, truncate=False, order=2, color=".2", 211 | ) 212 | fig.tight_layout() 213 | return fig 214 | 215 | pain = get_pain_thresh_nifti() 216 | plot_f = plotdef( 217 | surf_from_archive(), 218 | add_surface_overlay( 219 | 'pain', 220 | surf_scalars_from_nifti( 221 | 'pain', template='fsLR', plot=True 222 | ), 223 | ), 224 | pyplot_element( 225 | name='pyplot0', 226 | plotter=pyplot0_f, 227 | figsize=(6, 5), 228 | ), 229 | pyplot_element( 230 | name='pyplot1', 231 | plotter=pyplot1_f, 232 | figsize=(6, 5), 233 | ), 234 | pyplot_element( 235 | name='pyplot2', 236 | plotter=pyplot2_f, 237 | figsize=(6, 5), 238 | ), 239 | pyplot_element( 240 | name='pyplot3', 241 | plotter=pyplot3_f, 242 | figsize=(6, 5), 243 | ), 244 | text_element( 245 | name='title', 246 | content='pain', 247 | bounding_box_height=192, 248 | font_size_multiplier=0.25, 249 | font_color='#cccccc', 250 | priority=-1, 251 | ), 252 | plot_to_image(), 253 | save_figure( 254 | layout_kernel=layout, 255 | padding=10, 256 | canvas_size=(1800, 1500), 257 | canvas_color=(0, 0, 0), 258 | fname_spec=f'scalars-pain_desc-pyplot_page-{{page}}', 259 | scalar_bar_action='collect', 260 | ), 261 | ) 262 | plot_f( 263 | template='fsLR', 264 | pain_nifti=pain, 265 | pain_clim=(2, 8), 266 | pain_below_color=(0, 0, 0, 0), 267 | pain_scalar_bar_style={ 268 | 'name': 'z-stat', 269 | 'orientation': 'h', 270 | 'lim_fontsize_multiplier': 0.6, 271 | 'name_fontsize_multiplier': 0.5, 272 | }, 273 | pain_cmap='inferno', 274 | surf_projection=('veryinflated',), 275 | theme=pv.themes.DarkTheme(), 276 | **COMMON_PARAMS, 277 | ) 278 | -------------------------------------------------------------------------------- /tests/test_interactive.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Tests that require an interactive plotter display window 6 | """ 7 | import pytest 8 | import templateflow.api as tflow 9 | from hyve_examples import get_pain_thresh_nifti 10 | from hyve.flows import plotdef 11 | from hyve.transforms import ( 12 | plot_to_display, 13 | plot_final_image, 14 | save_snapshots, 15 | surf_from_archive, 16 | surf_scalars_from_nifti, 17 | points_scalars_from_nifti, 18 | ) 19 | 20 | 21 | @pytest.mark.ci_unsupported 22 | def test_plotter_flow_syntax(): 23 | plot_f = plotdef( 24 | surf_from_archive(), 25 | surf_scalars_from_nifti('gmdensity', template='fsaverage'), 26 | points_scalars_from_nifti('pain'), 27 | plot_to_display(), 28 | ) 29 | plot_f( 30 | template='fsaverage', 31 | load_mask=True, 32 | gmdensity_nifti=tflow.get( 33 | template='MNI152NLin2009cAsym', 34 | suffix='probseg', 35 | label='GM', 36 | resolution=2 37 | ), 38 | pain_nifti=get_pain_thresh_nifti(), 39 | surf_projection=('pial',), 40 | surf_alpha=0.5, 41 | ) 42 | 43 | 44 | @pytest.mark.ci_unsupported 45 | def test_plotter_final_capture(): 46 | plot_f = plotdef( 47 | surf_from_archive(), 48 | surf_scalars_from_nifti('gmdensity', template='fsaverage'), 49 | points_scalars_from_nifti('pain'), 50 | plot_final_image(n_scenes=1), # n_scenes > 1 is not supported yet 51 | save_snapshots( 52 | fname_spec=( 53 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}' 54 | ), 55 | ), 56 | ) 57 | plot_f( 58 | template='fsaverage', 59 | load_mask=True, 60 | gmdensity_nifti=tflow.get( 61 | template='MNI152NLin2009cAsym', 62 | suffix='probseg', 63 | label='GM', 64 | resolution=2 65 | ), 66 | pain_nifti=get_pain_thresh_nifti(), 67 | surf_projection=('pial',), 68 | surf_alpha=0.5, 69 | output_dir='/tmp', 70 | ) 71 | -------------------------------------------------------------------------------- /tests/test_layout.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for layout representation 6 | """ 7 | from hyve.layout import CellLayout, Cell, hsplit, vsplit, grid, float_layout 8 | 9 | 10 | def test_simple_layout(): 11 | layout = vsplit(1 / 3, 12 | hsplit(1 / 2, 13 | Cell(), 14 | vsplit(1 / 4, 15 | Cell(), 16 | Cell(), 17 | ), 18 | ), 19 | Cell(), 20 | Cell(), 21 | ).partition(120, 120) 22 | cells = list(layout) 23 | assert cells[0].cell_loc == (0, 0) 24 | assert cells[0].cell_dim == (40, 60) 25 | assert cells[1].cell_loc == (0, 60) 26 | assert cells[1].cell_dim == (10, 60) 27 | assert cells[2].cell_loc == (10, 60) 28 | assert cells[2].cell_dim == (30, 60) 29 | assert cells[3].cell_loc == (40, 0) 30 | assert cells[3].cell_dim == (40, 120) 31 | assert cells[4].cell_loc == (80, 0) 32 | assert cells[4].cell_dim == (40, 120) 33 | 34 | 35 | def test_grid_layout(): 36 | gridlayout = grid(n_cols=6, n_rows=5).partition(1200, 900) 37 | cells = list(gridlayout) 38 | assert all([cells[i].cell_dim == (200, 180) for i in range(30)]) 39 | for i in range(5): 40 | for j in range(6): 41 | assert cells[6 * i + j].cell_loc == (200 * j, 180 * i) 42 | 43 | gridlayout = grid(n_cols=6, n_rows=5, order='col').partition(1200, 900) 44 | cells = list(gridlayout) 45 | assert all([cells[i].cell_dim == (200, 180) for i in range(30)]) 46 | for i in range(5): 47 | for j in range(6): 48 | assert cells[5 * j + i].cell_loc == (200 * j, 180 * i) 49 | 50 | 51 | def test_layout_substitute(): 52 | # layout = ( 53 | # (Cell() | (1 / 2) | (Cell() / 54 | # (1 / 4) / 55 | # Cell()) 56 | # ) / (1 / 3) / ( 57 | # Cell(), 58 | # Cell(), 59 | # ) 60 | # ) 61 | # layout = ( 62 | # (Cell() | (Cell() / 63 | # Cell() << 64 | # (1 / 4)) << (1 / 2)) / 65 | # Cell() / 66 | # Cell() << 67 | # (1 / 3) 68 | # ) 69 | layout = (( 70 | Cell() / 71 | (Cell() | Cell() << (1 / 4)) << 72 | (1 / 2))| Cell() | Cell() << (1 / 3) 73 | ) 74 | layout_inner = Cell() / Cell() / Cell() << (1 / 4) 75 | layout_sub = (layout % layout_inner << 1).partition(120, 120) 76 | cells = list(layout_sub) 77 | assert cells[0].cell_loc == (0, 0) 78 | assert cells[0].cell_dim == (40, 60) 79 | assert cells[1].cell_loc == (0, 60) 80 | assert cells[1].cell_dim == (10, 15) 81 | assert cells[2].cell_loc == (0, 75) 82 | assert cells[2].cell_dim == (10, 15) 83 | assert cells[3].cell_loc == (0, 90) 84 | assert cells[3].cell_dim == (10, 30) 85 | assert cells[4].cell_loc == (10, 60) 86 | assert cells[4].cell_dim == (30, 60) 87 | assert cells[5].cell_loc == (40, 0) 88 | assert cells[5].cell_dim == (40, 120) 89 | assert cells[6].cell_loc == (80, 0) 90 | assert cells[6].cell_dim == (40, 120) 91 | 92 | def test_layout_product(): 93 | layout0 = Cell() / Cell() << (1 / 3) 94 | layout1 = Cell() | Cell() << (1 / 4) 95 | layout01 = (layout0 * layout1).partition(120, 120) 96 | cells = list(layout01) 97 | assert cells[0].cell_loc == (0, 0) 98 | assert cells[0].cell_dim == (30, 40) 99 | assert cells[1].cell_loc == (30, 0) 100 | assert cells[1].cell_dim == (90, 40) 101 | assert cells[2].cell_loc == (0, 40) 102 | assert cells[2].cell_dim == (30, 80) 103 | assert cells[3].cell_loc == (30, 40) 104 | assert cells[3].cell_dim == (90, 80) 105 | 106 | layout10 = (layout1 * layout0).partition(120, 120) 107 | cells = list(layout10) 108 | assert cells[0].cell_loc == (0, 0) 109 | assert cells[0].cell_dim == (30, 40) 110 | assert cells[1].cell_loc == (0, 40) 111 | assert cells[1].cell_dim == (30, 80) 112 | assert cells[2].cell_loc == (30, 0) 113 | assert cells[2].cell_dim == (90, 40) 114 | assert cells[3].cell_loc == (30, 40) 115 | assert cells[3].cell_dim == (90, 80) 116 | 117 | annotation0 = { 118 | 0: {'x': 0}, 119 | 1: {'x': 1}, 120 | } 121 | annotation1 = { 122 | 0: {'y': 0}, 123 | 1: {'y': 1}, 124 | } 125 | annotated0 = layout0.annotate(annotation0, default_elements=None) 126 | annotated1 = layout1.annotate(annotation1, default_elements=None) 127 | annotated01 = (annotated0 * annotated1).partition(120, 120) 128 | assert annotated01.annotations == { 129 | 0: {'x': 0, 'y': 0}, 130 | 1: {'x': 0, 'y': 1}, 131 | 2: {'x': 1, 'y': 0}, 132 | 3: {'x': 1, 'y': 1}, 133 | } 134 | for cell_a, cell_b in zip(annotated01.layout, layout01): 135 | assert cell_a.cell_loc == cell_b.cell_loc 136 | assert cell_a.cell_dim == cell_b.cell_dim 137 | 138 | annotated10 = (annotated1 * annotated0).partition(120, 120) 139 | assert annotated10.annotations == { 140 | 0: {'x': 0, 'y': 0}, 141 | 1: {'x': 1, 'y': 0}, 142 | 2: {'x': 0, 'y': 1}, 143 | 3: {'x': 1, 'y': 1}, 144 | } 145 | for cell_a, cell_b in zip(annotated10.layout, layout10): 146 | assert cell_a.cell_loc == cell_b.cell_loc 147 | assert cell_a.cell_dim == cell_b.cell_dim 148 | 149 | 150 | def test_layout_floating(): 151 | anchor = Cell() | Cell() | Cell() | Cell() << (1 / 5) 152 | floating = Cell() / Cell() << (1 / 3) 153 | floating_inner = Cell() | Cell() << (1 / 2) 154 | layout_inner = floating + floating_inner << ((0.1, 0.4), (0.8, 0.2)) 155 | layout = anchor + layout_inner << ((0.7, 0.1), (0.2, 0.8)) 156 | for i, cell in enumerate(layout): 157 | if i < 4: 158 | assert cell.root is layout 159 | elif i < 6: 160 | assert cell.root is layout.floating[0] 161 | else: 162 | assert cell.root is layout.floating[1] 163 | 164 | layout.partition(500, 500) 165 | cells = list(layout) 166 | assert cells[0].cell_loc == (0, 0) 167 | assert cells[0].cell_dim == (100, 500) 168 | assert cells[1].cell_loc == (100, 0) 169 | assert cells[1].cell_dim == (100, 500) 170 | assert cells[2].cell_loc == (200, 0) 171 | assert cells[2].cell_dim == (100, 500) 172 | assert cells[3].cell_loc == (300, 0) 173 | assert cells[3].cell_dim == (200, 500) 174 | 175 | # Total area is (100, 400) 176 | assert cells[4].cell_loc == (350, 50) 177 | assert cells[4].cell_dim == (100, 133) 178 | assert cells[5].cell_loc == (350, 183) 179 | assert cells[5].cell_dim == (100, 267) 180 | 181 | # Total area is (80, 80) 182 | assert cells[6].cell_loc == (360, 210) 183 | assert cells[6].cell_dim == (40, 80) 184 | assert cells[7].cell_loc == (400, 210) 185 | assert cells[7].cell_dim == (40, 80) 186 | 187 | anchor = Cell() | Cell() << (1 / 2) 188 | floating = Cell() / Cell() << (1 / 4) 189 | base = anchor + floating << ((0.1, 0.1), (0.8, 0.8)) 190 | layout = ( 191 | base | base << (1 / 2)) / ( 192 | base | base << (1 / 2)) << (1 / 2) 193 | layout.partition(400, 400) 194 | cells = list(layout) 195 | def check_cells(cells): 196 | assert cells[0].cell_loc == (0, 0) 197 | assert cells[0].cell_dim == (100, 200) 198 | assert cells[1].cell_loc == (100, 0) 199 | assert cells[1].cell_dim == (100, 200) 200 | assert cells[2].cell_loc == (200, 0) 201 | assert cells[2].cell_dim == (100, 200) 202 | assert cells[3].cell_loc == (300, 0) 203 | assert cells[3].cell_dim == (100, 200) 204 | assert cells[4].cell_loc == (0, 200) 205 | assert cells[4].cell_dim == (100, 200) 206 | assert cells[5].cell_loc == (100, 200) 207 | assert cells[5].cell_dim == (100, 200) 208 | assert cells[6].cell_loc == (200, 200) 209 | assert cells[6].cell_dim == (100, 200) 210 | assert cells[7].cell_loc == (300, 200) 211 | assert cells[7].cell_dim == (100, 200) 212 | 213 | # Total area is (160, 160) 214 | assert cells[8].cell_loc == (20, 20) 215 | assert cells[8].cell_dim == (160, 40) 216 | assert cells[9].cell_loc == (20, 60) 217 | assert cells[9].cell_dim == (160, 120) 218 | assert cells[10].cell_loc == (220, 20) 219 | assert cells[10].cell_dim == (160, 40) 220 | assert cells[11].cell_loc == (220, 60) 221 | assert cells[11].cell_dim == (160, 120) 222 | assert cells[12].cell_loc == (20, 220) 223 | assert cells[12].cell_dim == (160, 40) 224 | assert cells[13].cell_loc == (20, 260) 225 | assert cells[13].cell_dim == (160, 120) 226 | assert cells[14].cell_loc == (220, 220) 227 | assert cells[14].cell_dim == (160, 40) 228 | assert cells[15].cell_loc == (220, 260) 229 | assert cells[15].cell_dim == (160, 120) 230 | 231 | check_cells(cells) 232 | 233 | annotated0 = base.annotate( 234 | {0: {'i': 0}, 1: {'i': 1}, 2: {'i': 8}, 3: {'i': 9}}, 235 | default_elements=None, 236 | ) 237 | annotated1 = base.annotate( 238 | {0: {'i': 2}, 1: {'i': 3}, 2: {'i': 10}, 3: {'i': 11}}, 239 | default_elements=None, 240 | ) 241 | annotated2 = base.annotate( 242 | {0: {'i': 4}, 1: {'i': 5}, 2: {'i': 12}, 3: {'i': 13}}, 243 | default_elements=None, 244 | ) 245 | annotated3 = base.annotate( 246 | {0: {'i': 6}, 1: {'i': 7}, 2: {'i': 14}, 3: {'i': 15}}, 247 | default_elements=None, 248 | ) 249 | annotated = ( 250 | annotated0 | annotated1 << (1 / 2)) / ( 251 | annotated2 | annotated3 << (1 / 2)) << (1 / 2) 252 | annotated.partition(400, 400) 253 | cells = list(annotated) 254 | check_cells(cells) 255 | assert len(annotated) == 16 256 | for i in range(len(annotated)): 257 | assert annotated.annotations[i]['i'] == i 258 | 259 | annotated0 = base.annotate( 260 | {0: {'i': 0}, 1: {'i': None}, 2: {'i': 3}, 3: {'i': 4}}, 261 | default_elements=None, 262 | ) 263 | annotated1 = base.annotate( 264 | {0: {'i': 1}, 1: {'i': 2}, 2: {'i': 5}, 3: {'i': 6}}, 265 | default_elements=None, 266 | ) 267 | annotated = annotated0 % annotated1 << 1 268 | annotated.partition(400, 400) 269 | assert len(annotated) == 7 270 | for i in range(len(annotated)): 271 | assert annotated.annotations[i]['i'] == i 272 | if i > 2: 273 | assert type(annotated[i].root).__name__ == 'FloatingCellLayout' 274 | cells = list(annotated) 275 | assert cells[0].cell_loc == (0, 0) 276 | assert cells[0].cell_dim == (200, 400) 277 | assert cells[1].cell_loc == (200, 0) 278 | assert cells[1].cell_dim == (100, 400) 279 | assert cells[2].cell_loc == (300, 0) 280 | assert cells[2].cell_dim == (100, 400) 281 | assert cells[3].cell_loc == (40, 40) 282 | assert cells[3].cell_dim == (320, 80) 283 | assert cells[4].cell_loc == (40, 120) 284 | assert cells[4].cell_dim == (320, 240) 285 | assert cells[5].cell_loc == (220, 40) 286 | assert cells[5].cell_dim == (160, 80) 287 | assert cells[6].cell_loc == (220, 120) 288 | assert cells[6].cell_dim == (160, 240) 289 | 290 | annotated_a0 = anchor.annotate( 291 | {0: {'i': 0}, 1: {'i': 1}}, 292 | default_elements=None, 293 | ) 294 | annotated_a1 = anchor.annotate( 295 | {0: {'i': 2}, 1: {'i': 3}}, 296 | default_elements=None, 297 | ) 298 | annotated_a2 = anchor.annotate( 299 | {0: {'i': 4}, 1: {'i': 5}}, 300 | default_elements=None, 301 | ) 302 | annotated_a3 = anchor.annotate( 303 | {0: {'i': 6}, 1: {'i': 7}}, 304 | default_elements=None, 305 | ) 306 | annotated_f0 = floating.annotate( 307 | {0: {'i': 8}, 1: {'i': 9}}, 308 | default_elements=None, 309 | ) 310 | annotated_f1 = floating.annotate( 311 | {0: {'i': 10}, 1: {'i': 11}}, 312 | default_elements=None, 313 | ) 314 | annotated_f2 = floating.annotate( 315 | {0: {'i': 12}, 1: {'i': 13}}, 316 | default_elements=None, 317 | ) 318 | annotated_f3 = floating.annotate( 319 | {0: {'i': 14}, 1: {'i': 15}}, 320 | default_elements=None, 321 | ) 322 | annotated0 = annotated_a0 + annotated_f0 << ((0.1, 0.1), (0.8, 0.8)) 323 | annotated1 = annotated_a1 + annotated_f1 << ((0.1, 0.1), (0.8, 0.8)) 324 | annotated2 = annotated_a2 + annotated_f2 << ((0.1, 0.1), (0.8, 0.8)) 325 | annotated3 = annotated_a3 + annotated_f3 << ((0.1, 0.1), (0.8, 0.8)) 326 | annotated = ( 327 | (annotated0 | annotated1 << (1 / 2)) / 328 | (annotated2 | annotated3 << (1 / 2)) << (1 / 2) 329 | ) 330 | annotated.partition(400, 400) 331 | cells = list(annotated) 332 | check_cells(cells) 333 | assert len(annotated) == 16 334 | for i in range(len(annotated)): 335 | assert annotated.annotations[i]['i'] == i 336 | 337 | layout0 = Cell() | Cell() << (1 / 2) 338 | layout1 = Cell() / Cell() << (1 / 4) 339 | onecell = Cell() 340 | annotated0 = layout0.annotate( 341 | {0: {'x': 0}, 1: {'x': 1}}, 342 | default_elements=None, 343 | ) 344 | annotated1 = layout1.annotate( 345 | {0: {'y': 0}, 1: {'y': 1}}, 346 | default_elements=None, 347 | ) 348 | annotated2 = onecell.annotate( 349 | {0: {'float0': True}}, 350 | default_elements=None, 351 | ) 352 | annotated3 = onecell.annotate( 353 | {0: {'float1': True}}, 354 | default_elements=None, 355 | ) 356 | annotated12 = annotated1 + annotated2 << ((0.1, 0.6), (0.8, 0.3)) 357 | annotated03 = annotated0 + annotated3 << ((0.6, 0.1), (0.3, 0.8)) 358 | annotated = annotated0 * annotated12 359 | annotated.partition(400, 400) 360 | cells = list(annotated) 361 | annotations = annotated.annotations 362 | assert cells[0].cell_loc == (0, 0) 363 | assert cells[0].cell_dim == (200, 100) 364 | assert annotations[0] == {'x': 0, 'y': 0} 365 | assert cells[1].cell_loc == (0, 100) 366 | assert cells[1].cell_dim == (200, 300) 367 | assert annotations[1] == {'x': 0, 'y': 1} 368 | assert cells[2].cell_loc == (200, 0) 369 | assert cells[2].cell_dim == (200, 100) 370 | assert annotations[2] == {'x': 1, 'y': 0} 371 | assert cells[3].cell_loc == (200, 100) 372 | assert cells[3].cell_dim == (200, 300) 373 | assert annotations[3] == {'x': 1, 'y': 1} 374 | assert cells[4].cell_loc == (20, 240) 375 | assert cells[4].cell_dim == (160, 120) 376 | assert annotations[4] == {'float0': True, 'x': 0} 377 | assert cells[5].cell_loc == (220, 240) 378 | assert cells[5].cell_dim == (160, 120) 379 | assert annotations[5] == {'float0': True, 'x': 1} 380 | 381 | # Really I don't think you should do this in practice, but it's possible 382 | annotated = annotated03 * annotated12 383 | annotations = annotated.annotations 384 | cells = list(annotated.partition(400, 400)) 385 | assert cells[0].cell_loc == (0, 0) 386 | assert cells[0].cell_dim == (200, 100) 387 | assert annotations[0] == {'x': 0, 'y': 0} 388 | assert cells[1].cell_loc == (0, 100) 389 | assert cells[1].cell_dim == (200, 300) 390 | assert annotations[1] == {'x': 0, 'y': 1} 391 | assert cells[2].cell_loc == (200, 0) 392 | assert cells[2].cell_dim == (200, 100) 393 | assert annotations[2] == {'x': 1, 'y': 0} 394 | assert cells[3].cell_loc == (200, 100) 395 | assert cells[3].cell_dim == (200, 300) 396 | assert annotations[3] == {'x': 1, 'y': 1} 397 | assert cells[4].cell_loc == (20, 240) 398 | assert cells[4].cell_dim == (160, 120) 399 | assert annotations[4] == {'x': 0, 'float0': True} 400 | assert cells[5].cell_loc == (220, 240) 401 | assert cells[5].cell_dim == (160, 120) 402 | assert annotations[5] == {'x': 1, 'float0': True} 403 | # TODO: need to double check the below -- something here isn't right 404 | assert cells[6].cell_loc == (240, 40) 405 | assert cells[6].cell_dim == (120, 80) 406 | assert annotations[6] == {'y': 0, 'float1': True} 407 | assert cells[7].cell_loc == (240, 120) 408 | assert cells[7].cell_dim == (120, 240) 409 | assert annotations[7] == {'y': 1, 'float1': True} 410 | assert cells[8].cell_loc == (252, 232) 411 | assert cells[8].cell_dim == (96, 96) 412 | assert annotations[8] == {'float0': True, 'float1': True} 413 | 414 | 415 | def test_break(): 416 | layout0 = Cell() | Cell() | Cell() << (1 / 3) 417 | layout1 = Cell() | Cell() << (1 /2) 418 | layout2 = Cell() / Cell() << (1 / 2) 419 | layout = layout0 * layout1 * layout2 420 | left, right = layout @ 0 421 | assert len(left), len(right) == (2, 10) 422 | left, right = layout @ 1 423 | assert len(left), len(right) == (4, 8) 424 | left, right = layout @ 2 425 | assert len(left), len(right) == (6, 6) 426 | left, right = layout @ 3 427 | assert len(left), len(right) == (8, 4) 428 | left, right = layout @ 4 429 | assert len(left), len(right) == (10, 2) 430 | 431 | assert len(list(layout.breakpoints)) == 5 432 | assert len(list(layout[0].parent.breakpoints)) == 1 433 | assert len(list(layout[0].parent.parent.breakpoints)) == 1 434 | 435 | layout = layout1 * layout0 * layout2 436 | breakpoints = list(layout.breakpoints) 437 | bpdict = dict(zip(breakpoints, range(len(breakpoints)))) 438 | # not intuitive, because of operator precedence 439 | assert breakpoints[0] is layout.left 440 | assert breakpoints[1] is layout.left.right 441 | assert breakpoints[2] is layout 442 | assert breakpoints[3] is layout.right 443 | assert breakpoints[4] is layout.right.right 444 | left, right = layout @ 0 445 | assert len(left), len(right) == (2, 10) 446 | left, right = layout @ 1 447 | assert len(left), len(right) == (4, 8) 448 | left, right = layout @ 2 449 | assert len(left), len(right) == (6, 6) 450 | left, right = layout @ 3 451 | assert len(left), len(right) == (8, 4) 452 | left, right = layout @ 4 453 | assert len(left), len(right) == (10, 2) 454 | 455 | layout = ( 456 | layout1 * layout1 * layout1 * layout0 * layout1 * layout2 * layout1 457 | ) 458 | l = len(layout) 459 | k = 4 460 | layout @ 35 461 | for i in range(47): 462 | left, right = layout @ i 463 | try: 464 | len(right) 465 | except Exception: 466 | print('very bad', i) 467 | continue 468 | assert (len(left), len(right)) == ((k * (i + 1), l - k * (i + 1))) 469 | 470 | annotated = layout.annotate( 471 | {i: {'i': i} for i in range(len(layout))}, 472 | default_elements=None, 473 | ) 474 | for i in range(47): 475 | left, right = annotated @ i 476 | assert len(left), len(right) == (k * (i + 1), l - k * (i + 1)) 477 | assert left.annotations == {j: {'i': j} for j in range(k * (i + 1))} 478 | assert right.annotations == { 479 | i: {'i': j} for i, j in enumerate(range(k * (i + 1), l)) 480 | } 481 | -------------------------------------------------------------------------------- /tests/test_netplot.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for elementary brain network visualisations 6 | """ 7 | import pytest 8 | import numpy as np 9 | import pandas as pd 10 | import pyvista as pv 11 | 12 | from hyve_examples import ( 13 | get_schaefer400_synthetic_conmat, 14 | get_schaefer400_cifti, 15 | ) 16 | from hyve.flows import plotdef 17 | from hyve.flows import add_network_data 18 | from hyve.transforms import ( 19 | surf_from_archive, 20 | surf_scalars_from_cifti, 21 | parcellate_colormap, 22 | add_node_variable, 23 | add_edge_variable, 24 | plot_to_image, 25 | save_snapshots, 26 | save_grid, 27 | node_coor_from_parcels, 28 | build_network, 29 | add_network_overlay, 30 | ) 31 | 32 | 33 | def test_net(): 34 | parcellation = get_schaefer400_cifti() 35 | cov = pd.read_csv( 36 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 37 | ).values 38 | 39 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 40 | vis_nodes_edge_selection[0:5] = True 41 | vis_nodes_edge_selection[200:205] = True 42 | 43 | plot_f = plotdef( 44 | surf_from_archive(), 45 | surf_scalars_from_cifti('parcellation', plot=False), 46 | add_network_data( 47 | add_node_variable('vis'), 48 | add_edge_variable( 49 | "vis_conn", 50 | threshold=10, 51 | topk_threshold_nodewise=True, 52 | absolute=True, 53 | incident_node_selection=vis_nodes_edge_selection, 54 | emit_degree=True, 55 | ), 56 | add_edge_variable( 57 | "vis_internal_conn", 58 | absolute=True, 59 | connected_node_selection=vis_nodes_edge_selection, 60 | ), 61 | ), 62 | node_coor_from_parcels('parcellation'), 63 | build_network('vis'), 64 | parcellate_colormap('parcellation', 'network', target='node'), 65 | plot_to_image(), 66 | save_snapshots( 67 | fname_spec=( 68 | 'network-schaefer400_view-{view}' 69 | ), 70 | ), 71 | ) 72 | plot_f( 73 | template='fsLR', 74 | hemisphere='right', 75 | surf_projection='inflated', 76 | surf_alpha=0.2, 77 | parcellation_cifti=parcellation, 78 | node_radius='vis_conn_degree', 79 | node_color='index', 80 | node_style={ 81 | 'pbr': True, 'metallic': 0.3, 'roughness': 0.1, 82 | 'specular': 0.5, 'specular_power': 15, 83 | }, 84 | edge_color='vis_conn_sgn', 85 | edge_radius='vis_conn_val', 86 | edge_style={ 87 | 'pbr': True, 'metallic': 0.3, 'roughness': 0.1, 88 | 'specular': 0.5, 'specular_power': 15, 89 | }, 90 | vis_nodal=vis_nodes_edge_selection.astype(int), 91 | vis_conn_adjacency=cov, 92 | vis_internal_conn_adjacency=cov, 93 | views=('dorsal', 'lateral', 'posterior'), 94 | output_dir='/tmp', 95 | ) 96 | 97 | 98 | def test_net_highlight_nooverlay(): 99 | parcellation = get_schaefer400_cifti() 100 | cov = pd.read_csv( 101 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 102 | ).values 103 | 104 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 105 | vis_nodes_edge_selection[0:2] = True 106 | vis_nodes_edge_selection[200:202] = True 107 | 108 | plot_f = plotdef( 109 | surf_from_archive(), 110 | surf_scalars_from_cifti('parcellation', plot=False), 111 | #build_network('vis'), 112 | add_network_data( 113 | add_edge_variable( 114 | "vis_conn", 115 | absolute=True, 116 | incident_node_selection=vis_nodes_edge_selection, 117 | ), 118 | add_edge_variable( 119 | "vis_internal_conn", 120 | absolute=True, 121 | connected_node_selection=vis_nodes_edge_selection, 122 | emit_degree=True, 123 | emit_incident_nodes=(0.2, 1), 124 | removed_val=0.03, 125 | surviving_val=1.0, 126 | ), 127 | how='left', 128 | ), 129 | node_coor_from_parcels('parcellation'), 130 | build_network('vis'), 131 | parcellate_colormap('parcellation', 'modal', target='node'), 132 | plot_to_image(), 133 | save_snapshots( 134 | fname_spec=( 135 | 'network-schaefer400_desc-visual_view-{view}' 136 | ), 137 | ), 138 | ) 139 | plot_f( 140 | template="fsLR", 141 | surf_projection='veryinflated', 142 | surf_alpha=0.2, 143 | parcellation_cifti=parcellation, 144 | vis_conn_adjacency=cov, 145 | vis_internal_conn_adjacency=cov, 146 | node_radius='vis_internal_conn_degree', 147 | node_color='index', 148 | node_alpha='vis_internal_conn_incidents', 149 | edge_color='vis_conn_sgn', 150 | edge_radius='vis_conn_val', 151 | edge_alpha='vis_internal_conn_val', 152 | views=("dorsal", "left", "posterior"), 153 | output_dir='/tmp', 154 | ) 155 | 156 | 157 | def test_net_fig(): 158 | parcellation = get_schaefer400_cifti() 159 | cov = pd.read_csv( 160 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 161 | ).values 162 | 163 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 164 | vis_nodes_edge_selection[0:5] = True 165 | vis_nodes_edge_selection[200:205] = True 166 | 167 | plot_f = plotdef( 168 | surf_from_archive(), 169 | surf_scalars_from_cifti('parcellation', plot=False), 170 | add_network_data( 171 | add_node_variable('vis'), 172 | add_edge_variable( 173 | "vis_conn", 174 | threshold=10, 175 | topk_threshold_nodewise=True, 176 | absolute=True, 177 | incident_node_selection=vis_nodes_edge_selection, 178 | emit_degree=True, 179 | ), 180 | add_edge_variable( 181 | "vis_internal_conn", 182 | absolute=True, 183 | connected_node_selection=vis_nodes_edge_selection, 184 | ), 185 | ), 186 | node_coor_from_parcels('parcellation'), 187 | build_network('vis'), 188 | parcellate_colormap('parcellation', 'network', target='node'), 189 | plot_to_image(), 190 | save_grid( 191 | n_cols=3, n_rows=1, 192 | canvas_size=(1800, 500), 193 | canvas_color=(1, 1, 1), 194 | scalar_bar_action='collect', 195 | ), 196 | ) 197 | plot_f( 198 | template='fsLR', 199 | surf_projection='inflated', 200 | surf_alpha=0.2, 201 | parcellation_cifti=parcellation, 202 | node_radius='vis_conn_degree', 203 | node_color='index', 204 | edge_color='vis_conn_sgn', 205 | edge_radius='vis_conn_val', 206 | vis_nodal=vis_nodes_edge_selection.astype(int), 207 | vis_conn_adjacency=cov, 208 | vis_internal_conn_adjacency=cov, 209 | views=('dorsal', 'left', 'posterior'), 210 | window_size=(1200, 1000), 211 | output_dir='/tmp', 212 | fname_spec=f'network-schaefer400_view-all_page-{{page}}', 213 | ) 214 | 215 | 216 | @pytest.mark.parametrize('cmap', ['network', 'modal']) 217 | def test_net_highlight(cmap): 218 | parcellation = get_schaefer400_cifti() 219 | cov = pd.read_csv( 220 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 221 | ).values 222 | 223 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 224 | vis_nodes_edge_selection[0:2] = True 225 | vis_nodes_edge_selection[200:202] = True 226 | 227 | plot_f = plotdef( 228 | surf_from_archive(), 229 | surf_scalars_from_cifti('parcellation', plot=False), 230 | #build_network('vis'), 231 | add_network_overlay( 232 | 'vis', 233 | add_network_data( 234 | add_edge_variable( 235 | "vis_conn", 236 | absolute=True, 237 | incident_node_selection=vis_nodes_edge_selection, 238 | ), 239 | add_edge_variable( 240 | "vis_internal_conn", 241 | absolute=True, 242 | connected_node_selection=vis_nodes_edge_selection, 243 | emit_degree=True, 244 | emit_incident_nodes=(0.2, 1), 245 | removed_val=0.03, 246 | surviving_val=1.0, 247 | ), 248 | how='left', 249 | ), 250 | parcellate_colormap('parcellation', 'modal', target='node'), 251 | node_coor_from_parcels('parcellation'), 252 | ), 253 | plot_to_image(), 254 | save_snapshots( 255 | fname_spec=( 256 | f'network-schaefer400_desc-visual_cmap-{cmap}_view-{{view}}' 257 | ), 258 | ), 259 | ) 260 | plot_f( 261 | template="fsLR", 262 | surf_projection='veryinflated', 263 | surf_alpha=0.2, 264 | parcellation_cifti=parcellation, 265 | surf_scalars_cmap=cmap, 266 | vis_conn_adjacency=cov, 267 | vis_internal_conn_adjacency=cov, 268 | vis_node_radius='vis_internal_conn_degree', 269 | vis_node_color='index', 270 | vis_node_alpha='vis_internal_conn_incidents', 271 | vis_edge_color='vis_conn_sgn', 272 | vis_edge_radius='vis_conn_val', 273 | vis_edge_alpha='vis_internal_conn_val', 274 | views=("dorsal", "left", "posterior"), 275 | output_dir='/tmp', 276 | ) 277 | 278 | 279 | def test_net_highlight_fig(): 280 | parcellation = get_schaefer400_cifti() 281 | cov = pd.read_csv( 282 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 283 | ).values 284 | 285 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 286 | vis_nodes_edge_selection[0:2] = True 287 | vis_nodes_edge_selection[200:202] = True 288 | 289 | plot_f = plotdef( 290 | surf_from_archive(), 291 | surf_scalars_from_cifti('parcellation', plot=False), 292 | #build_network('vis'), 293 | add_network_overlay( 294 | 'vis', 295 | add_network_data( 296 | add_edge_variable( 297 | "vis_conn", 298 | absolute=True, 299 | incident_node_selection=vis_nodes_edge_selection, 300 | ), 301 | add_edge_variable( 302 | "vis_internal_conn", 303 | absolute=True, 304 | connected_node_selection=vis_nodes_edge_selection, 305 | emit_degree=True, 306 | emit_incident_nodes=(0.2, 1), 307 | removed_val=0.03, 308 | surviving_val=1.0, 309 | ), 310 | how='left', 311 | ), 312 | parcellate_colormap('parcellation', 'modal', target='node'), 313 | node_coor_from_parcels('parcellation'), 314 | ), 315 | plot_to_image(), 316 | save_grid( 317 | n_cols=3, n_rows=1, 318 | canvas_size=(1800, 500), 319 | canvas_color=(1, 1, 1), 320 | fname_spec=f'network-schaefer400_desc-visual_view-all_page-{{page}}', 321 | scalar_bar_action='collect', 322 | ), 323 | ) 324 | plot_f( 325 | template="fsLR", 326 | surf_projection='veryinflated', 327 | surf_alpha=0.2, 328 | parcellation_cifti=parcellation, 329 | vis_conn_adjacency=cov, 330 | vis_internal_conn_adjacency=cov, 331 | vis_node_radius='vis_internal_conn_degree', 332 | vis_node_color='index', 333 | vis_node_alpha='vis_internal_conn_incidents', 334 | vis_edge_color='vis_conn_sgn', 335 | vis_edge_radius='vis_conn_val', 336 | vis_edge_alpha='vis_internal_conn_val', 337 | views=("dorsal", "left", "posterior"), 338 | window_size=(1200, 1000), 339 | output_dir='/tmp', 340 | ) 341 | -------------------------------------------------------------------------------- /tests/test_overlay.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for plots that contain multiple overlays 6 | """ 7 | import pytest 8 | 9 | import nibabel as nb 10 | import numpy as np 11 | import pyvista as pv 12 | 13 | from hyve_examples import ( 14 | get_null400_cifti, 15 | get_pain_thresh_nifti, 16 | get_salience_ic_nifti, 17 | get_schaefer400_cifti, 18 | get_schaefer400_gifti, 19 | get_svg_blend, 20 | ) 21 | from hyve.elements import TextBuilder 22 | from hyve.flows import plotdef 23 | from hyve.layout import Cell 24 | from hyve.transforms import ( 25 | surf_from_archive, 26 | surf_scalars_from_nifti, 27 | surf_scalars_from_cifti, 28 | surf_scalars_from_gifti, 29 | points_scalars_from_array, 30 | plot_to_image, 31 | plot_to_html, 32 | auto_camera, 33 | parcellate_colormap, 34 | save_snapshots, 35 | save_grid, 36 | save_figure, 37 | svg_element, 38 | add_surface_overlay, 39 | add_points_overlay, 40 | vertex_to_face, 41 | ) 42 | 43 | 44 | def create_sphere(radius=30, inner_radius=25, inmost_radius=20): 45 | sphere_bounds = np.arange(-radius, radius, 3) 46 | sphere_coor = np.concatenate([ 47 | c.reshape(1, -1) for c in 48 | np.meshgrid(sphere_bounds, sphere_bounds, sphere_bounds) 49 | ]).T 50 | coor_radius = np.sqrt((sphere_coor ** 2).sum(-1)) 51 | sphere_index = coor_radius < radius 52 | coor_radius = coor_radius[sphere_index] 53 | sphere_coor = sphere_coor[sphere_index] 54 | sphere_inner_index = coor_radius < inner_radius 55 | sphere_data = 1 + ((coor_radius - inner_radius) / (radius - inner_radius)) 56 | sphere_data[sphere_inner_index] = -( 57 | 1 + ((coor_radius[sphere_inner_index] - inmost_radius) / ( 58 | inner_radius - inmost_radius 59 | ))) 60 | sphere_inmost_index = coor_radius < inmost_radius 61 | sphere_data[sphere_inmost_index] = np.random.randn( 62 | sphere_inmost_index.sum()) 63 | return sphere_coor, sphere_data 64 | 65 | 66 | @pytest.mark.parametrize('output', ['image', 'html']) 67 | @pytest.mark.parametrize('v2f', [True, False]) 68 | def test_parcellation_modal_cmap(output, v2f): 69 | fname_spec = 'scalars-{surfscalars}_hemisphere-{hemisphere}' 70 | if v2f: 71 | v2f_transform_sequence = ( 72 | [vertex_to_face('parcellation')], 73 | [vertex_to_face('pain')], 74 | ) 75 | fname_spec += '_mode-face' 76 | else: 77 | v2f_transform_sequence = ([], []) 78 | fname_spec += '_mode-vertex' 79 | if output == 'image': 80 | fname_spec += '_view-{view}' 81 | out_transform_sequence = [ 82 | plot_to_image(), 83 | save_snapshots(fname_spec=fname_spec), 84 | ] 85 | elif output == 'html': 86 | out_transform_sequence = [ 87 | plot_to_html(fname_spec=fname_spec), 88 | ] 89 | sphere_coor, sphere_data = create_sphere() 90 | 91 | plot_f = plotdef( 92 | surf_from_archive(), 93 | add_surface_overlay( 94 | 'parcellation', 95 | surf_scalars_from_cifti('parcellation'), 96 | parcellate_colormap('parcellation', 'modal'), 97 | *v2f_transform_sequence[0], 98 | ), 99 | add_surface_overlay( 100 | 'pain', 101 | surf_scalars_from_nifti('pain', template='fsLR'), 102 | *v2f_transform_sequence[1], 103 | ), 104 | add_points_overlay( 105 | 'sphere', 106 | points_scalars_from_array('sphere', point_size=8), 107 | ), 108 | *out_transform_sequence, 109 | ) 110 | 111 | plot_f( 112 | template="fsLR", 113 | load_mask=True, 114 | parcellation_cifti=get_null400_cifti(), 115 | pain_nifti=nb.load(get_pain_thresh_nifti()), 116 | pain_cmap='inferno', 117 | pain_clim_percentile=True, 118 | pain_alpha=0.5, 119 | pain_below_color=(0, 0, 0, 0), 120 | sphere_coor=sphere_coor, 121 | sphere_values=sphere_data, 122 | sphere_cmap='Reds', 123 | sphere_cmap_negative='Blues', 124 | sphere_clim=(1.0, 2.0), 125 | sphere_below_color=(0, 0, 0, 0), 126 | surf_projection=('veryinflated',), 127 | # surf_scalars_boundary_color='black', 128 | # surf_scalars_boundary_width=5, 129 | hemisphere=['left', 'right'], 130 | output_dir='/tmp', 131 | ) 132 | 133 | 134 | def test_overlay_allview(): 135 | annotations = { 136 | 0: dict( 137 | hemisphere='left', 138 | view='lateral', 139 | ), 140 | 1: dict(view='anterior'), 141 | 2: dict( 142 | hemisphere='right', 143 | view='lateral', 144 | ), 145 | 3: dict(view='dorsal'), 146 | 4: dict(elements=[ 147 | 'title', 148 | {'scalar_bar': ('salience', 'zstat')}, 149 | ]), 150 | 5: dict(view='ventral'), 151 | 6: dict( 152 | hemisphere='left', 153 | view='medial', 154 | ), 155 | 7: dict(view='posterior'), 156 | 8: dict( 157 | hemisphere='right', 158 | view='medial', 159 | ), 160 | } 161 | annotations = {k + 1: v for k, v in annotations.items()} 162 | layout_r = Cell() | Cell() | Cell() << (1 / 3) 163 | layout_c = Cell() / Cell() / Cell() << (1 / 3) 164 | layout = layout_c * layout_r 165 | #layout = layout % (Cell() | Cell() << (1 / 2)) << 4 166 | layout = Cell() | layout << (3 / 8) 167 | annotations[0] = dict(elements=['blend_insert']) 168 | layout = layout.annotate(annotations) 169 | fname_spec = 'scalars-{surfscalars}_hemisphere-{hemisphere}_mode-face' 170 | v2f_transform_sequence = ( 171 | [vertex_to_face('parcellation')], 172 | [vertex_to_face('salience')], 173 | [vertex_to_face('pain')], 174 | ) 175 | fname_spec += '_view-{view}' 176 | out_transform_sequence = [ 177 | plot_to_image(), 178 | # save_grid( 179 | # n_cols=3, n_rows=3, padding=10, 180 | # canvas_size=(1800, 1500), 181 | # canvas_color=(0, 0, 0), 182 | # fname_spec=f'scalars-painoverlay_view-all_page-{{page}}', 183 | # scalar_bar_action='collect', 184 | # annotations=annotations, 185 | # ), 186 | save_figure( 187 | layout_kernel=layout, 188 | #padding=10, 189 | canvas_size=(2880, 1500), 190 | canvas_color=(0, 0, 0), 191 | fname_spec=f'scalars-painoverlay_view-all_page-{{page}}', 192 | scalar_bar_action='collect', 193 | ), 194 | ] 195 | 196 | plot_f = plotdef( 197 | surf_from_archive(), 198 | add_surface_overlay( 199 | 'parcellation', 200 | surf_scalars_from_cifti('parcellation'), 201 | parcellate_colormap('parcellation', 'modal'), 202 | *v2f_transform_sequence[0], 203 | ), 204 | add_surface_overlay( 205 | 'salience', 206 | surf_scalars_from_nifti('salience', template='fsLR'), 207 | *v2f_transform_sequence[1], 208 | ), 209 | add_surface_overlay( 210 | 'pain', 211 | surf_scalars_from_nifti('pain', template='fsLR'), 212 | *v2f_transform_sequence[2], 213 | ), 214 | svg_element( 215 | name='blend_insert', 216 | src_file=get_svg_blend(), 217 | height=375, 218 | width=300, 219 | ), 220 | *out_transform_sequence, 221 | ) 222 | plot_f( 223 | template="fsLR", 224 | load_mask=True, 225 | parcellation_cifti=get_schaefer400_cifti(), 226 | parcellation_alpha=0.5, 227 | parcellation_scalar_bar_style={ 228 | 'name': 'parcel', 229 | 'orientation': 'h', 230 | }, 231 | salience_nifti=get_salience_ic_nifti(), 232 | salience_cmap='magma', 233 | salience_clim=(3, 8), 234 | salience_cmap_negative='cool', 235 | salience_alpha=0.7, 236 | salience_scalar_bar_style={ 237 | 'name': 'salience', 238 | 'orientation': 'h', 239 | }, 240 | salience_below_color=(0, 0, 0, 0), 241 | pain_nifti=nb.load(get_pain_thresh_nifti()), 242 | pain_cmap='Reds', 243 | pain_clim=(0.3, 8), 244 | pain_alpha=0.7, 245 | pain_below_color=(0, 0, 0, 0), 246 | pain_scalar_bar_style={ 247 | 'name': 'zstat', 248 | 'orientation': 'h', 249 | }, 250 | surf_projection=('veryinflated',), 251 | hemisphere=['left', 'right', None], 252 | views={ 253 | 'left': ('medial', 'lateral'), 254 | 'right': ('medial', 'lateral'), 255 | 'both': ('dorsal', 'ventral', 'anterior', 'posterior'), 256 | }, 257 | elements={ 258 | 'title': ( 259 | TextBuilder( 260 | content='pain', 261 | bounding_box_height=192, 262 | font_size_multiplier=0.25, 263 | font_color='#cccccc', 264 | priority=-1, 265 | ), 266 | ), 267 | }, 268 | window_size=(600, 500), 269 | theme=pv.themes.DarkTheme(), 270 | output_dir='/tmp', 271 | ) 272 | 273 | 274 | def test_autocams(): 275 | layout = Cell() | Cell() | Cell() << (1 / 3) 276 | layout = layout / layout << (1 / 2) 277 | layout = Cell() | layout << (2 / 5) 278 | bottom = Cell() | Cell() | Cell() | Cell() | Cell() << (1 / 5) 279 | layout = layout / bottom << (2 / 3) 280 | layout = layout | Cell() << (15 / 16) 281 | annotations = { 282 | **{0: {'view': 'focused'}}, 283 | **{k: {'view': 'ortho'} for k in range(1, 4)}, 284 | **{k: {'view': 'planar'} for k in range(4, 12)}, 285 | **{12: {'elements': [{'scalar_bar': ('pain')}]}} 286 | } 287 | layout = layout.annotate(annotations) 288 | 289 | parcellation_gifti = get_schaefer400_gifti(tpl='fsaverage') 290 | plot_f = plotdef( 291 | surf_from_archive(), 292 | add_surface_overlay( 293 | 'parcellation', 294 | #surf_scalars_from_gifti('parcellation', is_masked=False), 295 | surf_scalars_from_cifti('parcellation'), 296 | parcellate_colormap('parcellation', 'network', template='fsLR'), 297 | vertex_to_face('parcellation'), 298 | ), 299 | add_surface_overlay( 300 | 'pain', 301 | surf_scalars_from_nifti('pain', template='fsLR'), 302 | vertex_to_face('pain'), 303 | ), 304 | plot_to_image(), 305 | auto_camera(n_ortho=3, focus='centroid', n_angles=8, surf_scalars='pain:points'), 306 | save_figure( 307 | layout_kernel=layout, 308 | #padding=10, 309 | canvas_size=(3200, 1500), 310 | canvas_color=(0, 0, 0), 311 | scalar_bar_action='collect', 312 | ), 313 | ) 314 | plot_f( 315 | #template='fsaverage', 316 | template='fsLR', 317 | load_mask=True, 318 | parcellation_cifti=get_schaefer400_cifti(), 319 | # parcellation_gifti_left=parcellation_gifti['left'], 320 | # parcellation_gifti_right=parcellation_gifti['right'], 321 | parcellation_alpha=0.4, 322 | pain_nifti=nb.load(get_pain_thresh_nifti()), 323 | pain_cmap='inferno', 324 | pain_clim=(0.3, 8), 325 | pain_alpha=0.9, 326 | pain_below_color=(0, 0, 0, 0), 327 | surf_projection=('veryinflated',), 328 | surf_style={ 329 | 'pbr': True, 330 | 'metallic': 0.05, 331 | 'roughness': 0.1, 332 | 'specular': 0.5, 333 | 'specular_power': 15, 334 | # 'diffuse': 1, 335 | }, 336 | hemisphere='left', 337 | window_size=(600, 500), 338 | theme=pv.themes.DarkTheme(), 339 | output_dir='/tmp', 340 | fname_spec=f'scalars-painoverlay_view-auto_page-{{page}}', 341 | ) 342 | -------------------------------------------------------------------------------- /tests/test_sparque.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests using the parcellations in the OHBM ``sparque`` poster 6 | """ 7 | import pytest 8 | 9 | from hyve_examples import ( 10 | get_schaefer400_cifti, 11 | get_myconnectomeWard400_nifti, 12 | get_mscWard400_nifti, 13 | get_glasser360_gifti, 14 | get_gordon333_gifti, 15 | ) 16 | from hyve.flows import plotdef 17 | from hyve.transforms import ( 18 | surf_from_archive, 19 | surf_scalars_from_cifti, 20 | surf_scalars_from_gifti, 21 | surf_scalars_from_array, 22 | surf_scalars_from_nifti, 23 | parcellate_colormap, 24 | plot_to_image, 25 | save_snapshots, 26 | ) 27 | from hyve.util import sanitise 28 | 29 | @pytest.mark.parametrize('cmap', ['network', 'modal']) 30 | @pytest.mark.parametrize('parcellation_name, parcellation_path', [ 31 | ('Schaefer400', get_schaefer400_cifti()), 32 | ('MyConnectomeWard400', get_myconnectomeWard400_nifti()), 33 | ('MSCWard400', get_mscWard400_nifti()), 34 | ('Glasser360', get_glasser360_gifti()), 35 | ('Gordon333', get_gordon333_gifti()), 36 | ]) 37 | def test_sparque(parcellation_name, parcellation_path, cmap): 38 | 39 | # import lytemaps 40 | # from nilearn import datasets, surface 41 | # fsaverage = lytemaps.datasets.fetch_fsaverage() 42 | # print(fsaverage['pial'].L) 43 | # surf_data_L = surface.vol_to_surf( 44 | # parcellation_path, 45 | # surf_mesh=str(fsaverage["pial"].L), 46 | # #inner_mesh=fsaverage["white_left"], 47 | # interpolation="nearest", 48 | # ) 49 | # surf_data_R = surface.vol_to_surf( 50 | # parcellation_path, 51 | # surf_mesh=str(fsaverage["pial"].R), 52 | # #inner_mesh=fsaverage["white_left"], 53 | # interpolation="nearest", 54 | # ) 55 | # print(surf_data_L.shape, surf_data_R.shape) 56 | 57 | paramstr = sanitise(parcellation_name) 58 | if isinstance(parcellation_path, dict): 59 | parcellation_path_L, parcellation_path_R = ( 60 | parcellation_path['left'], parcellation_path['right'] 61 | ) 62 | filearg = { 63 | f'{paramstr}_gifti_left': parcellation_path_L, 64 | f'{paramstr}_gifti_right': parcellation_path_R, 65 | } 66 | transform = surf_scalars_from_gifti( 67 | parcellation_name, 68 | is_masked=False, 69 | apply_mask=True, 70 | allow_multihemisphere=False, 71 | ) 72 | elif parcellation_path.endswith('.nii.gz'): 73 | filearg = {f'{paramstr}_nifti': parcellation_path} 74 | transform = surf_scalars_from_nifti( 75 | parcellation_name, 76 | template='fsLR', 77 | method='nearest', 78 | plot=True, 79 | threshold=0, 80 | allow_multihemisphere=False, 81 | ) 82 | elif parcellation_path.endswith('.nii'): # Not always, but here yes 83 | filearg = {f'{paramstr}_cifti': parcellation_path} 84 | transform = surf_scalars_from_cifti( 85 | parcellation_name, 86 | plot=True, 87 | allow_multihemisphere=False, 88 | ) 89 | 90 | 91 | plot_f = plotdef( 92 | surf_from_archive(), 93 | transform, 94 | parcellate_colormap(parcellation_name, cmap), 95 | #vertex_to_face(parcellation_name), 96 | plot_to_image(), 97 | save_snapshots( 98 | fname_spec=( 99 | 'project-sparque_scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 100 | f'cmap-{cmap}' 101 | ), 102 | ), 103 | ) 104 | plot_f( 105 | template='fsLR', 106 | load_mask=True, 107 | surf_projection=('veryinflated',), 108 | hemisphere=['left', 'right'], 109 | views=[ 110 | 'lateral', 'medial', 'ventral', 'dorsal', 'anterior', 'posterior', 111 | ], 112 | output_dir='/tmp', 113 | surf_scalars_boundary_color='black', 114 | # can't do this because it's autofilled by parcellate_colormap 115 | # surf_scalars_below_color='#555555', 116 | surf_scalars_boundary_width=3, 117 | **filearg, 118 | # We need a test for array inputs 119 | # **{f'{parcellation_name}_array_left': surf_data_L}, 120 | # **{f'{parcellation_name}_array_right': surf_data_R}, 121 | ) 122 | -------------------------------------------------------------------------------- /tests/test_surfplot.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for elementary brain surface visualisations 6 | """ 7 | import pytest 8 | 9 | import numpy as np 10 | import pyvista as pv 11 | import templateflow.api as tflow 12 | 13 | from hyve_examples import ( 14 | get_null400_cifti, 15 | get_null400_gifti, 16 | get_poldrack_freesurfer, 17 | get_pain_thresh_nifti, 18 | ) 19 | from hyve.flows import plotdef 20 | from hyve.transforms import ( 21 | surf_from_archive, 22 | surf_from_freesurfer, 23 | surf_scalars_from_array, 24 | surf_scalars_from_cifti, 25 | surf_scalars_from_freesurfer, 26 | surf_scalars_from_gifti, 27 | surf_scalars_from_nifti, 28 | add_surface_overlay, 29 | parcellate_colormap, 30 | parcellate_surf_scalars, 31 | scatter_into_parcels, 32 | vertex_to_face, 33 | draw_surface_boundary, 34 | select_active_parcels, 35 | plot_to_html, 36 | plot_to_image, 37 | save_snapshots, 38 | ) 39 | 40 | @pytest.mark.parametrize('hemi', ['left', 'right']) 41 | @pytest.mark.parametrize('overlay', [True, False]) 42 | @pytest.mark.parametrize('paramindef', [True, False]) 43 | @pytest.mark.parametrize('scalars_cmap', [ 44 | ('parcellation', 'network'), 45 | ('parcellation', 'modal'), 46 | ('parcellation', 'bone'), 47 | ('gmdensity', 'bone'), 48 | ('gmscatter', 'bone'), 49 | ('pain', 'magma'), 50 | ]) 51 | def test_surface_snap_battery(hemi, overlay, paramindef, scalars_cmap): 52 | scalars, cmap = scalars_cmap 53 | inargs = {} 54 | fname_spec = ( 55 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 56 | f'cmap-{cmap}_overlay-{overlay}_paramindef-{paramindef}' 57 | ) 58 | scatter = (scalars == 'gmscatter') 59 | if scatter: 60 | fname_spec = f'{fname_spec}_mode-scatter' 61 | if scalars == 'parcellation': 62 | inprims = [ 63 | surf_scalars_from_cifti(scalars), 64 | parcellate_colormap( 65 | scalars, 66 | cmap_name=( 67 | cmap if (paramindef and cmap != 'bone') else 'network' 68 | ), 69 | ), 70 | vertex_to_face(scalars), 71 | ] 72 | inargs = {**inargs, 'parcellation_cifti': get_null400_cifti()} 73 | if not paramindef or cmap == 'bone': 74 | inargs = {**inargs, **{( 75 | 'parcellation_cmap' if overlay else 'surf_scalars_cmap' 76 | ): cmap}} 77 | elif scalars == 'gmscatter': 78 | defargs = {} 79 | if paramindef: 80 | defargs = {'plot': False} 81 | else: 82 | inargs = { 83 | **inargs, 84 | 'gmdensity_plot': False, 85 | 'parcellation_plot': False, 86 | } 87 | inprims = [ 88 | surf_scalars_from_nifti('gmdensity', **defargs), 89 | surf_scalars_from_cifti('parcellation', **defargs), 90 | parcellate_surf_scalars('gmdensity', 'parcellation'), 91 | vertex_to_face('gmdensity', interpolation='mode'), 92 | ] 93 | scalars = 'gmdensity' 94 | else: 95 | defargs = {} 96 | if paramindef: 97 | defargs = {'interpolation': 'mean'} 98 | else: 99 | inargs = {**inargs, f'{scalars}_v2f_interpolation': 'mean'} 100 | inprims = [ 101 | surf_scalars_from_nifti(scalars), 102 | vertex_to_face(scalars, **defargs), 103 | ] 104 | if scalars == 'gmdensity': 105 | inargs = { 106 | **inargs, 107 | f'gmdensity_nifti': tflow.get( 108 | template='MNI152NLin2009cAsym', 109 | suffix='probseg', 110 | label='GM', 111 | resolution=2 112 | ), 113 | **{( 114 | f'{scalars}_cmap' if overlay else 'surf_scalars_cmap' 115 | ): cmap}, 116 | } 117 | if scatter: 118 | inargs = {**inargs, **{ 119 | 'parcellation_cifti': get_null400_cifti(), 120 | }} 121 | elif scalars == 'pain': 122 | inargs = { 123 | **inargs, 124 | 'pain_nifti': get_pain_thresh_nifti(), 125 | ( 126 | f'{scalars}_cmap' if overlay else 'surf_scalars_cmap' 127 | ): cmap, 128 | ( 129 | f'{scalars}_below_color' 130 | if overlay 131 | else 'surf_scalars_below_color' 132 | ): (0.3, 0.3, 0.3, 0.3), 133 | } 134 | if overlay: 135 | inprims = [ 136 | add_surface_overlay(scalars, *inprims) 137 | ] 138 | if paramindef: 139 | outprims = [save_snapshots(fname_spec=fname_spec)] 140 | outargs = {} 141 | else: 142 | outprims = [save_snapshots()] 143 | outargs = {'fname_spec': fname_spec} 144 | plot_f = plotdef( 145 | surf_from_archive(), 146 | *inprims, 147 | plot_to_image(), 148 | *outprims, 149 | ) 150 | plot_f( 151 | template="fsLR", 152 | load_mask=True, 153 | **inargs, 154 | surf_projection=['veryinflated'], 155 | # surf_scalars_boundary_color='black', 156 | # surf_scalars_boundary_width=5, 157 | hemisphere=hemi, 158 | window_size=(600, 400), 159 | output_dir='/tmp', 160 | views=('lateral',), 161 | **outargs, 162 | ) 163 | 164 | 165 | def test_scalars(): 166 | plot_f = plotdef( 167 | surf_from_archive(), 168 | surf_scalars_from_nifti('gmdensity', template='fsaverage', plot=True), 169 | surf_scalars_from_array('noise', plot=False), 170 | plot_to_image(), 171 | save_snapshots( 172 | fname_spec=( 173 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}' 174 | ), 175 | ), 176 | ) 177 | plot_f( 178 | template='fsaverage', 179 | load_mask=True, 180 | gmdensity_nifti=tflow.get( 181 | template='MNI152NLin2009cAsym', 182 | suffix='probseg', 183 | label='GM', 184 | resolution=2 185 | ), 186 | noise_array_left=np.random.randn(37476), 187 | noise_array_right=np.random.randn(37471), 188 | surf_style={ 189 | 'pbr': True, 190 | 'metallic': 0.05, 191 | 'roughness': 0.1, 192 | 'specular': 0.5, 193 | 'specular_power': 15, 194 | # 'diffuse': 1, 195 | }, 196 | surf_scalars_color='gmdensity', 197 | surf_scalars_alpha='noise', 198 | surf_projection=('pial',), 199 | hemisphere=['left', 'right'], 200 | output_dir='/tmp', 201 | ) 202 | 203 | 204 | @pytest.mark.ci_unsupported 205 | @pytest.mark.parametrize('cmap', ['network', 'modal']) 206 | @pytest.mark.parametrize('v2f,imgtype', [(True, 'cifti'), (False, 'gifti')]) 207 | def test_parcellation( 208 | cmap: str, 209 | v2f: bool, 210 | imgtype: str, 211 | ): 212 | additional = [] 213 | mode = 'vertex' 214 | if v2f: 215 | additional.append(vertex_to_face('parcellation')) 216 | mode = 'face' 217 | match imgtype: 218 | case 'cifti': 219 | inprim = [surf_scalars_from_cifti('parcellation', plot=True)] 220 | imgargs = {'parcellation_cifti': get_null400_cifti()} 221 | case 'gifti': 222 | inprim = [surf_scalars_from_gifti('parcellation', plot=True)] 223 | gifti = get_null400_gifti() 224 | imgargs = { 225 | 'parcellation_gifti_left': gifti['left'], 226 | 'parcellation_gifti_right': gifti['right'], 227 | } 228 | plot_f = plotdef( 229 | surf_from_archive(), 230 | *inprim, 231 | parcellate_colormap('parcellation', cmap), 232 | *additional, 233 | plot_to_image(), 234 | save_snapshots( 235 | fname_spec=( 236 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 237 | f'cmap-{cmap}_mode-{mode}' 238 | ), 239 | ), 240 | ) 241 | plot_f( 242 | template="fsLR", 243 | load_mask=True, 244 | **imgargs, 245 | surf_projection=('veryinflated',), 246 | surf_color='black', 247 | surf_style={'lighting': False}, 248 | parallel_projection=True, 249 | hemisphere=['left', 'right'], 250 | views=[ 251 | 'lateral', 'medial', 'ventral', 'dorsal', 'anterior', 'posterior', 252 | (-20, 0, 0), ((65, 65, 0), (0, 0, 0), (0, 0, 1)) 253 | ], 254 | output_dir='/tmp', 255 | ) 256 | 257 | 258 | @pytest.mark.ci_unsupported 259 | @pytest.mark.parametrize('cmap', ['network', 'modal']) 260 | @pytest.mark.parametrize('v2f,imgtype', [(True, 'cifti'), (False, 'gifti')]) 261 | def test_boundary_maps( 262 | cmap: str, 263 | v2f: bool, 264 | imgtype: str, 265 | ): 266 | additional = [] 267 | drawbound_args = {} 268 | mode = 'vertex' 269 | if v2f: 270 | additional.append(vertex_to_face('zstat', interpolation='mean')) 271 | mode = 'face' 272 | drawbound_args = { 273 | 'target_domain': 'face', 274 | 'num_steps': 1, # 0, 275 | 'v2f_interpolation': 'mode', 276 | } 277 | match imgtype: 278 | case 'cifti': 279 | inprim = [surf_scalars_from_cifti('parcellation', plot=False)] 280 | imgargs = {'parcellation_cifti': get_null400_cifti()} 281 | case 'gifti': 282 | inprim = [surf_scalars_from_gifti('parcellation', plot=False)] 283 | gifti = get_null400_gifti() 284 | imgargs = { 285 | 'parcellation_gifti_left': gifti['left'], 286 | 'parcellation_gifti_right': gifti['right'], 287 | } 288 | plot_f = plotdef( 289 | surf_from_archive(), 290 | add_surface_overlay( 291 | 'zstat', 292 | surf_scalars_from_nifti('zstat', plot=True), 293 | *additional, 294 | ), 295 | add_surface_overlay( 296 | 'parcellation', 297 | *inprim, 298 | parcellate_colormap('parcellation'), 299 | draw_surface_boundary( 300 | 'parcellation', 301 | 'parcellation', 302 | copy_values_to_boundary=True, 303 | **drawbound_args, 304 | ), 305 | ), 306 | plot_to_image(), 307 | save_snapshots( 308 | fname_spec=( 309 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 310 | f'cmap-{cmap}_mode-{mode}' 311 | ), 312 | ), 313 | ) 314 | plot_f( 315 | template="fsLR", 316 | load_mask=True, 317 | **imgargs, 318 | parcellation_cmap=cmap, 319 | parcellation_alpha=0.5, 320 | zstat_nifti=get_pain_thresh_nifti(), 321 | zstat_cmap='magma', 322 | zstat_below_color=(0, 0, 0, 0), 323 | surf_projection=('veryinflated',), 324 | surf_color=(0.3, 0.3, 0.3), 325 | surf_style={'lighting': False}, 326 | parallel_projection=True, 327 | hemisphere=['left', 'right'], 328 | views=[ 329 | 'lateral', 'medial', 330 | ], 331 | output_dir='/tmp', 332 | empty_builders=True, 333 | window_size=(600, 400), 334 | theme=pv.themes.DarkTheme(), 335 | ) 336 | 337 | 338 | @pytest.mark.ci_unsupported 339 | @pytest.mark.parametrize('cmap', ['network', 'modal']) 340 | @pytest.mark.parametrize('v2f,imgtype', [(True, 'cifti'), (False, 'gifti')]) 341 | def test_boundary_maps2( 342 | cmap: str, 343 | v2f: bool, 344 | imgtype: str, 345 | ): 346 | additional_zstat = [] 347 | additional = [] 348 | drawbound_args = {} 349 | mode = 'vertex' 350 | if v2f: 351 | additional_zstat.append(vertex_to_face('zstat', interpolation='mean')) 352 | additional.append(vertex_to_face('parcellation', interpolation='mode')) 353 | mode = 'face' 354 | drawbound_args = { 355 | 'target_domain': 'face', 356 | 'num_steps': 0, 357 | 'v2f_interpolation': 'mode', 358 | } 359 | match imgtype: 360 | case 'cifti': 361 | inprim = [surf_scalars_from_cifti('parcellation', plot=True)] 362 | imgargs = {'parcellation_cifti': get_null400_cifti()} 363 | case 'gifti': 364 | inprim = [surf_scalars_from_gifti('parcellation', plot=True)] 365 | gifti = get_null400_gifti() 366 | imgargs = { 367 | 'parcellation_gifti_left': gifti['left'], 368 | 'parcellation_gifti_right': gifti['right'], 369 | } 370 | plot_f = plotdef( 371 | surf_from_archive(), 372 | add_surface_overlay( 373 | 'parcellation', 374 | *inprim, 375 | parcellate_colormap('parcellation'), 376 | ), 377 | add_surface_overlay( 378 | 'parcellation_boundary', 379 | draw_surface_boundary( 380 | 'parcellation', 381 | 'parcellation_boundary', 382 | **drawbound_args, 383 | ), 384 | ), 385 | add_surface_overlay( 386 | 'zstat', 387 | surf_scalars_from_nifti('zstat', plot=True), 388 | *additional_zstat, 389 | ), 390 | *additional, 391 | plot_to_image(), 392 | save_snapshots( 393 | fname_spec=( 394 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 395 | f'cmap-{cmap}_mode-{mode}' 396 | ), 397 | ), 398 | ) 399 | plot_f( 400 | template="fsLR", 401 | load_mask=True, 402 | **imgargs, 403 | parcellation_cmap=cmap, 404 | parcellation_boundary_color='black', 405 | zstat_nifti=get_pain_thresh_nifti(), 406 | zstat_cmap='magma', 407 | zstat_alpha=0.8, 408 | zstat_below_color=(0, 0, 0, 0), 409 | surf_projection=('veryinflated',), 410 | surf_color=(0.3, 0.3, 0.3), 411 | surf_style={'lighting': False}, 412 | parallel_projection=True, 413 | hemisphere=['left', 'right'], 414 | views=[ 415 | 'lateral', 'medial', 416 | ], 417 | output_dir='/tmp', 418 | empty_builders=True, 419 | window_size=(600, 400), 420 | ) 421 | 422 | 423 | @pytest.mark.ci_unsupported 424 | #@pytest.mark.parametrize('cmap', ['network', 'modal']) 425 | @pytest.mark.parametrize('v2f,imgtype', [(True, 'cifti'), (False, 'gifti')]) 426 | def test_active_selection( 427 | #cmap: str, 428 | v2f: bool, 429 | imgtype: str, 430 | ): 431 | additional = [] 432 | drawbound_args = {} 433 | mode = 'vertex' 434 | if v2f: 435 | additional.append(vertex_to_face('zstat', interpolation='mean')) 436 | mode = 'face' 437 | drawbound_args = { 438 | 'target_domain': 'face', 439 | 'num_steps': 1, # 0, 440 | 'v2f_interpolation': 'mode', 441 | } 442 | match imgtype: 443 | case 'cifti': 444 | inprim = [surf_scalars_from_cifti('parcellation', plot=False)] 445 | imgargs = {'parcellation_cifti': get_null400_cifti()} 446 | case 'gifti': 447 | inprim = [surf_scalars_from_gifti('parcellation', plot=False)] 448 | gifti = get_null400_gifti() 449 | imgargs = { 450 | 'parcellation_gifti_left': gifti['left'], 451 | 'parcellation_gifti_right': gifti['right'], 452 | } 453 | plot_f = plotdef( 454 | surf_from_archive(), 455 | add_surface_overlay( 456 | 'zstat', 457 | surf_scalars_from_nifti('zstat', plot=True), 458 | ), 459 | add_surface_overlay( 460 | 'parcellation', 461 | *inprim, 462 | #parcellate_colormap('parcellation'), 463 | select_active_parcels('parcellation', 'zstat', parcel_coverage_threshold=0.1), 464 | draw_surface_boundary( 465 | 'parcellation', 466 | 'parcellation', 467 | copy_values_to_boundary=True, 468 | **drawbound_args, 469 | ), 470 | ), 471 | *additional, 472 | plot_to_image(), 473 | save_snapshots( 474 | fname_spec=( 475 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_' 476 | f'mode-{mode}' 477 | ), 478 | ), 479 | ) 480 | plot_f( 481 | template="fsLR", 482 | load_mask=True, 483 | **imgargs, 484 | parcellation_color='aqua', 485 | parcellation_alpha=0.8, 486 | zstat_nifti=get_pain_thresh_nifti(), 487 | zstat_cmap='magma', 488 | zstat_below_color=(0, 0, 0, 0), 489 | surf_projection=('veryinflated',), 490 | surf_color=(0.3, 0.3, 0.3), 491 | surf_style={'lighting': False}, 492 | parallel_projection=True, 493 | hemisphere=['left', 'right'], 494 | views=[ 495 | 'lateral', 'medial', 496 | ], 497 | output_dir='/tmp', 498 | empty_builders=True, 499 | window_size=(600, 400), 500 | theme=pv.themes.DarkTheme(), 501 | ) 502 | 503 | 504 | @pytest.mark.ci_unsupported 505 | def test_parcellation_html(): 506 | plot_f = plotdef( 507 | surf_from_archive(), 508 | surf_scalars_from_cifti('parcellation', plot=True), 509 | parcellate_colormap('parcellation', 'network'), 510 | vertex_to_face('parcellation'), 511 | plot_to_html(), 512 | ) 513 | plot_f( 514 | template="fsLR", 515 | load_mask=True, 516 | parcellation_cifti=get_null400_cifti(), 517 | surf_projection=['veryinflated'], 518 | # surf_scalars_boundary_color='black', 519 | # surf_scalars_boundary_width=5, 520 | hemisphere=['left', 'right'], 521 | window_size=(800, 800), 522 | output_dir='/tmp', 523 | fname_spec=( 524 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_cmap-network' 525 | ), 526 | ) 527 | 528 | @pytest.mark.ci_unsupported 529 | def test_parcellated_scalars(): 530 | plot_f = plotdef( 531 | surf_from_archive(), 532 | surf_scalars_from_nifti('gmdensity', template='fsLR', plot=False), 533 | surf_scalars_from_cifti('parcellation', plot=False), 534 | parcellate_surf_scalars('gmdensity', 'parcellation'), 535 | vertex_to_face('gmdensity', interpolation='mode'), 536 | plot_to_image(), 537 | save_snapshots(), 538 | ) 539 | plot_f( 540 | template="fsLR", 541 | load_mask=True, 542 | parcellation_cifti=get_null400_cifti(), 543 | gmdensity_nifti=tflow.get( 544 | template='MNI152NLin2009cAsym', 545 | suffix='probseg', 546 | label="GM", 547 | resolution=2 548 | ), 549 | surf_projection=['inflated'], 550 | surf_scalars_clim=(0.1, 0.9), 551 | surf_scalars_below_color=(0, 0, 0, 0), 552 | hemisphere=['left', 'right'], 553 | output_dir='/tmp', 554 | fname_spec=( 555 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_parcellation-null' 556 | ), 557 | ) 558 | 559 | parcellated = np.random.rand(400) 560 | plot_f = plotdef( 561 | surf_from_archive(), 562 | surf_scalars_from_cifti('parcellation', plot=False), 563 | scatter_into_parcels('noise', 'parcellation'), 564 | vertex_to_face('noise', interpolation='mode'), 565 | plot_to_image(), 566 | save_snapshots( 567 | fname_spec=( 568 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}_parcellation-null' 569 | ), 570 | ), 571 | ) 572 | plot_f( 573 | template="fsLR", 574 | load_mask=True, 575 | parcellation_cifti=get_null400_cifti(), 576 | noise_parcellated=parcellated, 577 | surf_projection=['inflated'], 578 | surf_scalars_clim=(0, 1), 579 | surf_scalars_cmap='inferno', 580 | surf_scalars_below_color=(0, 0, 0, 0), 581 | hemisphere=['left', 'right'], 582 | output_dir='/tmp', 583 | ) 584 | 585 | 586 | def test_freesurfer(): 587 | fs = get_poldrack_freesurfer() 588 | geom_left, morph_left = fs['left'] 589 | geom_right, morph_right = fs['right'] 590 | plot_f = plotdef( 591 | surf_from_freesurfer(projection='inflated'), 592 | surf_scalars_from_freesurfer('curv'), 593 | plot_to_image(), 594 | save_snapshots( 595 | fname_spec=( 596 | 'scalars-{surfscalars}_hemisphere-{hemisphere}_view-{view}' 597 | ), 598 | ), 599 | ) 600 | plot_f( 601 | inflated_left_surf=geom_left, 602 | inflated_right_surf=geom_right, 603 | curv_morph_left=morph_left, 604 | curv_morph_right=morph_right, 605 | surf_scalars_cmap='RdYlBu_r', 606 | surf_scalars_clim=(-0.35, 0.35), 607 | hemisphere=['left', 'right'], 608 | surf_style={'lighting': False}, 609 | parallel_projection=True, 610 | output_dir='/tmp', 611 | ) 612 | -------------------------------------------------------------------------------- /tests/test_uniplot.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for elementary surfplot-based visualisations 6 | """ 7 | import pytest 8 | 9 | import nibabel as nb 10 | import numpy as np 11 | import pandas as pd 12 | import pyvista as pv 13 | 14 | from hyve_examples import ( 15 | get_pain_thresh_nifti, 16 | get_schaefer400_cifti, 17 | get_schaefer400_synthetic_conmat, 18 | ) 19 | from hyve.geom import ( 20 | CortexTriSurface, 21 | Layer, 22 | PointDataCollection, 23 | PointData, 24 | NetworkDataCollection, 25 | NetworkData, 26 | ) 27 | from hyve.plot import unified_plotter 28 | from hyve.util import ( 29 | filter_adjacency_data, 30 | filter_node_data, 31 | ) 32 | 33 | 34 | @pytest.mark.ci_unsupported 35 | def test_unified_plotter(): 36 | surf = CortexTriSurface.from_nmaps(projections=('pial', 'inflated')) 37 | unified_plotter( 38 | surf=surf, 39 | surf_alpha=0.2, 40 | off_screen=False, 41 | )[0].show() 42 | 43 | unified_plotter( 44 | surf=surf, 45 | surf_alpha=0.2, 46 | off_screen=False, 47 | hemisphere='left', 48 | hemisphere_slack=1.2, 49 | surf_projection='inflated', 50 | )[0].show() 51 | 52 | surf.add_vertex_dataset( 53 | 'data', 54 | data=np.random.randn(40962 * 2), 55 | apply_mask=False, 56 | ) 57 | unified_plotter( 58 | surf=surf, 59 | surf_scalars='data', 60 | surf_scalars_cmap='magma', 61 | surf_alpha=0.2, 62 | hemisphere_slack=1.2, 63 | off_screen=False, 64 | )[0].show() 65 | 66 | surf_layer = Layer( 67 | name='data', 68 | cmap='hot', 69 | cmap_negative='cool', 70 | below_color='white', 71 | clim=(1.5, 3.0), 72 | alpha=0.8, 73 | ) 74 | unified_plotter( 75 | surf=surf, 76 | surf_scalars='data', 77 | surf_scalars_cmap='gray', 78 | surf_scalars_layers=(surf_layer,), 79 | surf_alpha=0.2, 80 | hemisphere_slack=1.2, 81 | off_screen=False, 82 | )[0].show() 83 | 84 | vol = nb.load(get_pain_thresh_nifti()) 85 | vol_data = vol.get_fdata() 86 | vol_loc = np.where(vol_data > 0) 87 | vol_scalars = vol_data[vol_data > 0] 88 | vol_coor = np.stack(vol_loc) 89 | vol_coor = (vol.affine @ np.concatenate( 90 | (vol_coor, np.ones((1, vol_coor.shape[-1]))) 91 | ))[:3].T 92 | vol_voxdim = vol.header.get_zooms() 93 | points_pain = PointData( 94 | pv.PointSet(vol_coor), 95 | data={'pain': vol_scalars}, 96 | point_size=np.min(vol_voxdim[:3]), 97 | ) 98 | points = PointDataCollection([points_pain]) 99 | points_layer_pain = Layer( 100 | name='pain', 101 | cmap='viridis', 102 | clim=(5, 95), 103 | clim_percentile=True, 104 | alpha=0.8, 105 | ) 106 | unified_plotter( 107 | points=points, 108 | points_scalars='pain', 109 | hemisphere_slack=1.2, 110 | off_screen=False, 111 | )[0].show() 112 | 113 | sphere_bounds = np.arange(-30, 30, 3) 114 | sphere_coor = np.concatenate([ 115 | c.reshape(1, -1) for c in 116 | np.meshgrid(sphere_bounds, sphere_bounds, sphere_bounds) 117 | ]).T 118 | radius = np.sqrt((sphere_coor ** 2).sum(-1)) 119 | sphere_index = radius < 30 120 | radius = radius[sphere_index] 121 | sphere_coor = sphere_coor[sphere_index] 122 | sphere_inner_index = radius < 25 123 | sphere_data = 1 + ((radius - 20) / 10) 124 | sphere_data[sphere_inner_index] = -( 125 | 1 + ((radius[sphere_inner_index] - 10) / 10)) 126 | sphere_inmost_index = radius < 20 127 | sphere_data[sphere_inmost_index] = np.random.randn( 128 | sphere_inmost_index.sum()) 129 | points = PointDataCollection([ 130 | points_pain, 131 | PointData( 132 | pv.PointSet(sphere_coor), 133 | data={'sphere': sphere_data}, 134 | point_size=6, 135 | ) 136 | ]) 137 | points_layer_sphere = Layer( 138 | name='sphere', 139 | cmap='Reds', 140 | cmap_negative='Blues', 141 | below_color=(0, 0, 0, 0), 142 | clim=(1.0, 2.0), 143 | alpha=0.8, 144 | ) 145 | unified_plotter( 146 | surf=surf, 147 | surf_alpha=0.2, 148 | points=points, 149 | points_scalars_layers=(points_layer_pain, points_layer_sphere), 150 | hemisphere_slack=1.2, 151 | off_screen=False, 152 | )[0].show() 153 | 154 | parcellation = get_schaefer400_cifti() 155 | surf_lr = CortexTriSurface.from_tflow(load_mask=True, projections=('inflated',)) 156 | surf_lr.add_vertex_dataset( 157 | 'parcellation', 158 | data=nb.load(parcellation).get_fdata().ravel(), 159 | is_masked=True, 160 | ) 161 | surf_lr.add_vertex_dataset( 162 | 'data', 163 | data=np.random.rand(32492 * 2), 164 | apply_mask=False, 165 | ) 166 | node_coor = surf_lr.parcel_centres_of_mass('parcellation', 'inflated') 167 | cov = pd.read_csv( 168 | get_schaefer400_synthetic_conmat(), sep='\t', header=None 169 | ).values 170 | vis_nodes_edge_selection = np.zeros(400, dtype=bool) 171 | vis_nodes_edge_selection[0:2] = True 172 | vis_nodes_edge_selection[200:202] = True 173 | node_data = filter_node_data(cov.sum(axis=0)) 174 | node_data['radius'] = np.random.rand(400) 175 | node_data['opacity'] = np.random.rand(400) 176 | edge_data = filter_adjacency_data( 177 | cov, connected_node_selection=vis_nodes_edge_selection) 178 | node_clim = (node_data['node_val'].min(), node_data['node_val'].max()) 179 | edge_clim = (-1, 1) 180 | node_lh = np.zeros(400, dtype=bool) 181 | node_lh[:200] = True 182 | network_data = NetworkDataCollection([ 183 | NetworkData( 184 | 'vis_conn', 185 | coor=node_coor, 186 | nodes=node_data, 187 | edges=edge_data, 188 | lh_mask=node_lh, 189 | ), 190 | ]) 191 | unified_plotter( 192 | networks=network_data, 193 | node_clim=node_clim, 194 | node_color='node_val', 195 | node_radius='radius', 196 | node_rmap=(1, 10), 197 | node_alpha='opacity', 198 | edge_clim=edge_clim, 199 | hemisphere_slack=1.2, 200 | off_screen=False, 201 | )[0].show() 202 | 203 | unified_plotter( 204 | surf=surf_lr, 205 | surf_projection='inflated', 206 | surf_alpha=0.2, 207 | networks=network_data, 208 | node_clim=node_clim, 209 | node_color='node_val', 210 | edge_clim=edge_clim, 211 | hemisphere_slack=1.2, 212 | off_screen=False, 213 | )[0].show() 214 | 215 | points = PointDataCollection([points_pain]) 216 | unified_plotter( 217 | surf=surf_lr, 218 | surf_projection='inflated', 219 | surf_scalars='data', 220 | surf_scalars_cmap='magma', 221 | surf_alpha=0.2, 222 | points=points, 223 | points_scalars_layers=(points_layer_pain,), 224 | networks=network_data, 225 | node_clim=node_clim, 226 | node_color='node_val', 227 | edge_clim=edge_clim, 228 | hemisphere_slack=1.2, 229 | off_screen=False, 230 | )[0].show() 231 | -------------------------------------------------------------------------------- /tests/test_volplot.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 3 | # vi: set ft=python sts=4 ts=4 sw=4 et: 4 | """ 5 | Unit tests for elementary brain volume visualisations 6 | """ 7 | import pytest 8 | import pyvista as pv 9 | import templateflow.api as tflow 10 | from hyve_examples import get_pain_thresh_nifti 11 | from hyve.flows import plotdef 12 | from hyve.transforms import ( 13 | surf_from_archive, 14 | points_scalars_from_nifti, 15 | select_internal_points, 16 | plot_to_image, 17 | save_snapshots, 18 | save_grid, 19 | add_points_overlay, 20 | ) 21 | 22 | 23 | @pytest.mark.parametrize('overlay', [True, False]) 24 | @pytest.mark.parametrize('paramindef', [True, False]) 25 | @pytest.mark.parametrize('select_internal', [True, False]) 26 | @pytest.mark.parametrize('scalars_cmap', [ 27 | ('parcellation', 'tab20b'), 28 | ('gmdensity', 'cividis'), 29 | ('wmdensity', 'cividis'), 30 | ('pain', 'magma'), 31 | ]) 32 | def test_points_snap_battery( 33 | overlay, paramindef, select_internal, scalars_cmap 34 | ): 35 | scalars, cmap = scalars_cmap 36 | inargs = { 37 | ( 38 | f'{scalars}_cmap' if overlay else 'points_scalars_cmap' 39 | ): cmap, 40 | } 41 | fname_spec = ( 42 | 'scalars-{pointsscalars}_hemisphere-{hemisphere}_view-{view}_' 43 | f'cmap-{cmap}_internal-{select_internal}_' 44 | f'overlay-{overlay}_paramindef-{paramindef}' 45 | ) 46 | inprims = [ 47 | points_scalars_from_nifti(scalars, geom_name='MNIres2grid'), 48 | ] 49 | extra_inprims = [] 50 | if scalars == 'parcellation': 51 | #extra_in = [points_scalars_from_nifti('gmdensity', plot=False)] 52 | inargs = { 53 | **inargs, 54 | 'parcellation_nifti': tflow.get( 55 | 'MNI152NLin2009cAsym', 56 | suffix='dseg', 57 | resolution=2, 58 | atlas='Schaefer2018', 59 | desc='400Parcels7Networks', 60 | ), 61 | ( 62 | f'{scalars}_clim' if overlay else 'points_scalars_clim' 63 | ): (0.1, 400.9), 64 | # 'gmdensity_nifti': tflow.get( 65 | # 'MNI152NLin2009cAsym', 66 | # suffix='probseg', 67 | # label='GM', 68 | # resolution=2, 69 | # ), 70 | } 71 | # extra_inprims += [ 72 | # select_internal_points(), 73 | # points_scalars_from_nifti( 74 | # 'gmdensity', 75 | # geom_name='MNIres2grid', 76 | # plot=False, 77 | # ), 78 | # ] 79 | elif scalars == 'pain': 80 | inargs = { 81 | **inargs, 82 | 'pain_nifti': get_pain_thresh_nifti(), 83 | } 84 | else: 85 | label_key = 'GM' if scalars == 'gmdensity' else 'WM' 86 | inargs = { 87 | **inargs, 88 | f'{scalars}_nifti': tflow.get( 89 | 'MNI152NLin2009cAsym', 90 | suffix='probseg', 91 | label=label_key, 92 | resolution=2, 93 | ), 94 | ( 95 | f'{scalars}_clim' if overlay else 'points_scalars_clim' 96 | ): (0.3, 1.0), 97 | } 98 | if overlay: 99 | inprims = [ 100 | add_points_overlay(scalars, *inprims) 101 | ] 102 | if paramindef: 103 | outprims = [save_snapshots(fname_spec=fname_spec)] 104 | outargs = {} 105 | else: 106 | outprims = [save_snapshots()] 107 | outargs = {'fname_spec': fname_spec} 108 | if select_internal: 109 | extra_inprims += [select_internal_points()] 110 | plot_f = plotdef( 111 | surf_from_archive(), 112 | *inprims, 113 | *extra_inprims, 114 | plot_to_image(), 115 | *outprims, 116 | ) 117 | plot_f( 118 | template='fsaverage', 119 | surf_projection=('pial',), 120 | hemisphere=['left', 'right', 'both'], 121 | surf_alpha=0.3, 122 | **inargs, 123 | parallel_projection=True, 124 | window_size=(600, 400), 125 | output_dir='/tmp', 126 | views={ 127 | 'left': ('lateral',), 128 | 'right': ('lateral',), 129 | 'both': ('dorsal',), 130 | }, 131 | **outargs, 132 | theme=pv.themes.DarkTheme(), 133 | ) 134 | 135 | 136 | @pytest.mark.parametrize('outprim', ['snapshots', 'grid']) 137 | def test_points_scalars(outprim): 138 | nii = get_pain_thresh_nifti() 139 | additional = [] 140 | match outprim: 141 | case 'snapshots': 142 | additional += [ 143 | save_snapshots( 144 | fname_spec=( 145 | 'scalars-{pointsscalars}_view-{view}' 146 | ), 147 | ) 148 | ] 149 | add_args = {} 150 | case 'grid': 151 | additional += [ 152 | save_grid( 153 | n_cols=3, n_rows=1, 154 | canvas_size=(1800, 500), 155 | canvas_color=(1, 1, 1), 156 | fname_spec=f'scalars-pain_view-all_page-{{page}}', 157 | scalar_bar_action='collect', 158 | ) 159 | ] 160 | add_args = {'window_size': (1200, 1000)} 161 | plot_f = plotdef( 162 | surf_from_archive(), 163 | #points_scalars_from_nifti('pain'), 164 | add_points_overlay( 165 | 'pain', 166 | points_scalars_from_nifti('pain'), 167 | ), 168 | plot_to_image(), 169 | *additional, 170 | ) 171 | plot_f( 172 | template='fsaverage', 173 | surf_projection=('pial',), 174 | hemisphere='right', 175 | surf_alpha=0.3, 176 | pain_nifti=nii, 177 | #points_alpha=0.8, 178 | pain_cmap='magma', 179 | pain_alpha=0.8, 180 | parallel_projection=True, 181 | #points_style={'style': 'points', 'point_size': 10.0}, 182 | views=('dorsal', 'lateral', 'anterior'), 183 | **add_args, 184 | output_dir='/tmp', 185 | ) 186 | --------------------------------------------------------------------------------