├── .github
└── workflows
│ ├── build_docs.yml
│ └── run_tests.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CITATION.cff
├── LICENSE.md
├── README.md
├── cedalion.def
├── data
└── ninja_cap_probe
│ ├── fullhead_56x144_System2_landmarks.tsv
│ ├── fullhead_56x144_System2_measlist.pkl
│ └── fullhead_56x144_System2_optodes.tsv
├── docs
├── CHANGELOG.md
├── LICENSE.md
├── Makefile
├── _static
│ ├── IBS_Logo_sm.png
│ └── css
│ │ └── rtd_fixes.css
├── _templates
│ ├── custom-class-template.rst
│ └── custom-module-template.rst
├── community
│ ├── contributing_code.md
│ ├── contributors.md
│ ├── index.md
│ └── toolboxes.md
├── conf.py
├── data_io
│ └── index.rst
├── environments.md
├── examples.rst
├── examples
│ └── Makefile
├── getting_started
│ ├── contributing_code
│ │ ├── contributing_code.md
│ │ ├── dirs_parent.png
│ │ └── dirs_src.png
│ ├── index.md
│ └── installation.md
├── image_reco
│ └── index.rst
├── img
│ ├── IBS_clr_small.png
│ ├── cedalion_frontpage.png
│ ├── recording
│ │ ├── dataarray_indexing_overview.png
│ │ ├── ndarray.png
│ │ └── rec_container_overview.png
│ ├── special_thanks.png
│ └── time_series_guiexample.png
├── index.md
├── machine_learning
│ └── index.rst
├── make.bat
├── plot_vis
│ └── index.rst
├── references.bib
├── references.rst
├── sigproc
│ └── index.rst
└── synth
│ └── index.rst
├── environment_dev.yml
├── environment_doc.yml
├── examples
├── 14_xarray_indexing.ipynb
├── PCArecurse_motion_correct.ipynb
├── align_ninja_to_colin.ipynb
├── augmentation
│ └── 61_synthetic_artifacts_example.ipynb
├── finger_tapping_full_pipeline.ipynb
├── getting_started_io
│ ├── 00_test_installation.ipynb
│ ├── 10_xarray_datastructs_fnirs.ipynb
│ ├── 11_recording_container.ipynb
│ ├── 13_data_structures_intro.ipynb
│ └── 34_store_hrfs_in_snirf_file.ipynb
├── head_models.ipynb
├── head_models
│ ├── 40_image_reconstruction.ipynb
│ ├── 41_photogrammetric_optode_coregistration.ipynb
│ ├── 42_1010_system.ipynb
│ └── 43_crs_and_headmodel.ipynb
├── machine_learning
│ └── 50_finger_tapping_lda_classification.ipynb
├── modeling
│ ├── 31_glm_basis_functions.ipynb
│ ├── 32_glm_fingertapping_example.ipynb
│ └── 33_glm_illustrative_example.ipynb
├── plots_visualization
│ └── 12_plots_example.ipynb
├── precompute_fluence.ipynb
├── registration.ipynb
├── signal_quality
│ ├── 20_scalp_coupling_index.ipynb
│ ├── 21_data_quality_and_pruning.ipynb
│ └── 22_motion_artefacts_and_correction.ipynb
├── sketch.ipynb
└── splineSG_motion_correct.ipynb
├── install_nirfaster.sh
├── pyproject.toml
├── scripts
├── build_docs.sh
└── run_tasks.py
├── src
└── cedalion
│ ├── __init__.py
│ ├── data
│ ├── 10-5-System_Mastoids_EGI129.tsv
│ ├── __init__.py
│ ├── measfunc_table.npy
│ └── prahl_absorption_spectrum.tsv
│ ├── dataclasses
│ ├── __init__.py
│ ├── accessors.py
│ ├── geometry.py
│ ├── recording.py
│ └── schemas.py
│ ├── datasets.py
│ ├── errors.py
│ ├── geometry
│ ├── __init__.py
│ ├── landmarks.py
│ ├── photogrammetry
│ │ ├── __init__.py
│ │ └── processors.py
│ ├── registration.py
│ ├── segmentation.py
│ └── utils.py
│ ├── imagereco
│ ├── __init__.py
│ ├── forward_model.py
│ ├── solver.py
│ ├── tissue_properties.py
│ └── utils.py
│ ├── io
│ ├── __init__.py
│ ├── anatomy.py
│ ├── bids.py
│ ├── forward_model.py
│ ├── photogrammetry.py
│ ├── probe_geometry.py
│ └── snirf.py
│ ├── models
│ ├── __init__.py
│ └── glm
│ │ ├── __init__.py
│ │ ├── basis_functions.py
│ │ ├── design_matrix.py
│ │ └── solve.py
│ ├── nirs.py
│ ├── physunits.py
│ ├── plots.py
│ ├── sigdecomp
│ ├── ERBM.py
│ ├── ICA_EBM.py
│ ├── __init__.py
│ ├── dummy.py
│ └── measfunc_table.npy
│ ├── sigproc
│ ├── __init__.py
│ ├── epochs.py
│ ├── frequency.py
│ ├── motion_correct.py
│ ├── quality.py
│ └── tasks.py
│ ├── sim
│ ├── __init__.py
│ ├── synthetic_artifact.py
│ ├── synthetic_hrf.py
│ └── synthetic_utils.py
│ ├── tasks.py
│ ├── typing.py
│ ├── validators.py
│ ├── vis
│ ├── __init__.py
│ ├── plot_probe.py
│ ├── plot_sensitivity_matrix.py
│ └── time_series.py
│ ├── vtktutils.py
│ └── xrutils.py
└── tests
├── test_bids.py
├── test_dataclasses_geometry.py
├── test_fwmodel.py
├── test_geodesics.py
├── test_imagereco_forward_model.py
├── test_io_forward_model.py
├── test_io_photogrammetry.py
├── test_io_probe_geometry.py
├── test_io_snirf.py
├── test_labeled_points.py
├── test_model_glm_design_matrix.py
├── test_nirs.py
├── test_points_accessors.py
├── test_sigdecomp.py
├── test_sigdecomp_dummy.py
├── test_sigproc_epochs.py
├── test_sigproc_frequency.py
├── test_sigproc_motion_correct.py
├── test_sigproc_quality.py
├── test_synthetic.py
├── test_vtkutils.py
└── test_xrutils.py
/.github/workflows/build_docs.yml:
--------------------------------------------------------------------------------
1 | name: build_docs
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | - dev
8 |
9 | # cancel job if a new push to main occurs while running
10 | concurrency:
11 | group: build_docs
12 | cancel-in-progress: true
13 |
14 | jobs:
15 | build_docs:
16 | runs-on: ubuntu-latest
17 | defaults:
18 | run:
19 | shell: bash -el {0}
20 |
21 | steps:
22 | - name: Checkout git repository
23 | uses: actions/checkout@v4
24 |
25 | - name: Setup headless display
26 | uses: pyvista/setup-headless-display-action@v3
27 |
28 | - name: Setup Miniforge
29 | uses: conda-incubator/setup-miniconda@v3
30 | with:
31 | miniforge-version: latest
32 | activate-environment: cedalion
33 |
34 | - name: Get Date
35 | id: get-date
36 | run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
37 | shell: bash
38 |
39 | - name: Cache Conda env
40 | uses: actions/cache@v4
41 | with:
42 | path: ${{ env.CONDA }}/envs
43 | key:
44 | conda-${{ runner.os }}--${{ runner.arch }}--${{
45 | steps.get-date.outputs.today }}-${{
46 | hashFiles('environment_dev.yml') }}-${{ env.CACHE_NUMBER }}
47 | env:
48 | # Increase this value to reset cache if environment_dev.yml has not changed
49 | CACHE_NUMBER: 0
50 | id: cache
51 |
52 | - name: Update environment
53 | run: conda env update -n cedalion -f environment_dev.yml
54 | if: steps.cache.outputs.cache-hit != 'true'
55 |
56 | - name: Install cedalion
57 | run: python -m pip install -e . --no-deps --force-reinstall
58 |
59 | - name: Install nirfaster
60 | run: bash install_nirfaster.sh CPU
61 | shell: bash
62 |
63 | - name: Building docs
64 | run: ./scripts/build_docs.sh docs
65 |
66 | - name: Save website as an artifact
67 | uses: actions/upload-artifact@v4
68 | with:
69 | name: docs-${{ github.ref_name }}
70 | path: docs/_build/html
71 | retention-days: 2
72 |
--------------------------------------------------------------------------------
/.github/workflows/run_tests.yml:
--------------------------------------------------------------------------------
1 | name: run_tests
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | run_pytest:
7 | strategy:
8 | matrix:
9 | os: [ubuntu-latest, macos-13, macos-latest, windows-latest]
10 | runs-on: ${{ matrix.os }}
11 | defaults:
12 | run:
13 | shell: bash -l {0}
14 |
15 | steps:
16 | - name: Checkout git repository
17 | uses: actions/checkout@v4
18 |
19 | - name: Setup Miniforge
20 | uses: conda-incubator/setup-miniconda@v3
21 | with:
22 | miniforge-version: latest
23 | activate-environment: cedalion
24 |
25 | - name: Get Date
26 | id: get-date
27 | run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
28 | shell: bash
29 |
30 | - name: Cache Conda env
31 | uses: actions/cache@v4
32 | with:
33 | path: ${{ env.CONDA }}/envs
34 | key:
35 | conda-${{ runner.os }}--${{ runner.arch }}--${{
36 | steps.get-date.outputs.today }}-${{
37 | hashFiles('environment_dev.yml') }}-${{ env.CACHE_NUMBER }}
38 | env:
39 | # Increase this value to reset cache if environment_dev.yml has not changed
40 | CACHE_NUMBER: 0
41 | id: cache
42 |
43 | - name: Update environment
44 | run: conda env update -n cedalion -f environment_dev.yml
45 | if: steps.cache.outputs.cache-hit != 'true'
46 |
47 | - name: Install cedalion
48 | run: python -m pip install -e . --no-deps --force-reinstall
49 |
50 | - name: Install nirfaster
51 | run: bash install_nirfaster.sh CPU
52 | shell: bash
53 |
54 | - name: Running Tests
55 | run: python -m pytest --verbose
56 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # from https://github.com/github/gitignore/blob/main/Python.gitignore
2 | # Byte-compiled / optimized / DLL files
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | .DS_store
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 | cover/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # poetry
100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
101 | # This is especially recommended for binary packages to ensure reproducibility, and is more
102 | # commonly ignored for libraries.
103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
104 | #poetry.lock
105 |
106 | # pdm
107 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
108 | #pdm.lock
109 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
110 | # in version control.
111 | # https://pdm.fming.dev/#use-with-ide
112 | .pdm.toml
113 |
114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115 | __pypackages__/
116 |
117 | # Celery stuff
118 | celerybeat-schedule
119 | celerybeat.pid
120 |
121 | # SageMath parsed files
122 | *.sage.py
123 |
124 | # Environments
125 | .env
126 | .venv
127 | env/
128 | venv/
129 | ENV/
130 | env.bak/
131 | venv.bak/
132 |
133 | # Spyder project settings
134 | .spyderproject
135 | .spyproject
136 |
137 | # Rope project settings
138 | .ropeproject
139 |
140 | # mkdocs documentation
141 | /site
142 |
143 | # mypy
144 | .mypy_cache/
145 | .dmypy.json
146 | dmypy.json
147 |
148 | # Pyre type checker
149 | .pyre/
150 |
151 | # pytype static type analyzer
152 | .pytype/
153 |
154 | # Cython debug symbols
155 | cython_debug/
156 |
157 | # PyCharm
158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
160 | # and can be added to the global gitignore or merged into this file. For a more nuclear
161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162 | #.idea/
163 |
164 |
165 | # automatically created by setuptools_scm
166 | src/cedalion/_version.py
167 |
168 | # rendered example notebooks
169 | docs/examples/*.ipynb
170 | docs/examples/*/*.ipynb
171 |
172 | # API documentation from sphinx-apidoc
173 | docs/api/*
174 | _autosummary*/
175 |
176 | # vscode settings
177 | .vscode/
178 |
179 | scratch/
180 |
181 | # plugins
182 | plugins/
183 |
184 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 | repos:
4 | # Ruff
5 | - repo: https://github.com/astral-sh/ruff-pre-commit
6 | # Ruff version.
7 | rev: v0.7.0
8 | hooks:
9 | - id: ruff
10 | files: ^src/cedalion/
11 | - repo: https://github.com/kynan/nbstripout
12 | rev: 0.7.1
13 | hooks:
14 | - id: nbstripout
15 | files: ^examples/
16 |
17 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.2.0
2 | message: "If you use this software, please cite it as below."
3 | authors:
4 | - family-names: "Intelligent Biomedical Sensing (IBS) Lab"
5 | - family-names: "Cedalion Developers"
6 | title: "Cedalion Python Toolbox"
7 | version: 1.0.0
8 | date-released: 2024-09-08
9 | url: "https://github.com/ibs-lab/cedalion"
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright 2024 The Cedalion Developers
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
4 | software and associated documentation files (the “Software”), to deal in the Software
5 | without restriction, including without limitation the rights to use, copy, modify,
6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
7 | permit persons to whom the Software is furnished to do so, subject to the following
8 | conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all copies or
11 | substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
14 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
15 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
16 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
17 | OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
18 | OTHER DEALINGS IN THE SOFTWARE.
19 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # cedalion - fNIRS analysis toolbox
6 |
7 | To avoid misinterpretations and to facilitate studies in naturalistic environments, fNIRS measurements will increasingly be combined with recordings from physiological sensors and other neuroimaging modalities.
8 | The aim of this toolbox is to facilitate this kind of analyses, i.e. it should allow the easy integration of machine learning techniques and provide unsupervised decomposition techniques for
9 | multimodal fNIRS signals.
10 |
11 | ## Documentation
12 |
13 | The [documentation](https://doc.ibs.tu-berlin.de/cedalion/doc/dev) contains
14 | [installation instructions](https://doc.ibs.tu-berlin.de/cedalion/doc/dev/getting_started/installation.html) as
15 | well as several [example notebooks](https://doc.ibs.tu-berlin.de/cedalion/doc/dev/examples.html)
16 | that illustrate the functionality of the toolbox.
17 | For discussions and help you can visit the [cedalion forum on openfnirs.org](https://openfnirs.org/community/cedalion/)
18 |
19 |
20 | ## Development environment
21 |
22 | To create a conda environment with the necessary dependencies run:
23 |
24 | ```
25 | $ conda env create -n cedalion -f environment_dev.yml
26 | ```
27 |
28 | Afterwards activate the environment and add an editable install of `cedalion` to it:
29 | ```
30 | $ conda activate cedalion
31 | $ pip install -e .
32 | $ bash install_nirfaster.sh CPU # or GPU
33 | ```
34 |
35 | This will also install Jupyter Notebook to run the example notebooks.
36 |
37 | If conda is too slow consider using the faster drop-in replacement [mamba](https://mamba.readthedocs.io/en/latest/).
38 | If you have Miniconda or Anaconda you can install mamba with:
39 | '''
40 | $ conda install mamba -c conda-forge
41 | '''
42 | and then create the environment with
43 | ```
44 | $ mamba env create -n cedalion -f environment_dev.yml
45 | ```
46 | Please note: If this does not socceed there is another route to go:
47 | Install the libmamba solver
48 | '''
49 | $ conda install -n base conda-libmamba-solver
50 | '''
51 | and then build the environment with the --solver=libmamba
52 | ```
53 | $ conda env create -n cedalion -f environment_dev.yml --solver=libmamba
54 | ```
55 |
56 | ## How to cite Cedalion
57 | A paper for the toolbox is currently in the making. If you use this toolbox for a publication in the meantime, please cite us using GitHub's "Cite this repository" feature in the "About" section. If you want to contact us or learn more about the IBS-Lab please go to https://www.ibs-lab.com/
58 |
59 |
--------------------------------------------------------------------------------
/cedalion.def:
--------------------------------------------------------------------------------
1 | Bootstrap: docker
2 | From: continuumio/miniconda3
3 |
4 | %setup
5 | mkdir $APPTAINER_ROOTFS/app
6 |
7 | %post
8 | export DEBIAN_FRONTEND=noninteractive
9 | chmod 1777 /tmp
10 | apt-get update
11 | apt-get install -y build-essential
12 | apt-get install -y xvfb
13 | apt-get install -y libgl1-mesa-dev
14 |
15 | conda update -n base -c defaults conda -y
16 | conda env create -n cedalion -f /app/environment_dev.yml
17 | conda run --no-capture-output -n cedalion pip install -e /app
18 |
19 | %environment
20 | export DISPLAY=:99.0
21 | export PYVISTA_OFF_SCREEN=true
22 |
23 | . /opt/conda/etc/profile.d/conda.sh
24 | conda activate cedalion
25 | Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 &
26 | sleep 3
27 |
28 |
--------------------------------------------------------------------------------
/data/ninja_cap_probe/fullhead_56x144_System2_measlist.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/data/ninja_cap_probe/fullhead_56x144_System2_measlist.pkl
--------------------------------------------------------------------------------
/docs/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## Version 25.0.0 (2025-01-21)
4 |
5 | - First named release with contributions from:
6 | - [Sung Ahn](https://github.com/ahns97)
7 | - [Jacqueline Behrendt](https://github.com/jackybehrendt12)
8 | - [David Boas](https://github.com/dboas)
9 | - [Laura Carlton](https://github.com/lauracarlton)
10 | - [Tomás Codina](https://github.com/TCodina)
11 | - [Josef Cutler](https://github.com/jccutler)
12 | - [Qianqian Fang](https://github.com/fangq)
13 | - [Thomas Fischer](https://github.com/thomasfischer11)
14 | - [Nils Harmening](https://github.com/harmening)
15 | - [Mariia Iudina](https://github.com/mashayu)
16 | - [Filip Jenko](https://github.com/FilipJenko)
17 | - [Eike Middell](https://github.com/emiddell)
18 | - [Shakiba Moradi](https://github.com/shakiba93)
19 | - [Alexander von Lühmann](https://github.com/avolu)
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/docs/LICENSE.md:
--------------------------------------------------------------------------------
1 | # License
2 |
3 | Copyright 2024 The Cedalion Developers
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
6 | software and associated documentation files (the “Software”), to deal in the Software
7 | without restriction, including without limitation the rights to use, copy, modify,
8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
9 | permit persons to whom the Software is furnished to do so, subject to the following
10 | conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all copies or
13 | substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
16 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
17 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
19 | OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 | OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile examples
16 |
17 | examples:
18 | make -C examples notebooks
19 |
20 | clean_examples:
21 | make -C examples clean
22 |
23 |
24 | # Catch-all target: route all unknown targets to Sphinx using the new
25 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
26 | %: Makefile
27 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
28 |
--------------------------------------------------------------------------------
/docs/_static/IBS_Logo_sm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/_static/IBS_Logo_sm.png
--------------------------------------------------------------------------------
/docs/_static/css/rtd_fixes.css:
--------------------------------------------------------------------------------
1 | ul.xr-var-list li.xr-var-item div {
2 | margin-top: 2px !important;
3 | margin-bottom: 2px !important;
4 | }
5 |
6 | ul.xr-var-list li.xr-var-item label {
7 | margin-top: 2px !important;
8 | margin-bottom: 2px !important;
9 | }
10 |
--------------------------------------------------------------------------------
/docs/_templates/custom-class-template.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. autoclass:: {{ objname }}
6 | :members:
7 | :show-inheritance:
8 | :inherited-members:
9 |
10 | {% block methods %}
11 | .. automethod:: __init__
12 |
13 | {% if methods %}
14 | .. rubric:: {{ _('Methods') }}
15 |
16 | .. autosummary::
17 | {% for item in methods %}
18 | ~{{ name }}.{{ item }}
19 | {%- endfor %}
20 | {% endif %}
21 | {% endblock %}
22 |
23 | {% block attributes %}
24 | {% if attributes %}
25 | .. rubric:: {{ _('Attributes') }}
26 |
27 | .. autosummary::
28 | {% for item in attributes %}
29 | ~{{ name }}.{{ item }}
30 | {%- endfor %}
31 | {% endif %}
32 | {% endblock %}
33 |
--------------------------------------------------------------------------------
/docs/_templates/custom-module-template.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. automodule:: {{ fullname }}
4 |
5 | {% block attributes %}
6 | {%- if attributes %}
7 | .. rubric:: {{ _('Module Attributes') }}
8 |
9 | .. autosummary::
10 | :nosignatures:
11 | {% for item in attributes %}
12 | {{ item }}
13 | {%- endfor %}
14 | {% endif %}
15 | {%- endblock %}
16 |
17 | {%- block functions %}
18 | {%- if functions %}
19 | .. rubric:: {{ _('Functions') }}
20 |
21 | .. autosummary::
22 | {% for item in functions %}
23 | {{ item }}
24 | {%- endfor %}
25 |
26 | {% for item in functions %}
27 | .. autofunction:: {{ item }}
28 | {%- endfor %}
29 | {% endif %}
30 | {%- endblock %}
31 |
32 | {%- block classes %}
33 | {%- if classes %}
34 | .. rubric:: {{ _('Classes') }}
35 |
36 | .. autosummary::
37 | :nosignatures:
38 | {% for item in classes %}
39 | {{ item }}
40 | {%- endfor %}
41 |
42 | {% for item in classes %}
43 | .. autoclass:: {{ item }}
44 | :members:
45 | {%- endfor %}
46 | {% endif %}
47 | {%- endblock %}
48 |
49 | {%- block exceptions %}
50 | {%- if exceptions %}
51 | .. rubric:: {{ _('Exceptions') }}
52 |
53 | .. autosummary::
54 | {% for item in exceptions %}
55 | {{ item }}
56 | {%- endfor %}
57 |
58 | {% for item in exceptions %}
59 | .. autoexception:: {{ item }}
60 | {%- endfor %}
61 | {% endif %}
62 | {%- endblock %}
63 |
64 | {%- block modules %}
65 | {%- if modules %}
66 | .. rubric:: Modules
67 |
68 | .. autosummary::
69 | :toctree:
70 | :template: custom-module-template.rst
71 | :recursive:
72 | :nosignatures:
73 | {% for item in modules %}
74 | {{ item }}
75 | {%- endfor %}
76 | {% endif %}
77 | {%- endblock %}
--------------------------------------------------------------------------------
/docs/community/contributing_code.md:
--------------------------------------------------------------------------------
1 | # Contributing Code
2 |
3 | This section is WIP.
4 | Here you will find information on how to interface your own developments with cedalion to facilitate contributing it to the toolbox to make it available to the broader community.
5 | In the meantime, you can find a small introduction to how to contribute code to cedalion below:
6 |
7 | [Getting Started with Contributing Code](https://doc.ibs.tu-berlin.de/cedalion/doc/dev/getting_started/contributing_code/contributing_code.html)
--------------------------------------------------------------------------------
/docs/community/contributors.md:
--------------------------------------------------------------------------------
1 | # Contributors
2 |
3 | This section is WIP. You will find more information and detailed stats here soon.
4 |
5 | Cedalion is an open source project driven by the [IBS-Lab](https://ibs-lab.com/) with the aim to encourage continuous use, contribution and improvement from the whole community to make it last. Therefore, it is important to us establish a transparent system of crediting contributors.
6 |
7 |
8 | ## Code Credit
9 | Here you will find a list of code contributors soon. Until then, please see:
10 | https://github.com/ibs-lab/cedalion/graphs/contributors
11 |
12 | ## Scientific Credit
13 | This documentation contains a dedicated [section that lets you search and find references to scientific papers](https://doc.ibs.tu-berlin.de/cedalion/doc/dev/references.html
14 | ). Papers that introduce methods implemented in Cedalion show up here. If you contribute code to the toolbox that is based on a scientific method, please add the bibtex entries to our references.bib file and mention the paper in the function's docstring to give its inventors the visibility they deserve.
15 |
16 | ## Special Mentions
17 | - Special thanks to members of the Bio Optical & Acoustic Spectroscopy (BOAS) Lab at Boston University's Neurophotonics Center: Laura Carlton, Sung Ahn, Meryem Yücel and David Boas.
18 | - Thanks to Jiaming Cao from the University of Birmingham for actively supporting us in the adoption of [NIRFASTer](https://github.com/nirfaster/NIRFASTer) into cedailion.
--------------------------------------------------------------------------------
/docs/community/index.md:
--------------------------------------------------------------------------------
1 | # Community
2 |
3 | This section is WIP.
4 |
5 | ```{toctree}
6 | toolboxes.md
7 | contributing_code.md
8 | contributors.md
9 | ```
10 |
11 | ## Discussion Forum
12 | To discuss with the community or get help please visit the [Cedalion Forum on openfnirs.org](https://openfnirs.org/community/cedalion/)
13 |
--------------------------------------------------------------------------------
/docs/community/toolboxes.md:
--------------------------------------------------------------------------------
1 | # Toolboxes
2 |
3 | This section is WIP.
4 |
5 | [Cedalion's name ist based on a greek myth](https://en.wikipedia.org/wiki/Cedalion): Cedalion stands on the shoulders of the giant Orion to guide him to the east, where the rays of Helios restored his sight. This toolbox stands on the shoulders of many giants, and we aim to complement existing toolboxes and packages in the community by interfacing with them wherever possible.
6 |
7 | A part of the core set of Cedalion's scientific fNIRS processing functionality is based on the matlab toolboxes [Homer2/3](https://github.com/BUNPC/Homer3) and [AtlasViewer](https://github.com/BUNPC/AtlasViewer) from which we have adopted methods into the python ecosystem.
8 |
9 | ## Current toolboxes cedalion interfaces with
10 | - [Monte Carlo Xtreme](http://mcx.space/), for Monte-Carlo-based photon simulation
11 | - [NIRFASTer](https://github.com/nirfaster/NIRFASTer), for FEM-based photon simulation
12 |
13 | ## Planned future toolboxes
14 | - [MNE](https://mne.tools/stable/index.html), for state of the art EEG/MEG processing functionality
15 | - [NeuroKit](https://github.com/neuropsychology/NeuroKit), for Neurophysiological Signal Processing
16 | - [...?](https://openfnirs.org/software/)
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # For the full list of built-in configuration values, see the documentation:
4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
5 |
6 | import subprocess
7 | from urllib.parse import quote
8 |
9 | # -- Project information -----------------------------------------------------
10 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
11 |
12 | project = "cedalion"
13 | copyright = "2024, the cedalion developers"
14 | author = "the cedalion developers"
15 |
16 | # -- General configuration ---------------------------------------------------
17 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
18 |
19 | extensions = [
20 | "myst_parser",
21 | "nbsphinx",
22 | "sphinx.ext.autosummary",
23 | "sphinx.ext.autodoc",
24 | "sphinx.ext.napoleon",
25 | "sphinxcontrib.bibtex",
26 | "sphinx.ext.linkcode"
27 | #"autoapi.extension"
28 | ]
29 |
30 | templates_path = ["_templates"]
31 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
32 |
33 |
34 | # -- Options for HTML output -------------------------------------------------
35 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
36 |
37 | html_theme = "sphinx_rtd_theme"
38 | html_static_path = ["_static"]
39 |
40 | # fix a margin problem with the rendering of xarray representations in notebooks when
41 | # using the RTD theme
42 | html_css_files = [
43 | "css/rtd_fixes.css",
44 | ]
45 |
46 | # -- Configure MyST -----------------------------------------------------------
47 |
48 | myst_enable_extensions = [
49 | "substitution",
50 | ]
51 |
52 | myst_heading_anchors = 2
53 |
54 | # -- Configure sphinxcontrib-bibtex -------------------------------------------
55 |
56 | bibtex_bibfiles = ['references.bib']
57 |
58 |
59 | # -- Substitutions ------------------------------------------------------------
60 |
61 | commit_hash = (
62 | subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
63 | .strip()
64 | .decode("ascii")
65 | )
66 |
67 | branch = (
68 | subprocess.check_output(["git", "branch", "--show-current"])
69 | .strip()
70 | .decode("ascii")
71 | )
72 |
73 | myst_substitutions = {
74 | "docs_url": "https://doc.ibs.tu-berlin.de/cedalion/doc/dev",
75 | "commit_hash": commit_hash,
76 | }
77 |
78 | # -- sphinx_autodoc_typehints -------------------------------------------------
79 | always_use_bars_union = True
80 | # specifiying a maximum line length will create line breaks in functions signatures
81 | # and make them easier to read
82 | maximum_signature_line_length = 88
83 |
84 | autodoc_type_aliases = {
85 | "NDTimeSeries" : "cdt.NDTimeSeries",
86 | "cdt.NDTimeSeries" : "cdt.NDTimeSeries",
87 | "LabeledPointCloud" : "cdt.LabeledPointCloud",
88 | "cdt.LabeledPointCloud" : "cdt.LabeledPointCloud",
89 | "cedalion.Quantity" : "Quantity",
90 | "pint.Quantity" : "Quantity",
91 | "Quantity" : "Quantity",
92 | "ArrayLike" : "ArrayLike",
93 | "collections.OrderedDict" : "OrderedDict",
94 | }
95 |
96 |
97 |
98 | # -- sphinx_autoapi_-----------------------------------------------------------
99 | # using autosummary with customized templates as decribed in
100 | # https://github.com/sphinx-doc/sphinx/issues/7912
101 |
102 | """autoapi_dirs = ["../src"]
103 |
104 | autoapi_options = [
105 | "members",
106 | "undoc-members",
107 | "show-inheritance",
108 | "show-module-summary",
109 | "special-members",
110 | "inherited-members",
111 | "no-signatures"
112 | ]
113 | autoapi_add_toctree_entry = False"""
114 |
115 | autosummary_generate = True
116 | autodoc_default_options = {
117 | "members": True,
118 | "undoc-members": True,
119 | "show-inheritance": True,
120 | }
121 | autodoc_member_order = "bysource" # Keep member order as in the source code
122 |
123 |
124 | # -- Nbsphinx gallery ----------------------------------------------------------------
125 | nbsphinx_thumbnails = {
126 | 'examples/*/*': '_static/IBS_Logo_sm.png',
127 | }
128 |
129 |
130 | # -- linkcode ------- ----------------------------------------------------------------
131 | # adopted from: https://stackoverflow.com/a/75279988
132 | # maybe also incorporate for direct links to line numbers:
133 | # https://github.com/sphinx-doc/sphinx/issues/1556#issuecomment-101027317
134 |
135 | def linkcode_resolve(domain, info):
136 | if domain != 'py':
137 | return None
138 | if not info['module']:
139 | return None
140 | filename = quote(info['module'].replace('.', '/'))
141 | if not filename.startswith("tests"):
142 | filename = "src/" + filename
143 | if "fullname" in info:
144 | anchor = info["fullname"]
145 | anchor = "#:~:text=" + quote(anchor.split(".")[-1])
146 | else:
147 | anchor = ""
148 |
149 | # github
150 | result = f"https://github.com/ibs-lab/cedalion/blob/{branch}/{filename}.py{anchor}"
151 | # print(result)
152 | return result
153 |
--------------------------------------------------------------------------------
/docs/data_io/index.rst:
--------------------------------------------------------------------------------
1 | Data structures and I/O
2 | =======================
3 |
4 | Cedalion builds on existing python packages for handling scientific
5 | data (such as numpy and xarray), but it also defines several data structures
6 | for working with fNIRS data in particular. It provides I/O functions for
7 | reading and writing data in various formats.
8 |
9 | Data structures
10 | ---------------
11 |
12 | .. autosummary::
13 | :toctree: _autosummary_data_structures
14 | :recursive:
15 | :nosignatures:
16 |
17 | cedalion.dataclasses
18 | cedalion.typing
19 | cedalion.validators
20 | cedalion.physunits
21 |
22 | Utilities
23 | ---------
24 |
25 | .. autosummary::
26 | :toctree: _autosummary_utils
27 | :recursive:
28 | :nosignatures:
29 |
30 | cedalion.xrutils
31 |
32 | I/O
33 | ---
34 |
35 | .. autosummary::
36 | :toctree: _autosummary_io
37 | :recursive:
38 | :nosignatures:
39 |
40 | cedalion.io.snirf
41 | cedalion.io.anatomy
42 | cedalion.io.bids
43 | cedalion.io.forward_model
44 | cedalion.io.photogrammetry
45 | cedalion.io.probe_geometry
46 | cedalion.datasets
47 |
48 |
49 | Examples
50 | --------
51 |
52 | .. nbgallery::
53 | :glob:
54 |
55 | ../examples/getting_started_io/10_xarray_datastructs_fnirs.ipynb
56 | ../examples/getting_started_io/11_recording_container.ipynb
57 | ../examples/getting_started_io/13_data_structures_intro.ipynb
58 | ../examples/getting_started_io/34_store_hrfs_in_snirf_file.ipynb
--------------------------------------------------------------------------------
/docs/environments.md:
--------------------------------------------------------------------------------
1 | # Environments
2 |
3 | Currently, we provide one environment specified in the file `environment_dev.yml`. This
4 | environment contains all dependenices to run and develop cedalion.
5 |
--------------------------------------------------------------------------------
/docs/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
3 |
4 | .. contents::
5 | :local:
6 |
7 | Getting Started and IO
8 | ----------------------
9 |
10 | .. nbgallery::
11 | :glob:
12 |
13 | examples/getting_started_io/*
14 |
15 | Signal Quality
16 | -----------------------
17 |
18 | .. nbgallery::
19 | :glob:
20 |
21 | examples/signal_quality/*
22 |
23 | Modeling
24 | -----------------------
25 |
26 | .. nbgallery::
27 | :glob:
28 |
29 | examples/modeling/*
30 |
31 | Head Models and Image Reconstruction
32 | -------------------------------------
33 |
34 | .. nbgallery::
35 | :glob:
36 |
37 | examples/head_models/*
38 |
39 | Machine Learning and Data Driven Methods
40 | ----------------------------------------
41 |
42 | .. nbgallery::
43 | :glob:
44 |
45 | examples/machine_learning/*
46 |
47 | Data Augmentation
48 | -----------------------
49 |
50 | .. nbgallery::
51 | :caption: Coming soon
52 | :glob:
53 |
54 | examples/data_augmentation/*
55 |
56 | Plots & Visualization
57 | -----------------------
58 |
59 | .. nbgallery::
60 | :glob:
61 |
62 | examples/plots_visualization/*
63 |
--------------------------------------------------------------------------------
/docs/examples/Makefile:
--------------------------------------------------------------------------------
1 | EXAMPLE_NOTEBOOKS = getting_started_io/00_test_installation.ipynb \
2 | getting_started_io/10_xarray_datastructs_fnirs.ipynb \
3 | getting_started_io/11_recording_container.ipynb \
4 | getting_started_io/13_data_structures_intro.ipynb \
5 | getting_started_io/34_store_hrfs_in_snirf_file.ipynb \
6 | plots_visualization/12_plots_example.ipynb \
7 | machine_learning/50_finger_tapping_lda_classification.ipynb \
8 | modeling/31_glm_basis_functions.ipynb \
9 | modeling/32_glm_fingertapping_example.ipynb \
10 | modeling/33_glm_illustrative_example.ipynb \
11 | signal_quality/20_scalp_coupling_index.ipynb \
12 | signal_quality/21_data_quality_and_pruning.ipynb \
13 | signal_quality/22_motion_artefacts_and_correction.ipynb \
14 | head_models/40_image_reconstruction.ipynb \
15 | head_models/41_photogrammetric_optode_coregistration.ipynb \
16 | head_models/42_1010_system.ipynb \
17 | head_models/43_crs_and_headmodel.ipynb
18 |
19 | .PHONY: notebooks clean
20 |
21 | notebooks: $(EXAMPLE_NOTEBOOKS)
22 |
23 | clean:
24 | rm -f $(EXAMPLE_NOTEBOOKS)
25 |
26 | $(EXAMPLE_NOTEBOOKS): %.ipynb : ../../examples/%.ipynb
27 | jupyter nbconvert --ExecutePreprocessor.store_widget_state=True --to notebook \
28 | --execute --output-dir=$(dir $@) $<
29 |
--------------------------------------------------------------------------------
/docs/getting_started/contributing_code/dirs_parent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/getting_started/contributing_code/dirs_parent.png
--------------------------------------------------------------------------------
/docs/getting_started/contributing_code/dirs_src.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/getting_started/contributing_code/dirs_src.png
--------------------------------------------------------------------------------
/docs/getting_started/index.md:
--------------------------------------------------------------------------------
1 | # Getting started
2 |
3 | ```{toctree}
4 | installation.md
5 | contributing_code/contributing_code.md
6 | ```
7 |
--------------------------------------------------------------------------------
/docs/getting_started/installation.md:
--------------------------------------------------------------------------------
1 | # Installation
2 |
3 | Get the latest release of the cedalion toolbox from our public
4 | [github repository](https://github.com/ibs-lab/cedalion). Releases can be found in
5 | the `main` branch of the repository wheras development happens in the `dev` branch.
6 |
7 | Cedalion depends on many third-party python libraries. To uniformly provide environments
8 | containing these dependencies across different platforms (Linux, Windows, MacOS) we rely
9 | on the [conda](https://docs.anaconda.com/working-with-conda/packages/install-packages/)
10 | package manager and the [conda-forge](https://conda-forge.org/docs/) package
11 | repository.
12 |
13 | ## Development
14 |
15 | Follow the installation instructions to install the [Miniconda](https://docs.anaconda.com/miniconda/install/) or [Anaconda](https://docs.anaconda.com/anaconda/install/) distribution.
16 |
17 | Clone the git repository to a directory on your machine:
18 |
19 | ```
20 | $ git clone git@github.com:ibs-lab/cedalion.git
21 | ```
22 |
23 | This creates a new directory `cedalion` in your current folder. It checks out the latest
24 | stable release from the `main` branch. If you intend to contribute to cedalion please
25 | check out the `dev` branch.
26 |
27 | To create a conda environment named `cedalion` with the necessary dependendencies enter
28 | this checked-out directory and run:
29 |
30 | ```
31 | $ conda env create -n cedalion -f environment_dev.yml
32 | ```
33 |
34 | Afterwards activate the environment and add an editable install of `cedalion` to it:
35 | ```
36 | $ conda activate cedalion
37 | $ pip install -e .
38 | ```
39 |
40 | This will also install Jupyter Notebook to run the example notebooks.
41 |
42 |
43 | ```{admonition} If it's slow...
44 | :class: tip
45 |
46 | To create the environment conda needs to find a set of packages that fulfills all
47 | requirements. If conda needs too much time to find a solution, there are two ways to
48 | speed it up.
49 |
50 | 1. Make sure that you are using a recent version of conda (> 23.10) that uses
51 | [libmamba-solver](https://conda.github.io/conda-libmamba-solver/user-guide/) to resolve dependcies. (***recommended***)
52 |
53 | 2. Install [mamba](https://mamba.readthedocs.io/en/latest/installation/mamba-installation.html),
54 | a drop-in replacement for conda.
55 | ```
56 |
57 | ## Development using Hatch
58 |
59 | Alternatively, there is preliminary support for using the Python project manager [Hatch](https://hatch.pypa.io/latest/). Hatch helps with the handling of the python environments and
60 | offers a simple interface to perform common tasks.
61 |
62 | As a tool to manage cedalion's environment, hatch and its dependencies must be [installed](https://hatch.pypa.io/1.13/install/) in a separate environment, like for example the `base` environment of a Miniconda/Anaconda installation or using [pipx](https://pipx.pypa.io/latest/):
63 |
64 | ```
65 | $ pipx install hatch
66 | $ pipx inject hatch hatch-vcs hatch-conda hatchling
67 | ```
68 |
69 | Then clone cedalion's git repository and change to the checked-out directory:
70 |
71 | ```
72 | $ git clone git@github.com:ibs-lab/cedalion.git
73 | $ cd cedalion
74 | ```
75 |
76 | To create the environment run and install `cedalion` in editable mode run:
77 | ```
78 | $ hatch env create
79 | ```
80 |
81 | To run the tests call:
82 | ```
83 | $ hatch test
84 | ```
85 |
86 | To locally build the documenation run:
87 | ```
88 | $ hatch run build_docs
89 | ```
90 |
91 | ## Production
92 |
93 | The same procedure as above applies. However, make sure to use a released version
94 | from the main branch.
95 |
96 |
97 | ## Container Environments
98 |
99 | ### Apptainer
100 |
101 | For running cedalion in a headless environment we provide an [Apptainer](https://apptainer.org/)
102 | definition file. This container provides a populated conda environment
103 | and runs the X Window virtual Framebuffer (Xvfb) needed for the 3D plotting functionality
104 | of pyvista. The directory with the cedalion source code is expected to be mounted under
105 | `/app`. It is added as an editable install to the conda environment, i.e. changes
106 | made in the host directory propagate into the container.
107 |
108 | XVfb needs write access under `/var/lib/xkb` which is not available from inside the
109 | container. As workaround we bind a writable directory from the host to this folder. Probably a more elegant solution exists.
110 |
111 | #### Building the container:
112 |
113 | Clone the cedalion repository to `./cedalion`
114 | ```
115 | $ git clone https://github.com/ibs-lab/cedalion.git
116 | ```
117 |
118 | Build the container. The cedalion source code needs to be mounted under `/app`.
119 | ```
120 | $ apptainer build --bind `pwd`/cedalion:/app cedalion.sif cedalion/cedalion.def
121 | ```
122 |
123 | #### Run jupyter notebook in the container
124 |
125 | ```
126 | $ mkdir -p xkb
127 | $ apptainer run --nv --bind `pwd`/xkb:/var/lib/xkb,`pwd`/cedalion:/app cedalion.sif jupyter notebook --ip 0.0.0.0 --no-browser
128 | ```
129 |
130 | ### Docker
131 |
132 | - WIP: see [Nils' branch](https://github.com/ibs-lab/cedalion/tree/docker)
133 |
--------------------------------------------------------------------------------
/docs/image_reco/index.rst:
--------------------------------------------------------------------------------
1 | Head models and image reconstruction
2 | ====================================
3 |
4 | .. py:currentmodule:: cedalion.imagereco
5 |
6 |
7 | .. autosummary::
8 | :toctree: _autosummary_imagereco
9 | :recursive:
10 |
11 | cedalion.geometry
12 |
13 | cedalion.imagereco.forward_model
14 | cedalion.imagereco.solver
15 | cedalion.imagereco.tissue_properties
16 | cedalion.imagereco.utils
17 |
18 |
19 | Examples
20 | --------
21 |
22 | .. nbgallery::
23 | :glob:
24 |
25 | ../examples/head_models/*
--------------------------------------------------------------------------------
/docs/img/IBS_clr_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/IBS_clr_small.png
--------------------------------------------------------------------------------
/docs/img/cedalion_frontpage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/cedalion_frontpage.png
--------------------------------------------------------------------------------
/docs/img/recording/dataarray_indexing_overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/recording/dataarray_indexing_overview.png
--------------------------------------------------------------------------------
/docs/img/recording/ndarray.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/recording/ndarray.png
--------------------------------------------------------------------------------
/docs/img/recording/rec_container_overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/recording/rec_container_overview.png
--------------------------------------------------------------------------------
/docs/img/special_thanks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/special_thanks.png
--------------------------------------------------------------------------------
/docs/img/time_series_guiexample.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/docs/img/time_series_guiexample.png
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Cedalion
2 |
3 | A python-based framework for the data driven analysis of multimodal fNIRS and DOT in naturalistic environments. Developed by the Intelligent Biomedical Sensing (IBS) Lab with and for the community.
4 |
5 | 
6 |
7 | You can find the [github repository for the cedalion toolbox here](https://github.com/ibs-lab/cedalion).
8 |
9 | ```{toctree}
10 | :maxdepth: 1
11 | :caption: General Info
12 |
13 | rationale.md
14 | getting_started/index.md
15 | data_structures/index.md
16 |
17 | community/index.md
18 | LICENSE.md
19 | ```
20 |
21 | ```{toctree}
22 | :maxdepth: 1
23 | :caption: Package Features
24 |
25 | data_io/index
26 | sigproc/index
27 | machine_learning/index
28 | image_reco/index
29 | plot_vis/index
30 | synth/index
31 | ```
32 |
33 |
34 | ```{toctree}
35 | :maxdepth: 1
36 | :caption: Reference
37 |
38 | API reference
39 | Bibliography
40 | All examples
41 | ```
42 |
43 | ```{toctree}
44 | :maxdepth: 1
45 | :caption: Project
46 |
47 | Source code
48 | Issues
49 | Documentation
50 | Changelog
51 | ```
52 |
53 | ## Special Thanks
54 | We cordially thank our friends and long-term collaborators at the BOAS Lab for their contributions and support in starting this project.
55 | 
56 |
57 | ## Version
58 | This documentation was built from commit {{commit_hash}}.
59 |
--------------------------------------------------------------------------------
/docs/machine_learning/index.rst:
--------------------------------------------------------------------------------
1 | Modeling and Machine Learning
2 | =============================
3 |
4 |
5 | Models
6 | ---------------
7 |
8 | .. autosummary::
9 | :toctree: _autosummary_models
10 | :nosignatures:
11 | :recursive:
12 |
13 | cedalion.models.glm
14 |
15 | Decomposition Methods
16 | ---------------------
17 |
18 | .. autosummary::
19 | :toctree: _autosummary_decomp
20 | :recursive:
21 | :nosignatures:
22 |
23 | cedalion.sigdecomp.ERBM
24 | cedalion.sigdecomp.ICA_EBM
25 |
26 | Examples
27 | --------
28 |
29 | .. nbgallery::
30 | :glob:
31 |
32 | ../examples/machine_learning/*
33 | ../examples/modeling/*
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/plot_vis/index.rst:
--------------------------------------------------------------------------------
1 | Plotting and Visualization
2 | ==========================
3 |
4 | These modules provide specialized tools for plotting data related to fNIRS
5 | analysis, including functions for visualizing scalp plots, sensitivity matrices,
6 | and optode montages.
7 |
8 | .. autosummary::
9 | :toctree: _autosummary_models
10 | :recursive:
11 |
12 | cedalion.plots
13 | cedalion.vis.plot_probe
14 | cedalion.vis.plot_sensitivity_matrix
15 | cedalion.vis.time_series
16 |
17 | Examples
18 | --------
19 |
20 | .. nbgallery::
21 | :glob:
22 |
23 | ../examples/plots_visualization/*
--------------------------------------------------------------------------------
/docs/references.rst:
--------------------------------------------------------------------------------
1 | References
2 | ==========
3 |
4 | .. bibliography::
5 | :all:
--------------------------------------------------------------------------------
/docs/sigproc/index.rst:
--------------------------------------------------------------------------------
1 | Signal processing
2 | =================
3 |
4 | Methods for processing and analyzing fNIRS signals, including
5 | preprocessing, quality control, and feature extraction.
6 |
7 | .. autosummary::
8 | :toctree: _autosummary_sigproc
9 | :nosignatures:
10 | :recursive:
11 |
12 | cedalion.nirs
13 | cedalion.sigproc.epochs
14 | cedalion.sigproc.frequency
15 | cedalion.sigproc.motion_correct
16 | cedalion.sigproc.quality
17 |
18 |
19 | Examples
20 | --------
21 |
22 | .. nbgallery::
23 | :glob:
24 |
25 | ../examples/signal_quality/*
--------------------------------------------------------------------------------
/docs/synth/index.rst:
--------------------------------------------------------------------------------
1 | Synthetic Data
2 | ==============
3 |
4 | Cedalion provides tools for generating synthetic data for testing and
5 | development purposes. This includes functions for generating synthetic
6 | hemodynamic response functions (HRFs) and synthetic motion artifacts.
7 |
8 | .. autosummary::
9 | :toctree: _autosummary_synth
10 | :recursive:
11 |
12 | cedalion.sim.synthetic_artifact
13 | cedalion.sim.synthetic_hrf
14 |
15 |
16 | Examples
17 | --------
18 |
19 | .. nbgallery::
20 | :glob:
21 |
22 | ../examples/augmentation/*
--------------------------------------------------------------------------------
/environment_dev.yml:
--------------------------------------------------------------------------------
1 | name: cedalion
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - click=8.1
7 | - h5py=3.11
8 | - ipython=8.13.2
9 | - ipywidgets=8.1.2
10 | - jupyter
11 | - jupyter_client=7.4.9
12 | - matplotlib=3.9
13 | - nibabel=5.2
14 | - nilearn=0.10
15 | - notebook=6.5.4
16 | - numpy=1.26
17 | - opencv
18 | - pandas=2.2
19 | - pint-xarray=0.3
20 | - pip
21 | - pooch=1.8
22 | - pre_commit
23 | - pybids=0.16
24 | - pytest
25 | - pytest-cov
26 | - python=3.11
27 | - pywavelets=1.6
28 | - ruff
29 | - scikit-image=0.24
30 | - scikit-learn=1.5
31 | - scipy=1.14
32 | - seaborn=0.13
33 | - statsmodels=0.14
34 | - strenum=0.4
35 | - xarray=2024.6
36 | - trimesh=4.4
37 | - pyvista=0.44
38 | - trame=3.6
39 | - trame-vtk=2.8
40 | - trame-vuetify=2.6
41 | - trame-components=2.3
42 | - vtk=9.2.6
43 |
44 | - nbsphinx
45 | - nbstripout
46 | - sphinx
47 | - myst-parser
48 | - sphinx-autodoc-typehints
49 | - sphinx_rtd_theme=2.0.0
50 | - sphinx-autoapi
51 |
52 | - hatchling
53 | - hatch-vcs
54 |
55 | - PyQt
56 | - ipympl
57 |
58 | - pip:
59 | - mne==1.7
60 | - mne-bids==0.15
61 | - mne-nirs==0.6
62 | - pywavefront==1.3
63 | - setuptools-scm
64 | - snirf==0.8
65 | - pmcx==0.3.3
66 | - pmcxcl==0.2.0
67 | - sphinxcontrib-bibtex
68 | - hatch-conda
69 |
70 |
--------------------------------------------------------------------------------
/environment_doc.yml:
--------------------------------------------------------------------------------
1 | name: cedalion
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - black=23.7.0
6 | - click=8.1.3
7 | - flake8=6.0.0
8 | - h5py=3.8.0
9 | - ipython=8.13.2
10 | - jupyter
11 | - matplotlib=3.7.1
12 | - nibabel=5.1.0
13 | - nilearn=0.10.0
14 | - nipype=1.8.6
15 | - notebook=6.5.4
16 | - numpy=1.23.5
17 | - opencv=4.6.0
18 | - pandas=2.0.1
19 | - pint-xarray=0.3
20 | - pip=23.1.2
21 | - pooch=1.7.0
22 | - pre_commit=3.3.3
23 | - pybids=0.16.1
24 | - pylint=2.17.5
25 | - pytest=7.3.1
26 | - pytest-cov=4.0.0
27 | - python=3.10
28 | - pywavelets=1.4.1
29 | - ruff=0.0.285
30 | - scikit-image=0.20.0
31 | - scikit-learn=1.2.2
32 | - scipy=1.10.1
33 | - seaborn=0.12.2
34 | - statsmodels=0.13.5
35 | - strenum=0.4.15
36 | - xarray=2024.2.0
37 | - cudatoolkit
38 | - trimesh
39 | - pyvista
40 | - trame
41 | - vtk=9.2.2
42 |
43 | - nbsphinx=0.9.3
44 | - nbstripout=0.7.1
45 | - sphinx
46 | - myst-parser
47 | - sphinx-autodoc-typehints
48 | - sphinx_rtd_theme
49 |
50 | - pip:
51 | - mne==1.4.0
52 | - mne-bids==0.12
53 | - mne-nirs==0.5.0
54 | - pywavefront==1.3.3
55 | - setuptools-scm==7.1.0
56 | - snirf==0.7.4
57 | - pmcx
58 | - open3d==0.16
59 |
--------------------------------------------------------------------------------
/examples/PCArecurse_motion_correct.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "0",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import cedalion\n",
11 | "import cedalion.nirs\n",
12 | "import cedalion.sigproc.quality as quality\n",
13 | "from cedalion.sigproc.artifact import id_motion, id_motion_refine\n",
14 | "from cedalion.sigproc.motion_correct import motion_correct_PCA, motion_correct_PCA_recurse\n",
15 | "import cedalion.xrutils as xrutils\n",
16 | "import cedalion.datasets as datasets\n",
17 | "import xarray as xr\n",
18 | "import matplotlib.pyplot as p\n",
19 | "from functools import reduce\n",
20 | "import numpy as np\n",
21 | "\n",
22 | "from cedalion import Quantity, units"
23 | ]
24 | },
25 | {
26 | "cell_type": "code",
27 | "execution_count": null,
28 | "id": "1",
29 | "metadata": {},
30 | "outputs": [],
31 | "source": [
32 | "# get example finger tapping dataset\n",
33 | "snirf_element = datasets.get_fingertapping()\n",
34 | "amp = snirf_element[0].data[0]\n",
35 | "geo = snirf_element[0].geo3d\n",
36 | "od = cedalion.nirs.int2od(amp)\n",
37 | "\n",
38 | "data = xr.Dataset(\n",
39 | " data_vars = {\n",
40 | " \"amp\" : amp,\n",
41 | " \"od\" : od,\n",
42 | " \"geo3d\": geo\n",
43 | " })\n",
44 | "\n",
45 | "\n",
46 | "# Plot some data for visual validation\n",
47 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
48 | "ax.plot( data.amp.time, data.amp.sel(channel=\"S1D1\", wavelength=\"850\"), \"r-\", label=\"850nm\")\n",
49 | "ax.plot( data.amp.time, data.amp.sel(channel=\"S1D1\", wavelength=\"760\"), \"b-\", label=\"760nm\")\n",
50 | "p.legend()\n",
51 | "ax.set_xlabel(\"time / s\")\n",
52 | "ax.set_ylabel(\"Signal intensity / a.u.\")"
53 | ]
54 | },
55 | {
56 | "cell_type": "markdown",
57 | "id": "2",
58 | "metadata": {},
59 | "source": [
60 | "# Detect motion and perform PCA filtering \n",
61 | "\n",
62 | "The motion_correct_PCA_recurse algortithm first detects motion in the the OD data. It then iteratively calls motion_correct_PCA which performs PCA filtering on all time points labelled as motion. \n"
63 | ]
64 | },
65 | {
66 | "cell_type": "code",
67 | "execution_count": null,
68 | "id": "3",
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "# typical motion id parameters\n",
73 | "t_motion = 0.5\n",
74 | "t_mask = 1\n",
75 | "stdev_thresh = 20\n",
76 | "amp_thresh = 5\n",
77 | "\n",
78 | "# motion identification \n",
79 | "tIncCh = id_motion(fNIRSdata=data.od, t_motion=t_motion, t_mask=t_mask, \n",
80 | " stdev_thresh=stdev_thresh, amp_thresh=amp_thresh) \n",
81 | "tInc = id_motion_refine(tIncCh, 'all')[0]\n",
82 | "tInc.values = np.hstack([False, tInc.values[:-1]]) # manual shift to account for indexing differences\n",
83 | "\n",
84 | "# call motion_correct_PCA\n",
85 | "nSV=0.97 # discard n components up to 97% of variance \n",
86 | "od_cleaned, svs, nSV = motion_correct_PCA(fNIRSdata=data.od, tInc=tInc, nSV=nSV)\n"
87 | ]
88 | },
89 | {
90 | "cell_type": "code",
91 | "execution_count": null,
92 | "id": "4",
93 | "metadata": {},
94 | "outputs": [],
95 | "source": [
96 | "# plot difference between uncorrected OD and after PCA filter correction\n",
97 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
98 | "ax.plot( data.od.time, data.od.sel(channel=\"S1D1\", wavelength=\"760\"), \"b-\", label=\"850nm OD\")\n",
99 | "ax.plot( od_cleaned.time, od_cleaned.sel(channel=\"S1D1\", wavelength=\"760\"), \"g-\", label=\"850nm OD post PCA filtering\")\n",
100 | "p.legend()\n",
101 | "ax.set_xlabel(\"time / s\")\n",
102 | "ax.set_ylabel(\"Optical density / a.u.\")"
103 | ]
104 | },
105 | {
106 | "cell_type": "markdown",
107 | "id": "5",
108 | "metadata": {},
109 | "source": [
110 | "# Iterative PCA filtering \n",
111 | "\n",
112 | "Above, the PCA filtering was performed once. motion_correct_PCA_recurse iteratively calls the motion detection and motion_correct_PCA until either it reaches the maximum number of iterations specified or until there is no longer any motion detected. "
113 | ]
114 | },
115 | {
116 | "cell_type": "code",
117 | "execution_count": null,
118 | "id": "6",
119 | "metadata": {},
120 | "outputs": [],
121 | "source": [
122 | "maxIter = 5\n",
123 | "\n",
124 | "od_cleaned_fully, svs, nSV, tInc = motion_correct_PCA_recurse(fNIRSdata=data.od, t_motion=t_motion, t_mask=t_mask, \n",
125 | " stdev_thresh=stdev_thresh, amp_thresh=amp_thresh, nSV=nSV, maxIter=maxIter)\n",
126 | "\n"
127 | ]
128 | },
129 | {
130 | "cell_type": "code",
131 | "execution_count": null,
132 | "id": "7",
133 | "metadata": {},
134 | "outputs": [],
135 | "source": [
136 | "# plot difference between uncorrected OD and after iterative PCA filter correction\n",
137 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
138 | "ax.plot( data.od.time, data.od.sel(channel=\"S1D1\", wavelength=\"760\"), \"b-\", label=\"850nm OD\")\n",
139 | "ax.plot( od_cleaned_fully.time, od_cleaned_fully.sel(channel=\"S1D1\", wavelength=\"760\"), \"g-\", label=\"850nm OD post PCA filtering\")\n",
140 | "p.legend()\n",
141 | "ax.set_xlabel(\"time / s\")\n",
142 | "ax.set_ylabel(\"Optical density / a.u.\")"
143 | ]
144 | },
145 | {
146 | "cell_type": "code",
147 | "execution_count": null,
148 | "id": "8",
149 | "metadata": {},
150 | "outputs": [],
151 | "source": []
152 | }
153 | ],
154 | "metadata": {
155 | "kernelspec": {
156 | "display_name": "Python 3 (ipykernel)",
157 | "language": "python",
158 | "name": "python3"
159 | },
160 | "language_info": {
161 | "codemirror_mode": {
162 | "name": "ipython",
163 | "version": 3
164 | },
165 | "file_extension": ".py",
166 | "mimetype": "text/x-python",
167 | "name": "python",
168 | "nbconvert_exporter": "python",
169 | "pygments_lexer": "ipython3",
170 | "version": "3.10.13"
171 | }
172 | },
173 | "nbformat": 4,
174 | "nbformat_minor": 5
175 | }
176 |
--------------------------------------------------------------------------------
/examples/getting_started_io/00_test_installation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# This Notebook tests whether your cedalion installation is working\n",
8 | "\n",
9 | "Everything that is specific to the installation of Cedalion can be found on our documentation page: https://doc.ibs.tu-berlin.de/cedalion/doc/dev\n",
10 | "\n",
11 | "It is assumed that you already followed the steps below: \n",
12 | "- Have the conda package manager installed, either by installing [Anaconda or Miniconda](https://docs.anaconda.com/distro-or-miniconda/). \n",
13 | "- Clone the Cedalion repository from github to your local disk. The main branch is the release branch.\n",
14 | "- In your IDE, switch to the Cedalion folder as your local working folder or “add to path”.\n",
15 | "- In its Python terminal create a conda environment with the necessary dependencies by running: \n",
16 | "```\n",
17 | "$ conda env create -n cedalion -f environment_dev.yml\n",
18 | "```\n",
19 | "- This installs all relevant dependencies and creates an environment in which you can run the toolbox. It also includes Jupyter Notebook to run the example notebooks.\n",
20 | "- Afterwards activate the environment and add an editable install of Cedalion to it:\n",
21 | "\n",
22 | "```\n",
23 | "$ conda activate cedalion\n",
24 | "$ pip install -e .\n",
25 | "```\n",
26 | "\n",
27 | " \n",
28 | "If creating the environment is slow:\n",
29 | "- To create the environment conda needs to find a set of packages that fulfills all requirements. If conda needs too much time to find a solution, there are two ways to speed it up.\n",
30 | "- Install [libmamba-solver](https://conda.github.io/conda-libmamba-solver/user-guide/) and configure conda to use it. (recommended )\n",
31 | "- If you have a recent conda version (23.10 or later), libmamba is already preconfigured. Otherwise update:\n",
32 | "```\n",
33 | "$ conda update -n base conda\n",
34 | "```"
35 | ]
36 | },
37 | {
38 | "cell_type": "markdown",
39 | "metadata": {},
40 | "source": [
41 | "## Import Cedalion Dependencies\n",
42 | "If you get an error here, revisit the setup above."
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "import cedalion\n",
52 | "import cedalion.datasets\n",
53 | "import cedalion.plots"
54 | ]
55 | },
56 | {
57 | "cell_type": "markdown",
58 | "metadata": {},
59 | "source": [
60 | "## Download datasets\n",
61 | "These will be cached and are used by amongst others by the example notebooks.\n",
62 | "\n",
63 | "- [Image Reconstruction](https://github.com/ibs-lab/cedalion/blob/main/examples/image_reconstruction.ipynb)\n",
64 | "- [Photogrammetric Coregistration](https://github.com/ibs-lab/cedalion/blob/main/examples/photogrammetric_optode_coregistration.ipynb)\n",
65 | "\n",
66 | "Depending on your operating system the user cache directory is one of these:\n",
67 | "- Mac: `~/Library/Caches/cedalion`\n",
68 | "- Unix: `~/.cache/cedalion`\n",
69 | "- Windows: `C:\\Users\\\\AppData\\Local\\cedalion\\cedalion\\Cache`\n",
70 | "\n",
71 | "It is safe to delete the downloaded files, if they are not needed anymore. During a new run of the example notebooks they are downloaded again.\n"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "metadata": {},
78 | "outputs": [],
79 | "source": [
80 | "# loads finger tapping data with sparse probe \n",
81 | "rec = cedalion.datasets.get_fingertapping()\n",
82 | "display(rec)"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "# loads finger tapping data with high density probe\n",
92 | "rec = cedalion.datasets.get_fingertappingDOT()\n",
93 | "display(rec)"
94 | ]
95 | },
96 | {
97 | "cell_type": "code",
98 | "execution_count": null,
99 | "metadata": {},
100 | "outputs": [],
101 | "source": [
102 | "cedalion.datasets.get_colin27_segmentation()\n"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "fluence = cedalion.datasets.get_precomputed_fluence(\"fingertappingDOT\", \"colin27\")"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": null,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "# loads photogrammetry scan example \n",
121 | "fname_scan, fname_snirf,fname_montage = cedalion.datasets.get_photogrammetry_example_scan()\n",
122 | "s = cedalion.io.read_einstar_obj(fname_scan)"
123 | ]
124 | },
125 | {
126 | "cell_type": "markdown",
127 | "metadata": {},
128 | "source": [
129 | "## Optional: Plot a 3D scan with Pyvista\n",
130 | "Note: If you run into problems here this will not be due to a faulty Cedalion installation, but your Pyvista setup"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "metadata": {},
137 | "outputs": [],
138 | "source": [
139 | "# plot the scan\n",
140 | "import pyvista as pv\n",
141 | "pv.set_jupyter_backend(\"static\") # use \"client\" for interactive rendering\n",
142 | "plt = pv.Plotter()\n",
143 | "cedalion.plots.plot_surface(plt, s, opacity=1.0)\n",
144 | "plt.show()"
145 | ]
146 | },
147 | {
148 | "cell_type": "markdown",
149 | "metadata": {},
150 | "source": [
151 | "## If you run into trouble you can get help by the community\n",
152 | "Visit the Cedalion forum on openfnirs.org/community/cedalion and particularly the [thread for installing cedalion](https://openfnirs.org/community/cedalion/installing-cedalion/#post-2165)"
153 | ]
154 | }
155 | ],
156 | "metadata": {
157 | "kernelspec": {
158 | "display_name": "cedalion",
159 | "language": "python",
160 | "name": "python3"
161 | },
162 | "language_info": {
163 | "codemirror_mode": {
164 | "name": "ipython",
165 | "version": 3
166 | },
167 | "file_extension": ".py",
168 | "mimetype": "text/x-python",
169 | "name": "python",
170 | "nbconvert_exporter": "python",
171 | "pygments_lexer": "ipython3",
172 | "version": "3.11.8"
173 | }
174 | },
175 | "nbformat": 4,
176 | "nbformat_minor": 2
177 | }
178 |
--------------------------------------------------------------------------------
/examples/getting_started_io/10_xarray_datastructs_fnirs.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Xarray Data Structures - an fNIRS example\n",
8 | "\n",
9 | "This exampple illustrates the usage of xarray-based data structures for calculating the Beer-Lambert transformation."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import cedalion\n",
19 | "import cedalion.nirs\n",
20 | "import cedalion.xrutils\n",
21 | "import cedalion.xrutils as xrutils\n",
22 | "from cedalion.datasets import get_fingertapping_snirf_path\n",
23 | "import numpy as np\n",
24 | "import xarray as xr\n",
25 | "import pint\n",
26 | "import matplotlib.pyplot as p\n",
27 | "import scipy.signal\n",
28 | "import os.path\n",
29 | "xr.set_options(display_max_rows=3, display_values_threshold=50)\n",
30 | "np.set_printoptions(precision=4)"
31 | ]
32 | },
33 | {
34 | "cell_type": "markdown",
35 | "metadata": {},
36 | "source": [
37 | "### Loading raw CW-NIRS data from a SNIRF file\n",
38 | "This notebook uses a finger-tapping dataset in BIDS layout provided by [Rob Luke](https://github.com/rob-luke/BIDS-NIRS-Tapping). It can can be downloaded via `cedalion.datasets`."
39 | ]
40 | },
41 | {
42 | "cell_type": "markdown",
43 | "metadata": {},
44 | "source": [
45 | "Load amplitude data from the snirf file."
46 | ]
47 | },
48 | {
49 | "cell_type": "code",
50 | "execution_count": null,
51 | "metadata": {},
52 | "outputs": [],
53 | "source": [
54 | "path_to_snirf_file = get_fingertapping_snirf_path()\n",
55 | "\n",
56 | "recordings = cedalion.io.read_snirf(path_to_snirf_file)\n",
57 | "rec = recordings[0] # there is only one NirsElement in this snirf file...\n",
58 | "amp = rec[\"amp\"] # ... which holds amplitude data\n",
59 | "\n",
60 | "# restrict to first 60 seconds and fill in missing units\n",
61 | "amp = amp.sel(time=amp.time < 60)\n",
62 | "amp = amp.pint.dequantify().pint.quantify(\"V\")\n",
63 | "geo3d = rec.geo3d\n"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "recordings"
73 | ]
74 | },
75 | {
76 | "cell_type": "markdown",
77 | "metadata": {},
78 | "source": [
79 | "### Amplitude data"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": null,
85 | "metadata": {},
86 | "outputs": [],
87 | "source": [
88 | "display(amp.round(4))"
89 | ]
90 | },
91 | {
92 | "cell_type": "markdown",
93 | "metadata": {},
94 | "source": [
95 | "### Montage information"
96 | ]
97 | },
98 | {
99 | "cell_type": "markdown",
100 | "metadata": {},
101 | "source": [
102 | "The `geo3d` DataArray maps labels to 3D positions, thus storing the location of optodes and landmarks."
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": null,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "display_labels = [\"S1\", \"S2\", \"D1\", \"D2\", \"NASION\"] # for brevity show only these\n",
112 | "geo3d.round(5).sel(label=display_labels)"
113 | ]
114 | },
115 | {
116 | "cell_type": "markdown",
117 | "metadata": {},
118 | "source": [
119 | "To obtain channel distances, we can lookup `amp`'s source and detector coordinates in `geo3d`,\n",
120 | "subtract these and calculate the vector norm."
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": null,
126 | "metadata": {},
127 | "outputs": [],
128 | "source": [
129 | "dists = xrutils.norm(geo3d.loc[amp.source] - geo3d.loc[amp.detector], dim=\"pos\")\n",
130 | "display(dists.round(3))"
131 | ]
132 | },
133 | {
134 | "cell_type": "markdown",
135 | "metadata": {},
136 | "source": [
137 | "### Beer-Lambert transformation"
138 | ]
139 | },
140 | {
141 | "cell_type": "markdown",
142 | "metadata": {},
143 | "source": [
144 | "Specify differential path length factors (DPF). Obtain a matrix of tabulated extinction coefficients for the wavelengths of our dataset and calculate the inverse. Cedalion offers dedicated functions for mBLL conversion ( nirs.int2od(), nirs.od2conc(), and nirs.beer-lambert() functions from the nirs subpackage) - but we do not use them here to better showcase how Xarrays work. "
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": null,
150 | "metadata": {},
151 | "outputs": [],
152 | "source": [
153 | "dpf = xr.DataArray([6., 6.], dims=\"wavelength\", coords={\"wavelength\" : [760., 850.]})\n",
154 | "\n",
155 | "E = cedalion.nirs.get_extinction_coefficients(\"prahl\", amp.wavelength)\n",
156 | "Einv = cedalion.xrutils.pinv(E)\n",
157 | "display(Einv.round(4))"
158 | ]
159 | },
160 | {
161 | "cell_type": "code",
162 | "execution_count": null,
163 | "metadata": {},
164 | "outputs": [],
165 | "source": [
166 | "optical_density = -np.log( amp / amp.mean(\"time\"))\n",
167 | "\n",
168 | "conc = Einv @ (optical_density / ( dists * dpf))\n",
169 | "\n",
170 | "display(conc.pint.to(\"micromolar\").round(4))"
171 | ]
172 | },
173 | {
174 | "cell_type": "code",
175 | "execution_count": null,
176 | "metadata": {
177 | "tags": [
178 | "nbsphinx-thumbnail"
179 | ]
180 | },
181 | "outputs": [],
182 | "source": [
183 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
184 | "ax.plot( conc.time, conc.sel(channel=\"S1D1\", chromo=\"HbO\").pint.to(\"micromolar\"), \"r-\", label=\"HbO\")\n",
185 | "ax.plot( conc.time, conc.sel(channel=\"S1D1\", chromo=\"HbR\").pint.to(\"micromolar\"), \"b-\", label=\"HbR\")\n",
186 | "p.legend()\n",
187 | "ax.set_xlabel(\"time / s\")\n",
188 | "ax.set_ylabel(\"$\\Delta c$ / $\\mu M$\");"
189 | ]
190 | }
191 | ],
192 | "metadata": {
193 | "kernelspec": {
194 | "display_name": "Python 3 (ipykernel)",
195 | "language": "python",
196 | "name": "python3"
197 | },
198 | "language_info": {
199 | "codemirror_mode": {
200 | "name": "ipython",
201 | "version": 3
202 | },
203 | "file_extension": ".py",
204 | "mimetype": "text/x-python",
205 | "name": "python",
206 | "nbconvert_exporter": "python",
207 | "pygments_lexer": "ipython3",
208 | "version": "3.11.6"
209 | }
210 | },
211 | "nbformat": 4,
212 | "nbformat_minor": 2
213 | }
214 |
--------------------------------------------------------------------------------
/examples/head_models.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Headmodels in Cedalion\n",
8 | "This notebook displays the different ways of loading headmodels into cedalion - either Atlases ( Colin27 / ICBM152 ) or individual anatomies."
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "metadata": {},
15 | "outputs": [],
16 | "source": [
17 | "# load dependencies\n",
18 | "\n",
19 | "import pyvista as pv\n",
20 | "pv.set_jupyter_backend('server')\n",
21 | "#pv.set_jupyter_backend('static')\n",
22 | "\n",
23 | "import os\n",
24 | "\n",
25 | "import cedalion\n",
26 | "import cedalion.io\n",
27 | "import cedalion.plots\n",
28 | "import cedalion.datasets\n",
29 | "import cedalion.imagereco.forward_model as fw"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "## Colin 27\n",
37 | "An average of 27 scans of the same person"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "# load segmentation data from the colin27 atlas\n",
47 | "SEG_DATADIR_cl27, mask_files_cl27, landmarks_file_cl27 = cedalion.datasets.get_colin27_segmentation()\n",
48 | "\n",
49 | "# create forward model class for colin 27 atlas\n",
50 | "head_colin27 = fw.TwoSurfaceHeadModel.from_surfaces(\n",
51 | " segmentation_dir=SEG_DATADIR_cl27,\n",
52 | " mask_files = mask_files_cl27,\n",
53 | " brain_surface_file= os.path.join(SEG_DATADIR_cl27, \"mask_brain.obj\"),\n",
54 | " landmarks_ras_file=landmarks_file_cl27,\n",
55 | " brain_face_count=None,\n",
56 | " scalp_face_count=None\n",
57 | ")\n"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": null,
63 | "metadata": {},
64 | "outputs": [],
65 | "source": [
66 | "# plot Colin headmodel\n",
67 | "plt = pv.Plotter()\n",
68 | "cedalion.plots.plot_surface(plt, head_colin27.brain, color=\"w\")\n",
69 | "cedalion.plots.plot_surface(plt, head_colin27.scalp, opacity=.1)\n",
70 | "plt.show()"
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "## ICBM 152\n",
78 | "An average of 152 scans from different persons"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": null,
84 | "metadata": {},
85 | "outputs": [],
86 | "source": [
87 | "# load segmentation data from the icbm152 atlas\n",
88 | "SEG_DATADIR_ic152, mask_files_ic152, landmarks_file_ic152 = cedalion.datasets.get_icbm152_segmentation()\n",
89 | "\n",
90 | "# create forward model class for icbm152 atlas\n",
91 | "head_icbm152 = fw.TwoSurfaceHeadModel.from_surfaces(\n",
92 | " segmentation_dir=SEG_DATADIR_ic152,\n",
93 | " mask_files = mask_files_ic152,\n",
94 | " brain_surface_file= os.path.join(SEG_DATADIR_ic152, \"mask_brain.obj\"),\n",
95 | " landmarks_ras_file=landmarks_file_ic152,\n",
96 | " brain_face_count=None,\n",
97 | " scalp_face_count=None\n",
98 | ")\n"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": null,
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "# plot ICBM headmodel\n",
108 | "plt = pv.Plotter()\n",
109 | "cedalion.plots.plot_surface(plt, head_icbm152.brain, color=\"w\")\n",
110 | "cedalion.plots.plot_surface(plt, head_icbm152.scalp, opacity=.1)\n",
111 | "plt.show()"
112 | ]
113 | }
114 | ],
115 | "metadata": {
116 | "kernelspec": {
117 | "display_name": "cedalion",
118 | "language": "python",
119 | "name": "python3"
120 | },
121 | "language_info": {
122 | "codemirror_mode": {
123 | "name": "ipython",
124 | "version": 3
125 | },
126 | "file_extension": ".py",
127 | "mimetype": "text/x-python",
128 | "name": "python",
129 | "nbconvert_exporter": "python",
130 | "pygments_lexer": "ipython3",
131 | "version": "3.11.9"
132 | }
133 | },
134 | "nbformat": 4,
135 | "nbformat_minor": 2
136 | }
137 |
--------------------------------------------------------------------------------
/examples/head_models/42_1010_system.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "0",
6 | "metadata": {},
7 | "source": [
8 | "# Constructing 10-10 coordinates on segmented MRI scans"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "id": "1",
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import cedalion\n",
19 | "import cedalion.io\n",
20 | "import cedalion.geometry.segmentation\n",
21 | "import cedalion.geometry.landmarks\n",
22 | "from cedalion.imagereco.forward_model import TwoSurfaceHeadModel\n",
23 | "import cedalion.datasets\n",
24 | "import os.path\n",
25 | "import pyvista\n",
26 | "\n",
27 | "#pyvista.set_jupyter_backend(\"html\")\n",
28 | "pyvista.set_jupyter_backend(\"static\")"
29 | ]
30 | },
31 | {
32 | "cell_type": "markdown",
33 | "id": "2",
34 | "metadata": {},
35 | "source": [
36 | "## Load segmentation masks\n",
37 | "\n",
38 | "This example constructs the 10-10 system on the Colin27 average brain."
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": null,
44 | "id": "3",
45 | "metadata": {},
46 | "outputs": [],
47 | "source": [
48 | "SEG_DATADIR, mask_files, landmarks_file = cedalion.datasets.get_colin27_segmentation()\n",
49 | "masks, t_ijk2ras = cedalion.io.read_segmentation_masks(SEG_DATADIR, mask_files)"
50 | ]
51 | },
52 | {
53 | "cell_type": "markdown",
54 | "id": "4",
55 | "metadata": {},
56 | "source": [
57 | "## Wrap the segmented head with derived surfaces in a TwoSurfaceHeadModel"
58 | ]
59 | },
60 | {
61 | "cell_type": "code",
62 | "execution_count": null,
63 | "id": "5",
64 | "metadata": {},
65 | "outputs": [],
66 | "source": [
67 | "head = TwoSurfaceHeadModel.from_surfaces(\n",
68 | " segmentation_dir=SEG_DATADIR,\n",
69 | " mask_files = mask_files,\n",
70 | " brain_surface_file= os.path.join(SEG_DATADIR, \"mask_brain.obj\"),\n",
71 | " scalp_surface_file= os.path.join(SEG_DATADIR, \"mask_scalp.obj\"),\n",
72 | " landmarks_ras_file=landmarks_file,\n",
73 | " brain_face_count=None,\n",
74 | " scalp_face_count=None\n",
75 | ")"
76 | ]
77 | },
78 | {
79 | "cell_type": "markdown",
80 | "id": "6",
81 | "metadata": {},
82 | "source": [
83 | "## Transform the scalp surface from voxel space ('ijk') to RAS space ('aligned')"
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "execution_count": null,
89 | "id": "7",
90 | "metadata": {},
91 | "outputs": [],
92 | "source": [
93 | "scalp_surface = head.scalp\n",
94 | "display(scalp_surface)\n",
95 | "scalp_surface = scalp_surface.apply_transform(t_ijk2ras)\n",
96 | "display(scalp_surface)"
97 | ]
98 | },
99 | {
100 | "cell_type": "markdown",
101 | "id": "8",
102 | "metadata": {},
103 | "source": [
104 | "## Transform initial landmarks from voxel space ('ijk') to RAS space ('aligned')"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "id": "9",
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "landmarks_ras = head.landmarks.points.apply_transform(t_ijk2ras)"
115 | ]
116 | },
117 | {
118 | "cell_type": "markdown",
119 | "id": "10",
120 | "metadata": {},
121 | "source": [
122 | "## Construct landmarks"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": null,
128 | "id": "11",
129 | "metadata": {},
130 | "outputs": [],
131 | "source": [
132 | "lmbuilder = cedalion.geometry.landmarks.LandmarksBuilder1010(scalp_surface, landmarks_ras)\n",
133 | "all_landmarks = lmbuilder.build()"
134 | ]
135 | },
136 | {
137 | "cell_type": "markdown",
138 | "id": "12",
139 | "metadata": {},
140 | "source": [
141 | "## Visualize"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": null,
147 | "id": "13",
148 | "metadata": {
149 | "tags": [
150 | "nbsphinx-thumbnail"
151 | ]
152 | },
153 | "outputs": [],
154 | "source": [
155 | "lmbuilder.plot()"
156 | ]
157 | },
158 | {
159 | "cell_type": "code",
160 | "execution_count": null,
161 | "id": "14",
162 | "metadata": {},
163 | "outputs": [],
164 | "source": [
165 | "display(all_landmarks)"
166 | ]
167 | }
168 | ],
169 | "metadata": {
170 | "kernelspec": {
171 | "display_name": "Python 3 (ipykernel)",
172 | "language": "python",
173 | "name": "python3"
174 | },
175 | "language_info": {
176 | "codemirror_mode": {
177 | "name": "ipython",
178 | "version": 3
179 | },
180 | "file_extension": ".py",
181 | "mimetype": "text/x-python",
182 | "name": "python",
183 | "nbconvert_exporter": "python",
184 | "pygments_lexer": "ipython3",
185 | "version": "3.11.8"
186 | }
187 | },
188 | "nbformat": 4,
189 | "nbformat_minor": 5
190 | }
191 |
--------------------------------------------------------------------------------
/examples/head_models/43_crs_and_headmodel.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Head Models and Coordinate Reference Systems (CRS)"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import pyvista as pv\n",
17 | "#pv.set_jupyter_backend('server')\n",
18 | "pv.set_jupyter_backend('static')\n",
19 | "\n",
20 | "import os\n",
21 | "import xarray as xr\n",
22 | "\n",
23 | "import cedalion\n",
24 | "import cedalion.io\n",
25 | "import cedalion.plots\n",
26 | "import cedalion.datasets\n",
27 | "import cedalion.imagereco.forward_model as fw\n",
28 | "\n",
29 | "xr.set_options(display_expand_data=False);"
30 | ]
31 | },
32 | {
33 | "cell_type": "markdown",
34 | "metadata": {},
35 | "source": [
36 | "## Loading the ICBM-152 head model\n",
37 | "\n",
38 | "- the `TwoSurfaceHeadModel` holds the segmented MRT image and derived cortex and scalp surfaces\n",
39 | "- we provide functionality to derive these surfaces from the masks or to load them from files"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": null,
45 | "metadata": {},
46 | "outputs": [],
47 | "source": [
48 | "# load pathes to segmentation data for the icbm-152 atlas\n",
49 | "SEG_DATADIR, mask_files, landmarks_file = cedalion.datasets.get_icbm152_segmentation()\n",
50 | "\n",
51 | "# create forward model class for icbm152 atlas\n",
52 | "head_icbm152 = fw.TwoSurfaceHeadModel.from_surfaces(\n",
53 | " segmentation_dir=SEG_DATADIR,\n",
54 | " mask_files=mask_files,\n",
55 | " brain_surface_file=os.path.join(SEG_DATADIR, \"mask_brain.obj\"),\n",
56 | " landmarks_ras_file=landmarks_file,\n",
57 | " brain_face_count=None,\n",
58 | " scalp_face_count=None,\n",
59 | ")"
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "## Visualization"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": null,
72 | "metadata": {
73 | "tags": [
74 | "nbsphinx-thumbnail"
75 | ]
76 | },
77 | "outputs": [],
78 | "source": [
79 | "plt = pv.Plotter()\n",
80 | "cedalion.plots.plot_surface(plt, head_icbm152.brain, color=\"#d3a6a1\")\n",
81 | "cedalion.plots.plot_surface(plt, head_icbm152.scalp, opacity=.1)\n",
82 | "cedalion.plots.plot_labeled_points(plt, head_icbm152.landmarks, show_labels=True)\n",
83 | "plt.show()"
84 | ]
85 | },
86 | {
87 | "cell_type": "markdown",
88 | "metadata": {},
89 | "source": [
90 | "## Segmentation masks\n",
91 | "\n",
92 | "The head model comprises masks for different tissue types: CSF, Gray Matter, White Matter, Scalp and Skull"
93 | ]
94 | },
95 | {
96 | "cell_type": "code",
97 | "execution_count": null,
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "head_icbm152.segmentation_masks"
102 | ]
103 | },
104 | {
105 | "cell_type": "markdown",
106 | "metadata": {},
107 | "source": [
108 | "## Coordinate System\n",
109 | "\n",
110 | "- we need to distinguish several coordinate systems: voxel space, scanner space, subject space, ...\n",
111 | "- geometric data types carry information about which crs they use\n",
112 | "- transformations between coordinate systems through affine transformations\n",
113 | "\n",
114 | "\n",
115 | "The head model is loaded in voxel space ('ijk')"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "metadata": {},
122 | "outputs": [],
123 | "source": [
124 | "head_icbm152.crs"
125 | ]
126 | },
127 | {
128 | "cell_type": "markdown",
129 | "metadata": {},
130 | "source": [
131 | "The head model contains initial landmarks ('Nz', 'Iz', 'LPA' and 'RPA') stored as a LabeledPointCloud.\n",
132 | "The crs is stored as the name of the second dimension, easily retrievable through the `.points`-accessor "
133 | ]
134 | },
135 | {
136 | "cell_type": "code",
137 | "execution_count": null,
138 | "metadata": {},
139 | "outputs": [],
140 | "source": [
141 | "display(head_icbm152.landmarks)\n",
142 | "display(head_icbm152.landmarks.points.crs)"
143 | ]
144 | },
145 | {
146 | "cell_type": "markdown",
147 | "metadata": {},
148 | "source": [
149 | "Triangulated surface meshes of the scalp and brain:"
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": null,
155 | "metadata": {},
156 | "outputs": [],
157 | "source": [
158 | "display(head_icbm152.brain)\n",
159 | "display(head_icbm152.scalp)"
160 | ]
161 | },
162 | {
163 | "cell_type": "code",
164 | "execution_count": null,
165 | "metadata": {},
166 | "outputs": [],
167 | "source": [
168 | "head_icbm152.t_ijk2ras # transformation from voxel to subject (RAS) space"
169 | ]
170 | },
171 | {
172 | "cell_type": "markdown",
173 | "metadata": {},
174 | "source": [
175 | "Change to subject (RAS) space by applying an affine transformation on the head model.\n",
176 | "This transforms all components.\n",
177 | "\n",
178 | "Here, the subject space is called 'aligned' (the label is derived from information in the mask's nifti file)\n",
179 | "\n",
180 | "The scanner space also uses physical units whereas coordinates in voxel space are dimensionless."
181 | ]
182 | },
183 | {
184 | "cell_type": "code",
185 | "execution_count": null,
186 | "metadata": {},
187 | "outputs": [],
188 | "source": [
189 | "trafo = head_icbm152.t_ijk2ras\n",
190 | "\n",
191 | "head_icbm152_ras = head_icbm152.apply_transform(trafo)\n",
192 | "\n",
193 | "display(head_icbm152_ras.crs)\n",
194 | "display(head_icbm152_ras.landmarks.points.crs)\n",
195 | "display(head_icbm152_ras.brain.crs)\n",
196 | "\n",
197 | "display(head_icbm152.landmarks.pint.units)\n",
198 | "display(head_icbm152_ras.landmarks.pint.units)"
199 | ]
200 | }
201 | ],
202 | "metadata": {
203 | "kernelspec": {
204 | "display_name": "cedalion_240902",
205 | "language": "python",
206 | "name": "python3"
207 | },
208 | "language_info": {
209 | "codemirror_mode": {
210 | "name": "ipython",
211 | "version": 3
212 | },
213 | "file_extension": ".py",
214 | "mimetype": "text/x-python",
215 | "name": "python",
216 | "nbconvert_exporter": "python",
217 | "pygments_lexer": "ipython3",
218 | "version": "3.11.8"
219 | }
220 | },
221 | "nbformat": 4,
222 | "nbformat_minor": 2
223 | }
224 |
--------------------------------------------------------------------------------
/examples/modeling/31_glm_basis_functions.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# GLM Basis Functions"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import cedalion\n",
17 | "import cedalion.models.glm.basis_functions as bf\n",
18 | "import cedalion.models.glm.design_matrix as dm\n",
19 | "\n",
20 | "import cedalion.dataclasses as cdc\n",
21 | "\n",
22 | "import matplotlib.pyplot as p\n",
23 | "import numpy as np\n",
24 | "import xarray as xr\n",
25 | "import matplotlib.pyplot as p\n",
26 | "import cedalion.datasets\n",
27 | "\n",
28 | "units = cedalion.units\n",
29 | "\n",
30 | "xr.set_options(display_expand_data=False)"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": null,
36 | "metadata": {},
37 | "outputs": [],
38 | "source": [
39 | "# dummy time series\n",
40 | "fs = 8.0\n",
41 | "ts = cdc.build_timeseries(\n",
42 | " np.random.random((100, 1, 2)),\n",
43 | " dims=[\"time\", \"channel\", \"chromo\"],\n",
44 | " time=np.arange(100) / fs,\n",
45 | " channel=[\"S1D1\"],\n",
46 | " value_units=units.uM,\n",
47 | " time_units=units.s,\n",
48 | " other_coords={'chromo' : [\"HbO\", \"HbR\"]}\n",
49 | ")\n",
50 | "display(ts)"
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": null,
56 | "metadata": {},
57 | "outputs": [],
58 | "source": [
59 | "basis = bf.GaussianKernels(\n",
60 | " t_pre=5 * units.s,\n",
61 | " t_post=30 * units.s,\n",
62 | " t_delta=3 * units.s,\n",
63 | " t_std=3 * units.s,\n",
64 | ")\n",
65 | "hrf = basis(ts)\n",
66 | "\n",
67 | "p.figure()\n",
68 | "for i_comp, comp in enumerate(hrf.component.values):\n",
69 | " p.plot(hrf.time, hrf[:, i_comp], label=comp)\n",
70 | "\n",
71 | "p.axvline(-5, c=\"r\", ls=\":\")\n",
72 | "p.axvline(30, c=\"r\", ls=\":\")\n",
73 | "p.legend(ncols=3)"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": null,
79 | "metadata": {},
80 | "outputs": [],
81 | "source": [
82 | "basis = bf.GaussianKernelsWithTails(\n",
83 | " t_pre=5 * units.s,\n",
84 | " t_post=30 * units.s,\n",
85 | " t_delta=3 * units.s,\n",
86 | " t_std=3 * units.s,\n",
87 | ")\n",
88 | "hrf = basis(ts)\n",
89 | "\n",
90 | "p.figure()\n",
91 | "for i_comp, comp in enumerate(hrf.component.values):\n",
92 | " p.plot(hrf.time, hrf[:, i_comp], label=comp)\n",
93 | "p.axvline(-5, c=\"r\", ls=\":\")\n",
94 | "p.axvline(30, c=\"r\", ls=\":\")\n",
95 | "p.legend(ncols=3)"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": null,
101 | "metadata": {},
102 | "outputs": [],
103 | "source": [
104 | "basis = bf.Gamma(\n",
105 | " tau={\"HbO\": 0 * units.s, \"HbR\": 1 * units.s},\n",
106 | " sigma=3 * units.s,\n",
107 | " T=0 * units.s,\n",
108 | ")\n",
109 | "hrf = basis(ts)\n",
110 | "display(hrf)\n",
111 | "p.figure()\n",
112 | "for i_comp, comp in enumerate(hrf.component.values):\n",
113 | " for i_chromo, chromo in enumerate(hrf.chromo.values):\n",
114 | " p.plot(hrf.time, hrf[:, i_comp, i_chromo], label=f\"{comp} {chromo}\")\n",
115 | "\n",
116 | "p.legend()"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": null,
122 | "metadata": {},
123 | "outputs": [],
124 | "source": [
125 | "basis = bf.Gamma(\n",
126 | " tau={\"HbO\": 0 * units.s, \"HbR\": 1 * units.s},\n",
127 | " sigma=2 * units.s,\n",
128 | " T=5 * units.s,\n",
129 | ")\n",
130 | "hrf = basis(ts)\n",
131 | "display(hrf)\n",
132 | "\n",
133 | "p.figure()\n",
134 | "for i_comp, comp in enumerate(hrf.component.values):\n",
135 | " for i_chromo, chromo in enumerate(hrf.chromo.values):\n",
136 | " p.plot(hrf.time, hrf[:, i_comp, i_chromo], label=f\"{comp} {chromo}\")\n",
137 | "\n",
138 | "p.legend()"
139 | ]
140 | },
141 | {
142 | "cell_type": "code",
143 | "execution_count": null,
144 | "metadata": {
145 | "tags": [
146 | "nbsphinx-thumbnail"
147 | ]
148 | },
149 | "outputs": [],
150 | "source": [
151 | "basis = bf.GammaDeriv(\n",
152 | " tau=1 * units.s,\n",
153 | " sigma=1 * units.s,\n",
154 | " T=10 * units.s,\n",
155 | ")\n",
156 | "hrf = basis(ts)\n",
157 | "display(hrf)\n",
158 | "p.figure()\n",
159 | "for i_comp, comp in enumerate(hrf.component.values):\n",
160 | " for i_chromo, chromo in enumerate([\"HbO\"]):\n",
161 | " p.plot(hrf.time, hrf[:, i_comp, i_chromo], label=f\"{comp} {chromo}\")\n",
162 | "\n",
163 | "p.legend()"
164 | ]
165 | },
166 | {
167 | "cell_type": "code",
168 | "execution_count": null,
169 | "metadata": {},
170 | "outputs": [],
171 | "source": [
172 | "basis = bf.AFNIGamma(\n",
173 | " p=1,\n",
174 | " q=0.7 * units.s,\n",
175 | " T=0 * units.s,\n",
176 | ")\n",
177 | "hrf = basis(ts)\n",
178 | "display(hrf)\n",
179 | "p.figure()\n",
180 | "for i_comp, comp in enumerate(hrf.component.values):\n",
181 | " for i_chromo, chromo in enumerate([\"HbO\"]):\n",
182 | " p.plot(hrf.time, hrf[:, i_comp, i_chromo], label=f\"{comp} {chromo}\")\n",
183 | "\n",
184 | "p.legend()"
185 | ]
186 | }
187 | ],
188 | "metadata": {
189 | "kernelspec": {
190 | "display_name": "cedalion_241112",
191 | "language": "python",
192 | "name": "python3"
193 | },
194 | "language_info": {
195 | "codemirror_mode": {
196 | "name": "ipython",
197 | "version": 3
198 | },
199 | "file_extension": ".py",
200 | "mimetype": "text/x-python",
201 | "name": "python",
202 | "nbconvert_exporter": "python",
203 | "pygments_lexer": "ipython3",
204 | "version": "3.11.8"
205 | }
206 | },
207 | "nbformat": 4,
208 | "nbformat_minor": 2
209 | }
210 |
--------------------------------------------------------------------------------
/examples/precompute_fluence.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "0",
6 | "metadata": {},
7 | "source": [
8 | "# Precomupte forward model results"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "id": "1",
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import pyvista as pv\n",
19 | "pv.set_jupyter_backend('static')"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "2",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "from functools import lru_cache\n",
30 | "import os\n",
31 | "\n",
32 | "import numpy as np\n",
33 | "import xarray as xr\n",
34 | "\n",
35 | "import cedalion.datasets\n",
36 | "import cedalion.geometry.segmentation\n",
37 | "import cedalion.imagereco.forward_model as fw\n",
38 | "import cedalion.io.forward_model\n",
39 | "import cedalion.plots\n",
40 | "xr.set_options(display_expand_data=False);"
41 | ]
42 | },
43 | {
44 | "cell_type": "code",
45 | "execution_count": null,
46 | "id": "3",
47 | "metadata": {},
48 | "outputs": [],
49 | "source": [
50 | "def compute_fluence_mcx(rec, head):\n",
51 | " geo3d_snapped_ijk = head.align_and_snap_to_scalp(rec.geo3d)\n",
52 | "\n",
53 | " fwm = cedalion.imagereco.forward_model.ForwardModel(\n",
54 | " head, geo3d_snapped_ijk, rec._measurement_lists[\"amp\"]\n",
55 | " )\n",
56 | "\n",
57 | " fluence_all, fluence_at_optodes = fwm.compute_fluence_mcx()\n",
58 | "\n",
59 | " return fluence_all, fluence_at_optodes\n",
60 | "\n",
61 | "\n",
62 | "def plot_fluence(rec, head, fluence_all, src, det, wl):\n",
63 | " geo3d_snapped_ijk = head.align_and_snap_to_scalp(rec.geo3d)\n",
64 | "\n",
65 | " f = fluence_all.loc[src, wl].values * fluence_all.loc[det, wl].values\n",
66 | "\n",
67 | " # clip fluence to smallest positive value and transform to log\n",
68 | " f[f <= 0] = f[f > 0].min()\n",
69 | " f = np.log10(f)\n",
70 | "\n",
71 | " vf = pv.wrap(f)\n",
72 | "\n",
73 | " plt = pv.Plotter()\n",
74 | " plt.add_volume(\n",
75 | " vf,\n",
76 | " log_scale=False,\n",
77 | " cmap=\"plasma_r\",\n",
78 | " clim=(-10, 0),\n",
79 | " )\n",
80 | " cedalion.plots.plot_surface(plt, head.brain, color=\"w\")\n",
81 | " cedalion.plots.plot_labeled_points(plt, geo3d_snapped_ijk, show_labels=False)\n",
82 | "\n",
83 | " cog = head.brain.vertices.mean(\"label\").values\n",
84 | " plt.camera.position = cog + [-300, 30, 150]\n",
85 | " plt.camera.focal_point = cog\n",
86 | " plt.camera.up = [0, 0, 1]\n",
87 | "\n",
88 | " plt.show()\n",
89 | "\n",
90 | "\n",
91 | "@lru_cache\n",
92 | "def get_colin27():\n",
93 | " SEG_DATADIR, mask_files, landmarks_file = (\n",
94 | " cedalion.datasets.get_colin27_segmentation()\n",
95 | " )\n",
96 | "\n",
97 | " head = fw.TwoSurfaceHeadModel.from_segmentation(\n",
98 | " segmentation_dir=SEG_DATADIR,\n",
99 | " mask_files=mask_files,\n",
100 | " landmarks_ras_file=landmarks_file,\n",
101 | " )\n",
102 | "\n",
103 | " return head\n",
104 | "\n",
105 | "\n",
106 | "@lru_cache\n",
107 | "def get_icbm152():\n",
108 | " SEG_DATADIR, mask_files, landmarks_file = (\n",
109 | " cedalion.datasets.get_icbm152_segmentation()\n",
110 | " )\n",
111 | "\n",
112 | " head = fw.TwoSurfaceHeadModel.from_surfaces(\n",
113 | " segmentation_dir=SEG_DATADIR,\n",
114 | " mask_files=mask_files,\n",
115 | " brain_surface_file=os.path.join(SEG_DATADIR, \"mask_brain.obj\"),\n",
116 | " landmarks_ras_file=landmarks_file,\n",
117 | " brain_face_count=None,\n",
118 | " scalp_face_count=None,\n",
119 | " )\n",
120 | "\n",
121 | " return head\n",
122 | "\n",
123 | "\n",
124 | "# fluence_all, fluence_at_optodes = plot_fluence(rec, head, fluence_all, \"S4\", \"D2\", 760.)"
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": null,
130 | "id": "4",
131 | "metadata": {},
132 | "outputs": [],
133 | "source": [
134 | "rec = cedalion.datasets.get_fingertappingDOT()\n",
135 | "head = get_colin27()\n",
136 | "fluence_all, fluence_at_optodes = compute_fluence_mcx(rec, head)\n",
137 | "\n",
138 | "cedalion.io.forward_model.save_fluence(\"fluence_fingertappingDOT_colin27.h5\", fluence_all, fluence_at_optodes)"
139 | ]
140 | },
141 | {
142 | "cell_type": "code",
143 | "execution_count": null,
144 | "id": "5",
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "rec = cedalion.datasets.get_fingertappingDOT()\n",
149 | "head = get_icbm152()\n",
150 | "fluence_all, fluence_at_optodes = compute_fluence_mcx(rec, head)\n",
151 | "cedalion.io.forward_model.save_fluence(\"fluence_fingertappingDOT_icbm152.h5\", fluence_all, fluence_at_optodes)"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "id": "6",
158 | "metadata": {},
159 | "outputs": [],
160 | "source": [
161 | "rec = cedalion.datasets.get_fingertapping()\n",
162 | "head = get_colin27()\n",
163 | "fluence_all, fluence_at_optodes = compute_fluence_mcx(rec, head)\n",
164 | "cedalion.io.forward_model.save_fluence(\"fluence_fingertapping_colin27.h5\", fluence_all, fluence_at_optodes)"
165 | ]
166 | },
167 | {
168 | "cell_type": "code",
169 | "execution_count": null,
170 | "id": "7",
171 | "metadata": {},
172 | "outputs": [],
173 | "source": [
174 | "rec = cedalion.datasets.get_fingertapping()\n",
175 | "head = get_colin27()\n",
176 | "fluence_all, fluence_at_optodes = compute_fluence_mcx(rec, head)\n",
177 | "cedalion.io.forward_model.save_fluence(\"fluence_fingertapping_icbm152.h5\", fluence_all, fluence_at_optodes)"
178 | ]
179 | }
180 | ],
181 | "metadata": {
182 | "kernelspec": {
183 | "display_name": "Python 3 (ipykernel)",
184 | "language": "python",
185 | "name": "python3"
186 | },
187 | "language_info": {
188 | "codemirror_mode": {
189 | "name": "ipython",
190 | "version": 3
191 | },
192 | "file_extension": ".py",
193 | "mimetype": "text/x-python",
194 | "name": "python",
195 | "nbconvert_exporter": "python",
196 | "pygments_lexer": "ipython3",
197 | "version": "3.11.8"
198 | }
199 | },
200 | "nbformat": 4,
201 | "nbformat_minor": 5
202 | }
203 |
--------------------------------------------------------------------------------
/examples/splineSG_motion_correct.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Motion Correction: SplineSG\n",
8 | "\n",
9 | "This notebook sketches how splineSG works and how to implement it for motion correction.\n"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import cedalion\n",
19 | "import cedalion.nirs\n",
20 | "import cedalion.sigproc.quality as quality\n",
21 | "from cedalion.sigproc.artifact import detect_baselineshift, detect_outliers\n",
22 | "from cedalion.sigproc.motion_correct import motion_correct_spline, motion_correct_splineSG\n",
23 | "import cedalion.xrutils as xrutils\n",
24 | "import cedalion.datasets as datasets\n",
25 | "import xarray as xr\n",
26 | "import matplotlib.pyplot as p\n",
27 | "from functools import reduce\n",
28 | "\n",
29 | "\n",
30 | "from cedalion import Quantity, units"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": null,
36 | "metadata": {},
37 | "outputs": [],
38 | "source": [
39 | "# get example finger tapping dataset\n",
40 | "snirf_element = datasets.get_fingertapping()\n",
41 | "amp = snirf_element[0].data[0]\n",
42 | "geo = snirf_element[0].geo3d\n",
43 | "od = cedalion.nirs.int2od(amp)\n",
44 | "\n",
45 | "data = xr.Dataset(\n",
46 | " data_vars = {\n",
47 | " \"amp\" : amp,\n",
48 | " \"od\" : od,\n",
49 | " \"geo3d\": geo\n",
50 | " })\n",
51 | "\n",
52 | "\n",
53 | "# Plot some data for visual validation\n",
54 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
55 | "ax.plot( data.amp.time, data.amp.sel(channel=\"S1D1\", wavelength=\"850\"), \"r-\", label=\"850nm\")\n",
56 | "ax.plot( data.amp.time, data.amp.sel(channel=\"S1D1\", wavelength=\"760\"), \"b-\", label=\"760nm\")\n",
57 | "p.legend()\n",
58 | "ax.set_xlabel(\"time / s\")\n",
59 | "ax.set_ylabel(\"Signal intensity / a.u.\")"
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "metadata": {},
65 | "source": [
66 | "## Detecting outliers and baseline shifts\n",
67 | "\n",
68 | "SplineSG has two stages. First, in the spline interpolation stage, baselineshifts are detected and spline interpolation is applied to these segments of the signal. Two functions are called initially, detect_outliers(), which returns a boolean mask containing False when outliers are detected and False otherwise, and detect_baselineshift(), which is passed the mask from detect_outliers and returns another mask that indicates where there are baseline shifts. Spline interpolation is then performed on the segments of data where baseline shifts are detected."
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "# call detect_outliers first \n",
78 | "# this returns mask, M, and M_array which is the result of applying the mask, M to the fNIRSdata\n",
79 | "M = detect_outliers(ts = data.od, t_window_std = 1)\n",
80 | "\n",
81 | "# call detect_baselineshifts\n",
82 | "tInc = detect_baselineshift(ts = data.od, outlier_mask = M)\n",
83 | "\n",
84 | "# perform spline interpolation\n",
85 | "dodSpline = motion_correct_spline(fNIRSdata=data.od, tIncCh=tInc)\n"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "# plot difference between uncorrected OD and after spline correction\n",
95 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
96 | "ax.plot( data.od.time, data.od.sel(channel=\"S2D3\", wavelength=\"760\"), \"b-\", label=\"850nm OD\")\n",
97 | "ax.plot( dodSpline.time, dodSpline.sel(channel=\"S2D3\", wavelength=\"760\"), \"g-\", label=\"850nm OD post spline\")\n",
98 | "p.legend()\n",
99 | "ax.set_xlabel(\"time / s\")\n",
100 | "ax.set_ylabel(\"Optical density / a.u.\")"
101 | ]
102 | },
103 | {
104 | "cell_type": "markdown",
105 | "metadata": {},
106 | "source": [
107 | "## Savitzky-Golay filtering\n",
108 | "\n",
109 | "The second stage applies a Savitzky-Golay filter to the data to further smooth the signal."
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "# the function motionCorrectSplineSG performs all the steps outlined above\n",
119 | "dodSplineSG = motion_correct_splineSG(fNIRSdata=data.od, framesize_sec=10)\n"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": null,
125 | "metadata": {},
126 | "outputs": [],
127 | "source": [
128 | "# plot uncorrected data vs after splineSG motion correction \n",
129 | "f,ax = p.subplots(1,1, figsize=(12,4))\n",
130 | "ax.plot( data.od.time, data.od.sel(channel=\"S1D1\", wavelength=\"760\"), \"b-\", label=\"760nm OD\")\n",
131 | "ax.plot( dodSplineSG.time, dodSplineSG.sel(channel=\"S1D1\", wavelength=\"760\"), \"g-\", label=\"760nm OD post splineSG\")\n",
132 | "p.legend()\n",
133 | "ax.set_xlabel(\"time / s\")\n",
134 | "ax.set_ylabel(\"Optical density / a.u.\")"
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": null,
140 | "metadata": {},
141 | "outputs": [],
142 | "source": []
143 | }
144 | ],
145 | "metadata": {
146 | "kernelspec": {
147 | "display_name": "Python 3 (ipykernel)",
148 | "language": "python",
149 | "name": "python3"
150 | },
151 | "language_info": {
152 | "codemirror_mode": {
153 | "name": "ipython",
154 | "version": 3
155 | },
156 | "file_extension": ".py",
157 | "mimetype": "text/x-python",
158 | "name": "python",
159 | "nbconvert_exporter": "python",
160 | "pygments_lexer": "ipython3",
161 | "version": "3.10.13"
162 | }
163 | },
164 | "nbformat": 4,
165 | "nbformat_minor": 2
166 | }
167 |
--------------------------------------------------------------------------------
/install_nirfaster.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Download and install NIRFASTer
3 |
4 | echo "Installing NIRFASTer..."
5 |
6 | OS="$(uname -s)"
7 |
8 | case "$OS" in
9 | Linux*) OS_NAME="linux";;
10 | Darwin*) OS_NAME="mac";;
11 | CYGWIN*|MINGW*|MSYS*) OS_NAME="win";;
12 | *) OS_NAME="Unknown";;
13 | esac
14 |
15 | echo "Operating System Detected: $OS_NAME"
16 | if [ "$OS_NAME" = "Unknown" ]; then
17 | echo "Error: Failed at detecting the operating system. Plaese visit the NIRFASTer documentation and install it on your system: https://github.com/milabuob/nirfaster-uFF";
18 | exit 1
19 | fi
20 |
21 | if [ "$OS_NAME" = "mac" -a $1 = 'GPU' ]; then
22 | echo "Error: No releases are available for your configuration. please visit: https://github.com/milabuob/nirfaster-uFF";
23 | exit 1
24 | fi
25 |
26 | # Define the directory name
27 | DIR_NAME="plugins"
28 |
29 | # Check if the directory exists
30 | if [ ! -d "$DIR_NAME" ]; then
31 | # If the directory does not exist, create it
32 | mkdir -p "$DIR_NAME"
33 | fi
34 |
35 | cd "$DIR_NAME"
36 |
37 | ZIP_URL="https://github.com/milabuob/nirfaster-uFF/archive/refs/heads/main.zip"
38 | ZIP_FILE="nirfaster-uFF-main.zip"
39 |
40 |
41 | curl -sL "$ZIP_URL" -o temp.zip && unzip temp.zip -d . && rm temp.zip
42 |
43 | FOLDER_NAME=$(basename "$ZIP_FILE" .zip)
44 | mv "${FOLDER_NAME}" "${FOLDER_NAME%-main}"
45 |
46 |
47 | SOURCE_URL="https://github.com/milabuob/nirfaster-uFF/releases/download/v0.9.6/"
48 |
49 | if [ $1 = 'CPU' ]; then
50 | curl -sL "$SOURCE_URL""cpu-"$OS_NAME"-python311.zip" -o temp.zip && unzip temp.zip -d "${FOLDER_NAME%-main}/nirfasteruff/" && rm temp.zip
51 |
52 | elif [ $1 = 'GPU' ]; then
53 | curl -sL "$SOURCE_URL""cpu-"$OS_NAME"-python311.zip" -o temp.zip && unzip temp.zip -d "${FOLDER_NAME%-main}/nirfasteruff/" && rm temp.zip
54 | curl -sL "$SOURCE_URL""gpu-"$OS_NAME"-python311.zip" -o temp.zip && unzip temp.zip -d "${FOLDER_NAME%-main}/nirfasteruff/" && rm temp.zip
55 |
56 |
57 | fi
58 |
59 | if [ "$OS_NAME" = 'mac' ]; then
60 | xattr -c "${FOLDER_NAME%-main}/nirfasteruff/nirfasteruff_cpu.cpython-311-darwin.so"
61 | fi
62 |
63 | echo "NIRFASTer installed successfully."
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "cedalion"
3 | description = "A fNIRS analysis framework"
4 | readme = "README.md"
5 | authors = [
6 | {name = "Eike Middell et al."}
7 | ]
8 | classifiers = [
9 | "Development Status :: 3 - Alpha",
10 | "Intended Audience :: Science/Research",
11 | "Topic :: Scientific/Engineering :: Medical Science Apps.",
12 | "Topic :: Scientific/Engineering :: Artificial Intelligence",
13 | "License :: OSI Approved :: MIT License",
14 | "Operating System :: OS Independent",
15 | "Programming Language :: Python",
16 | ]
17 | dynamic=["version"]
18 | requires-python = ">=3.11"
19 |
20 | [project.urls]
21 | Home = "https://ibs-lab.com/cedalion/"
22 | Documentation = "https://doc.ibs.tu-berlin.de/cedalion/doc/dev/"
23 | "Source Code" = "https://github.com/ibs-lab/cedalion/"
24 |
25 | [build-system]
26 | requires = ["hatch-vcs", "hatchling"]
27 | build-backend = "hatchling.build"
28 |
29 | [tool.hatch.build.hooks.vcs]
30 | version-file = "src/cedalion/_version.py"
31 |
32 | [tool.hatch.version]
33 | source = "vcs"
34 |
35 | [tool.hatch.env]
36 | requires = [
37 | "hatch-conda>=0.5.2",
38 | ]
39 |
40 |
41 | [tool.hatch.envs.default]
42 | type = "conda"
43 | conda-forge = true
44 | environment-file = "environment_dev.yml"
45 | python="3.11"
46 |
47 | [tool.hatch.envs.default.scripts]
48 | build_docs = "./scripts/build_docs.sh docs"
49 | check = "pre-commit run"
50 |
51 | [tool.hatch.envs.hatch-test]
52 | type = "conda"
53 | conda-forge = true
54 | environment-file = "environment_dev.yml"
55 | python="3.11"
56 |
57 | [tool.ruff.lint]
58 | select = ["E", "F", "W", "D"]
59 |
60 | ignore = [
61 | "D100", # Missing docstring in public module
62 | "D104", # Missing docstring in public package
63 | "D101", # Missing docstring in public class # FIXME
64 | "D102", # Missing docstring in public method # FIXME
65 | "D103", # Missing docstring in public function # FIXME
66 | "D105", # Missing docstring in magic method # FIXME
67 | "D107", # Missing docstring in __init__ # FIXME
68 | "D202", # no blank line allowed after function docstring
69 | ]
70 |
71 | [tool.ruff.lint.per-file-ignores]
72 | "__init__.py" = [
73 | "F401", # unused imports
74 | ]
75 | "test_*.py" = [
76 | "D103", # Missing docstring in public function
77 | ]
78 |
79 | [tool.ruff.lint.pydocstyle]
80 | convention = "google"
81 |
--------------------------------------------------------------------------------
/scripts/build_docs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | if [ "$#" -ne 1 ]; then
4 | echo "usage: ${0} "
5 | exit 1
6 | fi
7 |
8 | path_to_docs=$1
9 |
10 | cd ${path_to_docs}
11 |
12 | echo "Building example notebook"
13 | cd examples
14 | make notebooks
15 |
16 | echo "Building html documentation"
17 | cd ..
18 | make clean
19 |
20 | make html
21 |
22 | echo "Building tarball"
23 | cd _build/html
24 | tar cvzf ../html.tar.gz *
25 |
--------------------------------------------------------------------------------
/scripts/run_tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import inspect
3 | import typing
4 |
5 | import click
6 | import yaml
7 |
8 | import cedalion
9 | import cedalion.io
10 | import cedalion.sigproc.tasks
11 | import cedalion.tasks
12 |
13 |
14 | def is_quantity(hint):
15 | origin = typing.get_origin(hint)
16 | args = typing.get_args(hint)
17 | return (origin is typing.Annotated) and (args[0] is cedalion.Quantity)
18 |
19 |
20 | def is_dict_of_quantities(origin, args):
21 | return (origin is dict) and is_quantity(args[1])
22 |
23 |
24 | def is_list_of_quantities(origin, args):
25 | return (origin is list) and (len(args) == 1) and is_quantity(args[0])
26 |
27 |
28 | @click.command()
29 | @click.argument("config", type=click.File("r"), required=True)
30 | @click.argument("src", type=click.Path(exists=True), required=True)
31 | @click.argument("dst", required=True)
32 | def main(config, src, dst):
33 | config = yaml.safe_load(config)
34 |
35 |
36 | rec = cedalion.io.read_snirf(src)[0]
37 |
38 | for task in config["tasks"]:
39 | if isinstance(task, str):
40 | print(f"task '{task}' without parameters")
41 |
42 | if (task := cedalion.tasks.task_registry.get(task, None)) is None:
43 | raise ValueError(f"unknown task {task}")
44 |
45 | task(rec)
46 | elif isinstance(task, dict):
47 | assert len(task) == 1
48 | task_name = next(iter(task.keys()))
49 | params = next(iter(task.values()))
50 | print(f"task '{task_name}' with parameters '{params}'")
51 |
52 | if (task := cedalion.tasks.task_registry.get(task_name, None)) is None:
53 | raise ValueError(f"unknown task {task_name}")
54 |
55 | task_signature = inspect.signature(task)
56 | task_params = task_signature.parameters.keys()
57 |
58 | param_type_hints = typing.get_type_hints(task, include_extras=True)
59 |
60 | parsed_params = {}
61 |
62 | for param in params:
63 | assert len(param) == 1
64 | param_name = next(iter(param.keys()))
65 | param_value = next(iter(param.values()))
66 |
67 | if param_name not in task_params:
68 | raise ValueError(f"unknown param '{param}' for task {task_name}.")
69 |
70 | if param_name not in param_type_hints:
71 | parsed_params[param_name] = param_value
72 | continue
73 |
74 | param_hint = param_type_hints[param_name]
75 | hint_origin = typing.get_origin(param_hint)
76 | hint_args = typing.get_args(param_hint)
77 |
78 | if is_quantity(param_hint):
79 | # e.g. typing.Annotated[pint.Quantity, '[length]']
80 | dimension = hint_args[1]
81 | q = cedalion.Quantity(param_value)
82 | q.check(dimension)
83 |
84 | elif is_list_of_quantities(hint_origin, hint_args):
85 | # e.g. list[typing.Annotated[pint.Quantity, '[concentration]']]
86 | dimension = typing.get_args(hint_args[0])[1]
87 | q = [cedalion.Quantity(v) for v in param_value]
88 | for v in q:
89 | v.check(dimension)
90 |
91 | elif is_dict_of_quantities(hint_origin, hint_args):
92 | # e.g. dict[float, typing.Annotated[pint.Quantity, '[time]']]
93 | dimension = typing.get_args(hint_args[1])[1]
94 | q = {k: cedalion.Quantity(v) for k, v in param_value.items()}
95 | for v in q.values():
96 | v.check(dimension)
97 | else:
98 | q = param_value
99 |
100 | parsed_params[param_name] = q
101 |
102 | # if isinstance(v, typing._AnnotatedAlias):
103 |
104 | task(rec, **parsed_params)
105 | else:
106 | raise ValueError("unexpected task spec.")
107 |
108 | cedalion.io.write_snirf(dst, rec)
109 |
110 |
111 | if __name__ == "__main__":
112 | main()
113 |
--------------------------------------------------------------------------------
/src/cedalion/__init__.py:
--------------------------------------------------------------------------------
1 | from .physunits import Quantity, units
2 |
3 | import cedalion.dataclasses
4 | import cedalion.dataclasses.accessors
5 | import cedalion.io
6 | import cedalion.nirs
7 | import cedalion.tasks
8 |
--------------------------------------------------------------------------------
/src/cedalion/data/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib.resources import files
2 | from pathlib import Path
3 |
4 |
5 | def get(fname: str | Path) -> Path:
6 | """Returns the absolute path to a datafile."""
7 |
8 | return files("cedalion.data").joinpath(fname)
9 |
--------------------------------------------------------------------------------
/src/cedalion/data/measfunc_table.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/data/measfunc_table.npy
--------------------------------------------------------------------------------
/src/cedalion/dataclasses/__init__.py:
--------------------------------------------------------------------------------
1 | """Data classes used throughout cedalion."""
2 |
3 | # FIXME for easier access classes are pulled from the sub-modules into this scope.
4 | # Over time this may get crowded.
5 |
6 | from .geometry import (
7 | PointType,
8 | Surface,
9 | TrimeshSurface,
10 | VTKSurface,
11 | affine_transform_from_numpy,
12 | Voxels,
13 | )
14 | from .schemas import (
15 | build_labeled_points,
16 | build_timeseries,
17 | validate_schemas,
18 | build_stim_dataframe,
19 | )
20 | from .recording import Recording
21 |
--------------------------------------------------------------------------------
/src/cedalion/dataclasses/schemas.py:
--------------------------------------------------------------------------------
1 | """Data array schemas and utilities to build labeled data arrays."""
2 |
3 | import functools
4 | import inspect
5 | import typing
6 | from dataclasses import dataclass
7 | from typing import List, Optional
8 |
9 | import numpy as np
10 | import pandas as pd
11 | import pint
12 | import xarray as xr
13 | from numpy.typing import ArrayLike
14 |
15 | import cedalion.dataclasses as cdc
16 |
17 |
18 | class ValidationError(Exception):
19 | pass
20 |
21 |
22 | @dataclass(frozen=True)
23 | class DataArraySchema:
24 | dims: tuple[str]
25 | coords: tuple[tuple[str, tuple[str]]]
26 |
27 | def validate(self, data_array: xr.DataArray):
28 | if not isinstance(data_array, xr.DataArray):
29 | raise ValidationError("object is not a xr.DataArray")
30 |
31 | for dim in self.dims:
32 | if dim not in data_array.dims:
33 | raise ValidationError(f"dimension '{dim}' not found in data array.")
34 |
35 | for dim, coordinate_names in self.coords:
36 | for name in coordinate_names:
37 | if name not in data_array.coords:
38 | raise ValidationError(
39 | f"coordinate '{name}' missing for " f"dimension '{dim}'"
40 | )
41 | coords = data_array.coords[name]
42 | actual_dim = coords.dims[0]
43 |
44 | if not actual_dim == dim:
45 | raise ValidationError(
46 | f"coordinate '{name}' belongs to dimension "
47 | f"'{actual_dim}' instead of '{dim}'"
48 | )
49 |
50 |
51 | # FIXME better location?
52 | def validate_schemas(func):
53 | @functools.wraps(func)
54 | def wrapper(*args, **kwargs):
55 | ba = inspect.signature(func).bind(*args, **kwargs)
56 | ba.apply_defaults()
57 |
58 | hints = typing.get_type_hints(func, include_extras=True)
59 | for arg_name, hint in hints.items():
60 | if not isinstance(hint, typing._AnnotatedAlias):
61 | continue
62 |
63 | if arg_name == "return":
64 | continue
65 |
66 | for md in hint.__metadata__:
67 | if isinstance(md, DataArraySchema):
68 | md.validate(ba.arguments[arg_name])
69 |
70 | return func(*args, **kwargs)
71 |
72 | return wrapper
73 |
74 |
75 | # schemas describe the minimum requirements. LabeledPointClouds have an additional
76 | # dimension that's name denote the coordinate system and that is not enforced yet.
77 | # FIXME support wildcards in dims?
78 |
79 | LabeledPointCloudSchema = DataArraySchema(
80 | dims=("label",), coords=(("label", ("label", "type")),)
81 | )
82 |
83 |
84 | NDTimeSeriesSchema = DataArraySchema(
85 | dims=("channel", "time"),
86 | coords=(
87 | ("time", ("time", "samples")),
88 | ("channel", ("channel",)),
89 | ),
90 | )
91 |
92 |
93 | # FIXME better location?
94 | def build_timeseries(
95 | data: ArrayLike,
96 | dims: List[str],
97 | time: ArrayLike,
98 | channel: List[str],
99 | value_units: str,
100 | time_units: str,
101 | other_coords: dict[str, ArrayLike] = {},
102 | ):
103 | """Build a labeled time series data array.
104 |
105 | Args:
106 | data (ArrayLike): The data values.
107 | dims (List[str]): The dimension names.
108 | time (ArrayLike): The time values.
109 | channel (List[str]): The channel names.
110 | value_units (str): The units of the data values.
111 | time_units (str): The units of the time values.
112 | other_coords (dict[str, ArrayLike]): Additional coordinates.
113 |
114 | Returns:
115 | da (xr.DataArray): The labeled time series data array.
116 | """
117 | assert len(dims) == data.ndim
118 | assert "time" in dims
119 | assert "channel" in dims
120 | assert data.shape[dims.index("time")] == len(time)
121 | assert data.shape[dims.index("channel")] == len(channel)
122 |
123 | samples = np.arange(len(time))
124 |
125 | coords = {
126 | "time": ("time", time),
127 | "samples": ("time", samples),
128 | "channel": ("channel", channel),
129 | }
130 | coords.update(other_coords)
131 |
132 | da = xr.DataArray(
133 | data,
134 | dims=dims,
135 | coords=coords,
136 | )
137 | da = da.pint.quantify(value_units)
138 | da = da.pint.quantify({"time": time_units})
139 |
140 | return da
141 |
142 |
143 | def build_labeled_points(
144 | coordinates: ArrayLike | None = None,
145 | crs: str = "pos",
146 | units: Optional[pint.Unit | str] = "1",
147 | labels: Optional[list[str]] = None,
148 | types: Optional[list[str]] = None,
149 | ):
150 | """Build a labeled point cloud data array.
151 |
152 | Args:
153 | coordinates (ArrayLike, optional): The coordinates of the points. Default: None.
154 | crs (str, optional): The coordinate system. Defaults to "pos".
155 | units (Optional[pint.Unit | str], optional): The units of the coordinates.
156 | Defaults to "1".
157 | labels (Optional[list[str]], optional): The labels of the points. Defaults to
158 | None.
159 | types (Optional[list[str]], optional): The types of the points. Defaults to
160 | None.
161 |
162 | Returns:
163 | xr.DataArray: The labeled point cloud data array.
164 | """
165 | if coordinates is None:
166 | coordinates = np.zeros((0, 3), dtype=float)
167 | else:
168 | coordinates = np.asarray(coordinates)
169 | assert coordinates.ndim == 2
170 | npoints = len(coordinates)
171 |
172 | if labels is None:
173 | # generate labels "0..0" ... "0.." with a dynamic amount of 0-padding
174 | template = "%0" + str(int(np.ceil(np.log10(npoints + 1)))) + "d"
175 | labels = [template % i for i in range(npoints)]
176 |
177 | if types is None:
178 | types = [cdc.PointType.UNKNOWN] * npoints
179 |
180 | labeled_points = xr.DataArray(
181 | coordinates,
182 | dims=["label", crs],
183 | coords={"label": ("label", labels), "type": ("label", types)},
184 | ).pint.quantify(units)
185 |
186 | return labeled_points
187 |
188 |
189 | def validate_stim_schema(df: pd.DataFrame):
190 | for column_name in ["onset", "duration", "value", "trial_type"]:
191 | if column_name not in df:
192 | raise ValidationError(f"DataFrame misses required column '{column_name}'.")
193 |
194 |
195 | def build_stim_dataframe():
196 | columns = ["onset", "duration", "value", "trial_type"]
197 | return pd.DataFrame(columns=columns)
198 |
--------------------------------------------------------------------------------
/src/cedalion/errors.py:
--------------------------------------------------------------------------------
1 | """Cedalion-specific exceptions."""
2 |
3 |
4 | class CRSMismatchError(ValueError):
5 | """Error when coordinate reference systems do not match."""
6 |
7 | @classmethod
8 | def unexpected_crs(cls, expected_crs: str, found_crs: str):
9 | return cls(
10 | f"This operation expected coordinates to be in space "
11 | f"'{expected_crs}' but found them in '{found_crs}'."
12 | )
13 |
14 | @classmethod
15 | def wrong_transform(cls, current_crs: str, transform_crs: tuple[str]):
16 | return cls(
17 | "The coordinate reference systems of this object "
18 | f"('{current_crs}') and of the transform ('{','.join(transform_crs)}') "
19 | "do not match."
20 | )
21 |
--------------------------------------------------------------------------------
/src/cedalion/geometry/__init__.py:
--------------------------------------------------------------------------------
1 | """Tools for geometric calculations."""
2 |
--------------------------------------------------------------------------------
/src/cedalion/geometry/photogrammetry/__init__.py:
--------------------------------------------------------------------------------
1 | """Modules relating to photogrammetry calculations."""
2 |
--------------------------------------------------------------------------------
/src/cedalion/geometry/utils.py:
--------------------------------------------------------------------------------
1 | """Utility functions for geometric calculations."""
2 |
3 | import numpy as np
4 |
5 |
6 | def m_trans(t: np.ndarray) -> np.ndarray:
7 | """Calculate the affine transformation matrix for a tranlation t."""
8 | tx, ty, tz = t
9 | # fmt: off
10 | return np.array([
11 | [1, 0, 0, tx],
12 | [0, 1, 0, ty],
13 | [0, 0, 1, tz],
14 | [0, 0, 0, 1]
15 | ])
16 | # fmt: on
17 |
18 |
19 | def m_scale3(s: np.ndarray) -> np.ndarray:
20 | """Calculate the affine transformation matrix for scaling s.
21 |
22 | Apply different scaling factors for each dimension.
23 | """
24 |
25 | sx, sy, sz = s
26 |
27 | # fmt: off
28 | return np.array([
29 | [sx, 0, 0, 0],
30 | [ 0, sy, 0, 0],
31 | [ 0, 0, sz, 0],
32 | [ 0, 0, 0, 1]
33 | ])
34 | # fmt: on
35 |
36 |
37 | def m_scale1(s: np.ndarray) -> np.ndarray:
38 | """Calculate the affine transformation matrix for scaling s.
39 |
40 | Apply one scaling factor for all dimensions.
41 | """
42 | s = s[0]
43 |
44 | # fmt: off
45 | return np.array([
46 | [ s, 0, 0, 0],
47 | [ 0, s, 0, 0],
48 | [ 0, 0, s, 0],
49 | [ 0, 0, 0, 1]
50 | ])
51 | # fmt: on
52 |
53 |
54 | def m_rot(angles: np.ndarray) -> np.ndarray:
55 | """Calculate the affine transformation matrix for a 3D rotation.
56 |
57 | R = Rz(alpha)Ry(beta)Rx(gamma)
58 |
59 | https://en.wikipedia.org/wiki/Rotation_matrix#General_rotations
60 | """
61 | alpha, beta, gamma = angles
62 |
63 | ca = np.cos(alpha)
64 | sa = np.sin(alpha)
65 | cb = np.cos(beta)
66 | sb = np.sin(beta)
67 | cg = np.cos(gamma)
68 | sg = np.sin(gamma)
69 |
70 | # fmt: off
71 | return np.stack( (ca*cb, ca*sb*sg - sa*cg, ca*sb*cg + sa*sg, 0.,
72 | sa*cb, sa*sb*sg + ca*cg, sa*sb*cg - ca*sg, 0.,
73 | -sb, cb*sg, cb*cg, 0.,
74 | 0., 0., 0., 1.)).reshape(4,4)
75 | # fmt: on
76 |
77 |
78 | def cart2sph(
79 | x: np.ndarray, y: np.ndarray, z: np.ndarray
80 | ) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
81 | """Convert 3D cartesian into spherical coordinates.
82 |
83 | Args:
84 | x: cartesian x coordinates
85 | y: cartesian y coordinates
86 | z: cartesian z coordinates
87 |
88 | Returns:
89 | The spherical coordinates azimuth, elevation and radius as np.ndarrays.
90 | """
91 | hxy = np.hypot(x, y)
92 | r = np.hypot(hxy, z)
93 | el = np.arctan2(z, hxy)
94 | az = np.arctan2(y, x)
95 | return az, el, r
96 |
97 |
98 | def pol2cart(theta : np.ndarray, rho : np.ndarray) -> tuple[np.ndarray, np.ndarray]:
99 | """Convert 2D polar into 2D cartesian coordinates.
100 |
101 | Args:
102 | theta: polar theta/angle coordinates
103 | rho: polar rho/radius coordinates
104 |
105 | Returns:
106 | The cartesian coordinates x and y as np.ndarrays.
107 | """
108 |
109 | x = rho * np.cos(theta)
110 | y = rho * np.sin(theta)
111 | return x, y
112 |
--------------------------------------------------------------------------------
/src/cedalion/imagereco/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/imagereco/__init__.py
--------------------------------------------------------------------------------
/src/cedalion/imagereco/solver.py:
--------------------------------------------------------------------------------
1 | """Solver for the image reconstruction problem."""
2 |
3 | import numpy as np
4 | import pint
5 | import xarray as xr
6 | import cedalion.xrutils as xrutils
7 |
8 |
9 | def pseudo_inverse_stacked(Adot, alpha=0.01):
10 | """Pseudo-inverse of a stacked matrix.
11 |
12 | Args:
13 | Adot (xr.DataArray): Stacked matrix.
14 | alpha (float): Regularization parameter.
15 |
16 | Returns:
17 | xr.DataArray: Pseudo-inverse of the stacked matrix.
18 | """
19 |
20 | if "units" in Adot.attrs:
21 | units = pint.Unit(Adot.attrs["units"])
22 | inv_units = (1/units).units
23 | else:
24 | inv_units = pint.Unit("1")
25 |
26 | AA = Adot.values @ Adot.values.T
27 | highest_eigenvalue = np.linalg.eig(AA)[0][0].real
28 |
29 | B = Adot.values.T @ np.linalg.pinv(
30 | AA + alpha * highest_eigenvalue * np.eye(AA.shape[0])
31 | )
32 |
33 | coords = xrutils.coords_from_other(Adot)
34 |
35 | # don't copy the MultiIndexes
36 | for k in ["flat_channel", "flat_vertex"]:
37 | if k in coords:
38 | del coords[k]
39 |
40 | B = xr.DataArray(
41 | B,
42 | dims=("flat_vertex", "flat_channel"),
43 | coords=coords,
44 | attrs={"units": str(inv_units)},
45 | )
46 |
47 | return B
48 |
--------------------------------------------------------------------------------
/src/cedalion/imagereco/tissue_properties.py:
--------------------------------------------------------------------------------
1 | """Tissue properties for light transport simulation."""
2 |
3 | from enum import Enum, auto
4 | from typing import Dict
5 |
6 | import numpy as np
7 | import xarray as xr
8 |
9 |
10 | class TissueType(Enum):
11 | SKIN = auto()
12 | SKULL = auto()
13 | DM = auto()
14 | CSF = auto()
15 | GM = auto()
16 | WM = auto()
17 | OTHER = auto()
18 |
19 |
20 | TISSUE_LABELS: Dict[str, TissueType] = {
21 | "skin": TissueType.SKIN,
22 | "scalp": TissueType.SKIN,
23 | "skull": TissueType.SKULL,
24 | "bone": TissueType.SKULL,
25 | "dura": TissueType.DM,
26 | "dura mater": TissueType.DM,
27 | "dm": TissueType.DM,
28 | "csf": TissueType.CSF,
29 | "cerebral spinal fluid": TissueType.CSF,
30 | "gm": TissueType.GM,
31 | "gray matter": TissueType.GM,
32 | "brain": TissueType.GM,
33 | "wm": TissueType.WM,
34 | "white matter": TissueType.WM,
35 | }
36 |
37 | # FIXME units, reference
38 |
39 | # fmt: off
40 | TISSUE_PROPS_SCATTERING = {
41 | TissueType.SKIN : 0.6600,
42 | TissueType.SKULL : 0.8600,
43 | TissueType.DM : 0.6600,
44 | TissueType.CSF : 0.0100,
45 | TissueType.GM : 1.1000,
46 | TissueType.WM : 1.1000,
47 | TissueType.OTHER : 0.8600,
48 | }
49 |
50 | TISSUE_PROPS_ABSORPTION = {
51 | TissueType.SKIN : 0.0191,
52 | TissueType.SKULL : 0.0136,
53 | TissueType.DM : 0.0191,
54 | TissueType.CSF : 0.0026,
55 | TissueType.GM : 0.0186,
56 | TissueType.WM : 0.0186,
57 | TissueType.OTHER : 0.0191,
58 | }
59 |
60 | TISSUE_PROPS_ANISOTROPY = {
61 | TissueType.SKIN : 0.0010,
62 | TissueType.SKULL : 0.0010,
63 | TissueType.DM : 0.0010,
64 | TissueType.CSF : 0.0010,
65 | TissueType.GM : 0.0010,
66 | TissueType.WM : 0.0010,
67 | TissueType.OTHER : 0.0010,
68 | }
69 |
70 | TISSUE_PROPS_REFRACTION = {
71 | TissueType.SKIN : 1.0000,
72 | TissueType.SKULL : 1.0000,
73 | TissueType.DM : 1.0000,
74 | TissueType.CSF : 1.0000,
75 | TissueType.GM : 1.0000,
76 | TissueType.WM : 1.0000,
77 | TissueType.OTHER : 1.0000,
78 | }
79 | # fmt: on
80 |
81 | # FIXME allow for wavelength dependencies
82 |
83 |
84 | def get_tissue_properties(segmentation_masks: xr.DataArray) -> np.ndarray:
85 | """Return tissue properties for the given segmentation mask."""
86 | ntissues = segmentation_masks.sizes["segmentation_type"] + 1
87 | tissue_props = np.zeros((ntissues, 4))
88 | tissue_props[0, :] = [0.0, 0.0, 1.0, 1.0] # background
89 |
90 | for st in segmentation_masks.segmentation_type.values:
91 | m = segmentation_masks.sel(segmentation_type=st).values
92 | int_label = np.unique(m[m > 0]).item()
93 |
94 | if (tissue_type := TISSUE_LABELS.get(st, None)) is None:
95 | raise ValueError(f"unknown tissue type '{st}'")
96 |
97 | tissue_props[int_label, 0] = TISSUE_PROPS_ABSORPTION[tissue_type]
98 | tissue_props[int_label, 1] = TISSUE_PROPS_SCATTERING[tissue_type]
99 | tissue_props[int_label, 2] = TISSUE_PROPS_ANISOTROPY[tissue_type]
100 | tissue_props[int_label, 3] = TISSUE_PROPS_REFRACTION[tissue_type]
101 |
102 | return tissue_props
103 |
--------------------------------------------------------------------------------
/src/cedalion/imagereco/utils.py:
--------------------------------------------------------------------------------
1 | """Utility functions for image reconstruction."""
2 |
3 | import xarray as xr
4 | import numpy as np
5 | import cedalion
6 | from cedalion import units
7 | import cedalion.dataclasses as cdc
8 | import cedalion.typing as cdt
9 | import cedalion.geometry.segmentation as segm
10 | from scipy.sparse import coo_array
11 | import scipy.stats
12 |
13 | from cedalion import xrutils
14 |
15 |
16 | # FIXME right location?
17 | def map_segmentation_mask_to_surface(
18 | segmentation_mask: xr.DataArray,
19 | transform_vox2ras: cdt.AffineTransform, # FIXME
20 | surface: cdc.Surface,
21 | ):
22 | """Find for each voxel the closest vertex on the surface.
23 |
24 | Args:
25 | segmentation_mask (xr.DataArray): A binary mask of shape (segmentation_type, i,
26 | j, k).
27 | transform_vox2ras (xr.DataArray): The affine transformation from voxel to RAS
28 | space.
29 | surface (cedalion.dataclasses.Surface): The surface to map the voxels to.
30 |
31 | Returns:
32 | coo_array: A sparse matrix of shape (ncells, nvertices) that maps voxels to
33 | cells.
34 | """
35 |
36 | assert surface.crs == transform_vox2ras.dims[0]
37 |
38 | cell_coords = segm.cell_coordinates(segmentation_mask, flat=True)
39 | cell_coords = cell_coords.points.apply_transform(transform_vox2ras)
40 |
41 | cell_coords = cell_coords.pint.to(surface.units).pint.dequantify()
42 |
43 | ncells = cell_coords.sizes["label"]
44 | nvertices = len(surface.vertices)
45 |
46 | # find indices of cells that belong to the mask
47 | cell_indices = np.flatnonzero(segmentation_mask.values)
48 |
49 | # for each cell query the closests vertex on the surface
50 | dists, vertex_indices = surface.kdtree.query(
51 | cell_coords.values[cell_indices, :], workers=-1
52 | )
53 |
54 | # construct a sparse matrix of shape (ncells, nvertices)
55 | # that maps voxels to cells
56 | map_voxel_to_vertex = coo_array(
57 | (np.ones(len(cell_indices)), (cell_indices, vertex_indices)),
58 | shape=(ncells, nvertices),
59 | )
60 |
61 | return map_voxel_to_vertex
62 |
63 |
64 | def normal_hrf(t, t_peak, t_std, vmax):
65 | """Create a normal hrf.
66 |
67 | Args:
68 | t (np.ndarray): The time points.
69 | t_peak (float): The peak time.
70 | t_std (float): The standard deviation.
71 | vmax (float): The maximum value of the HRF.
72 |
73 | Returns:
74 | np.ndarray: The HRF.
75 | """
76 | hrf = scipy.stats.norm.pdf(t, loc=t_peak, scale=t_std)
77 | hrf *= vmax / hrf.max()
78 | return hrf
79 |
80 |
81 | def create_mock_activation_below_point(
82 | head_model: "cedalion.imagereco.forward_model.TwoSurfaceHeadModel",
83 | point: cdt.LabeledPointCloud,
84 | time_length: units.Quantity,
85 | sampling_rate: units.Quantity,
86 | spatial_size: units.Quantity,
87 | vmax: units.Quantity,
88 | ):
89 | """Create a mock activation below a point.
90 |
91 | Args:
92 | head_model (cedalion.imagereco.forward_model.TwoSurfaceHeadModel): The head
93 | model.
94 | point (cdt.LabeledPointCloud): The point below which to create the activation.
95 | time_length (units.Quantity): The length of the activation.
96 | sampling_rate (units.Quantity): The sampling rate.
97 | spatial_size (units.Quantity): The spatial size of the activation.
98 | vmax (units.Quantity): The maximum value of the activation.
99 |
100 | Returns:
101 | xr.DataArray: The activation.
102 | """
103 | # assert head_model.crs == point.points.crs
104 |
105 | _, vidx = head_model.brain.kdtree.query(point)
106 |
107 | # FIXME for simplicity use the euclidean distance here whilw the geodesic distance
108 | # would be the correct choice
109 | dists = xrutils.norm(
110 | head_model.brain.vertices - head_model.brain.vertices[vidx, :],
111 | head_model.brain.crs,
112 | )
113 |
114 | nsamples = int((time_length * sampling_rate).to_reduced_units().magnitude.item())
115 | t = np.arange(nsamples) / sampling_rate
116 |
117 | func_spat = np.exp(-((dists / spatial_size) ** 2)).rename({"label": "vertex"})
118 | func_temp = xr.DataArray(normal_hrf(t, 10, 3, vmax), dims="time")
119 |
120 | activation = func_temp * func_spat
121 | activation = activation.assign_coords({"time": t})
122 | return activation
123 |
--------------------------------------------------------------------------------
/src/cedalion/io/__init__.py:
--------------------------------------------------------------------------------
1 | from .snirf import read_snirf, write_snirf
2 | from .probe_geometry import read_mrk_json, read_digpts, read_einstar_obj, load_tsv
3 | from .anatomy import read_segmentation_masks
4 | from .photogrammetry import read_photogrammetry_einstar, read_einstar, opt_fid_to_xr
5 | from .forward_model import save_Adot, load_Adot
6 | from .bids import read_events_from_tsv
7 |
--------------------------------------------------------------------------------
/src/cedalion/io/anatomy.py:
--------------------------------------------------------------------------------
1 | """Functions for reading and processing anatomical data."""
2 |
3 | import nibabel
4 | import xarray as xr
5 | import os
6 | from typing import Dict, Tuple
7 | import numpy as np
8 | from cedalion.dataclasses import affine_transform_from_numpy
9 |
10 | # FIXME
11 | AFFINE_CODES = {
12 | 0: "unknown", # sform not defined
13 | 1: "scanner", # RAS+ in scanner coordinates
14 | 2: "aligned", # RAS+ aligned to some other scan
15 | 3: "talairach", # RAS+ in Talairach atlas space
16 | 4: "mni", # RAS+ in MNI atlas space
17 | }
18 |
19 |
20 | def _get_affine_from_niftii(image: nibabel.nifti1.Nifti1Image):
21 | """Get affine transformation matrix from NIFTI image.
22 |
23 | Args:
24 | image (nibabel.nifti1.Nifti1Image): NIFTI image object
25 |
26 | Returns:
27 | xr.DataArray: Affine transformation matrix
28 | """
29 | transform, code = image.get_sform(coded=True)
30 | if code != 0:
31 | return affine_transform_from_numpy(
32 | transform, "ijk", AFFINE_CODES[code], "1", "mm"
33 | )
34 |
35 | transform, code = image.get_qform(coded=True)
36 | if code != 0:
37 | return affine_transform_from_numpy(
38 | transform, "ijk", AFFINE_CODES[code], "1", "mm"
39 | )
40 |
41 | transform = image.get_best_affine()
42 |
43 | return affine_transform_from_numpy(transform, "ijk", AFFINE_CODES[0], "1", "mm")
44 |
45 |
46 | def read_segmentation_masks(
47 | basedir: str,
48 | mask_files: Dict[str, str] = {
49 | "csf": "csf.nii",
50 | "gm": "gm.nii",
51 | "scalp": "scalp.nii",
52 | "skull": "skull.nii",
53 | "wm": "wm.nii",
54 | },
55 | ) -> Tuple[xr.DataArray, np.ndarray]:
56 | """Read segmentation masks from NIFTI files.
57 |
58 | Args:
59 | basedir (str): Directory containing the mask files
60 | mask_files (Dict[str, str]): Dictionary mapping segmentation types to filenames
61 |
62 | Returns:
63 | Tuple[xr.DataArray, np.ndarray]:
64 | - masks (xr.DataArray): Concatenated segmentation masks with a new
65 | dimension `segmentation_type`.
66 | - affine (np.ndarray): Affine transformation matrix associated with the
67 | NIFTI files.
68 | """
69 | mask_ids = {seg_type: i + 1 for i, seg_type in enumerate(mask_files.keys())}
70 | masks = []
71 | affines = []
72 |
73 | for i, (seg_type, fname) in enumerate(mask_files.items()):
74 | fpath = os.path.join(basedir, fname)
75 | if not os.path.exists(fpath):
76 | raise FileNotFoundError(f"file '{fpath}' does not exist.")
77 |
78 | f = nibabel.load(fpath)
79 |
80 | volume = f.get_fdata()
81 |
82 | if len(np.unique(volume)) > 2:
83 | raise ValueError(
84 | "expected binary mask but found more than two unique "
85 | f"values in '{fpath}'"
86 | )
87 |
88 | # mask volume should contain integers stored as floating point numbers.
89 | # Operations like resampling can introduce small deviations and non-integer
90 | # mask ids -> round them.
91 | volume = volume.round(6).astype(np.uint8)
92 |
93 | volume[volume != 0] = mask_ids[seg_type]
94 |
95 | masks.append(
96 | xr.DataArray(
97 | volume,
98 | dims=["i", "j", "k"],
99 | coords={"segmentation_type": seg_type},
100 | )
101 | )
102 |
103 | affine = _get_affine_from_niftii(f)
104 | affines.append(affine)
105 |
106 | # check that sforms match for all masks
107 | for i in range(1, len(affines)):
108 | assert np.all(affines[i] == affines[i])
109 |
110 | masks = xr.concat(masks, dim="segmentation_type")
111 |
112 | # check for voxel that belong to more than one mask # FIXME too strict?
113 | if (masks > 0).sum("segmentation_type").max() > 1:
114 | raise ValueError("found voxels with positive entries in more than one mask.")
115 |
116 | affine = affines[0]
117 |
118 | return masks, affine
119 |
120 |
121 | def cell_coordinates(mask, affine, units="mm"):
122 | """Get the coordinates of each voxel in the transformed mask.
123 |
124 | Args:
125 | mask (xr.DataArray): A binary mask of shape (i, j, k).
126 | affine (np.ndarray): Affine transformation matrix.
127 | units (str): Units of the output coordinates.
128 |
129 | Returns:
130 | xr.DataArray: Coordinates of the center of each voxel in the mask.
131 | """
132 | # coordinates in voxel space
133 | i = np.arange(mask.shape[0])
134 | j = np.arange(mask.shape[1])
135 | k = np.arange(mask.shape[2])
136 |
137 | ii, jj, kk = np.meshgrid(i, j, k, indexing="ij")
138 |
139 | coords = np.stack((ii, jj, kk), -1) # shape (ni,nj,nk,3)
140 | transformed = xr.DataArray(
141 | nibabel.affines.apply_affine(affine, coords),
142 | dims=["i", "j", "k", "pos"],
143 | coords={"i": i, "j": j, "k": k},
144 | attrs={"units": units},
145 | )
146 |
147 | transformed = transformed.pint.quantify()
148 |
149 | return transformed
150 |
--------------------------------------------------------------------------------
/src/cedalion/io/bids.py:
--------------------------------------------------------------------------------
1 | """Functions for reading BIDS data."""
2 |
3 | from pathlib import Path
4 |
5 | import pandas as pd
6 |
7 |
8 | def read_events_from_tsv(fname: str | Path):
9 | return pd.read_csv(fname, delimiter="\t")
10 |
--------------------------------------------------------------------------------
/src/cedalion/io/forward_model.py:
--------------------------------------------------------------------------------
1 | """Module for saving and loading forward model computation results."""
2 |
3 | import h5py
4 | import xarray as xr
5 |
6 | import cedalion.dataclasses as cdc
7 |
8 |
9 | def save_Adot(fn: str, Adot: xr.DataArray):
10 | """Save Adot to a netCDF file.
11 |
12 | Args:
13 | fn (str): File name to save the data to.
14 | Adot (xr.DataArray): Data to save.
15 |
16 | Returns:
17 | None
18 | """
19 |
20 | Adot.to_netcdf(fn)
21 | return
22 |
23 | def load_Adot(fn: str):
24 | """Load Adot from a netCDF file.
25 |
26 | Args:
27 | fn (str): File name to load the data from.
28 |
29 | Returns:
30 | xr.DataArray: Data loaded from the file.
31 | """
32 |
33 | Adot = xr.open_dataset(fn)
34 | Adot = xr.DataArray(
35 | Adot.to_array()[0],
36 | dims=["channel", "vertex", "wavelength"],
37 | coords={"channel": ("channel", Adot.channel.values),
38 | "wavelength": ("wavelength", Adot.wavelength.values),
39 | "is_brain": ("vertex", Adot.is_brain.values)
40 | }
41 | )
42 | return Adot
43 |
44 |
45 |
46 | def save_fluence(fn : str, fluence_all, fluence_at_optodes):
47 | """Save forward model computation results.
48 |
49 | This method uses a lossy compression algorithm to reduce file size.
50 | """
51 |
52 | with h5py.File(fn, "w") as f:
53 | # with scaleoffset=14 this should be precise to the 14th decimal digit.
54 | f.create_dataset(
55 | "fluence_all",
56 | data=fluence_all,
57 | scaleoffset=14,
58 | shuffle=True,
59 | compression="lzf",
60 | )
61 |
62 | f["fluence_all"].attrs["dims"] = fluence_all.dims
63 | f["fluence_all"].attrs["label"] = [str(i) for i in fluence_all.label.values]
64 | f["fluence_all"].attrs["wavelength"] = fluence_all.wavelength
65 | f["fluence_all"].attrs["type"] = [i.value for i in fluence_all.type.values]
66 |
67 | f.create_dataset(
68 | "fluence_at_optodes",
69 | data=fluence_at_optodes,
70 | shuffle=True,
71 | compression="lzf",
72 | )
73 |
74 | f["fluence_at_optodes"].attrs["dims"] = fluence_at_optodes.dims
75 | f["fluence_at_optodes"].attrs["optode1"] = [
76 | str(i) for i in fluence_at_optodes.optode1.values
77 | ]
78 | f["fluence_at_optodes"].attrs["optode2"] = [
79 | str(i) for i in fluence_at_optodes.optode2.values
80 | ]
81 | f["fluence_at_optodes"].attrs["wavelength"] = fluence_all.wavelength
82 |
83 | f.flush()
84 |
85 |
86 | def load_fluence(fn : str):
87 | """Load forward model computation results.
88 |
89 | Args:
90 | fn (str): File name to load the data from.
91 |
92 | Returns:
93 | Tuple[xr.DataArray, xr.DataArray]: Fluence data loaded from the file.
94 | """
95 |
96 | with h5py.File(fn, "r") as f:
97 |
98 | ds = f["fluence_all"]
99 | fluence_all = xr.DataArray(
100 | ds,
101 | dims = ds.attrs["dims"],
102 | coords = {
103 | "label" : ("label", ds.attrs["label"]),
104 | "type" : ("label", [cdc.PointType(i) for i in ds.attrs["type"]]),
105 | "wavelength" : ds.attrs["wavelength"]
106 | }
107 | )
108 | fluence_all.attrs.clear()
109 |
110 | ds = f["fluence_at_optodes"]
111 |
112 | fluence_at_optodes = xr.DataArray(
113 | ds,
114 | dims = ds.attrs["dims"],
115 | coords = {
116 | "optode1" : ds.attrs["optode1"],
117 | "optode2" : ds.attrs["optode2"],
118 | "wavelength" : ds.attrs["wavelength"]
119 | }
120 | )
121 | fluence_at_optodes.attrs.clear()
122 |
123 | return fluence_all, fluence_at_optodes
124 |
--------------------------------------------------------------------------------
/src/cedalion/io/photogrammetry.py:
--------------------------------------------------------------------------------
1 | """Module for reading photogrammetry output file formats."""
2 |
3 | import cedalion.dataclasses as cdc
4 | import numpy as np
5 | from collections import OrderedDict
6 |
7 |
8 | def read_photogrammetry_einstar(fn):
9 | """Read optodes and fiducials from photogrammetry pipeline.
10 |
11 | This method reads the output file as returned by the
12 | photogrammetry pipeline using an einstar device.
13 |
14 | Args:
15 | fn (str): The filename of the einstar photogrammetry output file.
16 |
17 | Returns:
18 | tuple: A tuple containing:
19 | - fiducials (cedalion.LabeledPoints): The fiducials as a cedalion
20 | LabeledPoints object.
21 | - optodes (cedalion.LabeledPoints): The optodes as a cedalion LabeledPoints
22 | object.
23 | """
24 |
25 | fiducials, optodes = read_einstar(fn)
26 | fiducials, optodes = opt_fid_to_xr(fiducials, optodes)
27 | return fiducials, optodes
28 |
29 |
30 | def read_einstar(fn):
31 | """Read optodes and fiducials from einstar devices.
32 |
33 | Args:
34 | fn (str): The filename of the einstar photogrammetry output file.
35 |
36 | Returns:
37 | tuple: A tuple containing:
38 | - fiducials (OrderedDict): The fiducials as an OrderedDict.
39 | - optodes (OrderedDict): The optodes as an OrderedDict.
40 | """
41 |
42 | with open(fn, "r") as f:
43 | lines = [[ll.strip() for ll in line.split(",")] for line in f.readlines()]
44 | lines = [[line[0], [float(ll) for ll in line[1:]]] for line in lines]
45 | assert lines[0][0] == "Nz"
46 | assert lines[1][0] == "Iz"
47 | assert lines[2][0] == "Rpa"
48 | assert lines[3][0] == "Lpa"
49 | assert lines[4][0] == "Cz"
50 | fiducials = OrderedDict(lines[:5])
51 | optodes = OrderedDict(lines[5:])
52 | return fiducials, optodes
53 |
54 |
55 | def opt_fid_to_xr(fiducials, optodes):
56 | """Convert OrderedDicts fiducials and optodes to cedalion LabeledPoints objects.
57 |
58 | Args:
59 | fiducials (OrderedDict): The fiducials as an OrderedDict.
60 | optodes (OrderedDict): The optodes as an OrderedDict.
61 |
62 | Returns:
63 | tuple: A tuple containing:
64 | - fiducials (cedalion.LabeledPoints): The fiducials as a cedalion
65 | LabeledPoints object.
66 | - optodes (cedalion.LabeledPoints): The optodes as a cedalion LabeledPoints
67 | object.
68 | """
69 |
70 | # FIXME: this should get a different CRS
71 | CRS = "ijk"
72 | if len(fiducials) == 0:
73 | fidu_coords = np.zeros((0, 3))
74 | else:
75 | fidu_coords = np.array(list(fiducials.values()))
76 |
77 | if len(optodes) == 0:
78 | opt_coords = np.zeros((0, 3))
79 | else:
80 | opt_coords = np.array(list(optodes.values()))
81 |
82 | fiducials = cdc.build_labeled_points(
83 | fidu_coords, labels=list(fiducials.keys()), crs=CRS
84 | ) # , units="mm")
85 |
86 | types = []
87 | for lab in list(optodes.keys()):
88 | if lab.startswith("S"):
89 | types.append(cdc.PointType(1))
90 | elif lab.startswith("D"):
91 | types.append(cdc.PointType(2))
92 | else:
93 | types.append(cdc.PointType(0))
94 | optodes = cdc.build_labeled_points(
95 | opt_coords, labels=list(optodes.keys()), crs=CRS, types=types
96 | )
97 | return fiducials, optodes
98 |
--------------------------------------------------------------------------------
/src/cedalion/io/probe_geometry.py:
--------------------------------------------------------------------------------
1 | """Module for reading and writing probe geometry files."""
2 |
3 | import numpy as np
4 | import xarray as xr
5 | import trimesh
6 | import json
7 | from collections import OrderedDict
8 |
9 | import cedalion
10 | from cedalion.dataclasses import PointType, TrimeshSurface, build_labeled_points
11 |
12 |
13 | def load_tsv(tsv_fname: str, crs: str='digitized', units: str='mm') -> xr.DataArray:
14 | """Load a tsv file containing optodes or landmarks.
15 |
16 | Parameters
17 | ----------
18 | tsv_fname : str
19 | Path to the tsv file.
20 | crs : str
21 | Coordinate reference system of the points.
22 | units : str
23 |
24 | Returns:
25 | -------
26 | xr.DataArray
27 | Optodes or landmarks as a Data
28 | """
29 | with open(tsv_fname, 'r') as f:
30 | lines = f.readlines()
31 | lines = [line.split() for line in lines]
32 |
33 | data = OrderedDict([(line[0], np.array([float(line[1]), float(line[2]),
34 | float(line[3])])) for line in lines])
35 | # Check if tsv_type is optodes
36 | if all([k[0] in ['S', 'D'] for k in data.keys()]):
37 | tsv_type = 'optodes'
38 | else:
39 | tsv_type = 'landmarks'
40 |
41 | if tsv_type == 'optodes':
42 | types = []
43 | for lab in data.keys():
44 | if lab[0] == 'S':
45 | types.append(PointType(1)) # sources
46 | elif lab[0] == 'D':
47 | types.append(PointType(2)) # detectors
48 | else:
49 | raise ValueError("Unknown optode type")
50 |
51 | geo3d = build_labeled_points(np.array(list(data.values())),
52 | labels=list(data.keys()), crs=crs,
53 | types=types, units=units)
54 | return geo3d
55 | elif tsv_type == 'landmarks':
56 | landmarks = xr.DataArray(
57 | np.array(list(data.values())),
58 | dims=["label", crs],
59 | coords={
60 | "label": ("label", list(data.keys())),
61 | "type": ("label", [PointType.LANDMARK] * len(data)),
62 | },
63 | )
64 | return landmarks
65 | return data
66 |
67 |
68 | def read_mrk_json(fname: str, crs: str) -> xr.DataArray:
69 | """Read a JSON file containing landmarks.
70 |
71 | Parameters
72 | ----------
73 | fname : str
74 | Path to the JSON file.
75 | crs : str
76 | Coordinate reference system of the landmarks.
77 |
78 | Returns:
79 | -------
80 | xr.DataArray
81 | Landmarks as a DataArray.
82 | """
83 | with open(fname) as fin:
84 | x = json.load(fin)
85 |
86 | units = []
87 | labels = []
88 | positions = []
89 | types = []
90 |
91 | for markup in x["markups"]:
92 | units.append(markup["coordinateUnits"]) # FIXME handling of units
93 |
94 | for cp in markup["controlPoints"]:
95 | labels.append(cp["label"])
96 |
97 | # 3x3 matrix. column vectors are coordinate axes
98 | orientation = np.asarray(cp["orientation"]).reshape(3, 3)
99 |
100 | pos = cp["position"]
101 | positions.append(pos @ orientation)
102 | types.append(PointType.LANDMARK)
103 |
104 | unique_units = list(set(units))
105 | if len(unique_units) > 1:
106 | raise ValueError(f"more than one unit found in {fname}: {unique_units}")
107 |
108 | pos = np.vstack(pos)
109 |
110 | result = xr.DataArray(
111 | positions,
112 | dims=["label", crs],
113 | coords={"label": ("label", labels), "type": ("label", types)},
114 | attrs={"units": unique_units[0]},
115 | )
116 |
117 | result = result.pint.quantify()
118 |
119 | return result
120 |
121 |
122 | def save_mrk_json(fname: str, landmarks: xr.DataArray, crs: str):
123 | """Save landmarks to a JSON file.
124 |
125 | Parameters
126 | ----------
127 | fname : str
128 | Path to the output file.
129 | landmarks : xr.DataArray
130 | Landmarks to save.
131 | crs: str
132 | Coordinate system of the landmarks.
133 | """
134 | control_points = [{"id": i,
135 | "label": lm.label.item(),
136 | "position": list(np.array(landmarks[i])),
137 | "orientation": [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0],
138 | }
139 | for i, lm in enumerate(landmarks)]
140 | data_dict = {"@schema": "https://raw.githubusercontent.com/slicer/slicer/master/Modules/Loadable/Markups/Resources/Schema/markups-schema-v1.0.3.json",
141 | "markups": [{
142 | "type": "Fiducial",
143 | "coordinateSystem": crs,
144 | "coordinateUnits": "mm", #landmark.units,
145 | "controlPoints": control_points,
146 | }]}
147 | json.dump(data_dict, open(fname, "w"), indent=2)
148 |
149 |
150 | def read_digpts(fname: str, units: str="mm") -> xr.DataArray:
151 | """Read a file containing digitized points.
152 |
153 | Parameters
154 | ----------
155 | fname : str
156 | Path to the file.
157 | units : str
158 | Units of the points.
159 |
160 | Returns:
161 | -------
162 | xr.DataArray
163 | Digitized points as a DataArray.
164 | """
165 | with open(fname) as fin:
166 | lines = fin.readlines()
167 |
168 | labels = []
169 | coordinates = []
170 |
171 | for line in lines:
172 | label, coords = line.strip().split(":")
173 | coords = list(map(float, coords.split()))
174 | coordinates.append(coords)
175 | labels.append(label)
176 |
177 | result = xr.DataArray(
178 | coordinates,
179 | dims=["label", "pos"],
180 | coords={"label": labels},
181 | attrs={"units": units},
182 | )
183 | result = result.pint.quantify()
184 |
185 | return result
186 |
187 |
188 | def read_einstar_obj(fname: str) -> TrimeshSurface:
189 | """Read a textured triangle mesh generated by Einstar devices.
190 |
191 | Parameters
192 | ----------
193 | fname : str
194 | Path to the file.
195 |
196 | Returns:
197 | -------
198 | TrimeshSurface
199 | Triangle
200 | """
201 | mesh = trimesh.load(fname)
202 | return TrimeshSurface(mesh, crs="digitized", units=cedalion.units.mm)
203 |
--------------------------------------------------------------------------------
/src/cedalion/models/__init__.py:
--------------------------------------------------------------------------------
1 | """Tools for modelling fNIRS signals."""
2 |
--------------------------------------------------------------------------------
/src/cedalion/models/glm/__init__.py:
--------------------------------------------------------------------------------
1 | """Tools for describing fNIRS data with general linear models."""
2 |
3 | from .basis_functions import TemporalBasisFunction, GaussianKernels, Gamma
4 | from .design_matrix import make_design_matrix
5 | from .solve import fit, predict
6 |
--------------------------------------------------------------------------------
/src/cedalion/physunits.py:
--------------------------------------------------------------------------------
1 | """Builds on pint_xarray's unit registry."""
2 | import pint_xarray
3 |
4 | units = pint_xarray.unit_registry
5 | Quantity = units.Quantity
6 |
7 | # Aliases that we want to provide. FIXME: maybe a definition file is more convenient?
8 | units.define("@alias deg = o")
9 | units.define("@alias degC = oC")
10 | units.define("@alias ohm = Ohm")
11 |
12 | # FIXME temporarily define ADU unit in WINGS snirf datasets to avoid an error
13 | units.define("ADU = 1")
14 |
--------------------------------------------------------------------------------
/src/cedalion/sigdecomp/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/sigdecomp/__init__.py
--------------------------------------------------------------------------------
/src/cedalion/sigdecomp/dummy.py:
--------------------------------------------------------------------------------
1 | import xarray as xr
2 |
3 | import cedalion.dataclasses as cdc
4 | import cedalion.typing as cdt
5 | from cedalion import units
6 |
7 | from ..sigproc.frequency import freq_filter
8 |
9 |
10 | @cdc.validate_schemas
11 | def split_frequency_bands(ts: cdt.NDTimeSeries) -> cdt.NDTimeSeries:
12 | """Extract signal components in different frequency bands.
13 |
14 | This is not intended for real use but should act rather as an example how
15 | signal decomposition methods could be implemented.
16 | """
17 |
18 | # define frequency bands that contain different kinds of physiology
19 | cardiac_fmin = 0.5 * units.Hz
20 | cardiac_fmax = 2.5 * units.Hz
21 |
22 | respiratory_fmin = 0.1 * units.Hz
23 | respiratory_fmax = 0.5 * units.Hz
24 |
25 | # bandpass filter the time series
26 | cardiac = freq_filter(ts, cardiac_fmin, cardiac_fmax, butter_order=4)
27 | respiratory = freq_filter(ts, respiratory_fmin, respiratory_fmax, butter_order=4)
28 |
29 | # construct the resulting data array with one additional dimension
30 | new_dim = "band"
31 | result = xr.concat((cardiac, respiratory), dim=new_dim)
32 | result = result.assign_coords({new_dim: (new_dim, ["cardiac", "respiratory"])})
33 |
34 | return result
35 |
--------------------------------------------------------------------------------
/src/cedalion/sigdecomp/measfunc_table.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/sigdecomp/measfunc_table.npy
--------------------------------------------------------------------------------
/src/cedalion/sigproc/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/sigproc/__init__.py
--------------------------------------------------------------------------------
/src/cedalion/sigproc/epochs.py:
--------------------------------------------------------------------------------
1 | """Extract epochs from a time series based on stimulus events."""
2 |
3 | from __future__ import annotations
4 | import logging
5 |
6 | import numpy as np
7 | import pandas as pd
8 | import xarray as xr
9 | from scipy.interpolate import interp1d
10 |
11 | import cedalion.typing as cdt
12 | import cedalion.xrutils as xrutils
13 | from cedalion import Quantity
14 |
15 | from .frequency import sampling_rate
16 |
17 | logger = logging.getLogger("cedalion")
18 |
19 |
20 | def to_epochs(
21 | ts: cdt.NDTimeSeries,
22 | df_stim: pd.DataFrame,
23 | trial_types: list[str],
24 | before: cdt.QTime,
25 | after: cdt.QTime,
26 | ):
27 | """Extract epochs from the time series based on stimulus events.
28 |
29 | Args:
30 | ts: the time series
31 | df_stim: DataFrame containing stimulus events.
32 | trial_types: List of trial types to include in the epochs.
33 | before: Time before stimulus event to include in epoch.
34 | after: Time after stimulus event to include in epoch.
35 |
36 | Returns:
37 | xarray.DataArray: Array containing the extracted epochs.
38 | """
39 |
40 | if not isinstance(before, Quantity):
41 | raise ValueError("please specify 'before' as a Quantity with time units.")
42 | if not isinstance(after, Quantity):
43 | raise ValueError("please specify 'before' as a Quantity with time units.")
44 |
45 | # check if user-selected trial types are available
46 | available_trial_types = set(df_stim.trial_type)
47 | for trial_type in trial_types:
48 | if trial_type not in available_trial_types:
49 | raise ValueError(f"df_stim does not contain trial_type '{trial_type}'")
50 |
51 | # reduce df_stim to only the selected trial types
52 | df_stim = df_stim[df_stim.trial_type.isin(trial_types)]
53 |
54 | # get time axis in seconds
55 | if ts.time.pint.units is not None:
56 | time = ts.time.to("s").pint.dequantify().values
57 | else:
58 | # assume time coords are already in seconds
59 | time = ts.time.values
60 |
61 | before = before.to("s").magnitude.item()
62 | after = after.to("s").magnitude.item()
63 | fs = sampling_rate(ts).to("Hz")
64 |
65 | # the time stamps of the sampled time series and the events can have different
66 | # precision. Be explicit about how timestamps are assigned to samples in ts.
67 | # For samples i-1, i , i+1 in ts with timestamps t[i-1], t[i], t[i+1] we say
68 | # that sample i range from 0.5 * (t[i-1] + t[i]) till 0.5 * (t[i] + t[i+1])
69 | # (exclusive), i.e. the time stamp is centered in the bin.
70 | first_edge = time[0] - 0.5 * (time[1] - time[0])
71 | last_edge = time[-1] + 0.5 * (time[-1] - time[-2])
72 | bin_edges_between_timepoints = 0.5 * (time[:-1] + time[1:])
73 | sample_bin_edges = np.r_[first_edge, bin_edges_between_timepoints, last_edge]
74 |
75 | onset_indices = np.digitize(df_stim.onset, sample_bin_edges) - 1
76 |
77 | before_samples = int(np.ceil((before * fs).magnitude))
78 | after_samples = int(np.ceil((after * fs).magnitude))
79 | start_indices = np.clip(onset_indices - before_samples, -1, len(time))
80 | stop_indices = np.clip(onset_indices + after_samples, -1, len(time))
81 |
82 | # Define time axis relative to onset. This time axis depends only on before
83 | # after and the time series' sampling_rate. We round 1/fs to millisecond precision.
84 | # This way, epochs from different datasets with slightly different sampling rates
85 | # will have the same reltime axis and can be concatenated together.
86 | fs = fs.magnitude.item()
87 | dT = np.round(1 / fs, 3) # millisecond precision
88 | reltime = np.arange(-before_samples, after_samples + 1) * dT
89 |
90 | units = ts.pint.units
91 | ts = ts.pint.dequantify()
92 |
93 | interpolator = interp1d(
94 | ts.time.values, ts.values, axis=ts.dims.index("time"), fill_value="extrapolate"
95 | )
96 |
97 | epochs = []
98 |
99 | # dimensions of the epoch arrays. rename time to reltime
100 | dims = list(ts.dims)
101 | dims[dims.index("time")] = "reltime"
102 |
103 | for onset, trial_type, start, stop in zip(
104 | df_stim.onset, df_stim.trial_type, start_indices, stop_indices
105 | ):
106 | if start < 0:
107 | # start is outside time range -> skip this event
108 | continue
109 |
110 | if stop == len(time):
111 | # end is outside time range -> skip this event
112 | continue
113 |
114 | coords = xrutils.coords_from_other(
115 | ts, dims=dims, reltime=reltime, trial_type=trial_type
116 | )
117 |
118 | # Extract this epoch from ts. The timestamps in reltime and ts.time do
119 | # not have to agree. Hence, we linearly interpolate the original ts to query
120 | # ts between time samples.
121 | interpolated = interpolator(onset + reltime)
122 |
123 | epochs.append(xr.DataArray(interpolated, dims=dims, coords=coords))
124 |
125 | if not epochs:
126 | shape = list(ts.shape)
127 | shape[dims.index("reltime")] = len(reltime)
128 | shape = tuple([0] + shape)
129 |
130 | return xr.DataArray(
131 | np.zeros(shape),
132 | dims=["epoch"] + dims,
133 | coords=xrutils.coords_from_other(ts, dims=dims, reltime=reltime),
134 | )
135 |
136 | # concatenate an create epoch dimension
137 | epochs = xr.concat(epochs, dim="epoch")
138 |
139 | # if there is only one epoch or multiple epochs with the same trial_type
140 | # the coord 'trial_type' remains scalar. Tranform it into an array.
141 | if epochs.trial_type.values.shape == tuple():
142 | epochs = epochs.assign_coords(
143 | {
144 | "trial_type": (
145 | "epoch",
146 | [epochs.trial_type.item()] * epochs.sizes["epoch"],
147 | )
148 | }
149 | )
150 |
151 | # add units back
152 | epochs = epochs.pint.quantify(units)
153 |
154 | return epochs
155 |
--------------------------------------------------------------------------------
/src/cedalion/sigproc/frequency.py:
--------------------------------------------------------------------------------
1 | """Frequency-related signal processing methods."""
2 |
3 | import numpy as np
4 | import scipy.signal
5 | import xarray as xr
6 | import cedalion.typing as cdt
7 | from cedalion import Quantity, units
8 | from cedalion.validators import check_dimensionality
9 | import cedalion.dataclasses as cdc
10 |
11 |
12 | @cdc.validate_schemas
13 | def sampling_rate(timeseries: cdt.NDTimeSeries) -> Quantity:
14 | """Estimate the sampling rate of the timeseries.
15 |
16 | Note:
17 | This functions assumes uniform sampling.
18 |
19 | Args:
20 | timeseries (:class:`NDTimeSeries`, (time,*)): the input time series
21 |
22 | Returns:
23 | The sampling rate estimated by averaging time differences between samples.
24 | """
25 | assert "units" in timeseries.time.attrs
26 | time_unit = units.Unit(timeseries.time.attrs["units"])
27 |
28 | mean_diff = np.diff(timeseries.time).mean() * time_unit
29 | return (1.0 / mean_diff).to("Hz") # report sampling rate in Hz
30 |
31 |
32 | @cdc.validate_schemas
33 | def freq_filter(
34 | timeseries: cdt.NDTimeSeries,
35 | fmin: cdt.QFrequency,
36 | fmax: cdt.QFrequency,
37 | butter_order: int = 4,
38 | ) -> cdt.NDTimeSeries:
39 | """Apply a Butterworth bandpass frequency filter.
40 |
41 | Args:
42 | timeseries (:class:`NDTimeSeries`, (time,*)): the input time series
43 | fmin (:class:`Quantity`, [frequency]): lower threshold of the pass band
44 | fmax (:class:`Quantity`, [frequency]): higher threshold of the pass band
45 | butter_order: order of the filter
46 |
47 | Returns:
48 | The frequency-filtered time series
49 | """
50 |
51 | check_dimensionality("fmin", fmin, "[frequency]")
52 | check_dimensionality("fax", fmax, "[frequency]")
53 |
54 | fny = sampling_rate(timeseries) / 2
55 | fmin = float(fmin / fny)
56 | fmax = float(fmax / fny)
57 |
58 | if fmin == 0:
59 | sos = scipy.signal.butter(butter_order, fmax, "low", output="sos")
60 | elif fmax == 0:
61 | sos = scipy.signal.butter(butter_order, fmin, "high", output="sos")
62 | else:
63 | sos = scipy.signal.butter(butter_order, [fmin, fmax], "bandpass", output="sos")
64 |
65 | if (units := timeseries.pint.units) is not None:
66 | timeseries = timeseries.pint.dequantify()
67 |
68 |
69 | dims = timeseries.dims
70 | timeseries = timeseries.transpose(..., "time")
71 | result = xr.apply_ufunc(scipy.signal.sosfiltfilt, sos, timeseries)
72 | result = result.transpose(*dims)
73 |
74 | if units is not None:
75 | result = result.pint.quantify(units)
76 |
77 | return result
78 |
--------------------------------------------------------------------------------
/src/cedalion/sigproc/tasks.py:
--------------------------------------------------------------------------------
1 | import cedalion.dataclasses as cdc
2 | from cedalion.tasks import task
3 | import cedalion.nirs # FIXME cedalion.sigproc.nirs?
4 | import cedalion.sigproc.quality
5 | from typing import Annotated
6 | import xarray as xr
7 | from cedalion import Quantity
8 |
9 |
10 | @task
11 | def int2od(
12 | rec: cdc.Recording,
13 | ts_input: str | None = None,
14 | ts_output: str = "od",
15 | ):
16 | """Calculate optical density from intensity amplitude data.
17 |
18 | Args:
19 | rec (Recording): container of timeseries data
20 | ts_input (str): name of intensity timeseries. If None, this tasks operates on
21 | the last timeseries in rec.timeseries.
22 | ts_output (str): name of optical density timeseries.
23 | """
24 |
25 | ts = rec.get_timeseries(ts_input)
26 | od = cedalion.nirs.int2od(ts)
27 | rec.set_timeseries(ts_output, od)
28 |
29 |
30 | @task
31 | def od2conc(
32 | rec: cdc.Recording,
33 | dpf: dict[float, float],
34 | spectrum: str = "prahl",
35 | ts_input: str | None = None,
36 | ts_output: str = "conc",
37 | ):
38 | """Calculate hemoglobin concentrations from optical density data.
39 |
40 | Args:
41 | rec (Recording): container of timeseries data
42 | dpf (dict[float, float]): differential path length factors
43 | spectrum (str): label of the extinction coefficients to use (default: "prahl")
44 | ts_input (str | None): name of intensity timeseries. If None, this tasks operates
45 | on the last timeseries in rec.timeseries.
46 | ts_output (str): name of optical density timeseries (default: "conc").
47 | """
48 |
49 | ts = rec.get_timeseries(ts_input)
50 |
51 | dpf = xr.DataArray(
52 | list(dpf.values()),
53 | dims="wavelength",
54 | coords={"wavelength": list(dpf.keys())},
55 | )
56 |
57 | conc = cedalion.nirs.od2conc(ts, rec.geo3d, dpf, spectrum)
58 |
59 | rec.set_timeseries(ts_output, conc)
60 |
61 |
62 | @task
63 | def snr(
64 | rec: cdc.Recording,
65 | snr_thresh: float = 2.0,
66 | ts_input: str | None = None,
67 | aux_obj_output: str = "snr",
68 | mask_output: str = "snr",
69 | ):
70 | """Calculate signal-to-noise ratio (SNR) of timeseries data.
71 |
72 | Args:
73 | rec (Recording): The recording object containing the data.
74 | snr_thresh (float): The SNR threshold.
75 | ts_input (str | None, optional): The input time series. Defaults to None.
76 | aux_obj_output (str, optional): The key for storing the SNR in the auxiliary
77 | object. Defaults to "snr".
78 | mask_output (str, optional): The key for storing the mask in the recording
79 | object. Defaults to "snr".
80 | """
81 | ts = rec.get_timeseries(ts_input)
82 |
83 | snr, snr_mask = cedalion.sigproc.quality.snr(ts, snr_thresh)
84 |
85 | rec.aux_obj[aux_obj_output] = snr
86 | rec.set_mask(mask_output, snr_mask)
87 |
88 |
89 | @task
90 | def sd_dist(
91 | rec: cdc.Recording,
92 | sd_min: Annotated[Quantity, "[length]"],
93 | sd_max: Annotated[Quantity, "[length]"],
94 | ts_input: str | None = None,
95 | aux_obj_output: str = "sd_dist",
96 | mask_output: str = "sd_dist",
97 | ):
98 | """Calculate source-detector separations and mask channels outside a range.
99 |
100 | Args:
101 | rec (Recording): The recording object containing the data.
102 | sd_min (Annotated[Quantity, "[length]"]): The minimum source-detector separation.
103 | sd_max (Annotated[Quantity, "[length]"]): The maximum source-detector separation.
104 | ts_input (str | None, optional): The input time series. Defaults to None.
105 | aux_obj_output (str, optional): The key for storing the source-detector distances
106 | in the auxiliary object. Defaults to "sd_dist".
107 | mask_output (str, optional): The key for storing the mask in the recording object.
108 | Defaults to "sd_dist".
109 | """
110 | ts = rec.get_timeseries(ts_input)
111 |
112 | sd_dist, mask = cedalion.sigproc.quality.sd_dist(ts, rec.geo3d, (sd_min, sd_max))
113 |
114 | rec.set_mask(mask_output, mask)
115 | rec.aux_obj[aux_obj_output] = sd_dist
116 |
--------------------------------------------------------------------------------
/src/cedalion/sim/__init__.py:
--------------------------------------------------------------------------------
1 | """Tools for creating synthetic data."""
2 |
--------------------------------------------------------------------------------
/src/cedalion/tasks.py:
--------------------------------------------------------------------------------
1 | import functools
2 | from typing import Callable
3 |
4 | task_registry = {}
5 |
6 |
7 | def task(f: Callable):
8 | name = f.__name__
9 |
10 | if name in task_registry:
11 | raise ValueError(
12 | f"there is already a function with name '{name}' " "in the registry."
13 | )
14 |
15 | @functools.wraps(f)
16 | def wrapper(*args, **kwargs):
17 | return f(*args, **kwargs)
18 |
19 | task_registry[name] = wrapper
20 |
21 | return wrapper
22 |
--------------------------------------------------------------------------------
/src/cedalion/typing.py:
--------------------------------------------------------------------------------
1 | """Type aliases for Cedalion dataclasses.
2 |
3 | Cedalion relies as much as possible on generic data types (like xarray DataArrays).
4 | We then use type aliases and annotations to augment these data types with additional
5 | information about the data they carry. For DataArrays there is a basic mechanism
6 | to specify and validate data schemas that specify dimension and coordinate names.
7 | This way we can distinguish between time series DataArrays (NDTimeSeries) and DataArrays
8 | representing points in space (LabeledPointCloud). By using these aliases in type hints
9 | we indicate to user which kind of DataArray is expected.
10 |
11 | Parameters with physical units are represented by cedalion.Quantity. Aliases are defined
12 | to indicate the dimensionality of quantities.
13 | """
14 |
15 | from __future__ import annotations
16 | from typing import Annotated, TypeAlias
17 |
18 | import xarray as xr
19 |
20 | from cedalion.dataclasses.schemas import LabeledPointCloudSchema, NDTimeSeriesSchema
21 | from cedalion import Quantity
22 |
23 | #: DataArrays representing labeled points in space.
24 | LabeledPointCloud: TypeAlias = Annotated[xr.DataArray, LabeledPointCloudSchema]
25 |
26 | #: DataArrays representing time series.
27 | NDTimeSeries: TypeAlias = Annotated[xr.DataArray, NDTimeSeriesSchema]
28 |
29 | #: 4x4 DataArrays representing affine transformations.
30 | AffineTransform: TypeAlias = xr.DataArray
31 |
32 | #: Quantities with units of time
33 | QTime : TypeAlias = Annotated[Quantity, "[time]"]
34 |
35 | #: Quantities with units of length
36 | QLength : TypeAlias = Annotated[Quantity, "[length]"]
37 |
38 | #: Quantities with units of frequency
39 | QFrequency : TypeAlias = Annotated[Quantity, "[frequency]"]
40 |
--------------------------------------------------------------------------------
/src/cedalion/validators.py:
--------------------------------------------------------------------------------
1 | """Validators for common data structures."""
2 |
3 | from typing import List, Optional
4 |
5 | import pint
6 | import xarray as xr
7 |
8 |
9 | def _assert_dims_and_coords(
10 | array: xr.DataArray, dimensions: List[str], coordinates: List[str]
11 | ):
12 | for dim in dimensions:
13 | if dim not in array.dims:
14 | raise AttributeError("Missing dimension '{dim}'")
15 |
16 | for coord in coordinates:
17 | if coord not in array.coords:
18 | raise AttributeError("Missing coorindate '{coord}'")
19 |
20 |
21 | def has_time(array: xr.DataArray):
22 | _assert_dims_and_coords(array, ["time"], ["time"])
23 |
24 |
25 | def has_wavelengths(array: xr.DataArray):
26 | _assert_dims_and_coords(array, ["wavelength"], ["wavelength"])
27 |
28 |
29 | def has_channel(array: xr.DataArray):
30 | _assert_dims_and_coords(array, ["channel"], ["channel", "source", "detector"])
31 |
32 |
33 | def has_positions(array: xr.DataArray, npos: Optional[int] = None):
34 | crs = array.points.crs
35 | _assert_dims_and_coords(array, [crs], [])
36 |
37 | if npos is not None:
38 | npos_found = array.sizes[crs]
39 | if npos_found != npos:
40 | raise AttributeError(
41 | f"Expected geometry with {npos} dimensions but found {npos_found}"
42 | )
43 |
44 |
45 | def is_quantified(array: xr.DataArray):
46 | return isinstance(array.variable.data, pint.Quantity)
47 |
48 |
49 | def check_dimensionality(name: str, q: pint.Quantity, dim: str):
50 | if not q.check(dim):
51 | raise ValueError(f"quantity '{name}' does not have dimensionality '{dim}'")
52 |
--------------------------------------------------------------------------------
/src/cedalion/vis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ibs-lab/cedalion/ec81bcf0f3b219186f0ac0de95af2fa9f832845b/src/cedalion/vis/__init__.py
--------------------------------------------------------------------------------
/src/cedalion/vis/plot_sensitivity_matrix.py:
--------------------------------------------------------------------------------
1 | """Plots an fNIRS probe's sensitivity profile on a 3D brain surface.
2 |
3 | Args:
4 | sensitivity:
5 | brain_surface:
6 | head_surface:
7 | labeled_points:
8 | wavelength: The wavelength of the light source in nm.
9 |
10 | Initial Contributors:
11 | - Shakiba Moradi | shakiba.moradi@tu-berlin.de | 2024
12 | """
13 |
14 | import numpy as np
15 | import matplotlib.pyplot as p
16 | from matplotlib.colors import ListedColormap
17 | import pyvista as pv
18 |
19 | import cedalion
20 | import cedalion.dataclasses as cdc
21 |
22 |
23 | class Main:
24 | def __init__(
25 | self,
26 | sensitivity,
27 | brain_surface,
28 | head_surface=None,
29 | labeled_points=None,
30 | wavelength=760,
31 | ):
32 | # Initialize
33 | self.brain = brain_surface
34 | self.head = head_surface
35 | self.sensitivity = sensitivity
36 | self.wavelength = wavelength
37 | self.labeled_points = labeled_points
38 |
39 | self.plt = pv.Plotter()
40 |
41 | def plot(self, low_th=-3, high_th=0):
42 | if self.head is not None:
43 | cedalion.plots.plot_surface(self.plt, self.head, opacity=0.1)
44 | if self.labeled_points is not None:
45 | cedalion.plots.plot_labeled_points(self.plt, self.labeled_points)
46 |
47 | b = cdc.VTKSurface.from_trimeshsurface(self.brain)
48 | b = pv.wrap(b.mesh)
49 |
50 | sensitivity_matrix = self.sensitivity.where(
51 | self.sensitivity["is_brain"], drop=True
52 | )
53 | sensitivity_matrix = (
54 | sensitivity_matrix.sel(wavelength=self.wavelength).sum(dim="channel").values
55 | )
56 |
57 | sensitivity_matrix[sensitivity_matrix <= 0] = sensitivity_matrix[
58 | sensitivity_matrix > 0
59 | ].min()
60 | sensitivity_matrix = np.log10(sensitivity_matrix)
61 |
62 | # Original colormap
63 | cmap = p.cm.get_cmap("jet", 256)
64 |
65 | gray = [1, 1, 1, 1] # RGBA for gray
66 | new_cmap_colors = np.vstack((gray, cmap(np.linspace(0, 1, 255))))
67 | custom_cmap = ListedColormap(new_cmap_colors)
68 |
69 | self.plt.add_mesh(
70 | b,
71 | scalars=sensitivity_matrix,
72 | cmap=custom_cmap,
73 | smooth_shading=True,
74 | clim=(low_th, high_th),
75 | scalar_bar_args={
76 | "title": "Sensitivity (m⁻¹): Logarithmic Scale",
77 | "shadow": True,
78 | },
79 | )
80 |
--------------------------------------------------------------------------------
/src/cedalion/vtktutils.py:
--------------------------------------------------------------------------------
1 | import vtk
2 | from vtk.util.numpy_support import numpy_to_vtkIdTypeArray, numpy_to_vtk
3 | import trimesh
4 | import pyvista as pv
5 | import numpy as np
6 |
7 |
8 | def trimesh_to_vtk_polydata(mesh: trimesh.Trimesh):
9 | """Convert a Trimesh object to a VTK PolyData object.
10 |
11 | Args:
12 | mesh (trimesh.Trimesh): The input trimesh object.
13 |
14 | Returns:
15 | vtk.vtkPolyData: The converted VTK PolyData object.
16 | """
17 | ntris, ndim_cells = mesh.faces.shape
18 | nvertices, ndim_vertices = mesh.vertices.shape
19 |
20 | assert ndim_cells == 3 # operate only on triangle meshes
21 | assert ndim_vertices == 3 # operate only in 3D space
22 |
23 | # figure out if vtk uses 32 or 64 bits for IDs
24 | id_size = vtk.vtkIdTypeArray().GetDataTypeSize()
25 | id_dtype = np.int32 if id_size == 4 else np.int64
26 |
27 | cell_npoints = np.full(ntris, ndim_cells)
28 | # triangle definition: (points per cell, *point ids)
29 | point_ids = np.hstack((cell_npoints[:, None], mesh.faces)).astype(id_dtype).ravel()
30 | point_ids = numpy_to_vtkIdTypeArray(point_ids, deep=1)
31 |
32 | cells = vtk.vtkCellArray()
33 | cells.SetCells(ntris, point_ids)
34 |
35 | points = vtk.vtkPoints()
36 | points.SetData(numpy_to_vtk(mesh.vertices, deep=1))
37 |
38 | vtk_mesh = vtk.vtkPolyData()
39 | vtk_mesh.SetPoints(points)
40 | vtk_mesh.SetPolys(cells)
41 |
42 | # if the trimesh was textured copy the color information, too
43 | if isinstance(mesh.visual, trimesh.visual.color.ColorVisuals):
44 | colors = mesh.visual.vertex_colors
45 | else:
46 | colors = mesh.visual.to_color().vertex_colors
47 |
48 | colors = numpy_to_vtk(colors)
49 | colors.SetName("colors")
50 | vtk_mesh.GetPointData().SetScalars(colors)
51 |
52 | return vtk_mesh
53 |
54 |
55 | def pyvista_polydata_to_trimesh(polydata: pv.PolyData) -> trimesh.Trimesh:
56 | """Convert a PyVista PolyData object to a Trimesh object.
57 |
58 | Args:
59 | polydata (pv.PolyData): The input PyVista PolyData object.
60 |
61 | Returns:
62 | trimesh.Trimesh: The converted Trimesh object.
63 | """
64 | vertices = polydata.points
65 | faces = polydata.regular_faces
66 |
67 | # FIXME scalars to texture?
68 | return trimesh.Trimesh(vertices, faces)
69 |
--------------------------------------------------------------------------------
/tests/test_bids.py:
--------------------------------------------------------------------------------
1 |
2 | import cedalion.io
3 | import cedalion.datasets
4 | from cedalion.dataclasses.schemas import validate_stim_schema
5 |
6 | def test_read_events_from_tsv():
7 | bids_top_level_directory = cedalion.datasets.get_multisubject_fingertapping_path()
8 |
9 | for fname in bids_top_level_directory.glob("**/*_events.tsv"):
10 | stim = cedalion.io.read_events_from_tsv(fname)
11 |
12 | validate_stim_schema(stim)
13 |
--------------------------------------------------------------------------------
/tests/test_dataclasses_geometry.py:
--------------------------------------------------------------------------------
1 | import trimesh
2 | from numpy.testing import assert_allclose
3 | import pytest
4 |
5 | import cedalion
6 | import cedalion.dataclasses as cdc
7 | import cedalion.xrutils as xrutils
8 |
9 |
10 | def test_normal_normalization():
11 | # normal vectors have length 1
12 | sphere_orig = trimesh.creation.icosphere(4)
13 |
14 | # mesh with scaled normal vectors
15 | sphere = trimesh.Trimesh(
16 | vertices=sphere_orig.vertices,
17 | faces=sphere_orig.faces,
18 | vertex_normals=2 * sphere_orig.vertex_normals,
19 | )
20 |
21 | s = cdc.TrimeshSurface(sphere, crs="crs", units=cedalion.units.millimeter)
22 |
23 | norm1 = xrutils.norm(s.get_vertex_normals(s.vertices, normalized=False), s.crs)
24 | norm2 = xrutils.norm(s.get_vertex_normals(s.vertices, normalized=True), s.crs)
25 | norm3 = xrutils.norm(s.get_vertex_normals(s.vertices), s.crs)
26 |
27 | assert_allclose(norm1, 2)
28 | assert_allclose(norm2, 1)
29 | assert_allclose(norm3, 1)
30 |
31 | # set one normal to zero
32 | vertex_normals = 2 * sphere_orig.vertex_normals.copy()
33 | vertex_normals[5, :] = 0.0
34 |
35 | sphere = trimesh.Trimesh(
36 | vertices=sphere_orig.vertices,
37 | faces=sphere_orig.faces,
38 | vertex_normals=vertex_normals,
39 | )
40 |
41 | s = cdc.TrimeshSurface(sphere, crs="crs", units=cedalion.units.millimeter)
42 |
43 | norm1 = xrutils.norm(s.get_vertex_normals(s.vertices, normalized=False), s.crs)
44 |
45 | with pytest.raises(ValueError):
46 | norm2 = xrutils.norm(s.get_vertex_normals(s.vertices, normalized=True), s.crs)
47 |
--------------------------------------------------------------------------------
/tests/test_fwmodel.py:
--------------------------------------------------------------------------------
1 | import cedalion.imagereco.forward_model as fw
--------------------------------------------------------------------------------
/tests/test_geodesics.py:
--------------------------------------------------------------------------------
1 | from cedalion.dataclasses.geometry import PycortexSurface, TrimeshSurface
2 | import trimesh.creation
3 | from cedalion import units
4 | import numpy as np
5 | from numpy.testing import assert_allclose
6 |
7 |
8 | def test_pycortex_geodesic_distance_on_sphere():
9 | """Test geodesic distance calculation of PycortexSurface on a spherical mesh.
10 |
11 | Calculate distances between all vertices and the top-most point of the sphere.
12 | Use the analytic formula of distance on a spherical surface to assess the result.
13 | """
14 | RADIUS = 10
15 | CRS = "some_crs"
16 |
17 | # sphere centered at the origin
18 | s = trimesh.creation.icosphere(subdivisions=5, radius=RADIUS)
19 | ts = TrimeshSurface(s, CRS, units.cm)
20 |
21 | ps = PycortexSurface.from_trimeshsurface(ts)
22 |
23 | # find index of highest vertex (0,0,10)
24 | vertices = ps.vertices.pint.dequantify().values
25 | idx = np.argmax(vertices[:, 2])
26 | assert all(vertices[idx, :] == (0, 0, 10))
27 |
28 | # distance of all vertices to (0,0,10)
29 | distances = ps.geodesic_distance([idx], m=10)
30 |
31 | top_dir = np.array([0, 0, 1.])
32 | vertices_dirs = vertices / np.linalg.norm(vertices, axis=1)[:, None] # (nverts,3)
33 |
34 | angles = np.arccos(np.dot(top_dir, vertices_dirs.T)) # shape: (nverts,), units: rad
35 |
36 | expected_distances = RADIUS * angles
37 |
38 | # FIXME 16 % relative tolerance is needed to make this test pass. That's quite high.
39 | # Absolute tolerance of 0.1 at a radius of 10 works, too.
40 |
41 | assert_allclose(distances, expected_distances, atol=0.1)
42 |
--------------------------------------------------------------------------------
/tests/test_imagereco_forward_model.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tempfile
3 |
4 | import numpy as np
5 | from scipy.sparse import find
6 |
7 | import cedalion.datasets
8 | import cedalion.imagereco.forward_model as fw
9 |
10 |
11 | def allclose(A, B, atol=1e-8):
12 | """Check if two sparse matrices are equal within a tolerance."""
13 | # If you want to check matrix shapes as well
14 | if np.array_equal(A.shape, B.shape) == 0:
15 | return False
16 | r1, c1, v1 = find(A)
17 | r2, c2, v2 = find(B)
18 | index_match = np.array_equal(r1, r2) & np.array_equal(c1, c2)
19 | if index_match == 0:
20 | return False
21 | return np.allclose(v1, v2, atol=atol)
22 |
23 |
24 | def test_TwoSurfaceHeadModel():
25 | ### tests only save and load methods so far
26 | # prepare test head
27 | (
28 | SEG_DATADIR,
29 | mask_files,
30 | landmarks_file,
31 | ) = cedalion.datasets.get_colin27_segmentation(downsampled=True)
32 | head = fw.TwoSurfaceHeadModel.from_segmentation(
33 | segmentation_dir=SEG_DATADIR,
34 | mask_files=mask_files,
35 | landmarks_ras_file=landmarks_file,
36 | # disable mesh smoothing and decimation to speed up runtime
37 | smoothing=0,
38 | brain_face_count=None,
39 | scalp_face_count=None
40 | )
41 | # save to folder
42 |
43 | with tempfile.TemporaryDirectory() as dirpath:
44 | tmp_folder = os.path.join(dirpath, "test_head")
45 | head.save(tmp_folder)
46 | # load from folder
47 | head2 = fw.TwoSurfaceHeadModel.load(tmp_folder)
48 | # compare
49 | assert (head.landmarks == head2.landmarks).all()
50 | assert (head.segmentation_masks == head2.segmentation_masks).all()
51 | assert (head.brain.mesh.vertices == head2.brain.mesh.vertices).all()
52 | assert (head.brain.mesh.faces == head2.brain.mesh.faces).all()
53 | assert (head.t_ijk2ras.values == head2.t_ijk2ras.values).all()
54 | assert (head.t_ras2ijk.values == head2.t_ras2ijk.values).all()
55 | assert allclose(head.voxel_to_vertex_brain, head2.voxel_to_vertex_brain)
56 | assert allclose(head.voxel_to_vertex_scalp, head2.voxel_to_vertex_scalp)
57 |
--------------------------------------------------------------------------------
/tests/test_io_forward_model.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tempfile
3 | import numpy as np
4 | import xarray as xr
5 | import cedalion.io as cio
6 |
7 | def create_dummy_Adot():
8 | """Create a dummy Adot matrix for testing."""
9 | channels = ['S'+str(i)+'D'+str(j) for i in range(14) for j in range(30)]
10 | channel = np.random.choice(channels, 100)
11 | num_verts = np.random.randint(500, 5000)
12 | wavelength = np.array([760.0, 850.0])
13 | Adot = xr.DataArray(np.random.rand(len(channel), num_verts, len(wavelength)),
14 | dims=["channel", "vertex", "wavelength"],
15 | coords={"channel": ("channel", channel),
16 | "is_brain": ("vertex", np.random.randint(0, 2, num_verts)),
17 | "wavelength": ("wavelength", wavelength)})
18 | return Adot
19 |
20 |
21 | def test_save_Adot():
22 | Adot = create_dummy_Adot()
23 | # save to file
24 | dirpath = tempfile.mkdtemp()
25 | tmp_fn = os.path.join(dirpath, "test_Adot.nc")
26 | Adot.to_netcdf(tmp_fn)
27 | # load from file
28 | Adot2 = cio.load_Adot(tmp_fn)
29 | # compare
30 | assert np.all(Adot.values == Adot2.values)
31 | assert np.all(Adot.channel.values == Adot2.channel.values)
32 | assert np.all(Adot.vertex.values == Adot2.vertex.values)
33 | assert np.all(Adot.wavelength.values == Adot2.wavelength.values)
34 |
35 |
36 | def test_load_Adot():
37 | Adot = create_dummy_Adot()
38 | # save to file
39 | dirpath = tempfile.mkdtemp()
40 | tmp_fn = os.path.join(dirpath, "test_Adot.nc")
41 | cio.save_Adot(tmp_fn, Adot)
42 | # load from file
43 | Adot2 = xr.open_dataset(tmp_fn)
44 | # compare
45 | assert np.all(Adot.values == Adot2.to_array()[0])
46 | assert np.all(Adot.channel.values == Adot2.channel.values)
47 | assert np.all(Adot.vertex.values == Adot2.vertex.values)
48 | assert np.all(Adot.wavelength.values == Adot2.wavelength.values)
49 |
50 |
51 |
--------------------------------------------------------------------------------
/tests/test_io_photogrammetry.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tempfile
3 | import numpy as np
4 | import xarray as xr
5 | import cedalion
6 | import cedalion.dataclasses as cdc
7 | from cedalion.io import read_photogrammetry_einstar, opt_fid_to_xr
8 | from numpy.testing import assert_array_almost_equal
9 |
10 |
11 | def test_read_einstar():
12 | # prepare test data
13 | fiducials = np.random.rand(5, 3)
14 | for n_optodes in [1, 10, 100]:
15 | optodes = np.random.rand(n_optodes, 3)
16 | tmp_fn = write_test_photo_fn(fiducials, optodes)
17 | # call read_einstar to read test data
18 | fid, opt = cedalion.io.read_einstar(tmp_fn)
19 | # evaluate
20 | assert list(fid.keys()) == ["Nz", "Iz", "Rpa", "Lpa", "Cz"]
21 | assert_array_almost_equal(np.array(list(fid.values())), fiducials)
22 | assert_array_almost_equal(np.array(list(opt.values())), optodes)
23 |
24 |
25 | def test_opt_fid_to_xr():
26 | for n_fidu in range(10):
27 | for n_opts in range(10):
28 | fiducials = np.random.rand(n_fidu, 3)
29 | fiducials = {"F" + str(i): f for i, f in enumerate(fiducials)}
30 | optodes = np.random.rand(n_opts, 3)
31 |
32 | optodes = {"S" + str(i): o for i, o in enumerate(optodes)}
33 | fid, opt = opt_fid_to_xr(fiducials, optodes)
34 | assert isinstance(fid, xr.DataArray)
35 | assert isinstance(opt, xr.DataArray)
36 |
37 | if n_fidu > 0:
38 | assert_array_almost_equal(fid.values, list(fiducials.values()))
39 | else:
40 | assert fid.shape == (0, 3)
41 |
42 | if n_opts > 0:
43 | assert_array_almost_equal(opt.values, list(optodes.values()))
44 | else:
45 | assert opt.shape == (0, 3)
46 |
47 | assert (opt.type == cdc.PointType.SOURCE).all()
48 | optodes = {l.replace("S", "D"): o for l, o in optodes.items()}
49 | fid, opt = opt_fid_to_xr(fiducials, optodes)
50 | assert (opt.type == cdc.PointType.DETECTOR).all()
51 | assert list(opt.label.values) == ["D%d" % i for i in range(len(opt))]
52 |
53 |
54 | def test_read_photogrammetry_einstar():
55 | # prepare test data
56 | fiducials = np.random.rand(5, 3)
57 | optodes = np.random.rand(100, 3)
58 | tmp_fn = write_test_photo_fn(fiducials, optodes)
59 | fid, opt = read_photogrammetry_einstar(tmp_fn)
60 | assert isinstance(fid, xr.DataArray)
61 | assert isinstance(opt, xr.DataArray)
62 | assert_array_almost_equal(fid.values, fiducials)
63 | assert_array_almost_equal(opt.values, optodes)
64 | assert sum(opt.type == cdc.PointType.SOURCE) == 10
65 | assert sum(opt.type == cdc.PointType.DETECTOR) == 90
66 |
67 |
68 | def write_test_photo_fn(fid, opt):
69 | """Write a test file for photogrammetry data"""
70 | dirpath = tempfile.mkdtemp()
71 | tmp_fn = os.path.join(dirpath, "tmp.txt")
72 | with open(tmp_fn, "w") as f:
73 | f.write("Nz,%f, %f, %f\n" % (fid[0][0], fid[0][1], fid[0][2]))
74 | f.write("Iz,%f, %f, %f\n" % (fid[1][0], fid[1][1], fid[1][2]))
75 | f.write("Rpa,%f, %f, %f\n" % (fid[2][0], fid[2][1], fid[2][2]))
76 | f.write("Lpa,%f, %f, %f\n" % (fid[3][0], fid[3][1], fid[3][2]))
77 | f.write("Cz,%f, %f, %f\n" % (fid[4][0], fid[4][1], fid[4][2]))
78 | for i, v in enumerate(opt):
79 | if i < 10:
80 | label = "S%d" % i
81 | else:
82 | label = "D%d" % i
83 | f.write("%s, %f, %f, %f\n" % (label, v[0], v[1], v[2]))
84 | return tmp_fn
85 |
--------------------------------------------------------------------------------
/tests/test_io_probe_geometry.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tempfile
3 | import numpy as np
4 | import xarray as xr
5 | from numpy.testing import assert_array_almost_equal
6 |
7 | import cedalion.dataclasses as cdc
8 | from cedalion.io import load_tsv
9 |
10 |
11 | def test_load_tsv():
12 | # prepare test data
13 | num = 10
14 | pos = np.random.rand(num, 3)
15 | labels = [(np.random.choice(["S%d", "D%d"])) % (i+1) for i in range(num)]
16 |
17 | # write test data to a file
18 | dirpath = tempfile.mkdtemp()
19 | with open(os.path.join(dirpath, "optodes.tsv"), "w") as f:
20 | for l, p in zip(labels, pos):
21 | f.write("%s\t%f\t%f\t%f\n" % (l, p[0], p[1], p[2]))
22 |
23 | # call load_tsv to read test data
24 | optodes = load_tsv(os.path.join(dirpath, "optodes.tsv"))
25 | assert isinstance(optodes, xr.DataArray)
26 | assert_array_almost_equal(optodes.pint.dequantify().values, pos)
27 | assert sum(optodes.type == cdc.PointType.SOURCE) + \
28 | sum(optodes.type == cdc.PointType.DETECTOR) == num
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/tests/test_io_snirf.py:
--------------------------------------------------------------------------------
1 | """Tests for cedalion.io.read_snirf."""
2 |
3 | import pytest
4 | import os
5 | from pathlib import Path
6 | import cedalion.io
7 | import cedalion.io.snirf
8 | import cedalion.datasets
9 | from tempfile import TemporaryDirectory
10 |
11 | # Edge cases in the handling of snirf files are often discovered in files provided
12 | # by users. Ideally, we assemble a zoo of such edge case files and regularly test
13 | # against them. But we won't get permission to share all of these files. Hence, this
14 | # test looks for an environment variable "SNIRF_ZOO" that points to a local directory.
15 | # The test tries to read all snirf files in it. The test is skipped if the directory is
16 | # not available.
17 |
18 | skip_if_snirf_zoo_unavailable = pytest.mark.skipif(
19 | "SNIRF_ZOO" not in os.environ, reason="snirf zoo not available"
20 | )
21 |
22 | testfiles = []
23 |
24 | if "SNIRF_ZOO" in os.environ:
25 | snirf_zoo_dir = Path(os.environ["SNIRF_ZOO"])
26 | testfiles.extend(sorted(map(str, snirf_zoo_dir.glob("**/*.snirf"))))
27 |
28 |
29 | @skip_if_snirf_zoo_unavailable
30 | @pytest.mark.parametrize("fname", testfiles)
31 | def test_read_snirf(fname):
32 | cedalion.io.read_snirf(fname)
33 |
34 |
35 | @skip_if_snirf_zoo_unavailable
36 | @pytest.mark.parametrize("fname", testfiles)
37 | def test_write_snirf(fname):
38 | recs = cedalion.io.read_snirf(fname)
39 |
40 | with TemporaryDirectory() as tmpdirname:
41 | fname = Path(tmpdirname) / "test.snirf"
42 | cedalion.io.snirf.write_snirf(fname, recs)
43 |
44 |
45 | def test_add_number_to_name():
46 | keys = ["amp"]
47 | assert cedalion.io.snirf.add_number_to_name("amp", keys) == "amp_02"
48 |
49 | keys = ["amp", "amp_02"]
50 | assert cedalion.io.snirf.add_number_to_name("amp", keys) == "amp_03"
51 |
52 | keys = ["amp", "od", "od_02", "od_03", "amp_02"]
53 | assert cedalion.io.snirf.add_number_to_name("amp", keys) == "amp_03"
54 | assert cedalion.io.snirf.add_number_to_name("od", keys) == "od_04"
55 |
56 |
57 | def test_read_snirf_crs():
58 | path = cedalion.datasets.get_fingertapping_snirf_path()
59 |
60 | rec = cedalion.io.read_snirf(path)[0]
61 | assert rec.geo3d.points.crs == "pos"
62 |
63 | rec = cedalion.io.read_snirf(path, crs="another_crs")[0]
64 | assert rec.geo3d.points.crs == "another_crs"
65 |
--------------------------------------------------------------------------------
/tests/test_labeled_points.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import xarray as xr
3 | import numpy as np
4 | from cedalion.dataclasses.schemas import LabeledPointCloudSchema
5 | from cedalion.dataclasses import (
6 | PointType,
7 | affine_transform_from_numpy,
8 | build_labeled_points,
9 | )
10 | from cedalion import units
11 |
12 |
13 | @pytest.fixture
14 | def labeled_points():
15 | return xr.DataArray(
16 | np.arange(12).reshape(4, 3),
17 | dims=["label", "mni"],
18 | coords={
19 | "label": ("label", ["S1", "D1", "Nz", "Iz"]),
20 | "type": (
21 | "label",
22 | [
23 | PointType.SOURCE,
24 | PointType.DETECTOR,
25 | PointType.LANDMARK,
26 | PointType.LANDMARK,
27 | ],
28 | ),
29 | },
30 | attrs={"units": "mm"},
31 | ).pint.quantify()
32 |
33 |
34 | def test_schema_validate(labeled_points):
35 | LabeledPointCloudSchema.validate(labeled_points)
36 |
37 |
38 | def test_points_add_single(labeled_points):
39 | result = labeled_points.points.add("Cz", [1.0, 2.0, 3.0], PointType.LANDMARK)
40 |
41 | assert result.pint.units == units.Unit("mm")
42 | assert len(result) == 5
43 | assert "Cz" in result.label
44 | assert result.loc["Cz"].type.item() == PointType.LANDMARK
45 |
46 | all(result.loc["Cz"] == units.Quantity([1.0, 2.0, 3.0], "mm"))
47 |
48 |
49 | def test_points_add_multiple(labeled_points):
50 | result = labeled_points.points.add(
51 | ["S5", "D5"], np.arange(6).reshape(2, 3), [PointType.SOURCE, PointType.DETECTOR]
52 | )
53 |
54 | assert result.pint.units == units.Unit("mm")
55 | assert len(result) == 6
56 | assert "S5" in result.label
57 | assert "D5" in result.label
58 |
59 | assert result.loc["S5"].type.item() == PointType.SOURCE
60 | assert result.loc["D5"].type.item() == PointType.DETECTOR
61 |
62 | all(result.loc["S5"] == units.Quantity([0.0, 1.0, 2.0], "mm"))
63 | all(result.loc["D5"] == units.Quantity([3.0, 4.0, 5.0], "mm"))
64 |
65 |
66 | def test_transform_numpy(labeled_points):
67 | trafo = np.array(
68 | [
69 | [2.0, 0.0, 0.0, 0.0],
70 | [0.0, 2.0, 0.0, 0.0],
71 | [0.0, 0.0, 2.0, 0.0],
72 | [0.0, 0.0, 0.0, 1.0],
73 | ]
74 | )
75 |
76 | transformed = labeled_points.points.apply_transform(trafo)
77 |
78 | assert all(labeled_points[0, :] == units.Quantity([0.0, 1.0, 2.0], "mm"))
79 | assert all(transformed[0, :] == units.Quantity([0.0, 2.0, 4.0], "mm"))
80 | assert transformed.dims == labeled_points.dims
81 | assert transformed.pint.units == labeled_points.pint.units
82 |
83 |
84 | def test_transform_AffineTransform(labeled_points):
85 | trafo = affine_transform_from_numpy(
86 | [
87 | [2.0, 0.0, 0.0, 0.0],
88 | [0.0, 2.0, 0.0, 0.0],
89 | [0.0, 0.0, 2.0, 0.0],
90 | [0.0, 0.0, 0.0, 1.0],
91 | ],
92 | from_crs="mni",
93 | to_crs="other_crs",
94 | from_units="mm",
95 | to_units="cm",
96 | )
97 |
98 | transformed = labeled_points.points.apply_transform(trafo)
99 |
100 | assert all(labeled_points[0, :] == units.Quantity([0.0, 1.0, 2.0], "mm"))
101 | assert all(transformed[0, :] == units.Quantity([0.0, 2.0, 4.0], "cm"))
102 | assert transformed.dims != labeled_points.dims
103 | assert labeled_points.dims[1] == "mni"
104 | assert transformed.dims[1] == "other_crs"
105 | assert transformed.pint.units == units.cm
106 |
107 |
108 | def test_build_labeled_points_simple():
109 | CRS = "a_crs"
110 |
111 | pts = build_labeled_points([[0, 0, 0], [1, 1, 1]], crs=CRS)
112 |
113 | LabeledPointCloudSchema.validate(pts)
114 | assert pts.points.crs == CRS
115 | assert len(pts) == 2
116 | assert pts.pint.units == units.parse_units("1")
117 |
118 |
119 | def test_build_labeled_points_units():
120 | CRS = "a_crs"
121 |
122 | pts = build_labeled_points([[0, 0, 0], [1, 1, 1]], crs=CRS, units="mm")
123 |
124 | LabeledPointCloudSchema.validate(pts)
125 | assert pts.pint.units == units.mm
126 | assert all(pts.type == PointType.UNKNOWN)
127 |
128 |
129 | def test_build_labeled_points_labels():
130 | CRS = "a_crs"
131 |
132 | pts = build_labeled_points([[0, 0, 0], [1, 1, 1]], crs=CRS, labels=["A", "B"])
133 |
134 | LabeledPointCloudSchema.validate(pts)
135 |
136 | assert all(pts.label == ["A", "B"])
137 | assert all(pts.type == PointType.UNKNOWN)
138 |
139 |
140 | def test_build_labeled_points_dynamic_labels():
141 | CRS = "a_crs"
142 |
143 | npoints = [5, 97, 134, 4352]
144 | label1 = ["1", "01", "001", "0001"]
145 |
146 | for npts, lbl in zip(npoints, label1):
147 | pts = build_labeled_points(
148 | np.random.random((npts, 3)),
149 | crs=CRS,
150 | )
151 | LabeledPointCloudSchema.validate(pts)
152 |
153 | assert pts.label.values[1] == lbl
154 |
--------------------------------------------------------------------------------
/tests/test_model_glm_design_matrix.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import xarray as xr
3 | from pint.testsuite.helpers import assert_quantity_almost_equal as assert_approx
4 |
5 | import cedalion
6 | import cedalion.datasets
7 | import cedalion.models.glm as glm
8 | import cedalion.models.glm.design_matrix as dm
9 |
10 |
11 | @pytest.fixture
12 | def rec():
13 | rec = cedalion.datasets.get_snirf_test_data()[0]
14 | rec["od"] = cedalion.nirs.int2od(rec["amp"])
15 |
16 | # differential pathlenght factors
17 | dpf = xr.DataArray(
18 | [6, 6],
19 | dims="wavelength",
20 | coords={"wavelength": rec["amp"].wavelength},
21 | )
22 |
23 | rec["conc"] = cedalion.nirs.od2conc(rec["od"], rec.geo3d, dpf, spectrum="prahl")
24 |
25 | return rec
26 |
27 |
28 | def test_avg_short_channel(rec):
29 | ts_long, ts_short = cedalion.nirs.split_long_short_channels(
30 | rec["conc"], rec.geo3d, distance_threshold=1.5 * cedalion.units.cm
31 | )
32 |
33 | regressor = dm.average_short_channel(ts_short)
34 |
35 | assert regressor.dims == ("time", "regressor", "chromo")
36 |
37 | mean_hbo_0 = ts_short.sel(chromo="HbO", time=0).mean().item()
38 | mean_hbr_0 = ts_short.sel(chromo="HbR", time=0).mean().item()
39 |
40 | assert_approx(regressor.sel(chromo="HbO", time="0").item(), mean_hbo_0.magnitude)
41 | assert_approx(regressor.sel(chromo="HbR", time="0").item(), mean_hbr_0.magnitude)
42 |
43 |
44 | def test_make_design_matrix(rec):
45 | # split time series into two based on channel distance
46 | ts_long, ts_short = cedalion.nirs.split_long_short_channels(
47 | rec["conc"], rec.geo3d, distance_threshold=1.5 * cedalion.units.cm
48 | )
49 |
50 | for short_channel_method in [None, "closest", "max_corr", "mean"]:
51 | design_matrix, channel_wise_regressors = dm.make_design_matrix(
52 | ts_long,
53 | ts_short,
54 | rec.stim,
55 | rec.geo3d,
56 | basis_function=glm.Gamma(
57 | tau=0 * cedalion.units.s,
58 | sigma=3 * cedalion.units.s,
59 | T=3 * cedalion.units.s,
60 | ),
61 | drift_order=1,
62 | short_channel_method=short_channel_method,
63 | )
64 |
65 | # FIXME only checks that methods run.
66 |
--------------------------------------------------------------------------------
/tests/test_nirs.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pint
3 | import pytest
4 | import xarray as xr
5 | from numpy.testing import assert_allclose
6 |
7 | import cedalion.dataclasses as cdc
8 | import cedalion.datasets
9 | import cedalion.nirs
10 |
11 |
12 | def test_get_extinction_coefficients_notexistant():
13 | wavelengths = [750, 850]
14 |
15 | with pytest.raises(ValueError):
16 | cedalion.nirs.get_extinction_coefficients("nonsupported spectrum", wavelengths)
17 |
18 |
19 | def test_get_extinction_coefficients_prahl():
20 | wavelengths = [750, 850]
21 | E = cedalion.nirs.get_extinction_coefficients("prahl", wavelengths)
22 |
23 | assert "chromo" in E.dims
24 | assert "wavelength" in E.dims
25 | assert E.pint.units == pint.Unit("mm^-1 / M")
26 |
27 | assert (E.wavelength.values == wavelengths).all()
28 |
29 |
30 | @pytest.fixture
31 | def ts():
32 | return cdc.build_timeseries(
33 | np.asarray([[[10, 20, 30], [30, 20, 10]]]),
34 | dims=["channel", "wavelength", "time"],
35 | time=[1, 2, 3],
36 | channel=["S1D1"],
37 | value_units="mV",
38 | time_units="s",
39 | other_coords={"wavelength": [760.0, 850.0]},
40 | )
41 |
42 |
43 | def test_int2od(ts):
44 | od = cedalion.nirs.int2od(ts)
45 | assert od.pint.units == 1
46 | od = od.pint.dequantify()
47 | ch = "S1D1"
48 | assert_allclose(od.loc[ch, 760.0, :], [-np.log(0.5), -np.log(1.0), -np.log(1.5)])
49 | assert_allclose(od.loc[ch, 850.0, :], [-np.log(1.5), -np.log(1.0), -np.log(0.5)])
50 |
51 |
52 | def test_od2conc2od():
53 | rec = cedalion.datasets.get_snirf_test_data()[0]
54 |
55 | for wl1,wl2 in [(760., 850.), (700, 900), (810, 820)]:
56 | amp = rec["amp"].copy()
57 | amp.wavelength.values[:] = [wl1, wl2]
58 |
59 | dpf = xr.DataArray(
60 | [6, 6], dims="wavelength", coords={"wavelength": [wl1, wl2]}
61 | )
62 |
63 | od1 = cedalion.nirs.int2od(rec["amp"])
64 | conc = cedalion.nirs.od2conc(od1, rec.geo3d, dpf, "prahl")
65 | od2 = cedalion.nirs.conc2od(conc, rec.geo3d, dpf, "prahl")
66 |
67 | assert od1.pint.units == od2.pint.units
68 | od1 = od1.pint.dequantify()
69 | od2 = od2.pint.dequantify()
70 |
71 | assert_allclose(
72 | od1.transpose("channel", "wavelength", "time"),
73 | od2.transpose("channel", "wavelength", "time"),
74 | )
75 |
--------------------------------------------------------------------------------
/tests/test_points_accessors.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import cedalion.datasets
3 | import cedalion.geometry.utils as geoutils
4 | from cedalion.dataclasses.geometry import affine_transform_from_numpy
5 | from cedalion.errors import CRSMismatchError
6 |
7 | @pytest.fixture
8 | def geo3d():
9 | recordings = cedalion.datasets.get_snirf_test_data()
10 | geo3d = recordings[0].geo3d
11 | geo3d = geo3d.rename({"pos": "digitized"})
12 |
13 | return geo3d
14 |
15 |
16 | @pytest.fixture
17 | def np_transform():
18 | return geoutils.m_trans([1, 2, 3])
19 |
20 |
21 | @pytest.fixture
22 | def xr_transform(np_transform):
23 | return affine_transform_from_numpy(np_transform, "digitized", "coreg", "m", "m")
24 |
25 |
26 | def test_crs(geo3d):
27 | assert geo3d.dims == ("label", "digitized")
28 | assert geo3d.points.crs == "digitized"
29 |
30 |
31 | def test_apply_transform(geo3d, xr_transform):
32 | to_crs = "coreg"
33 | from_crs = "digitized"
34 |
35 | assert geo3d.points.crs == from_crs
36 | assert xr_transform.dims == (to_crs, from_crs)
37 |
38 | transformed = geo3d.points.apply_transform(xr_transform)
39 | assert transformed.points.crs == to_crs
40 |
41 |
42 | def test_apply_transform_crs_mismatch(geo3d, xr_transform):
43 | geo3d = geo3d.rename({"digitized" : "some_other_crs"})
44 | with pytest.raises(CRSMismatchError):
45 | geo3d.points.apply_transform(xr_transform)
46 |
47 |
48 | def test_apply_transform_numpy(geo3d, np_transform):
49 | transformed = geo3d.points.apply_transform(np_transform)
50 |
51 | orig_crs = geo3d.points.crs
52 | transformed_crs = transformed.points.crs
53 |
54 | assert orig_crs == transformed_crs # numpy transforms don't change the crs
55 | assert geo3d.pint.units == transformed.pint.units # same for units
56 |
--------------------------------------------------------------------------------
/tests/test_sigdecomp.py:
--------------------------------------------------------------------------------
1 | from cedalion.sigdecomp import ERBM, ICA_EBM
2 |
--------------------------------------------------------------------------------
/tests/test_sigdecomp_dummy.py:
--------------------------------------------------------------------------------
1 | import cedalion.datasets
2 | import cedalion.sigdecomp.dummy
3 |
4 |
5 | def test_split_frequency_bands():
6 | recordings = cedalion.datasets.get_snirf_test_data()
7 | amp = recordings[0].timeseries["amp"]
8 |
9 | assert amp.dims == ("channel", "wavelength", "time")
10 |
11 | x = cedalion.sigdecomp.dummy.split_frequency_bands(amp)
12 |
13 | assert x.dims == ("band", "channel", "wavelength", "time")
14 | assert all(x.band == ["cardiac", "respiratory"])
15 | assert x.shape[1:] == amp.shape
16 |
--------------------------------------------------------------------------------
/tests/test_sigproc_frequency.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import numpy as np
3 | from cedalion.dataclasses import build_timeseries
4 | from cedalion.sigproc.frequency import freq_filter, sampling_rate
5 | from cedalion import units
6 |
7 |
8 | @pytest.fixture
9 | def timeseries():
10 | sampling_rate = 3.14
11 | t = np.arange(1000) / sampling_rate
12 |
13 | f1 = 0.1
14 | f2 = 1.0
15 |
16 | y1 = 10 * np.sin(2 * np.pi * f1 * t)
17 | y2 = 10 * np.sin(2 * np.pi * f2 * t)
18 |
19 | return build_timeseries(
20 | np.vstack((y1, y2, y1 + y2)),
21 | ["channel", "time"],
22 | t,
23 | ["y1", "y2", "y1+y2"],
24 | "V",
25 | "s",
26 | )
27 |
28 |
29 | def test_fixture(timeseries):
30 | assert all(timeseries.channel.values == ["y1", "y2", "y1+y2"])
31 |
32 |
33 | def test_sampling_rate(timeseries):
34 | q = sampling_rate(timeseries)
35 |
36 | assert q.magnitude == pytest.approx(3.14)
37 | assert q.units == units.Hz
38 |
39 |
40 | def _proj(a, b):
41 | return np.dot(a, b)
42 |
43 |
44 | def assert_freq_filter_result(timeseries, filtered):
45 |
46 |
47 | before_y1 = _proj(timeseries.loc["y1"], timeseries.loc["y1"])
48 | before_y2 = _proj(timeseries.loc["y2"], timeseries.loc["y2"])
49 |
50 | after_y1 = _proj(timeseries.loc["y1"], filtered.loc["y1"])
51 | after_y2 = _proj(timeseries.loc["y2"], filtered.loc["y2"])
52 | after_y12_1 = _proj(timeseries.loc["y1"], filtered.loc["y1+y2"])
53 | after_y12_2 = _proj(timeseries.loc["y2"], filtered.loc["y1+y2"])
54 |
55 | assert after_y1 < (before_y1 / 100) # f1 got filtered.
56 | assert after_y2 == pytest.approx(before_y2, rel=0.005) # f2 remains intact
57 | assert after_y12_1 < (before_y1 / 100)
58 | assert after_y12_2 == pytest.approx(before_y2, rel=0.005)
59 |
60 |
61 | def test_freq_filter(timeseries):
62 | filtered = freq_filter(timeseries, 0.8 * units.Hz, 1.2 * units.Hz)
63 | filtered = filtered.pint.dequantify()
64 | timeseries = timeseries.pint.dequantify()
65 |
66 | assert_freq_filter_result(timeseries, filtered)
67 |
68 |
69 | def test_freq_filter_units(timeseries):
70 | filtered = freq_filter(timeseries, 800 * units.mHz, 1200 * units.mHz)
71 | filtered = filtered.pint.dequantify()
72 | timeseries = timeseries.pint.dequantify()
73 |
74 | assert_freq_filter_result(timeseries, filtered)
75 |
76 |
77 | def test_freq_filter_accessor(timeseries):
78 | filtered = timeseries.cd.freq_filter(0.8, 1.2)
79 |
80 | filtered = filtered.pint.dequantify()
81 | timeseries = timeseries.pint.dequantify()
82 |
83 | assert_freq_filter_result(timeseries, filtered)
84 |
85 |
86 | def test_freq_filter_accessor_units(timeseries):
87 | filtered = timeseries.cd.freq_filter(0.8 * units.Hz, 1200 * units.mHz)
88 |
89 | filtered = filtered.pint.dequantify()
90 | timeseries = timeseries.pint.dequantify()
91 |
92 | assert_freq_filter_result(timeseries, filtered)
93 |
94 |
95 | def test_freq_filter_highpass(timeseries):
96 | filtered = freq_filter(timeseries, fmin=0.5 * units.Hz, fmax=0 * units.Hz)
97 |
98 | filtered = filtered.pint.dequantify()
99 | timeseries = timeseries.pint.dequantify()
100 |
101 | assert_freq_filter_result(timeseries, filtered)
102 |
103 |
104 | def test_freq_filter_lowpass(timeseries):
105 | filtered = freq_filter(timeseries, fmin=0. * units.Hz, fmax=0.5 * units.Hz)
106 |
107 | filtered = filtered.pint.dequantify()
108 | timeseries = timeseries.pint.dequantify()
109 |
110 | before_y1 = _proj(timeseries.loc["y1"], timeseries.loc["y1"])
111 | before_y2 = _proj(timeseries.loc["y2"], timeseries.loc["y2"])
112 |
113 | after_y1 = _proj(timeseries.loc["y1"], filtered.loc["y1"])
114 | after_y2 = _proj(timeseries.loc["y2"], filtered.loc["y2"])
115 | after_y12_1 = _proj(timeseries.loc["y1"], filtered.loc["y1+y2"])
116 | after_y12_2 = _proj(timeseries.loc["y2"], filtered.loc["y1+y2"])
117 |
118 | assert after_y1 == pytest.approx(before_y1, rel=0.005) # f1 remains intact
119 | assert after_y2 < (before_y2 / 100) # f2 got filtered.
120 | assert after_y12_1 == pytest.approx(before_y1, rel=0.005)
121 | assert after_y12_2 < (before_y2 / 100)
122 |
--------------------------------------------------------------------------------
/tests/test_sigproc_motion_correct.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import cedalion.datasets
4 | import cedalion.nirs
5 | import cedalion.sigproc.motion_correct as mc
6 | from cedalion import units
7 |
8 |
9 | @pytest.fixture
10 | def rec():
11 | rec = cedalion.datasets.get_snirf_test_data()[0]
12 | rec["amp"] = rec["amp"].pint.dequantify().pint.quantify(units.V)
13 | rec["od"] = cedalion.nirs.int2od(rec["amp"])
14 | return rec
15 |
16 |
17 | def test_motion_correct_splineSG_default_param(rec):
18 | mc.motion_correct_splineSG(rec["od"], p=1.0)
19 |
20 | def test_motion_correct_splineSG_custom_param(rec):
21 | mc.motion_correct_splineSG(rec["od"], p=1.0, frame_size=3 * units.s)
22 |
23 | def test_motion_correct_tddr(rec):
24 | mc.tddr(rec["od"])
25 |
26 | def test_motion_correct_wavelets(rec):
27 | mc.motion_correct_wavelet(rec["od"], iqr=1.5, wavelet='db2', level=4)
--------------------------------------------------------------------------------
/tests/test_sigproc_quality.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from numpy.testing import assert_allclose
3 | import cedalion.sigproc.quality as quality
4 | import cedalion.datasets
5 | from cedalion import units
6 |
7 |
8 | @pytest.fixture
9 | def rec():
10 | rec = cedalion.datasets.get_snirf_test_data()[0]
11 | rec["amp"] = rec["amp"].pint.dequantify().pint.quantify(units.V)
12 | return rec
13 |
14 |
15 | def test_sci(rec):
16 | _, _ = quality.sci(rec["amp"], 5 * units.s, sci_thresh=0.7)
17 |
18 |
19 | def test_psp(rec):
20 | _, _ = quality.psp(rec["amp"], 2 * units.s, psp_thresh=0.1)
21 |
22 | def test_snr(rec):
23 | _, _ = quality.snr(rec["amp"], snr_thresh=2.0)
24 |
25 | @pytest.mark.parametrize(
26 | "stat_type",
27 | [
28 | "default",
29 | "histogram_mode",
30 | "kdensity_mode",
31 | "parabolic_mode",
32 | "median",
33 | "mean",
34 | "MAD",
35 | ],
36 | )
37 | def test_gvtd(rec, stat_type):
38 | _, _ = quality.gvtd(rec["amp"], stat_type=stat_type)
39 |
40 |
41 | def test_mean_amp(rec):
42 | amp_min = 0.5 * units.V
43 | amp_max = 1.0 * units.V
44 |
45 | _, _ = quality.mean_amp(rec["amp"], amp_range=(amp_min, amp_max))
46 |
47 |
48 | def test_sd_dist(rec):
49 | # units are mm
50 | channel_distances = {
51 | "S3D3": 45.3,
52 | "S8D6": 47.5,
53 | "S8D12": 48.1,
54 | "S1D16": 7.1,
55 | "S15D23": 8.5,
56 | }
57 |
58 | metric, mask = quality.sd_dist(
59 | rec["amp"],
60 | rec.geo3d,
61 | sd_range=(1.5 * units.cm, 4.5 * units.cm),
62 | )
63 |
64 | dists = (
65 | metric.sel(channel=list(channel_distances.keys()))
66 | .pint.to("mm")
67 | .pint.dequantify()
68 | )
69 |
70 | assert_allclose(dists, list(channel_distances.values()), atol=0.1)
71 | for ch in channel_distances.keys():
72 | assert mask.sel(channel=ch).item() == quality.TAINTED
73 |
74 |
75 | def test_id_motion(rec):
76 | rec["od"] = cedalion.nirs.int2od(rec["amp"])
77 |
78 | _ = quality.id_motion(rec["od"])
79 |
80 |
81 | def test_id_motion_refine(rec):
82 | rec["od"] = cedalion.nirs.int2od(rec["amp"])
83 |
84 | ma_mask = quality.id_motion(rec["od"])
85 |
86 | for operator in ["by_channel", "all"]:
87 | _, _ = quality.id_motion_refine(ma_mask, operator)
88 |
89 |
90 | def test_detect_outliers(rec):
91 | _ = quality.detect_outliers(rec["amp"], t_window_std=2 * units.s)
92 |
93 |
94 | def test_detect_baselineshift(rec):
95 | outlier_mask = quality.detect_outliers(rec["amp"], t_window_std=2 * units.s)
96 | _ = quality.detect_baselineshift(rec["amp"], outlier_mask)
97 |
--------------------------------------------------------------------------------
/tests/test_vtkutils.py:
--------------------------------------------------------------------------------
1 | import cedalion
2 | import cedalion.dataclasses as cdc
3 | import trimesh
4 | import numpy as np
5 |
6 | def test_conversion():
7 | vertices = np.asarray([
8 | [0.,0.,0.],
9 | [1., 0. ,0.],
10 | [1., 1., 0.],
11 | [0., 1., 0.]
12 | ])
13 | faces = np.asarray([
14 | [0., 1., 2.],
15 | [1.,2.,3.]
16 | ])
17 |
18 | trimesh_surface = cdc.TrimeshSurface(
19 | trimesh.Trimesh(vertices, faces),
20 | crs = "my_crs",
21 | units = cedalion.units.mm
22 | )
23 |
24 | vtk_surface = cdc.VTKSurface.from_trimeshsurface(trimesh_surface)
25 |
26 | assert trimesh_surface.nfaces == vtk_surface.nfaces
27 | assert trimesh_surface.nvertices == vtk_surface.nvertices
28 | assert trimesh_surface.crs == vtk_surface.crs
29 | assert trimesh_surface.units == vtk_surface.units
30 |
31 | trimesh_surface2 = cdc.TrimeshSurface.from_vtksurface(vtk_surface)
32 |
33 | assert np.all(trimesh_surface2.mesh.vertices == vertices)
34 | assert np.all(trimesh_surface2.mesh.faces == faces)
35 |
--------------------------------------------------------------------------------
/tests/test_xrutils.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import cedalion.xrutils as xrutils
3 | import pint
4 | import numpy as np
5 | import xarray as xr
6 |
7 |
8 | def test_pinv():
9 | a = np.asarray([[1, 2], [3, 4]])
10 | ainv = np.asarray([[-2, 1], [1.5, -0.5]])
11 |
12 | A = xr.DataArray(a, dims=["x", "y"])
13 | A = A.pint.quantify("kg")
14 |
15 | Ainv = xrutils.pinv(A)
16 |
17 | # units get inverted
18 | assert Ainv.pint.units == pint.Unit("kg^-1")
19 |
20 | Ainv = Ainv.pint.dequantify()
21 | A = A.pint.dequantify()
22 |
23 | assert ((Ainv.values - ainv).round(14) == np.zeros((2, 2))).all()
24 |
25 | # matrix product of DataArray.values yields identity
26 | assert ((Ainv.values @ A.values).round(14) == np.eye(2)).all()
27 |
28 | # matrix product of DataArrays contracts over
29 | # both dimensions:
30 | assert Ainv @ A == pytest.approx(2.0)
31 |
--------------------------------------------------------------------------------