├── .coveragerc
├── .flake8
├── .github
├── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── syncopy-bug-report.md
├── pull_request_template.md
└── workflows
│ ├── benchmarks_asv_workflow.yml
│ ├── cov_test_workflow.yml
│ ├── fullparallel_workflow.yml
│ └── macos_tests_on_demand.yml
├── .gitignore
├── .gitlab-ci.yml
├── .readthedocs.yml
├── CHANGELOG.md
├── CITATION.cff
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.rst
├── README_DEV.md
├── asv.conf.json
├── benchmarks
├── README.md
├── __init__.py
└── benchmarks.py
├── codecov.yml
├── doc
├── Makefile
├── README.md
├── make.bat
└── source
│ ├── README.rst
│ ├── _static
│ ├── ComputationalRoutine.ai
│ ├── ComputationalRoutine.png
│ ├── adata.png
│ ├── class_diagramm.png
│ ├── colors.css
│ ├── dask_logo.png
│ ├── esi-style.css
│ ├── fooof_out_aperiodic.png
│ ├── fooof_out_first_try.png
│ ├── fooof_out_tuned.png
│ ├── fooof_signal_spectrum.png
│ ├── fooof_signal_time.png
│ ├── scientific_python.jpg
│ ├── select_example1.png
│ ├── select_example2.png
│ ├── spec.png
│ ├── syncopy_icon.png
│ ├── syncopy_icon.svg
│ ├── syncopy_logo.png
│ ├── syncopy_logo.svg
│ ├── syncopy_logo_small.png
│ ├── synth_data1.png
│ ├── synth_data1_spec.png
│ ├── synth_data_pdiff_spec.png
│ ├── synth_data_spec.png
│ ├── welch_basic_power.png
│ ├── welch_params.png
│ ├── welch_params.txt
│ ├── welch_raw_fft_power.png
│ └── workFlow.png
│ ├── _templates
│ ├── syncopy_base.rst
│ └── syncopy_class.rst
│ ├── conf.py
│ ├── developer
│ ├── compute_kernels.rst
│ ├── developer_api.rst
│ ├── developers.rst
│ ├── io.rst
│ ├── logging.rst
│ └── tools.rst
│ ├── index.rst
│ ├── quickstart
│ ├── damped_harm.png
│ ├── damped_signals.png
│ ├── mtmfft_spec.png
│ ├── quickstart.rst
│ ├── wavelet_spec.png
│ └── wavelet_spec2.png
│ ├── scripts
│ ├── select_example.py
│ └── synth_data1.py
│ ├── setup.rst
│ ├── sitemap.rst
│ ├── tutorials
│ ├── ar2_bp_corr.png
│ ├── ar2_bp_corr2.png
│ ├── ar2_coh.png
│ ├── ar2_corr.png
│ ├── ar2_granger.png
│ ├── ar2_nw.py
│ ├── ar2_signals.png
│ ├── ar2_specs.png
│ ├── connectivity.rst
│ ├── fft_nui_spec.png
│ ├── fft_pp_spec.png
│ ├── fooof.rst
│ ├── freqanalysis.rst
│ ├── preproc_synthdata.py
│ ├── preprocessing.rst
│ ├── res_ds_spec.png
│ ├── res_lpds_spec.png
│ ├── res_lporderds_spec.png
│ ├── res_orig_spec.png
│ ├── res_rs_spec.png
│ ├── res_synthdata.py
│ ├── resampling.rst
│ └── welch.rst
│ └── user
│ ├── class_diagramm.mmd
│ ├── complete_api.rst
│ ├── concepts.rst
│ ├── data.rst
│ ├── data_basics.rst
│ ├── datatype.rst
│ ├── fieldtrip.rst
│ ├── logging.rst
│ ├── matlab_io.rst
│ ├── parallel.rst
│ ├── selectdata.rst
│ ├── synth_data.rst
│ ├── user_api.rst
│ └── work-flow.mmd
├── poetry.lock
├── pyproject.toml
├── setup.py
├── syncopy.yml
├── syncopy
├── __init__.py
├── connectivity
│ ├── AV_compRoutines.py
│ ├── ST_compRoutines.py
│ ├── __init__.py
│ ├── connectivity_analysis.py
│ ├── csd.py
│ ├── granger.py
│ └── wilson_sf.py
├── datatype
│ ├── __init__.py
│ ├── base_data.py
│ ├── continuous_data.py
│ ├── discrete_data.py
│ ├── methods
│ │ ├── arithmetic.py
│ │ ├── concat.py
│ │ ├── copy.py
│ │ ├── definetrial.py
│ │ ├── redefinetrial.py
│ │ ├── selectdata.py
│ │ └── show.py
│ ├── selector.py
│ └── util.py
├── io
│ ├── __init__.py
│ ├── load_ft.py
│ ├── load_nwb.py
│ ├── load_spy_container.py
│ ├── load_tdt.py
│ ├── mne_conv.py
│ ├── nwb.py
│ ├── save_spy_container.py
│ └── utils.py
├── plotting
│ ├── __init__.py
│ ├── _helpers.py
│ ├── _plotting.py
│ ├── config.py
│ ├── helpers.py
│ ├── mp_plotting.py
│ ├── sp_plotting.py
│ ├── spike_plotting.py
│ └── spy_plotting.py
├── preproc
│ ├── __init__.py
│ ├── compRoutines.py
│ ├── firws.py
│ ├── preprocessing.py
│ ├── resampledata.py
│ └── resampling.py
├── shared
│ ├── __init__.py
│ ├── computational_routine.py
│ ├── const_def.py
│ ├── dask_helpers.py
│ ├── errors.py
│ ├── filetypes.py
│ ├── input_processors.py
│ ├── kwarg_decorators.py
│ ├── latency.py
│ ├── log.py
│ ├── metadata.py
│ ├── parsers.py
│ ├── queries.py
│ └── tools.py
├── specest
│ ├── README.md
│ ├── __init__.py
│ ├── _norm_spec.py
│ ├── compRoutines.py
│ ├── fooofspy.py
│ ├── freqanalysis.py
│ ├── mtmconvol.py
│ ├── mtmfft.py
│ ├── stft.py
│ ├── superlet.py
│ ├── wavelet.py
│ └── wavelets
│ │ ├── __init__.py
│ │ ├── transform.py
│ │ └── wavelets.py
├── statistics
│ ├── __init__.py
│ ├── compRoutines.py
│ ├── jackknifing.py
│ ├── psth.py
│ ├── spike_psth.py
│ ├── summary_stats.py
│ └── timelockanalysis.py
├── synthdata
│ ├── __init__.py
│ ├── analog.py
│ ├── spikes.py
│ └── utils.py
└── tests
│ ├── README.md
│ ├── __init__.py
│ ├── backend
│ ├── __init__.py
│ ├── run_tests.sh
│ ├── test_conn.py
│ ├── test_fooofspy.py
│ ├── test_resampling.py
│ └── test_timefreq.py
│ ├── conftest.py
│ ├── helpers.py
│ ├── local_spy.py
│ ├── misc.py
│ ├── no_slurm.sh
│ ├── run_tests.cmd
│ ├── run_tests.sh
│ ├── test_attach_dataset.py
│ ├── test_basedata.py
│ ├── test_cfg.py
│ ├── test_computationalroutine.py
│ ├── test_concat.py
│ ├── test_connectivity.py
│ ├── test_continuousdata.py
│ ├── test_datatype_util.py
│ ├── test_decorators.py
│ ├── test_discretedata.py
│ ├── test_info.py
│ ├── test_logging.py
│ ├── test_metadata.py
│ ├── test_mne_conv.py
│ ├── test_nwb.py
│ ├── test_packagesetup.py
│ ├── test_parsers.py
│ ├── test_plotting.py
│ ├── test_preproc.py
│ ├── test_redefinetrial.py
│ ├── test_resampledata.py
│ ├── test_selectdata.py
│ ├── test_specest.py
│ ├── test_specest_fooof.py
│ ├── test_spike_psth.py
│ ├── test_spyio.py
│ ├── test_spytools.py
│ ├── test_statistics.py
│ ├── test_synthdata.py
│ ├── test_timelockanalysis.py
│ ├── test_tools.py
│ └── test_welch.py
└── syncopy_m1macos.yml
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | source = syncopy
4 |
5 | [report]
6 | exclude_lines =
7 | if self.debug:
8 | if debug:
9 | raise NotImplementedError
10 | if __name__ == .__main__.:
11 | ignore_errors = True
12 | omit =
13 | syncopy/tests/*
14 | *conda2pip.py
15 | *setup.py
16 | test_*
17 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E501, E731
3 | extend_ignore = W503, W504
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for Syncopy
4 | title: ''
5 | labels: Feature Request
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/syncopy-bug-report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Syncopy Bug Report
3 | about: File a report to help us improve Syncopy
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | Thank you for taking the time to report an issue! We know that writing detailed error reports takes effort, and we appreciate you taking the time.
11 |
12 | **Describe the bug**
13 | Please provide a clear and concise description of the problem.
14 |
15 | **To Reproduce**
16 | Steps to reproduce the behavior:
17 | 1. Load/Import dataset `xyz`
18 | 2. Perform Preprocessing Step 1, Step 2, ...
19 | 3. Run analysis ``spy.somefunction(input1, input2, ....)``.
20 | 4. Error message/erroneous result.
21 |
22 | **Expected behavior**
23 | Please provide a clear and concise description of the expected result/behavior.
24 |
25 | **System Profile:**
26 | - OS: [e.g. Windows 8/10, Ubuntu 18.04, macOS Catalina]
27 | - Please paste the output of the following command here
28 | ```python
29 | import syncopy as spy; spy.AnalogData().log
30 | ```
31 |
32 | **Additional Information**
33 | Please add any other context concerning the problem here.
34 |
35 | **Thank you again for your time!**
36 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | Changes Summary
2 | ----------------
3 | -
4 | -
5 |
6 |
7 | Reviewer Checklist
8 | ------------------
9 | - [ ] Are testing routines present?
10 | - [ ] Do objects in the global package namespace perform proper parsing of their input?
11 | - [ ] Are all docstrings complete and accurate?
12 | - [ ] Is the CHANGELOG.md up to date?
13 |
--------------------------------------------------------------------------------
/.github/workflows/benchmarks_asv_workflow.yml:
--------------------------------------------------------------------------------
1 | name: Run asv benchmarks
2 |
3 | on:
4 |
5 | # Allows you to run this workflow manually from the Actions tab on github
6 | workflow_dispatch:
7 |
8 |
9 | jobs:
10 | build-linux:
11 | runs-on: ubuntu-latest
12 | strategy:
13 | max-parallel: 1
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Set up Python 3.10
17 | uses: actions/setup-python@v2
18 | with:
19 | python-version: "3.10"
20 | - name: Install poetry
21 | run: |
22 | pip install poetry
23 | - name: Install SyNCoPy
24 | run: |
25 | poetry install
26 | - name: Run benchmarks
27 | run: |
28 | poetry run asv machine --yes
29 | poetry run asv run HEAD^! --show-stderr
30 |
31 |
--------------------------------------------------------------------------------
/.github/workflows/cov_test_workflow.yml:
--------------------------------------------------------------------------------
1 | name: Run Basic Tests
2 |
3 | on:
4 | # Triggers the workflow on push or pull request events
5 | push:
6 | branches:
7 | - master
8 | - dev
9 | pull_request:
10 |
11 | # Allows you to run this workflow manually from the Actions tab
12 | workflow_dispatch:
13 |
14 | jobs:
15 | build-linux:
16 | runs-on: ubuntu-latest
17 | strategy:
18 | max-parallel: 1
19 | steps:
20 | - uses: actions/checkout@v2
21 | - name: Set up Python 3.10
22 | uses: actions/setup-python@v2
23 | with:
24 | python-version: "3.10"
25 | - name: Install poetry
26 | run: |
27 | pip install poetry
28 | - name: Install SyNCoPy
29 | run: |
30 | poetry install
31 | - name: Lint with flake8
32 | run: |
33 | # stop the build if there are Python syntax errors or undefined names
34 | poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
36 | poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
37 | - name: Install optional dependency pynwb
38 | run: |
39 | poetry add pynwb
40 | - name: Test with pytest and get coverage
41 | run: |
42 | cd syncopy/tests
43 | # run parallel tests only for base CR
44 | poetry run pytest -k 'computationalroutine and parallel'
45 | # don't run general parallel tests
46 | poetry run pytest -k 'not parallel' --color=yes --tb=short --verbose --cov=../../syncopy --cov-config=../../.coveragerc --cov-report=xml
47 | - name: Upload coverage to Codecov
48 | uses: codecov/codecov-action@v2
49 | with:
50 | name: syncopy-codecov
51 | verbose: true
52 |
--------------------------------------------------------------------------------
/.github/workflows/fullparallel_workflow.yml:
--------------------------------------------------------------------------------
1 | name: Run all tests
2 |
3 | on:
4 | # only manual trigger here
5 | workflow_dispatch:
6 |
7 | jobs:
8 | build-linux:
9 | runs-on: ubuntu-latest
10 | strategy:
11 | max-parallel: 1
12 | steps:
13 | - uses: actions/checkout@v2
14 | - name: Set up Python 3.8
15 | uses: actions/setup-python@v2
16 | with:
17 | python-version: 3.8
18 | - name: Install poetry
19 | run: |
20 | pip install poetry
21 | - name: Install SyNCoPy
22 | run: |
23 | poetry install
24 | - name: Lint with flake8
25 | run: |
26 | # stop the build if there are Python syntax errors or undefined names
27 | poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
28 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
29 | poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
30 | - name: Test sequential with pytest
31 | run: |
32 | cd syncopy/tests
33 | # run all sequential tests
34 | poetry run pytest -ra --color=yes -k 'not parallel'
35 | - name: Test basic parallel with pytest
36 | run: |
37 | cd syncopy/tests
38 | # run all parallel tests
39 | poetry run pytest --disable-warnings --color=yes -k 'parallel'
40 |
41 |
--------------------------------------------------------------------------------
/.github/workflows/macos_tests_on_demand.yml:
--------------------------------------------------------------------------------
1 | name: Run Test under Mac OS
2 |
3 | on:
4 | # Allows you to run this workflow manually from the Actions tab
5 | workflow_dispatch:
6 |
7 | jobs:
8 |
9 | test-macos:
10 | runs-on: macos-latest
11 | strategy:
12 | max-parallel: 1
13 | steps:
14 | - uses: actions/checkout@v3
15 | - name: Set up Python 3.10
16 | uses: actions/setup-python@v3
17 | with:
18 | python-version: "3.10"
19 | - name: Install poetry
20 | run: |
21 | pip install poetry
22 | - name: Install SyNCoPy
23 | run: |
24 | poetry install
25 | - name: Lint with flake8
26 | run: |
27 | # stop the build if there are Python syntax errors or undefined names
28 | poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
29 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
30 | poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
31 | - name: Install optional dependency pynwb
32 | run: |
33 | poetry add pynwb
34 | - name: Test with pytest and get coverage
35 | run: |
36 | cd syncopy/tests
37 | # run parallel tests only for base CR
38 | poetry run pytest -k 'computationalroutine and parallel'
39 | # don't run general parallel tests
40 | poetry run pytest -k 'not parallel'
41 |
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled Object files
2 | *.slo
3 | *.lo
4 | *.o
5 | *.obj
6 |
7 | # Precompiled Headers
8 | *.gch
9 | *.pch
10 |
11 | # Compiled Dynamic libraries
12 | *.so
13 | *.dylib
14 | *.dll
15 |
16 | # Fortran module files
17 | *.mod
18 |
19 | # Compiled Static libraries
20 | *.lai
21 | *.la
22 | *.a
23 | *.lib
24 |
25 | # Executables
26 | *.exe
27 | *.out
28 | *.app
29 |
30 | # HTML documentation
31 | doc/build
32 | doc/source/_stubs
33 | *_stubs
34 | doc/source/api
35 |
36 | # Canonical /build directory
37 | /build
38 |
39 | # setuptools /dist directory
40 | /dist
41 | *.egg-info
42 | .eggs/
43 |
44 | # Python
45 | # Byte-compiled / optimized / DLL files
46 | __pycache__/
47 | *.py[cod]
48 | *$py.class
49 |
50 | # Jupyter notebook clutter
51 | .ipynb_checkpoints/
52 |
53 | # Dask-related stuff
54 | dask-worker-space
55 |
56 | # Test-related files
57 | .tox
58 |
59 | # Auto-generated files
60 | requirements.txt
61 | requirements-test.txt
62 |
63 | # Code coverage reports
64 | .coverage
65 | .coverage.*
66 |
67 | # spy containers
68 | *.spy
69 |
70 | # Editor-related stuff
71 | .vscode
72 |
73 | # Mac OS related stuff
74 | .DS_Store
75 |
76 | # airspeed velocity virtual environments for performance testing
77 | .asv/env/
78 |
79 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/ directory with Sphinx
9 | sphinx:
10 | configuration: doc/source/conf.py
11 |
12 | build:
13 | os: ubuntu-22.04
14 | tools:
15 | python: '3.10'
16 | apt_packages:
17 | - graphviz
18 | jobs:
19 | post_install:
20 | - pip install poetry
21 | - poetry config virtualenvs.create false
22 | - poetry install
23 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.1.0
2 | title: 'SyNCoPy: Systems Neuroscience Computing in Python'
3 | message: 'If you use this software, please cite it based on metadata found in this
4 | file. SyNCoPy is heavily inspired by and intended to be used complementary to the
5 | MATLAB toolbox FieldTrip: Robert Oostenveld, Pascal Fries, Eric Maris, and Jan-Mathijs
6 | Schoffelen. FieldTrip: Open Source Software for Advanced Analysis of MEG, EEG, and
7 | Invasive Electrophysiological Data. Computational Intelligence and Neuroscience,
8 | vol. 2011, Article ID 156869, 9 pages, 2011. doi:10.1155/2011/156869.'
9 | authors:
10 | - affiliation: Ernst Strüngmann Institute for Neuroscience in Cooperation with Max
11 | Planck Society
12 | family-names: Mönke
13 | given-names: Gregor
14 | orcid: https://orcid.org/0000-0002-3521-715X
15 | - affiliation: Ernst Strüngmann Institute for Neuroscience in Cooperation with Max
16 | Planck Society
17 | family-names: Schäfer
18 | given-names: Tim
19 | orcid: https://orcid.org/0000-0002-3683-8070
20 | - affiliation: Ernst Strüngmann Institute for Neuroscience in Cooperation with Max
21 | Planck Society
22 | family-names: Fuertinger
23 | given-names: Stefan
24 | orcid: https://orcid.org/0000-0002-8118-036X
25 | - affiliation: Ernst Strüngmann Institute for Neuroscience in Cooperation with Max
26 | Planck Society
27 | family-names: Schmiedt
28 | given-names: Joscha
29 | orcid: https://orcid.org/0000-0001-6233-1866
30 | - affiliation: Ernst Strüngmann Institute for Neuroscience in Cooperation with Max
31 | Planck Society
32 | family-names: Fries
33 | given-names: Pascal
34 | orcid: https://orcid.org/0000-0002-4270-1468
35 | license: BSD-3-Clause
36 | keywords:
37 | - large-scale electrophysiology
38 | - computational-neuroscience
39 | - high-performance computing
40 | - parallel computing
41 | - systems-neuroscience
42 | - spectral-methods
43 | - brain
44 | repository-code: https://github.com/esi-neuroscience/syncopy
45 | version: 2022.12
46 | date-released: '2022-12-15'
47 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ## Contributing to Syncopy
2 |
3 | We are very happy to accept [pull requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request), provided you are fine with publishing your work under the [license of this project](./LICENSE).
4 |
5 | If your contribution is not a bug fix but a new feature that changes or adds lots of code, please get in touch by [opening an issue](https://github.com/esi-neuroscience/syncopy/issues) *before* starting to code so we can discuss and coordinate.
6 |
7 | Development happens on the *dev* branch. Please note that we do not accept PRs against other branches.
8 |
9 |
10 | ### Contribution workflow -- Overview
11 |
12 | If you want to contribute something, the general workflow is:
13 |
14 | - Fork the repo to your GitHub account.
15 | - Clone your copy of the repo to your local computer.
16 | - Create a new branch off the `dev` branch for your changes.
17 | - Add tests relevant to your changes and run tests locally.
18 | - When happy, create a PR on GitHub and wait for CI to run tests. Make sure to request merging into `dev`.
19 | - When CI is green, we will review your code and get back to you to prepare merging into `dev`.
20 | - On the next Syncopy release, `dev` will be merged into our stable branch, and your changes will be part of Syncopy.
21 |
22 |
23 | ### Contribution workflow -- Detailed instructions
24 |
25 | Here are detailed instructions for the contribution workflow steps listed above:
26 |
27 | - Log into your Github account, visit the [Syncopy repo page](https://github.com/esi-neuroscience/syncopy), and click [fork](https://github.com/esi-neuroscience/syncopy/fork) to fork the Syncopy repository to your account.
28 | - Checkout **your forked** repository to your computer. You will be on the master branch. Make sure to **switch the branch** to the *dev* branch. E.g.:
29 |
30 | ```shell
31 | git clone https://github.com/your_user/syncopy
32 | cd syncopy
33 | git checkout dev
34 | ```
35 | - Now install the development version of Syncopy that you just checkout out, so you can actually test your changes to the code. We highly recommend to install into a new conda virtual environment, so that you do not
36 | break your system-wide stable installation of Syncopy.
37 |
38 | ```shell
39 | conda env create --file syncopy.yml --name syncopy-dev # The file syncopy.yml comes with the repo.
40 | conda activate syncopy-dev
41 | pip install -e .
42 | ```
43 |
44 | This allows you to run the Syncopy unit tests locally, and to run and test your changes. E.g., run a single test file:
45 |
46 | ```shell
47 | python -m pytest syncopy/tests/test_preproc.py
48 | ```
49 |
50 | We recommend running all unit tests once now to be sure that everything works. This also ensures that if you get errors on the tests later after you changed some code, you can be sure that these errors are actually related to your code, as opposed to issues with your Syncopy installation. To run all tests:
51 |
52 | ```shell
53 | python -m pytest -k "not parallel"
54 | ```
55 |
56 | This should take roughly 5 minutes and will open some plot windows. Please be patient. Testing parallel processing on a local machine usually is not necessary. In case you still want to run also the full parallel tests, just leave out `-k "not parallel"` in the command above.
57 |
58 |
59 | - Now you have a verified installation and you are ready to make changes. Create a new branch off *develop* and name it after your feature, e.g., `add_cool_new_feature` or `fix_issue_17`:
60 |
61 | ```shell
62 | git checkout dev # Just to be sure we are still on the correct branch. This is important.
63 | git checkout -b fix_issue_17
64 | ```
65 |
66 | - Make changes to the Syncopy code and commit them into your branch. Repeat as needed. Add some tests.
67 | - Make sure the unit tests run locally on your machine:
68 |
69 | ```shell
70 | python -m pytest -k "not parallel"
71 | ```
72 |
73 |
74 | - When you are happy with your changes, push your branch to your forked repo on Github.
75 |
76 | ```shell
77 | git push --set-upstream origin fix_issue_17 # If your branch is named 'fix_isssue_17'.
78 | ```
79 |
80 |
81 | - Then create a pull request on the GitHub website by visiting your copy of the repo. Make sure to request to merge your branch into the *dev* branch of the official Syncopy repo (the default is `master`, which is not what you want). You can verify that the branch is correct by clicking on the `Files changed` tab of the PR. It should list exactly your changes. If this is not the case, edit the PR and change the base to `dev`.
82 |
83 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2020, Ernst Strüngmann Institute (ESI) for Neuroscience in Cooperation with Max Planck Society
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | exclude .gitignore
2 | exclude .gitlab-ci.yml
3 | exclude MANIFEST.in
4 | exclude .readthedocs.yml
5 | recursive-exclude .github *
6 | recursive-exclude doc *
7 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | .. image:: https://raw.githubusercontent.com/esi-neuroscience/syncopy/master/doc/source/_static/syncopy_logo_small.png
2 | :alt: Syncopy-Logo
3 |
4 | Systems Neuroscience Computing in Python
5 | ========================================
6 |
7 |
8 | |Conda Version| |PyPi Version| |License| |DOI|
9 |
10 | .. |Conda Version| image:: https://img.shields.io/conda/vn/conda-forge/esi-syncopy.svg
11 | :target: https://anaconda.org/conda-forge/esi-syncopy
12 | .. |PyPI version| image:: https://badge.fury.io/py/esi-syncopy.svg
13 | :target: https://badge.fury.io/py/esi-syncopy
14 | .. |License| image:: https://img.shields.io/github/license/esi-neuroscience/syncopy
15 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.8191941.svg
16 | :target: https://doi.org/10.5281/zenodo.8191941
17 |
18 | |Master Tests| |Master Coverage|
19 |
20 | .. |Master Tests| image:: https://github.com/esi-neuroscience/syncopy/actions/workflows/cov_test_workflow.yml/badge.svg?branch=master
21 | :target: https://github.com/esi-neuroscience/syncopy/actions/workflows/cov_test_workflow.yml
22 | .. |Master Coverage| image:: https://codecov.io/gh/esi-neuroscience/syncopy/branch/master/graph/badge.svg?token=JEI3QQGNBQ
23 | :target: https://codecov.io/gh/esi-neuroscience/syncopy
24 |
25 | Syncopy aims to be a user-friendly toolkit for *large-scale*
26 | electrophysiology data-analysis in Python. We strive to achieve the following goals:
27 |
28 | 1. Syncopy is a *fully open source Python* environment for electrophysiology
29 | data analysis.
30 | 2. Syncopy is *scalable* and built for *very large datasets*. It automatically
31 | makes use of available computing resources and is developed with built-in
32 | parallelism in mind.
33 | 3. Syncopy is *compatible with FieldTrip*. Data and results can be loaded into
34 | MATLAB and Python, and parameter names and function call syntax are as similar as possible.
35 |
36 | Syncopy is developed at the
37 | `Ernst Strüngmann Institute (ESI) gGmbH for Neuroscience in Cooperation with Max Planck Society `_
38 | and released free of charge under the
39 | `BSD 3-Clause "New" or "Revised" License `_.
40 |
41 | News
42 | -----
43 | * 2024-04, **Pre-print on Syncopy available.** A pre-print paper on Syncopy is now available `here on arxiv, with DOI 10.1101/2024.04.15.589590 `_. Please cite this pre-print if you use Syncopy. In APA style, the citation is: Mönke, G., Schäfer, T., Parto-Dezfouli, M., Kajal, D. S., Fürtinger, S., Schmiedt, J. T., & Fries, P. (2024). *Systems Neuroscience Computing in Python (SyNCoPy): A Python Package for Large-scale Analysis of Electrophysiological Data.* bioRxiv, 2024-04.
44 |
45 | Contact
46 | -------
47 | To report bugs or ask questions please use our `GitHub issue tracker `_.
48 | For general inquiries please contact syncopy (at) esi-frankfurt.de.
49 |
50 | Installation
51 | ============
52 |
53 | We recommend to install SynCoPy into a new conda environment:
54 |
55 | #. Install the `Anaconda Distribution for your Operating System `_ if you do not yet have it.
56 | #. Start a new terminal.
57 |
58 | * You can do this by starting ```Anaconda navigator```, selecting ```Environments``` in the left tab, selecting the ```base (root)``` environment, and clicking the green play button and then ```Open Terminal```.
59 | * Alternatively, under Linux, you can just type ```bash``` in your active terminal to start a new session.
60 |
61 | You should see a terminal with a command prompt that starts with ```(base)```, indicating that you are
62 | in the conda ```base``` environment.
63 |
64 | Now we create a new environment named ```syncopy``` and install syncopy into this environment:
65 |
66 | .. code-block:: bash
67 |
68 | conda create -y --name syncopy
69 | conda activate syncopy
70 | conda install -y -c conda-forge esi-syncopy
71 |
72 | Getting Started
73 | ===============
74 | Please visit our `online documentation `_.
75 |
76 | Developer Installation
77 | -----------------------
78 |
79 | To get the latest development version, please clone our GitHub repository and change to the `dev` branch. We highly recommend to install into a new conda virtual environment, so that this development version does not interfere with your existing installation.
80 |
81 | .. code-block:: bash
82 |
83 | git clone https://github.com/esi-neuroscience/syncopy.git
84 | cd syncopy/
85 | conda env create --name syncopy-dev --file syncopy.yml
86 | conda activate syncopy-dev
87 | pip install -e .
88 |
89 |
90 | We recommend to verify your development installation by running the unit tests. You can skip the parallel tests to save some time, the tests should run in about 5 minutes then:
91 |
92 |
93 | .. code-block:: bash
94 |
95 | python -m pytest -k "not parallel"
96 |
97 |
98 | You now have a verified developer installation of Syncopy. Please refert to our `contributing guide `_ if you want to contribute to Syncopy.
99 |
100 |
--------------------------------------------------------------------------------
/README_DEV.md:
--------------------------------------------------------------------------------
1 | # Syncopy Developer Information
2 |
3 | These development instructions are only relevant for people on the Syncopy core team.
4 |
5 | ## Making a new Release
6 |
7 | ### PyPI
8 |
9 | At github:
10 |
11 | * Keep latest version/changes to be release on the `dev` branch for now
12 | * Set the new the package version in `pyproject.toml` and make sure release notes in the `CHANGELOG.md` file are up-to-date.
13 | * After last commit on github, log into the local ESI GitLab installation from within ESI and wait for the sync from Github to happen. The CI pipeline should start all runnners:
14 | - stage 1: single machine for all architectures like intelllinux, intelwin, macos
15 | - stage 2: slurm / HPC runners
16 | * Check + fix all failures. Note that the pipeline on the internal Gitlab differs from the CI run on Github in several ways:
17 | - parallel tests are run
18 | - platforms other than linux x64 are used
19 | - the ESI filesystem/cluster is available, so tests that require large local test data from the cluster's filesystem are run.
20 | * Once tests are all green, you can do the following:
21 | - in the gitlab "CI -- pipeline" tab, click on the name of the completed pipeline. You should see the stages. If parts of the pipeline stages 1 or 2 are still running, you can cancel them to unlock stage 3. There is a manual stage 3 'upload' entry named 'pypitest'. Click it to run the pypitest test deployment.
22 | - If it succeeded: there is a manual stage 4 'deploy' entry named 'pypideploy'. Click it to run the final deployment to pypi.
23 | - merge dev into master
24 |
25 | This concludes the release to PyPI.
26 |
27 | ### Conda
28 |
29 | Note: You need to release to PyPI first to start the conda release. Note that this requires that you have proper permissions on your Github account, i.e., you are a maintainer of the esi-syncopy package on conda-forge.
30 |
31 | * Go to https://github.com/conda-forge/esi-syncopy-feedstock and clone the repo to your private Github account. If you already have a clone from the last release, navigate to it and click *sync fork* to update it.
32 | * In your up-to-date clone of the `esi-syncopy-feedstock` repo, create a new branch off main, e.g., `release-2023.09`. In the new branch, in the file `recipe/meta.yaml`, do the following steps:
33 | - Update the version of the Syncopy package and use the file hash of the release on PyPI you did earlier (you can see the hash [here on PyPI](https://pypi.org/project/esi-syncopy/#files))
34 | * Beware: we typically have versions like `2023.07` on Github, and conda is fine with a version like that, too. However, PyPI removes the zero from `2023.07` in the package URL, so you cannot use `{{ version }}` in the `source`..`url` field of the `meta.yml` file.
35 | - Check versions of packages in `meta.yml` here versus versions in `pyproject.toml`/`syncopy.yml` in the root of the Syncopy GitHub repo (they need not be 100% identical, but having to old versions in there may lead to security risks or unexpected behaviour with older/buggy package versions).
36 | * Commit your changes and push to Github. Then to the Github website and create a PR (against https://github.com/conda-forge/esi-syncopy-feedstock, main branch).
37 | * Fill out the PR checklist in the PR. If the conditions listed in section [When to Rerender](https://conda-forge.org/docs/maintainer/updating_pkgs.html#when-to-rerender) in the conda documentation apply to the current change/release: request `@conda-forge-admin please rerender` via comment on GitHub in the PR.
38 | * Wait for the conda checks. If they are all green, merge.
39 |
40 | This concludes the release to conda-forge.
41 |
42 |
--------------------------------------------------------------------------------
/benchmarks/README.md:
--------------------------------------------------------------------------------
1 | # Syncopy Performance Benchmarks
2 |
3 | This directory contains the Syncopy performance benchmarks, implemented with [Airspeed Velocity](https://asv.readthedocs.io), i.e., the `asv` Python package.
4 |
5 | Note: The current version of `asv` does not seem to work with poetry at all, and it still defaults to using the rather outdated `setup.py` method instead of `pyproject.toml`. We do not hava a `setup.py`, nor do we want to ship one, so we convert our `pyproject.toml` to `setup.py` on the fly before running the performance benchmarks.
6 |
7 | ## Converting to setup.py
8 |
9 | At the moment, asv does NOT support the `pyproject.toml` directly. To (re-)convert to setup.py do:
10 |
11 | ```shell
12 | pip install dephell
13 | dephell convert deps --from-path pyproject.toml --from-format pyproject --to-path setup.py --to-format setuppy
14 | ```
15 |
16 | ## Running the benchmarks
17 |
18 | First change into the Syncopy repo. To run the benchmarks for the latest commit on your current branch:
19 |
20 | ```shell
21 | poetry run asv machine --yes
22 | poetry run asv run HEAD^!
23 | ```
24 |
25 |
26 | ## Common issues
27 |
28 | If you are getting errors when running the benchmarks, e.g., `no module named syncopy`, you most likely have changed something with the `asv` configuration that broke the installation. In addition to fixing that, you will have to manually delete the old environments so that `asv` creates new ones afterwards:
29 |
30 | ```shell
31 | rm -rf .asv/env
32 | ```
33 |
34 | ## Adding benchmarks
35 |
36 | The benchmarks can be found in `/benchmarks/benchmarks.py`. See the [asv docs](https://asv.readthedocs.io) for info on adding more.
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/benchmarks/benchmarks.py:
--------------------------------------------------------------------------------
1 | # Syncopy benchmark suite.
2 | # See "Writing benchmarks" in the asv docs for more information.
3 |
4 | import syncopy as spy
5 | from syncopy.synthdata.analog import white_noise
6 |
7 |
8 | class SelectionSuite:
9 | """
10 | Benchmark selections on AnalogData objects.
11 | """
12 | def setup(self):
13 | self.adata = white_noise(nSamples=25000, nChannels=32, nTrials=250, samplerate=1000)
14 |
15 | def teardown(self):
16 | del self.adata
17 |
18 | def time_external_channel_selection(self):
19 | _ = spy.selectdata(self.adata, channel=[0, 1, 7], inplace=False)
20 |
21 | def time_inplace_channel_selection(self):
22 | spy.selectdata(self.adata, channel=[0, 1, 7], inplace=True)
23 |
24 |
25 | class MTMFFT:
26 | """
27 | Benchmark multi-tapered fft
28 | """
29 | def setup(self):
30 | self.adata = white_noise(nSamples=5000, nChannels=32, nTrials=250, samplerate=1000)
31 |
32 | def teardown(self):
33 | del self.adata
34 |
35 | def time_mtmfft_untapered(self):
36 | _ = spy.freqanalysis(self.adata, taper=None)
37 |
38 | def time_mtmfft_multitaper(self):
39 | _ = spy.freqanalysis(self.adata, tapsmofrq=2)
40 |
41 |
42 | class Arithmetic:
43 | """
44 | Benchmark Syncopy's arithmetic
45 | """
46 |
47 | def setup(self):
48 | self.adata = white_noise(nSamples=25000, nChannels=32, nTrials=250, samplerate=1000)
49 | self.adata2 = self.adata.copy()
50 |
51 | def teardown(self):
52 | del self.adata
53 | del self.adata2
54 |
55 | def time_scalar_mult(self):
56 | _ = 3 * self.adata
57 |
58 | def time_scalar_add(self):
59 | _ = 3 + self.adata
60 |
61 | def time_dset_add(self):
62 | _ = self.adata + self.adata2
63 |
64 |
65 | class MemSuite:
66 | """Test memory usage of data classes.
67 | Note that this is intented to test memory usage of python objects, not of a function call.
68 | Use the mem_peak prefix for that.
69 | """
70 |
71 | def setup(self):
72 | self.adata = white_noise(nSamples=10_000, nChannels=32, nTrials=250, samplerate=1000)
73 |
74 | def teardown(self):
75 | del self.adata
76 |
77 | def mem_analogdata(self):
78 | """Test memory usage of AnalogData object."""
79 | return self.adata
80 |
81 | def peakmem_mtmfft(self):
82 | """Test memory usage of mtmfft"""
83 | _ = spy.freqanalysis(self.adata, tapsmofrq=2)
84 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | default:
5 | # Allow coverage to drop `threshold` percent in PRs to master/dev
6 | target: auto
7 | threshold: 5%
8 | base: auto
9 | branches:
10 | - master
11 | - dev
12 | if_ci_failed: error #success, failure, error, ignore
13 | informational: false
14 | only_pulls: true
15 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = source
8 | BUILDDIR = build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20 |
21 | # Custom directive to clean up build dir and stub files
22 | clean:
23 | rm -rf $(BUILDDIR)/
24 | rm -f $(SOURCEDIR)/api/*
25 | find $(SOURCEDIR) -name _stubs -prune -exec rm -rf {} +
26 |
--------------------------------------------------------------------------------
/doc/README.md:
--------------------------------------------------------------------------------
1 |
2 | # (Online-) Documentation
3 |
4 | ## Build Requirements
5 |
6 | Install (debian based packages):
7 | - `sphinx-common`
8 | - `python3-sphinx-book-theme`
9 | - graphviz
10 |
11 | then run `make html` from this folder
12 |
13 | ### Useful links
14 |
15 | - [nice introduction to rst and Sphinx](https://software.belle2.org/sphinx/recommended-training/framework/doc/atend-doctools.html)
16 |
17 | ### Latex in docstrings:
18 |
19 | Use raw strings (r"""..."""):
20 | - https://stackoverflow.com/questions/16468397/math-expression-in-docstring-is-not-rendered-correctly-by-sphinx
21 |
--------------------------------------------------------------------------------
/doc/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 | if "%1" == "clean" goto clean
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | REM Custom directive to clean up build dir and stub files
36 | :clean
37 | del /Q /S %BUILDDIR%"\*" > nul
38 | rmdir /Q /S %BUILDDIR% > nul
39 | del /Q /S %SOURCEDIR%"\_stubs\*" > nul
40 | del /Q /S %SOURCEDIR%"\api\*" > nul
41 | goto end
42 |
43 | :end
44 | popd
45 |
--------------------------------------------------------------------------------
/doc/source/README.rst:
--------------------------------------------------------------------------------
1 | .. Syncopy documentation master file
2 |
3 | .. title:: Syncopy Documentation
4 |
5 | .. image:: _static/syncopy_logo.png
6 | :alt: Syncopy logo
7 | :height: 200px
8 | :align: center
9 |
10 |
11 | Welcome to the Documentation of SyNCoPy!
12 | ========================================
13 |
14 | SyNCoPy (**Sy**\stems **N**\euroscience **Co**\mputing in **Py**\thon, spelled Syncopy in the following)
15 | is a Python toolkit for user-friendly, large-scale electrophysiology data analysis.
16 | We strive to achieve the following goals:
17 |
18 | 1. Syncopy provides a full *open source* Python environment for reproducible
19 | electrophysiology data analysis.
20 | 2. Syncopy is *scalable* to accommodate *very large* datasets. It automatically
21 | makes use of available computing resources and is developed with built-in
22 | parallelism in mind.
23 | 3. Syncopy is *compatible* with the MATLAB toolbox `FieldTrip `_.
24 |
25 | .. toctree::
26 | :maxdepth: 1
27 | :caption: Getting Started
28 |
29 | Install Syncopy
30 | Quickstart Guide
31 |
32 | Want to contribute or just curious how the sausage
33 | is made? Take a look at our :doc:`Developer Guide `.
34 |
35 | Citing Syncopy
36 | -----------------
37 |
38 | A pre-print paper on Syncopy is available `here on arxiv, with DOI 10.1101/2024.04.15.589590 `_. Please cite this pre-print if you use Syncopy. In APA style, the citation is: Mönke, G., Schäfer, T., Parto-Dezfouli, M., Kajal, D. S., Fürtinger, S., Schmiedt, J. T., & Fries, P. (2024). *Systems Neuroscience Computing in Python (SyNCoPy): A Python Package for Large-scale Analysis of Electrophysiological Data.* bioRxiv, 2024-04.
39 |
40 |
41 | Tutorials and in depth Guides
42 | -----------------------------
43 |
44 | .. toctree::
45 | :maxdepth: 1
46 | :caption: Tutorials
47 |
48 | Preprocessing
49 | Resampling
50 | Spectral Analysis
51 | Connectivity Analysis
52 |
53 | .. toctree::
54 | :maxdepth: 1
55 | :caption: Guides
56 |
57 | Basic Concepts
58 | Syncopy for FieldTrip Users
59 | Handling Data
60 | Parallel Processing
61 |
62 | API
63 | ---
64 |
65 | .. toctree::
66 | :maxdepth: 1
67 | :caption: API Reference
68 |
69 | User API
70 | Complete API
71 |
72 | Contact
73 | -------
74 | To report bugs or ask questions please use our `GitHub issue tracker `_.
75 | For general inquiries please contact syncopy (at) esi-frankfurt.de.
76 |
--------------------------------------------------------------------------------
/doc/source/_static/ComputationalRoutine.ai:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/ComputationalRoutine.ai
--------------------------------------------------------------------------------
/doc/source/_static/ComputationalRoutine.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/ComputationalRoutine.png
--------------------------------------------------------------------------------
/doc/source/_static/adata.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/adata.png
--------------------------------------------------------------------------------
/doc/source/_static/class_diagramm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/class_diagramm.png
--------------------------------------------------------------------------------
/doc/source/_static/colors.css:
--------------------------------------------------------------------------------
1 | .green {
2 | color: #78ffc7;
3 | }
4 |
5 | .red {
6 | color: #ff6969;
7 | font-weight: bold;
8 | }
9 |
10 | .blue {
11 | color: #29d5ff;
12 | font-weight: bold;
13 | }
14 |
--------------------------------------------------------------------------------
/doc/source/_static/dask_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/dask_logo.png
--------------------------------------------------------------------------------
/doc/source/_static/esi-style.css:
--------------------------------------------------------------------------------
1 |
2 | a.reference.internal {
3 | color: #25565E !important;
4 | }
5 | a.reference.external{
6 | color:#3572b0;
7 | }
8 |
9 | p.rubric {
10 | font-size: 16px;
11 | }
12 |
13 | p {
14 | font-size: 1.1em
15 | }
16 |
--------------------------------------------------------------------------------
/doc/source/_static/fooof_out_aperiodic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/fooof_out_aperiodic.png
--------------------------------------------------------------------------------
/doc/source/_static/fooof_out_first_try.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/fooof_out_first_try.png
--------------------------------------------------------------------------------
/doc/source/_static/fooof_out_tuned.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/fooof_out_tuned.png
--------------------------------------------------------------------------------
/doc/source/_static/fooof_signal_spectrum.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/fooof_signal_spectrum.png
--------------------------------------------------------------------------------
/doc/source/_static/fooof_signal_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/fooof_signal_time.png
--------------------------------------------------------------------------------
/doc/source/_static/scientific_python.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/scientific_python.jpg
--------------------------------------------------------------------------------
/doc/source/_static/select_example1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/select_example1.png
--------------------------------------------------------------------------------
/doc/source/_static/select_example2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/select_example2.png
--------------------------------------------------------------------------------
/doc/source/_static/spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/spec.png
--------------------------------------------------------------------------------
/doc/source/_static/syncopy_icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/syncopy_icon.png
--------------------------------------------------------------------------------
/doc/source/_static/syncopy_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/syncopy_logo.png
--------------------------------------------------------------------------------
/doc/source/_static/syncopy_logo_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/syncopy_logo_small.png
--------------------------------------------------------------------------------
/doc/source/_static/synth_data1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/synth_data1.png
--------------------------------------------------------------------------------
/doc/source/_static/synth_data1_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/synth_data1_spec.png
--------------------------------------------------------------------------------
/doc/source/_static/synth_data_pdiff_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/synth_data_pdiff_spec.png
--------------------------------------------------------------------------------
/doc/source/_static/synth_data_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/synth_data_spec.png
--------------------------------------------------------------------------------
/doc/source/_static/welch_basic_power.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/welch_basic_power.png
--------------------------------------------------------------------------------
/doc/source/_static/welch_params.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/welch_params.png
--------------------------------------------------------------------------------
/doc/source/_static/welch_params.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import syncopy as spy
4 | import syncopy.tests.synth_data as synth_data
5 | import numpy as np
6 | import matplotlib.pyplot as plt
7 |
8 | sig_lengths = np.linspace(1000, 4000, num=4, dtype=int)
9 | overlaps = np.linspace(0.0, 0.99, num=10)
10 | variances = np.zeros((sig_lengths.size, overlaps.size), dtype=float) # Filled in loop below.
11 |
12 | foilim = [5, 200] # Frequency selection, shared between cases.
13 | f_timwin = 0.2 # Window length in seconds, also shared.
14 |
15 | def get_welch_cfg():
16 | """
17 | Get a reasonable Welch cfg for testing purposes.
18 | """
19 | cfg = spy.get_defaults(spy.freqanalysis)
20 | cfg.method = "welch"
21 | cfg.t_ftimwin = 0.5 # Window length in seconds.
22 | cfg.toi = 0.0 # Overlap between periodograms (0.5 = 50 percent overlap).
23 | return cfg
24 |
25 | for sigl_idx, sig_len in enumerate(sig_lengths):
26 | for overl_idx, overlap in enumerate(overlaps):
27 | wn = synth_data.white_noise(nTrials=20, nChannels=1, nSamples=sig_len, samplerate=1000)
28 |
29 | cfg = get_welch_cfg()
30 | cfg.toi = overlap
31 | cfg.t_ftimwin = f_timwin
32 | cfg.foilim = foilim
33 |
34 | spec = spy.freqanalysis(cfg, wn)
35 |
36 | # We got one Welch estimate per trial so far. Now compute the variance over trials:
37 | spec_var = spy.var(spec, dim='trials')
38 | mvar = np.mean(spec_var.show(channel=0)) # We get one variance per frequency bin, and average over those.
39 | variances[sigl_idx, overl_idx] = mvar
40 |
41 | fig = plt.figure()
42 | ax = fig.add_subplot(projection='3d')
43 | for row_idx in range(variances.shape[0]):
44 | ax.scatter(np.tile(sig_lengths[row_idx], overlaps.size), overlaps, variances[row_idx, :], label=f"Signal len {sig_lengths[row_idx]}")
45 | ax.set_xlabel('Signal length (number of samples)')
46 | ax.set_ylabel('Window overlap')
47 | ax.set_zlabel('var of Welch estimate')
48 | ax.set_title('Variance of Welsh estimate as a function of signal length and overlap.\nColors represent different signal lengths.')
--------------------------------------------------------------------------------
/doc/source/_static/welch_raw_fft_power.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/welch_raw_fft_power.png
--------------------------------------------------------------------------------
/doc/source/_static/workFlow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/_static/workFlow.png
--------------------------------------------------------------------------------
/doc/source/_templates/syncopy_base.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. auto{{ objtype }}:: {{ objname }}
6 | :members:
--------------------------------------------------------------------------------
/doc/source/_templates/syncopy_class.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. autoclass:: {{ objname }}
6 | :members:
7 | :private-members:
8 |
9 | {% block methods %}
10 | .. automethod:: __init__
11 |
12 | {% if methods %}
13 | .. rubric:: Methods
14 |
15 | .. autosummary::
16 | {% for item in methods|unique %}
17 | ~{{ name }}.{{ item }}
18 | {%- endfor %}
19 |
20 | .. autosummary::
21 | {% for item in members %}
22 | {% if item[0] == "_" %}
23 | {% if item[1] != "_" %}
24 | ~{{ name }}.{{ item }}
25 | {% endif %}
26 | {% endif %}
27 | {%- endfor %}
28 |
29 | {% endif %}
30 | {% endblock %}
31 |
32 | {% block attributes %}
33 | {% if attributes %}
34 | .. rubric:: Attributes
35 |
36 | .. autosummary::
37 | {% for item in attributes %}
38 | ~{{ name }}.{{ item }}
39 | {%- endfor %}
40 | {% endif %}
41 | {% endblock %}
42 |
--------------------------------------------------------------------------------
/doc/source/developer/developer_api.rst:
--------------------------------------------------------------------------------
1 | API for Developers
2 | ------------------
3 |
4 | syncopy.datatype
5 | ^^^^^^^^^^^^^^^^
6 |
7 | .. autosummary::
8 | :toctree: _stubs
9 | :template: syncopy_class.rst
10 |
11 | syncopy.datatype.base_data.BaseData
12 | syncopy.datatype.base_data.Selector
13 | syncopy.datatype.base_data.FauxTrial
14 | syncopy.shared.StructDict
15 | syncopy.datatype.continuous_data.ContinuousData
16 | syncopy.datatype.discrete_data.DiscreteData
17 |
18 |
19 | syncopy.misc
20 | ^^^^^^^^^^^^
21 |
22 | .. autosummary::
23 | :toctree: _stubs
24 |
25 | syncopy.tests.misc.generate_artificial_data
26 |
27 |
28 | syncopy.shared
29 | ^^^^^^^^^^^^^^
30 |
31 | .. autosummary::
32 | :toctree: _stubs
33 |
34 | syncopy.shared.computational_routine.ComputationalRoutine
35 | syncopy.shared.errors.SPYError
36 | syncopy.shared.errors.SPYTypeError
37 | syncopy.shared.errors.SPYValueError
38 | syncopy.shared.errors.SPYIOError
39 | syncopy.shared.errors.SPYWarning
40 | syncopy.shared.kwarg_decorators.unwrap_cfg
41 | syncopy.shared.kwarg_decorators.unwrap_select
42 | syncopy.shared.kwarg_decorators.process_io
43 | syncopy.shared.kwarg_decorators.detect_parallel_client
44 | syncopy.shared.kwarg_decorators._append_docstring
45 | syncopy.shared.kwarg_decorators._append_signature
46 | syncopy.shared.tools.best_match
47 |
48 |
49 | syncopy.specest
50 | ^^^^^^^^^^^^^^^
51 |
52 | .. autosummary::
53 | :toctree: _stubs
54 |
55 | syncopy.specest.mtmfft.mtmfft
56 | syncopy.specest.compRoutines.MultiTaperFFT
57 | syncopy.specest.compRoutines.mtmfft_cF
58 | syncopy.specest.mtmconvol.mtmconvol
59 | syncopy.specest.compRoutines.MultiTaperFFTConvol
60 | syncopy.specest.compRoutines.mtmconvol_cF
61 | syncopy.specest.compRoutines._make_trialdef
62 | syncopy.specest.wavelet.wavelet
63 | syncopy.specest.compRoutines.WaveletTransform
64 | syncopy.specest.compRoutines.wavelet_cF
65 | syncopy.specest.compRoutines.SuperletTransform
66 | syncopy.specest.compRoutines.superlet_cF
67 | syncopy.specest.compRoutines._make_trialdef
68 | syncopy.specest.superlet.superlet
69 | syncopy.specest.wavelet.get_optimal_wavelet_scales
70 | syncopy.specest.compRoutines.FooofSpy
71 |
72 |
73 | syncopy.connectivity
74 | ^^^^^^^^^^^^^^^^^^^^
75 |
76 | .. autosummary::
77 | :toctree: _stubs
78 |
79 | syncopy.connectivity.AV_compRoutines.GrangerCausality
80 |
81 |
82 | syncopy.plotting
83 | ^^^^^^^^^^^^^^^^
84 |
85 | .. autosummary::
86 | :toctree: _stubs
87 |
88 | syncopy.plotting.spy_plotting.singlepanelplot
89 | syncopy.plotting.spy_plotting.multipanelplot
90 |
--------------------------------------------------------------------------------
/doc/source/developer/developers.rst:
--------------------------------------------------------------------------------
1 | ***********************
2 | Syncopy Developer Guide
3 | ***********************
4 |
5 | The following information is meant for advanced users with an understanding of
6 | class hierarchies that want to extend and/or modify Syncopy's base functionality.
7 |
8 | .. toctree::
9 | :glob:
10 | :maxdepth: 2
11 |
12 | datatype
13 | io
14 | tools
15 | compute_kernels
16 | logging
17 | developer_api
18 |
--------------------------------------------------------------------------------
/doc/source/developer/logging.rst:
--------------------------------------------------------------------------------
1 | .. _syncopy-logging:
2 |
3 | Controlling Logging in Syncopy
4 | ===============================
5 |
6 | Syncopy uses the `Python logging module `_ for logging. It uses two different loggers:
7 | one for code that runs on the local machine, and another one for logging the parallelelized code that
8 | is run by the remote workers in a high performance computing (HPC) cluster environment.
9 |
10 |
11 | Log levels
12 | -----------
13 |
14 | The default log level is for the Syncopy logger is `'logging.IMPORTANT'` (from now on referred to as `'IMPORTANT'`). This means that you will not see any Syncopy messages below that threshold, i.e., messages printed with log levels `'DEBUG'` and `'INFO'`. To change the log level, you can either use the logging API in your application code as explained below, or set the environment variable `'SPYLOGLEVEL'` to one of the values supported by the logging module, e.g., 'CRITICAL', 'WARNING', 'INFO', or 'DEBUG'. See the `official docs of the logging module `_ for details on the supported log levels. Note that IMPORTANT is a custom log level with importance 25, i.e., between INFO and WARNING.
15 |
16 |
17 | Log file location
18 | -----------------
19 |
20 | All Syncopy log files are saved in a configurable directory which we refer to as `SPYLOGDIR`. By default, `SPYLOGDIR` is set to the directory `.spy/logs/` in your home directory (accessible as `~/.spy/logs/` under Linux and Mac OS), and it can be adapted by setting the environment variable `SPYLOGDIR` before running your application.
21 |
22 | E.g., if your Python script using Syncopy is `~/neuro/paperfig1.py`, you can set the log level and log directory on the command line like this in the Bash shell:
23 |
24 | .. code-block:: shell
25 | export SPYLOGDIR=/tmp/spy
26 | export SPYLOGLEVEL=DEBUG
27 | ~/neuro/paperfig1.py
28 |
29 |
30 | Logging code that runs locally
31 | -------------------------------
32 |
33 | For all code that is run on the local machine, Syncopy logs to a logger named `'syncopy'` which is handled by both the console and the logfile `'SPYLOGDIR/syncopy.log'`.
34 |
35 | To adapt the local logging behaviour of Syncopy, one can configure the logger as explained in the documentation for the logging module, e.g., in your application that uses Syncopy:
36 |
37 | .. code-block:: python
38 |
39 | import syncopy
40 | import logging
41 | # Get the logger used by syncopy
42 | logger = logging.getLogger('syncopy')
43 |
44 | # Change the log level:
45 | logger.setLevel(logging.DEBUG)
46 |
47 | # Add another handler that logs to a file:
48 | fh = logging.FileHandler('syncopy_debug_log.log')
49 | logger.addHandler(fh)
50 |
51 | logger.info("My app starts now.")
52 | # The rest of your application code goes here.
53 |
54 |
55 | Logging code that potentially runs remotely
56 | --------------------------------------------
57 |
58 | The parallel code that performs the heavy lifting on the Syncopy data (i.e., what we call `compute functions`) will be executed on remote machines when Syncopy is run in an HPC environment. Therefore,
59 | special handling is required for these parts of the code, and we need to log to one log file per remote machine.
60 |
61 | Syncopy automatically configures a suitable logger named `syncopy_` on each host, where `` is the hostname. Each of these loggers is attached to the respective logfile `'SPYLOGDIR/syncopy_.log'`, where `` is the hostname, which ensures that logging works properly even if you log into the same directory on all remote machines (e.g., a home directory that is mounted on all machines via a network file system).
62 |
63 | Here is how to log with the remote logger:
64 |
65 | .. code-block:: python
66 |
67 | import syncopy
68 | import logging, platform
69 |
70 | # ...
71 | # In some cF or backend function:
72 | par_logger = logging.getLogger("syncopy_" + platform.node())
73 | par_logger.info("Code run on remote machine is being run.")
74 |
75 | This is all you need to do. If you want to configure different log levels for the remote logger and the local one, you can configure the environment variable `SPYPARLOGLEVEL` in addition to `SPYLOGLEVEL`.
76 |
--------------------------------------------------------------------------------
/doc/source/developer/tools.rst:
--------------------------------------------------------------------------------
1 | Tools for Developing Syncopy
2 | ============================
3 | The following is a collection of routines, decorators and classes that constitute
4 | the basic building blocks of Syncopy. Syncopy's entire source-code is built using
5 | following a modular structure where basic building blocks are written (and tested)
6 | once and then re-used throughout the entire package.
7 |
8 | Input Parsing and Error Checking
9 | --------------------------------
10 |
11 | .. autosummary::
12 | :toctree: _stubs
13 |
14 | syncopy.shared.parsers.array_parser
15 | syncopy.shared.parsers.data_parser
16 | syncopy.shared.parsers.filename_parser
17 | syncopy.shared.parsers.io_parser
18 | syncopy.shared.parsers.scalar_parser
19 |
20 | Decorators
21 | ----------
22 |
23 | .. autosummary::
24 | :toctree: _stubs
25 |
26 | syncopy.shared.kwarg_decorators.unwrap_cfg
27 | syncopy.shared.kwarg_decorators.unwrap_select
28 | syncopy.shared.kwarg_decorators.process_io
29 | syncopy.shared.kwarg_decorators.detect_parallel_client
30 |
31 |
32 | Writing A New Analysis Routine
33 | ------------------------------
34 | Any analysis routine that operates on Syncopy data is always structured in three
35 | (hierarchical) parts:
36 |
37 | 1. A numerical function based only on NumPy/SciPy that works on a
38 | :class:`numpy.ndarray` and returns a :class:`numpy.ndarray`.
39 | 2. A wrapper class that handles output initialization, potential
40 | parallelization and post-computation cleanup. The class should be based on the
41 | abstract class :class:`syncopy.shared.computational_routine.ComputationalRoutine`
42 | 3. Another wrapping metafunction handling method selection, parameterization and
43 | error checking is then provided for user interaction.
44 |
45 | An example of this type of structure is the multi-taper fourier analysis. The
46 | corresponding stages here are
47 |
48 | 1. Numerical function: :func:`syncopy.specest.mtmfft.mtmfft`
49 | 2. Wrapper class: :class:`syncopy.specest.mtmfft.MultiTaperFFT`
50 | 3. Metafunction: :func:`syncopy.freqanalysis`
51 |
52 | .. image:: ../_static/ComputationalRoutine.png
53 |
54 | For a detailed walk-through explaining the intricacies of writing an analysis
55 | routine, please refer to the :doc:`compute_kernels`.
56 |
--------------------------------------------------------------------------------
/doc/source/index.rst:
--------------------------------------------------------------------------------
1 | .. Dummy index master file pointing to `README.rst`
2 |
3 | :orphan:
4 |
5 | .. include:: README.rst
6 |
--------------------------------------------------------------------------------
/doc/source/quickstart/damped_harm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/quickstart/damped_harm.png
--------------------------------------------------------------------------------
/doc/source/quickstart/damped_signals.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/quickstart/damped_signals.png
--------------------------------------------------------------------------------
/doc/source/quickstart/mtmfft_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/quickstart/mtmfft_spec.png
--------------------------------------------------------------------------------
/doc/source/quickstart/wavelet_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/quickstart/wavelet_spec.png
--------------------------------------------------------------------------------
/doc/source/quickstart/wavelet_spec2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/quickstart/wavelet_spec2.png
--------------------------------------------------------------------------------
/doc/source/scripts/select_example.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import syncopy as spy
3 | from syncopy.tests import synth_data
4 |
5 | # 100 trials of two phase diffusing signals with 40Hz
6 | adata = synth_data.phase_diffusion(nTrials=100,
7 | freq=40,
8 | samplerate=200,
9 | nSamples=500,
10 | nChannels=2,
11 | eps=0.01)
12 |
13 | # coherence for full dataset
14 | coh1 = spy.connectivityanalysis(adata, method='coh')
15 |
16 | # plot coherence of channel1 vs channel2
17 | coh1.singlepanelplot(channel_i='channel1', channel_j='channel2')
18 |
--------------------------------------------------------------------------------
/doc/source/scripts/synth_data1.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import syncopy as spy
3 |
4 |
5 | def generate_noisy_harmonics(nSamples, nChannels, samplerate):
6 |
7 | f1, f2 = 20, 50 # the harmonic frequencies in Hz
8 |
9 | # the sampling times vector
10 | tvec = np.arange(nSamples) * 1 / samplerate
11 |
12 | # define the two harmonics
13 | ch1 = np.cos(2 * np.pi * f1 * tvec)
14 | ch2 = np.cos(2 * np.pi * f2 * tvec)
15 |
16 | # concatenate channels to to trial array
17 | trial = np.column_stack([ch1, ch2])
18 |
19 | # add some white noise
20 | trial += 0.5 * np.random.randn(nSamples, nChannels)
21 |
22 | return trial
23 |
24 |
25 | nTrials = 50
26 | nSamples = 1000
27 | nChannels = 2
28 | samplerate = 500 # in Hz
29 |
30 | # collect trials
31 | trials = []
32 | for _ in range(nTrials):
33 | trial = generate_noisy_harmonics(nSamples, nChannels, samplerate)
34 | trials.append(trial)
35 |
36 | synth_data = spy.AnalogData(trials, samplerate=samplerate)
37 |
--------------------------------------------------------------------------------
/doc/source/setup.rst:
--------------------------------------------------------------------------------
1 | .. _install:
2 |
3 | Install Syncopy
4 | ===============
5 |
6 | Syncopy can be installed using `conda `_:
7 |
8 | We recommend to install SynCoPy into a new conda environment:
9 |
10 | .. code-block:: bash
11 |
12 | conda create -y --name syncopy esi-syncopy
13 | conda activate syncopy
14 |
15 | If you're working on the ESI cluster installing Syncopy is only necessary if
16 | you create your own Conda environment.
17 |
18 | .. _install_acme:
19 |
20 | Installing parallel processing engine ACME
21 | --------------------------------------------
22 |
23 | To harness the parallel processing capabilities of Syncopy on the ESI cluster
24 | it is helpful to install `ACME `_.
25 |
26 | Again either via conda
27 |
28 | .. code-block:: bash
29 |
30 | conda install -c conda-forge esi-acme
31 |
32 | or pip
33 |
34 | .. code-block:: bash
35 |
36 | pip install esi-acme
37 |
38 | .. note::
39 | See :ref:`parallel` for details about parallel processing setup
40 |
41 | Importing Syncopy
42 | -----------------
43 |
44 | To start using Syncopy you have to import it in your Python code:
45 |
46 | .. code-block:: python
47 |
48 | import syncopy as spy
49 |
50 | All :doc:`user-facing functions and classes ` can then be
51 | accessed with the ``spy.`` prefix, e.g.
52 |
53 | .. code-block:: python
54 |
55 | spy.load("~/testdata.spy")
56 |
57 |
58 | To display your Syncopy version, run:
59 |
60 | .. code-block:: python
61 |
62 | spy.__version__
63 |
64 | .. _setup_env:
65 |
66 | Setting Up Your Python Environment
67 | ----------------------------------
68 |
69 | On the ESI cluster, ``/opt/conda/envs/syncopy`` provides a
70 | pre-configured and tested Conda environment with the most recent Syncopy
71 | version. This environment can be easily started using the `ESI JupyterHub
72 | `_
73 |
74 | Syncopy makes heavy use of temporary files, which may become large (> 100 GB).
75 | The storage location can be set using the `environmental variable
76 | `_ :envvar:`SPYTMPDIR`, which
77 | by default points to your home directory:
78 |
79 | .. code-block:: bash
80 |
81 | SPYTMPDIR=~/.spy
82 |
83 | The performance of Syncopy strongly depends on the read and write speed in
84 | this folder. On the ESI cluster, the variable is set to use the high performance
85 | storage:
86 |
87 | .. code-block:: bash
88 |
89 | SPYTMPDIR=/cs/home/$USER/.spy
90 |
--------------------------------------------------------------------------------
/doc/source/sitemap.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | Resources by Topic
4 | ==================
5 | Looking for information regarding a specific analysis method? The table below
6 | might help.
7 |
8 | .. cssclass:: table-hover
9 |
10 | +-------------------+-----------------------+---------------------------+
11 | | **Topic** | **Resources** | **Description** |
12 | +-------------------+-----------------------+---------------------------+
13 | | |TnW| | |Quick| | |QuickDesc| |
14 | | +-----------------------+---------------------------+
15 | | | |Spy4FT| | |Spy4FTDesc| |
16 | | +-----------------------+---------------------------+
17 | | | |SpyBasic| | |SpyBasicDesc| |
18 | | +-----------------------+---------------------------+
19 | | | |SpyData| | |SpyDataDesc| |
20 | +-------------------+-----------------------+---------------------------+
21 | | |RDoc| | |UserAPI| | |UserAPIDesc| |
22 | | +-----------------------+---------------------------+
23 | | | |DevAPI| | |DevAPIDesc| |
24 | | +-----------------------+---------------------------+
25 | | | |DevTools| | |DevToolsDesc| |
26 | | +-----------------------+---------------------------+
27 | | | |Indx| | |IndxDesc| |
28 | +-------------------+-----------------------+---------------------------+
29 | | |Spec| | |SpecTut| | |SpecTutDesc| |
30 | | +-----------------------+---------------------------+
31 | | | |SpecEx| | |SpecExDesc| |
32 | | +-----------------------+---------------------------+
33 | | | |SpecAdv| | |SpecAdvDesc| |
34 | | +-----------------------+---------------------------+
35 | | | |SpecFof| | |SpecFofDesc| |
36 | +-------------------+-----------------------+---------------------------+
37 | | |Con| | |ConTut| | |ConTutDesc| |
38 | | +-----------------------+---------------------------+
39 | | | |ConEx| | |ConExDesc| |
40 | | +-----------------------+---------------------------+
41 | | | |ConAdv| | |ConAdvDesc| |
42 | +-------------------+-----------------------+---------------------------+
43 |
44 | .. |TnW| replace:: *Tutorials & Walkthroughs*
45 | .. |RDoc| replace:: *Reference Documentation*
46 | .. |Spec| replace:: *Spectral Estimation*
47 | .. |Con| replace:: *Connectivity*
48 |
49 | .. |Quick| replace:: :doc:`Quickstart `
50 | .. |QuickDesc| replace:: A quick tour through Syncopy
51 | .. |Spy4FT| replace:: :doc:`Syncopy for FieldTrip Users `
52 | .. |Spy4FTDesc| replace:: Quick introduction to Syncopy from a FieldTrip user's perspective
53 | .. |SpyData| replace:: :doc:`Data Handling in Syncopy `
54 | .. |SpyDataDesc| replace:: Overview of Syncopy's data management
55 | .. |SpyBasic| replace:: :doc:`Basic Concepts `
56 | .. |SpyBasicDesc| replace:: Overview of Syncopy's approach to neural data analysis
57 |
58 | .. |UserAPI| replace:: :doc:`User API `
59 | .. |UserAPIDesc| replace:: The public API for users
60 | .. |DevAPI| replace:: :doc:`Developer API `
61 | .. |DevAPIDesc| replace:: The parts of Syncopy mostly interesting for developers
62 | .. |Indx| replace:: :ref:`Package Index `
63 | .. |IndxDesc| replace:: Index of all functions/classes
64 | .. |DevTools| replace:: :doc:`Syncopy Developer Tools `
65 | .. |DevToolsDesc| replace:: Tools for contributing new functionality to Syncopy
66 |
67 | .. |SpecTut| replace:: Spectral Estimation Tutorial
68 | .. |SpecTutDesc| replace:: An introduction to the available spectral estimation methods in Syncopy
69 | .. |SpecEx| replace:: Spectral Estimation Examples
70 | .. |SpecExDesc| replace:: Example scripts and notebooks illustrating spectral estimation in Syncopy
71 | .. |SpecAdv| replace:: Advanced Topics in Spectral Estimation
72 | .. |SpecAdvDesc| replace:: Technical details and notes for advanced users/developers
73 | .. |SpecFof| replace:: :doc:`Applying FOOOF `
74 | .. |SpecFofDesc| replace:: Post-processing spectral data with FOOOF: fitting oscillations and one over f
75 |
76 | .. |ConTut| replace:: Connectivity Tutorial
77 | .. |ConTutDesc| replace:: An introduction to connectivity estimation in Syncopy
78 | .. |ConEx| replace:: Connectivity Examples
79 | .. |ConExDesc| replace:: Example scripts and notebooks illustrating the use of connectivity metrics in Syncopy
80 | .. |ConAdv| replace:: Advanced Topics in Connectivity
81 | .. |ConAdvDesc| replace:: Technical details and notes for advanced users/developers
82 |
83 | Sitemap
84 | =======
85 |
86 | .. toctree::
87 | :maxdepth: 2
88 |
89 | quickstart/quickstart.rst
90 | user/concepts.rst
91 | developer/developers.rst
92 |
93 | Still no luck finding what you're looking for? Try using the :ref:`search ` function.
94 |
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_bp_corr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_bp_corr.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_bp_corr2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_bp_corr2.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_coh.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_coh.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_corr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_corr.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_granger.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_granger.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_nw.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import syncopy as spy
3 | from syncopy import synthdata
4 |
5 | cfg = spy.StructDict()
6 | cfg.nTrials = 50
7 | cfg.nSamples = 2000
8 | cfg.samplerate = 250
9 |
10 | # 3x3 Adjacency matrix to define coupling
11 | AdjMat = np.zeros((3, 3))
12 | # only coupling 0 -> 1
13 | AdjMat[0, 1] = 0.2
14 |
15 | data = synthdata.ar2_network(AdjMat, cfg=cfg, seed=42)
16 |
17 | # add some red noise as 1/f surrogate
18 | data = data + 2 * synthdata.red_noise(cfg, alpha=0.95, nChannels=3, seed=42)
19 |
20 | spec = spy.freqanalysis(data, tapsmofrq=3, keeptrials=False)
21 |
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_signals.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_signals.png
--------------------------------------------------------------------------------
/doc/source/tutorials/ar2_specs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/ar2_specs.png
--------------------------------------------------------------------------------
/doc/source/tutorials/fft_nui_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/fft_nui_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/fft_pp_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/fft_pp_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/freqanalysis.rst:
--------------------------------------------------------------------------------
1 | Spectral Analysis
2 | =================
3 |
4 | Many signals in Neuroscience have oscillatory components, and hence an analysis in the *frequency domain* often is advised. The high-level function :func:`~syncopy.freqanalysis` offers many standard methods for spectral analysis, like multi-tapered Fourier transform or time-frequency methods like wavelet transforms. In the following we introduce each method individually:
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 |
9 | Welch's Method
10 | Fooof
11 |
12 |
--------------------------------------------------------------------------------
/doc/source/tutorials/preproc_synthdata.py:
--------------------------------------------------------------------------------
1 | # built-in synthetic data generators
2 | import syncopy as spy
3 | from syncopy import synthdata as spy_synth
4 |
5 | cfg_synth = spy.StructDict()
6 | cfg_synth.nTrials = 150
7 | cfg_synth.samplerate = 500
8 | cfg_synth.nSamples = 1000
9 | cfg_synth.nChannels = 2
10 |
11 | # 30Hz undamped harmonig
12 | harm = spy_synth.harmonic(cfg_synth, freq=30)
13 |
14 | # a linear trend
15 | lin_trend = spy_synth.linear_trend(cfg_synth, y_max=3)
16 |
17 | # a 2nd 'nuisance' harmonic
18 | harm50 = spy_synth.harmonic(cfg_synth, freq=50)
19 |
20 | # finally the white noise floor
21 | wn = spy_synth.white_noise(cfg_synth)
22 |
--------------------------------------------------------------------------------
/doc/source/tutorials/preprocessing.rst:
--------------------------------------------------------------------------------
1 | .. _preproc:
2 |
3 | Preprocessing
4 | =============
5 |
6 | Raw data often contains unwanted signal components: offsets, trends or even oscillatory nuisance signals. Syncopy has a dedicated :func:`~syncopy.preprocessing` function to clean up and/or transform the data.
7 |
8 | .. contents:: Topics covered
9 | :local:
10 |
11 | Let's start by creating a new synthetic signal with confounding components:
12 |
13 | .. literalinclude:: /tutorials/preproc_synthdata.py
14 |
15 | Here we used a ``cfg`` dictionary to assemble all needed parameters, a concept we adopted from `FieldTrip `_
16 |
17 | .. _arithmetics:
18 |
19 | Dataset Arithmetics
20 | -------------------
21 |
22 | If the *shape* of different Syncopy objects match exactly (``nSamples``, ``nChannels`` and ``nTrials`` are all the same), we can use **standard Python arithmetic operators** like **+**, **-**, ***** and **/** directly. Here we want a linear superposition, so we simply add everything together::
23 |
24 | # add noise, trend and the nuisance harmonic
25 | data_nui = harm + wn + lin_trend + harm50
26 | # also works for scalars
27 | data_nui = data_nui + 5
28 |
29 | If we now do a spectral analysis, the power spectra are confounded by all our new signal components::
30 |
31 | cfg = spy.StructDict()
32 | cfg.tapsmofrq = 1
33 | cfg.foilim = [0, 60]
34 | cfg.polyremoval = None
35 | cfg.keeptrials = False # trial averaging
36 | fft_nui_spectra = spy.freqanalysis(data_nui, cfg)
37 |
38 | .. note::
39 | We explicitly set ``polyremoval=None`` to see the full effect of our confounding signal components. The default for :func:`~syncopy.freqanalysis` is ``polyremoval=0``, which removes polynoms of 0th order: constant offsets (*de-meaning*).
40 |
41 | .. hint::
42 | We did not specify the ``method`` parameter for the :func:`~syncopy.freqanalysis` call as multi-tapered Fourier analysis (``method='mtmfft'``) is the default. To learn about the defaults of any Python function you can inspect its signature with ``spy.freqanalysis?`` or ``help(spy.freqanalysis)`` typed into an interpreter
43 |
44 | Let's see what we got::
45 |
46 | fft_nui_spectra.singlepanelplot()
47 |
48 | .. image:: fft_nui_spec.png
49 | :height: 250px
50 |
51 | We see strong low-frequency components, originating from both the offset and the trend. We also see the nuisance signal spectral peak at 50Hz.
52 |
53 | Filtering
54 | ---------
55 | Filtering of signals in general removes/suppresses unwanted signal components. This can be done both in the *time-domain* and in the *frequency-domain*. For offsets and (low-order) polynomial trends, fitting a model directly in the time domain, and subtracting the obtained trend, is the preferred solution. This can be controlled in Syncopy with the ``polyremoval`` parameter, which is also directly available in :func:`~syncopy.freqanalysis`.
56 |
57 | Removing signal components in the frequency domain is typically done with *finite impulse response* (FIR) filters or *infinite impulse response* (IIR) filters. Syncopy supports one of each kind, a FIR `windowed sinc `_ and the `Butterworth filter `_ from the IIR family. For both filters we have low-pass (``'lp'``), high-pass (``'hp'``), band-pass (``'bp'``) and band-stop(Notch) (``'bp'``) designs available.
58 |
59 | To clean up our dataset above, we remove the linear trend and apply a low-pass 12th order Butterworth filter::
60 |
61 | data_pp = spy.preprocessing(data_nui,
62 | filter_class='but',
63 | filter_type='lp',
64 | polyremoval=1,
65 | freq=40,
66 | order=12)
67 |
68 | Now let's reuse our ``cfg`` from above to repeat the spectral analysis with the preprocessed data::
69 |
70 | spec_pp = spy.freqanalysis(data_pp, cfg)
71 | spec_pp.singlepanelplot()
72 |
73 | .. image:: fft_pp_spec.png
74 | :height: 250px
75 |
76 | As expected for a low-pass filter, all frequencies above 40Hz are strongly attenuated (note the log scale, so the suppression is around 2 orders of magnitude). We also removed the low-frequency components from the offset and trend, but acknowledge that we also lost a bit of the original white noise power around 0-2Hz. Importantly, the spectral power of our frequency band of interest, around 30Hz, remained virtually unchanged.
77 |
--------------------------------------------------------------------------------
/doc/source/tutorials/res_ds_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/res_ds_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/res_lpds_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/res_lpds_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/res_lporderds_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/res_lporderds_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/res_orig_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/res_orig_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/res_rs_spec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/doc/source/tutorials/res_rs_spec.png
--------------------------------------------------------------------------------
/doc/source/tutorials/res_synthdata.py:
--------------------------------------------------------------------------------
1 | # Import package
2 | import syncopy as spy
3 | from syncopy.tests import synth_data
4 |
5 | # basic dataset properties
6 | nTrials = 100
7 | samplerate = 5000 # in Hz
8 |
9 | # add a harmonic with 200Hz
10 | adata = 2 * synth_data.harmonic(nTrials, freq=200, samplerate=samplerate)
11 |
12 | # add another harmonic with 1300Hz
13 | adata += synth_data.harmonic(nTrials, freq=1300, samplerate=samplerate)
14 |
15 | # white noise floor
16 | adata += synth_data.white_noise(nTrials, samplerate=samplerate)
17 |
18 | # compute the trial averaged spectrum and plot
19 | spec = spy.freqanalysis(adata, keeptrials=False)
20 | f1, ax1 = spec.singlepanelplot(channel=0)
21 | f1.set_size_inches(6.4, 3)
22 | f1.savefig('res_orig_spec.png')
23 |
24 | # naive downsampling
25 | ds_adata = spy.resampledata(adata, method='downsample', resamplefs=1000)
26 | ds_spec = spy.freqanalysis(ds_adata, keeptrials=False)
27 | f2, ax2 = ds_spec.singlepanelplot(channel=0)
28 | ax2.annotate('?', (315, -.5), fontsize=25)
29 | f2.set_size_inches(6.4, 3)
30 | f2.savefig('res_ds_spec.png')
31 |
32 |
33 | ds_adata2 = spy.resampledata(adata, method='downsample', resamplefs=1000, lpfreq=500)
34 | ds_spec2 = spy.freqanalysis(ds_adata2, keeptrials=False)
35 | f3, ax3 = ds_spec2.singlepanelplot(channel=0)
36 | f3.set_size_inches(6.4, 3)
37 | f3.savefig('res_lpds_spec.png')
38 |
39 | ds_adata3 = spy.resampledata(adata, method='downsample', resamplefs=1000, lpfreq=500, order=5000)
40 | ds_spec3 = spy.freqanalysis(ds_adata3, keeptrials=False)
41 | f4, ax4 = ds_spec3.singlepanelplot(channel=0)
42 | f4.set_size_inches(6.4, 3)
43 | f4.savefig('res_lporderds_spec.png')
44 |
45 | # resampling
46 |
47 | # rs_adata = spy.resampledata(adata, method='resample', resamplefs=1202, order=20000)
48 | rs_adata = spy.resampledata(adata, method='resample', resamplefs=1200)
49 | rs_spec = spy.freqanalysis(rs_adata, keeptrials=False)
50 | f5, ax5 = rs_spec.singlepanelplot(channel=0)
51 | f5.set_size_inches(6.4, 3)
52 | f5.savefig('res_rs_spec.png')
53 |
--------------------------------------------------------------------------------
/doc/source/user/class_diagramm.mmd:
--------------------------------------------------------------------------------
1 | %%{init: {'theme':'forest'}}%%
2 | classDiagram
3 |
4 | BaseData <|-- ContinuousData
5 | BaseData <|-- DiscreteData
6 |
7 | ContinuousData <|-- AnalogData
8 | ContinuousData <|-- SpectralData
9 | ContinuousData <|-- CrossSpectralData
10 |
11 | DiscreteData <|-- SpikeData
12 | DiscreteData <|-- EventData
13 |
14 | BaseData : float samplerate
15 | BaseData : np.ndarray trialdefinition
16 | BaseData : iterable trials
17 | BaseData : np.ndarray~str~ channel
18 | BaseData : h5py.Dataset data
19 |
20 | BaseData : show()
21 | BaseData : selectdata()
22 | BaseData : save()
23 | BaseData : save_nwb()
24 | BaseData : singlepanelplot()*
25 |
26 | ContinuousData : np.ndarray~float~ time
27 | DiscreteData : np.ndarray~int~ sample
28 |
29 | class SpectralData{
30 | np.ndarray~float~ freq
31 | np.ndarray~str~ taper
32 | }
33 |
34 | class CrossSpectralData{
35 | np.ndarray~float~ freq
36 | np.ndarray~str~ taper
37 | np.ndarray~str~ channel_i
38 | np.ndarray~str~ channel_j
39 | }
40 |
41 | class SpikeData{
42 | np.ndarray~str~ unit
43 | np.ndarray~float~ waveform
44 | }
45 |
46 | class EventData{
47 | np.ndarray~int~ eventid
48 | }
--------------------------------------------------------------------------------
/doc/source/user/complete_api.rst:
--------------------------------------------------------------------------------
1 | Complete API
2 | =============
3 |
4 | Reference to the complete Syncopy API
5 |
6 | .. contents:: Sections
7 | :local:
8 |
9 | .. automodapi:: syncopy
10 | .. automodapi:: syncopy.synthdata
11 |
--------------------------------------------------------------------------------
/doc/source/user/concepts.rst:
--------------------------------------------------------------------------------
1 | **************
2 | Basic Concepts
3 | **************
4 |
5 | Using Syncopy usually entails writing Python analysis scripts operating on a given list of data files. For new users we prepared a :ref:`quick_start`. Here we want to present the general concepts behind Syncopy.
6 |
7 | Data analysis pipelines are inspired by the well established and feature-rich
8 | `MATLAB `_ toolbox `FieldTrip `_.
9 | Syncopy aims to emulate FieldTrip's basic usage concepts.
10 |
11 | .. contents:: Topics covered
12 | :local:
13 |
14 | .. _workflow:
15 |
16 | General Workflow
17 | ----------------
18 |
19 | A typical analysis workflow with Syncopy might look like this:
20 |
21 | .. image:: /_static/workFlow.png
22 |
23 |
24 | We start with data import (or simply loading if already in ``.spy`` format) which will create one of Syncopy's dataypes like :class:`~syncopy.AnalogData`. Then actual (parallel) processing of the data is triggered by calling a *meta-function* (see also below), for example :func:`~syncopy.freqanalysis`. An analysis output often results in a different datatype, e.g. :class:`~syncopy.SpectralData`. All indicated methods (:func:`~syncopy.show`, :func:`~syncopy.singlepanelplot` and :func:`~syncopy.save`) for data access are available for all of Syncopy's datatypes. Hence, at any processing step the data can be plotted, NumPy :class:`~numpy.ndarray`'s extracted or (intermediate) results saved to disc as ``.spy`` containers.
25 |
26 | .. note::
27 | Have a look at :doc:`Data Basics ` for further details about Syncopy's data formats and interfaces
28 |
29 |
30 | Memory Management
31 | ~~~~~~~~~~~~~~~~~
32 |
33 | One of the key concepts of Syncopy is mindful computing resource management, especially keeping a low **memory footprint**. In the depicted workflow, data processed :blue:`on disc` is indicated in :blue:`blue`, whereas potentially :red:`memory exhausting operations` are indicated in :red:`red`. So care has to be taken when using :func:`~syncopy.show` or the plotting routines :func:`~syncopy.singlepanelplot` and :func:`~syncopy.multipanelplot`, as these potentially pipe the whole dataset into the systems memory. It is advised to either perform some averaging beforehand, or cautiously only selecting a few channels/trials for these operations.
34 |
35 | .. _meta_functions:
36 |
37 | Syncopy Meta-Functions
38 | ----------------------
39 | All of Syncopy's computing managers (like :func:`~syncopy.freqanalysis`) can be
40 | either called using positional/keyword arguments following standard Python syntax,
41 | e.g.,
42 |
43 | .. code-block:: python
44 |
45 | spec = spy.freqanalysis(data, method="mtmfft", foilim=[1, 150], output="pow", taper="dpss", tapsmofrq=10)
46 |
47 | or using a ``cfg`` configuration structure:
48 |
49 | .. code-block:: python
50 |
51 | cfg = spy.get_defaults(spy.freqanalysis)
52 | cfg.method = 'mtmfft';
53 | cfg.foilim = [1, 150];
54 | cfg.output = 'pow';
55 | cfg.taper = 'dpss';
56 | cfg.tapsmofrq = 10;
57 | spec = spy.freqanalysis(cfg, data)
58 |
59 |
--------------------------------------------------------------------------------
/doc/source/user/data.rst:
--------------------------------------------------------------------------------
1 | .. _data_handling:
2 |
3 | ************************
4 | Handling Data in Syncopy
5 | ************************
6 |
7 | This section of the Syncopy documentation contains information about how data is handled in Syncopy.
8 |
9 | .. toctree ::
10 | :maxdepth: 2
11 |
12 | data_basics
13 | selectdata
14 | logging
15 | synth_data
16 | matlab_io
17 |
18 |
19 |
20 | Advanced Topics
21 | ---------------
22 | More information about Syncopy's data class structure and file format.
23 |
24 | .. toctree::
25 |
26 | datatype
27 | ../developer/io
28 |
29 |
--------------------------------------------------------------------------------
/doc/source/user/data_basics.rst:
--------------------------------------------------------------------------------
1 | .. _data_basics:
2 |
3 | Syncopy Data Basics
4 | ===================
5 |
6 | Syncopy utilizes a simple data format based on `HDF5
7 | `_ and `JSON
8 | `_ (see :doc:`../developer/io` for details).
9 | These formats were chosen for their *ubiquity* as they can be handled well in
10 | virtually all popular programming languages, and for allowing *streaming,
11 | parallel access* enabling computing on parallel architectures.
12 |
13 | .. contents:: Topics covered
14 | :local:
15 |
16 |
17 | Loading and Saving Syncopy (``*.spy``) Data
18 | -------------------------------------------
19 | Reading and writing data with Syncopy
20 |
21 | .. autosummary::
22 |
23 | syncopy.load
24 | syncopy.save
25 |
26 | Functions for Inspecting/Editing Syncopy Data Objects
27 | -----------------------------------------------------
28 | Defining trials, data selection and NumPy :class:`~numpy.ndarray` interface
29 |
30 | .. autosummary::
31 |
32 | syncopy.definetrial
33 | syncopy.selectdata
34 | syncopy.show
35 |
36 | Plotting Functions
37 | ------------------
38 |
39 | .. autosummary::
40 |
41 | syncopy.singlepanelplot
42 | syncopy.multipanelplot
43 |
44 | .. hint::
45 | The :ref:`selections` section details how :func:`~syncopy.singlepanelplot` and :func:`~syncopy.show` all work based on the same :func:`~syncopy.selectdata` API.
46 |
47 |
48 | Importing Data into Syncopy
49 | ---------------------------
50 |
51 | Importing Data from different file formats into Syncopy
52 | -------------------------------------------------------
53 |
54 | Currently, Syncopy supports importing data from `FieldTrip raw data `_ format, from `NWB `_ and `TDT `_:
55 |
56 | .. autosummary::
57 |
58 | syncopy.io.load_ft_raw
59 | syncopy.io.load_nwb
60 | syncopy.io.load_tdt
61 |
62 |
63 | Importing Data from NumPy
64 | -------------------------
65 |
66 | If you have an electrical time series as a :class:`~numpy.ndarray` and want to import it into Syncopy, you can initialize an :class:`~syncopy.AnalogData` object directly::
67 |
68 | import syncopy as spy
69 | import numpy as np
70 |
71 | # 3 channel surrogate data
72 | np_data = np.random.randn(10_000, 3)
73 |
74 | # initialize AnalogData
75 | spy_data = spy.AnalogData(np_data, samplerate=1000)
76 |
77 | Without an explicit **trialdefinition** the default all-to-all definition is used, meaning all data is merged into a single trial. Setting a trialdefinition requires building a ``M x 3`` matrix, with ``M`` being the number of trials, and each row containing ``[start, stop, offset]`` **in samples**::
78 |
79 | spy_data.trialdefinition = np.array([[0, 3000, 1000], [3000, 6000, 1000]])
80 |
81 | With this we have 2 trials, each 3000 samples long starting at -1 seconds.
82 |
83 |
84 | .. autosummary::
85 |
86 | syncopy.AnalogData
87 |
88 |
89 | Creating Synthetic Example Data
90 | -------------------------------
91 |
92 | Syncopy contains the `synthdata` module, which can be used to create synthetic data for testing and demonstration purposes.
93 |
94 |
95 | .. autosummary::
96 |
97 | syncopy.synthdata
98 |
99 |
100 |
101 | Exporting Data from Syncopy to NWB
102 | ----------------------------------
103 |
104 | Syncopy supports export of data to `NWB `_ format for objects of type :class:`~syncopy.AnalogData`, :class:`~syncopy.TimeLockData` and :class:`~syncopy.SpikeData`.
105 |
106 |
107 | .. autosummary::
108 |
109 | syncopy.AnalogData.save_nwb
110 | syncopy.TimeLockData.save_nwb
111 | syncopy.SpikeData.save_nwb
112 |
113 | Here is a little example::
114 |
115 | import syncopy as spy
116 |
117 | raw_data = spy.synthdata.red_noise(alpha=0.9)
118 |
119 | # some processing, bandpass filter and (here meaningless) phase extraction
120 | processed_data = spy.preprocessing(raw_data, filter_type='bp', freq=[35, 40], hilbert='angle')
121 |
122 | # save raw data to NWB
123 | nwb_path = 'test.nwb'
124 | nwbfile = raw_data.save_nwb(nwb_path)
125 |
126 | # save processed data into same NWB file
127 | processed_data.save_nwb(nwb_path, nwbfile=nwbfile, is_raw=False)
128 |
129 | Note that NWB is a very general container format, and thus loading an NWB container created in one software package into the internal data structures used by another software package requires some interpretation of the fields, which users many need to do manually. One can inspect NWB files online using tools like the `NWB Explorer `_.
130 |
131 |
132 | Data exchange and interoperability between Syncopy and MNE Python
133 | -----------------------------------------------------------------
134 |
135 | The MNE Python package is a popular open-source package for analyzing electrophysiological data. Syncopy comes with data conversion functions for the MNE data classes like, so data can be exchanged more easily between the two packages. In order to use these functions, users will need to manually install MNE into the syncopy environment.
136 |
137 | The following conversion functions are available:
138 |
139 | .. autosummary::
140 |
141 | syncopy.raw_adata_to_mne_raw
142 | syncopy.raw_mne_to_adata
143 | syncopy.tldata_to_mne_epochs
144 | syncopy.mne_epochs_to_tldata
145 |
146 |
147 | Here is an example of how to import data from MNE Python into Syncopy. Once more, make sure you have `mne` installed.::
148 |
149 | import syncopy as spy
150 | import mne
151 |
152 | # Load data in MNE Python
153 | sample_data_folder = mne.datasets.sample.data_path()
154 | sample_data_raw_file = os.path.join(
155 | sample_data_folder, "MEG", "sample", "sample_audvis_raw.fif"
156 | )
157 | mne_data = mne.io.read_raw_fif(sample_data_raw_file, preload=True)
158 |
159 | # Convert to Syncopy AnalogData
160 | spy_data = spy.io.mne_conv.raw_mne_to_adata(mne_data)
161 |
162 | # save to Syncopy HDF5 format
163 | spy_data.save('sample_audvis_raw.spy')
164 |
--------------------------------------------------------------------------------
/doc/source/user/datatype.rst:
--------------------------------------------------------------------------------
1 | .. _syncopy-data-classes:
2 |
3 | Syncopy Data Classes
4 | ====================
5 |
6 | The data structure in Syncopy is based around the idea that all
7 | electrophysiology data can be represented as multidimensional arrays. For
8 | example, a multi-channel local field potential can be stored as a
9 | two-dimensional `float` array with the dimensions being time (sample) and
10 | channel. This array is always stored in the :attr:`data` property and can be
11 | indexed using `NumPy indexing
12 | `_.
13 |
14 | .. note:: Each Syncopy data object is simply an annotated multi-dimensional array.
15 |
16 | Different types of electrophysiology data often share common properties (e.g.
17 | having channel/electrode labels, having a time axis, etc.). An efficient way of
18 | organizing these different data types are `classes
19 | `_ organized in a
20 | hierarchy, with shared properties inherited from the top level to the bottom
21 | classes (see also `Wikipedia
22 | `_).
23 |
24 | .. image:: /_static/class_diagramm.png
25 |
26 | The bottom classes in the class tree are for active use in analyses.
27 |
28 | .. _data_classes:
29 |
30 | Syncopy Data Classes
31 | --------------------
32 | The following classes can be instanced at the package-level (``spy.AnalogData(...)`` etc.)
33 |
34 | .. autosummary::
35 |
36 | syncopy.AnalogData
37 | syncopy.SpectralData
38 | syncopy.CrossSpectralData
39 | syncopy.TimeLockData
40 | syncopy.SpikeData
41 | syncopy.EventData
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/doc/source/user/logging.rst:
--------------------------------------------------------------------------------
1 | .. _logging:
2 |
3 | Trace Your Steps: Data Logs
4 | ===========================
5 |
6 | An important feature of Syncopy fostering reproducibility is a ``log`` which gets attached to and propagated between all datasets. Suppose we have some :class:`~syncopy.SpectralData` and we want to know how we did arrive at these results, typing::
7 |
8 | spectral_data.log
9 |
10 | Gives a output like this::
11 |
12 | |=== user@machine: Thu Feb 3 17:05:59 2022 ===|
13 |
14 | __init__: created AnalogData object
15 |
16 | |=== user@machine: Thu Feb 3 17:12:11 2022 ===|
17 |
18 | __init__: created SpectralData object
19 |
20 | |=== user@machine: Thu Feb 3 17:12:11 2022 ===|
21 |
22 | definetrial: updated trial-definition with [50 x 3] element array
23 |
24 | |=== user@machine: Thu Feb 3 17:12:11 2022 ===|
25 |
26 | write_log: computed mtmfft_cF with settings
27 | method = mtmfft
28 | output = pow
29 | keeptapers = False
30 | keeptrials = True
31 | polyremoval = None
32 | pad_to_length = None
33 | foi = [ 0. 0.5 1. 1.5 2. 2.5, ..., 47.5 48. 48.5 49. 49.5 50. ]
34 | taper = dpss
35 | nTaper = 5
36 | tapsmofrq = 3
37 |
38 |
39 | We see that from the creation of the original :class:`~syncopy.AnalogData` all steps needed to compute the new :class:`~syncopy.SpectralData` got recorded. In this example the spectra were computed via the multitapered FFT, with a spectral smoothing box (``tapsmofrq``) of 3Hz which required 5 Slepian tapers. The frequencies of interest (``foi``) range from 0Hz to 50Hz with 0.5Hz stepping and ``keeptrials`` was set to ``True``, meaning that this dataset contains the results for all trials separately.
40 |
--------------------------------------------------------------------------------
/doc/source/user/matlab_io.rst:
--------------------------------------------------------------------------------
1 | .. _matlab_io:
2 |
3 | Roundtrip from MatLab - FieldTrip to Syncopy and Back
4 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5 | Data created with Syncopy can be loaded into MATLAB using the `matlab-syncopy
6 | `_ interface. It's still in early
7 | development and supports only a subset of data classes. Also, the MATLAB
8 | interface does not support loading data that do not fit into local memory.
9 |
10 |
11 | For this illustrative example we start by generating synthetic data in FieldTrip:
12 |
13 | .. code-block:: matlab
14 |
15 | cfg = [];
16 | cfg.method = 'superimposed';
17 | cfg.fsample = 1000;
18 | cfg.numtrl = 13;
19 | cfg.trllen = 7;
20 | cfg.s1.freq = 50;
21 | cfg.s1.ampl = 1;
22 | cfg.s1.phase = 0;
23 | cfg.noise.ampl = 0;
24 | data = ft_freqsimulation(cfg);
25 | data.dimord = '{rpt}_label_time';
26 |
27 | Next, `download the latest release `_
28 | of Syncopy's MATLAB interface and add the folder containing the `+spy` directory to your
29 | MATLAB path.
30 |
31 | .. code-block:: matlab
32 |
33 | addpath('/path/to/syncopy-matlab/')
34 |
35 | Now, we save the synthetic dataset as Syncopy :class:`~syncopy.AnalogData` dataset in the
36 | respective user home
37 |
38 | .. code-block:: matlab
39 |
40 | cfg = []; cfg.filename = '~/syn_data.analog';
41 | spy.ft_save_spy(cfg, data)
42 |
43 | The previous call generated two files: an HDF5 data-file ``~/syn_data.analog``
44 | and the accompanying JSON meta-data ``~/syn_data.analog.info`` (please refer to
45 | :ref:`syncopy-data-format` for more information about Syncopy's file format).
46 |
47 | We start an (i)Python session, import Syncopy and use :func:`~syncopy.load` to read the
48 | data from disk:
49 |
50 | .. code-block:: python
51 |
52 | import syncopy as spy
53 | data = spy.load('~/syn_data.analog')
54 |
55 | Now, let's compute a power-spectrum using Syncopy's parallel computing engine:
56 |
57 | .. code-block:: python
58 |
59 | cfg = spy.get_defaults(spy.freqanalysis)
60 | cfg.method = 'mtmfft'
61 | cfg.output = 'pow'
62 | cfg.parallel = True
63 | spec = spy.freqanalysis(cfg, data)
64 |
65 | .. note::
66 |
67 | Using SLURM on HPC clusters for datasets this small usually does not
68 | yield any performance gain due to the comparatively large overhead of starting
69 | a SLURM worker pool compared to the total computation time.
70 |
71 | We save the resulting :class:`~syncopy.SpectralData` object alongside the corresponding
72 | :class:`~syncopy.AnalogData` source:
73 |
74 | .. code-block:: python
75 |
76 | spy.save(spec, filename='~/syn_data')
77 |
78 | Note that :func:`syncopy.save` automatically appends the appropriate filename
79 | extension (``.spectral`` in this case).
80 |
81 | Back in MATLAB, we can import the computed spectrum using:
82 |
83 | .. code-block:: matlab
84 |
85 | spec = spy.ft_load_spy('~/syn_data.spectral')
86 |
--------------------------------------------------------------------------------
/doc/source/user/user_api.rst:
--------------------------------------------------------------------------------
1 | API for Users
2 | =============
3 |
4 | This page gives an overview over all public functions and classes of Syncopy.
5 |
6 | .. contents:: Sections
7 | :local:
8 |
9 | High-level functions
10 | --------------------
11 | These *meta-functions* bundle many related analysis methods into one high-level function.
12 |
13 | .. autosummary::
14 |
15 | syncopy.preprocessing
16 | syncopy.resampledata
17 | syncopy.freqanalysis
18 | syncopy.connectivityanalysis
19 | syncopy.timelockanalysis
20 |
21 | Descriptive Statistics
22 | ----------------------
23 | .. autosummary::
24 |
25 | syncopy.mean
26 | syncopy.var
27 | syncopy.std
28 | syncopy.median
29 | syncopy.itc
30 | syncopy.spike_psth
31 |
32 | Utility
33 | --------
34 |
35 | .. autosummary::
36 |
37 | syncopy.definetrial
38 | syncopy.selectdata
39 | syncopy.redefinetrial
40 | syncopy.show
41 | syncopy.cleanup
42 |
43 | I/O
44 | ---
45 | Functions to import and export data in Syncopy
46 |
47 | .. autosummary::
48 |
49 | syncopy.load
50 | syncopy.save
51 | syncopy.load_ft_raw
52 | syncopy.load_tdt
53 | syncopy.load_nwb
54 | syncopy.copy
55 |
56 |
57 | Data exchange with MNE-Python
58 | -----------------------------
59 |
60 | .. autosummary::
61 |
62 | syncopy.raw_adata_to_mne_raw
63 | syncopy.raw_mne_to_adata
64 | syncopy.tldata_to_mne_epochs
65 | syncopy.mne_epochs_to_tldata
66 |
67 |
68 | Plotting
69 | -----------
70 |
71 | These convenience function are intended to be used for a quick visual inspection of data and results.
72 |
73 | .. autosummary::
74 |
75 | syncopy.singlepanelplot
76 | syncopy.multipanelplot
77 |
78 | Data Types
79 | --------------------
80 |
81 | Syncopy data types are Python classes, which offer convenient ways for data access and manipulation.
82 |
83 | .. autosummary::
84 |
85 | syncopy.AnalogData
86 | syncopy.SpectralData
87 | syncopy.CrossSpectralData
88 | syncopy.SpikeData
89 | syncopy.EventData
90 |
--------------------------------------------------------------------------------
/doc/source/user/work-flow.mmd:
--------------------------------------------------------------------------------
1 | graph LR
2 | classDef MEM fill:#ff908a,stroke:#333,stroke-width:2px;
3 | classDef DISC fill:#78e4ff,stroke:#333,stroke-width:1px;
4 | classDef Other fill:#9cfcff,stroke:#333,stroke-width:1px;
5 |
6 | spyRead[/.spy\] -->|load| Data
7 | FTread[/.mat\] -->|load_ft_raw| Data
8 | NWBread[/nwb\] -->|load_nwd| Data
9 | npInput[NumPy arrays] -.->|collect_trials| Data(AnalogData)
10 |
11 | Data -->|freqanalysis| SData(SpectralData)
12 | Data -.-> |singlepanelplot|PlotD1[Figures]
13 | Data -.-> |show| ShowD1[NumPy arrays]
14 |
15 | SData -.-> |singlepanelplot|PlotD[Figures]
16 | SData -.-> |show| ShowD2[NumPy arrays]
17 | SData --> |save| SaveD2[/.spy\]
18 |
19 | class npInput,PlotD,ShowD1,PlotD1,ShowD2,PlotD2 MEM;
20 | class Data,SData DISC;
21 | class spyRead,FTread,NWBread,SaveD2 Other;
22 | linkStyle 4 stroke:#78e4ff,stroke-width:3px;
23 |
24 | subgraph one [ Legend ]
25 | disc[/on disc\]:::DISC
26 | mem[in memory]:::MEM
27 | end
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "esi-syncopy"
3 | packages = [
4 | {include = "syncopy"}
5 | ]
6 | version = "2023.09"
7 | license = "BSD-3-Clause"
8 | readme="README.rst"
9 | homepage="https://syncopy.org"
10 | repository="https://github.com/esi-neuroscience/syncopy"
11 | include = [
12 | "LICENSE",
13 | ]
14 | classifiers = [
15 | "Topic :: Scientific/Engineering",
16 | "Environment :: Console",
17 | "Framework :: Jupyter",
18 | "Operating System :: OS Independent"
19 | ]
20 | description = "A toolkit for user-friendly large-scale electrophysiology data analysis. Syncopy is compatible with the Matlab toolbox FieldTrip."
21 | authors = ["Stefan Fürtinger ", "Tim Schäfer ", "Joscha Schmiedt ", "Gregor Mönke "]
22 |
23 | [tool.poetry.dependencies]
24 | python = "^3.8,<3.12"
25 | h5py = ">=2.9"
26 | dask = {version=">=2022.6", extras=["distributed"]}
27 | dask-jobqueue = ">=0.8"
28 | numpy = ">=1.10"
29 | scipy = ">=1.10.0"
30 | matplotlib = ">=3.5"
31 | tqdm = ">=4.31"
32 | natsort = "^8.1.0"
33 | psutil = ">=5.9"
34 | fooof = ">=1.0"
35 | bokeh = "^3.1.1"
36 |
37 | [tool.poetry.group.dev.dependencies]
38 | black = ">=22.6,<25.0"
39 | pytest = "^7.0"
40 | ipython = ">=8.10"
41 | pytest-cov = "^3.0.0"
42 | sphinx-book-theme = ">=1.0.1"
43 | sphinx-automodapi = "^0.14.1"
44 | pydata-sphinx-theme = ">=0.13.3"
45 | numpydoc = "^1.4.0"
46 | ipdb = "^0.13.9"
47 | memory-profiler = "^0.60.0"
48 | flake8 = "^3.9"
49 | asv = "^0.5.1"
50 | virtualenv = "^20.23.0"
51 |
52 | [build-system]
53 | requires = ["poetry-core>=1.0.0"]
54 | build-backend = "poetry.core.masonry.api"
55 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name='syncopy',
5 | version='2023.05',
6 | packages=find_packages(),
7 | install_requires=[ "h5py>=2.9", "dask>2022.6", "dask-jobqueue>=0.8", "numpy >=1.10", "scipy>=1.5", "matplotlib>=3.5", "tqdm>=4.31", "natsort>=8.1.0", "psutil>=5.9", "fooof>=1.0" ],
8 | )
--------------------------------------------------------------------------------
/syncopy.yml:
--------------------------------------------------------------------------------
1 | name: syncopy
2 | channels:
3 | - defaults
4 | - conda-forge
5 | dependencies:
6 | # this is to comply with acme 2022.11
7 | - python >= 3.10, < 3.11
8 | - dask[complete] >= 2023.3.2
9 | - distributed >= 2023.1
10 | - dask-jobqueue >= 0.8.2
11 | - h5py >=3.7, < 4
12 | - numpy >= 1.25, < 2.0
13 | - scipy >= 1.5
14 | - matplotlib >= 3.5
15 | - natsort >= 7.1
16 | - pip >= 22.0
17 | - psutil >= 5.9.0
18 | - tqdm >= 4.5
19 | - fooof >= 1.0
20 | - bokeh >= 3.1.1
21 | - pynwb >= 2.4.0
22 |
23 | # Optional packages required for running the test-suite and building the HTML docs
24 | - ipdb >= 0.12
25 | - memory_profiler >= 0.50
26 | - numpydoc
27 | - pylint >= 2.8
28 | - python-graphviz >= 0.20
29 | - pytest-cov >= 4
30 | - ruamel.yaml >=0.16
31 | - setuptools_scm >= 7.0
32 | - sphinx_bootstrap_theme >= 0.8
33 | - pip:
34 | # Optional: only necessary when building the HTML documentation
35 | - sphinx_automodapi >= 0.14
36 |
--------------------------------------------------------------------------------
/syncopy/connectivity/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with user exposed
4 | # connectivity methods
5 | #
6 |
7 | from .connectivity_analysis import connectivityanalysis
8 |
9 | # Populate local __all__ namespace
10 | # with the user-exposed frontend
11 | __all__ = ["connectivityanalysis"]
12 |
--------------------------------------------------------------------------------
/syncopy/connectivity/granger.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Implementation of Granger-Geweke causality
4 | #
5 | #
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 |
9 |
10 | def granger(CSD, Hfunc, Sigma):
11 | """
12 | Computes the pairwise Granger-Geweke causalities
13 | for all (non-symmetric!) channel combinations
14 | according to Equation 8 in [1]_.
15 |
16 | The transfer functions `Hfunc` and noise covariance
17 | `Sigma` are expected to have been already computed.
18 |
19 | Parameters
20 | ----------
21 | CSD : (nFreq, N, N) :class:`numpy.ndarray`
22 | Complex cross spectra for all channel combinations ``i,j``
23 | `N` corresponds to number of input channels.
24 | Hfunc : (nFreq, N, N) :class:`numpy.ndarray`
25 | Spectral transfer functions for all channel combinations ``i,j``
26 | Sigma : (N, N) :class:`numpy.ndarray`
27 | The noise covariances
28 |
29 | Returns
30 | -------
31 | Granger : (nFreq, N, N) :class:`numpy.ndarray`
32 | Spectral Granger-Geweke causality between all channel
33 | combinations. Directionality follows array
34 | notation: causality from ``i -> j`` is ``Granger[:,i,j]``,
35 | causality from ``j -> i`` is ``Granger[:,j,i]``
36 |
37 | See also
38 | --------
39 | wilson_sf : :func:`~syncopy.connectivity.wilson_sf.wilson_sf`
40 | Spectral matrix factorization that yields the
41 | transfer functions and noise covariances
42 | from a cross spectral density.
43 |
44 | Notes
45 | -----
46 | .. [1] Dhamala Mukeshwar, Govindan Rangarajan, and Mingzhou Ding.
47 | "Estimating Granger causality from Fourier and wavelet transforms
48 | of time series data." Physical review letters 100.1 (2008): 018701.
49 |
50 | """
51 |
52 | nChannels = CSD.shape[1]
53 | auto_spectra = CSD.transpose(1, 2, 0).diagonal()
54 | auto_spectra = np.abs(auto_spectra) # auto-spectra are real
55 |
56 | # we need the stacked auto-spectra of the form (nChannel=3):
57 | # S_11 S_22 S_33
58 | # Smat(f) = S_11 S_22 S_33
59 | # S_11 S_22 S_33
60 | Smat = auto_spectra[:, None, :] * np.ones(nChannels)[:, None]
61 |
62 | # Granger i->j needs H_ji entry
63 | Hmat = np.abs(Hfunc.transpose(0, 2, 1)) ** 2
64 | # Granger i->j needs Sigma_ji entry
65 | SigmaJI = np.abs(Sigma.T)
66 |
67 | # imag part should be 0
68 | auto_cov = np.abs(Sigma.diagonal())
69 | # same stacking as for the auto spectra (without freq axis)
70 | SigmaII = auto_cov[None, :] * np.ones(nChannels)[:, None]
71 |
72 | # the denominator
73 | denom = SigmaII.T - SigmaJI**2 / SigmaII
74 | denom = Smat - denom * Hmat
75 |
76 | # linear causality i -> j
77 | Granger = np.log(Smat / denom)
78 |
79 | return Granger
80 |
--------------------------------------------------------------------------------
/syncopy/datatype/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with datatype routines and classes
4 | #
5 |
6 | # Import __all__ routines from local modules
7 | from . import base_data, continuous_data, discrete_data, methods
8 | from .base_data import *
9 | from .continuous_data import *
10 | from .discrete_data import *
11 | from .methods.definetrial import *
12 | from .methods.selectdata import *
13 | from .methods.show import *
14 | from .methods.copy import *
15 | from .methods.redefinetrial import *
16 | from .methods.concat import *
17 | from .util import *
18 |
19 | # Populate local __all__ namespace
20 | __all__ = []
21 | __all__.extend(base_data.__all__)
22 | __all__.extend(continuous_data.__all__)
23 | __all__.extend(discrete_data.__all__)
24 | __all__.extend(util.__all__)
25 | __all__.extend(methods.definetrial.__all__)
26 | __all__.extend(methods.selectdata.__all__)
27 | __all__.extend(methods.show.__all__)
28 | __all__.extend(methods.copy.__all__)
29 | __all__.extend(methods.redefinetrial.__all__)
30 | __all__.extend(methods.concat.__all__)
31 |
--------------------------------------------------------------------------------
/syncopy/datatype/methods/copy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Syncopy's (deep) copy function
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | from copy import copy as py_copy
8 | import shutil
9 | import h5py
10 | import numpy as np
11 |
12 | # Syncopy imports
13 | from syncopy.shared.parsers import data_parser
14 | from syncopy.shared.errors import SPYInfo
15 |
16 | __all__ = ["copy"]
17 |
18 |
19 | # Return a deep copy of the current class instance
20 | def copy(spydata):
21 | """
22 | Create a copy of the entire Syncopy object `data` on disk
23 |
24 | Parameters
25 | ----------
26 | spydata : Syncopy data object
27 | Object to be copied on disk
28 |
29 | Returns
30 | -------
31 | cpy : Syncopy data object
32 | Reference to the copied data object
33 | on disk
34 |
35 | Notes
36 | -----
37 | For copying only a subset of the `data` use :func:`syncopy.selectdata` directly
38 | with the default `inplace=False` parameter.
39 |
40 | Syncopy objects may also be copied using the class method ``.copy`` that
41 | acts as a wrapper for :func:`syncopy.copy`
42 |
43 | See also
44 | --------
45 | :func:`syncopy.save` : save to specific file path
46 | :func:`syncopy.selectdata` : creates copy of a selection with `inplace=False`
47 | """
48 |
49 | # Make sure `data` is a valid Syncopy data object
50 | data_parser(spydata, varname="data", writable=None, empty=False)
51 |
52 | dsize = np.prod(spydata.data.shape) * spydata.data.dtype.itemsize / 1024**2
53 | msg = f"Copying {dsize:.2f} MB of data " f"to create new {spydata.__class__.__name__} object on disk"
54 | SPYInfo(msg)
55 |
56 | # Shallow copy, captures also non-default/temporary attributes.
57 | copy_spydata = py_copy(spydata)
58 | copy_filename = spydata._gen_filename()
59 | copy_spydata.filename = copy_filename
60 | spydata.clear()
61 |
62 | spydata._close()
63 |
64 | # Copy data on disk.
65 | shutil.copyfile(spydata.filename, copy_filename, follow_symlinks=False)
66 |
67 | spydata._reopen()
68 |
69 | # Reattach properties
70 | for propertyName in spydata._hdfFileDatasetProperties:
71 | prop = getattr(spydata, "_" + propertyName)
72 | if isinstance(prop, h5py.Dataset):
73 | sourceName = prop.name
74 | setattr(
75 | copy_spydata,
76 | "_" + propertyName,
77 | h5py.File(copy_filename, mode=copy_spydata.mode)[sourceName],
78 | )
79 | else: # np.ndarray
80 | setattr(copy_spydata, "_" + propertyName, prop)
81 |
82 | return copy_spydata
83 |
--------------------------------------------------------------------------------
/syncopy/datatype/util.py:
--------------------------------------------------------------------------------
1 | """
2 | Helpers and tools for Syncopy data classes
3 | """
4 |
5 | import os
6 | from numbers import Number
7 | import numpy as np
8 |
9 | # Syncopy imports
10 | from syncopy import __storage__, __storagelimit__, __sessionid__
11 | from syncopy.shared.errors import SPYTypeError, SPYValueError
12 |
13 | __all__ = ["TrialIndexer"]
14 |
15 |
16 | class TrialIndexer:
17 | def __init__(self, data_object, idx_list):
18 | """
19 | Class to obtain an indexable trials iterable from
20 | an instantiated Syncopy data class `data_object`.
21 | Relies on the `_get_trial` method of the
22 | respective `data_object`.
23 |
24 | Parameters
25 | ----------
26 | data_object : Syncopy data class, e.g. AnalogData
27 |
28 | idx_list : list
29 | List of valid trial indices for `_get_trial`
30 | """
31 |
32 | self.data_object = data_object
33 | self.idx_set = set(idx_list)
34 | self._len = len(idx_list)
35 |
36 | def __getitem__(self, trialno):
37 | # single trial access via index operator []
38 | if not isinstance(trialno, Number):
39 | raise SPYTypeError(trialno, "trial index", "single number to index a single trial")
40 | if trialno not in self.idx_set:
41 | lgl = "index of existing trials"
42 | raise SPYValueError(lgl, "trial index", trialno)
43 | return self.data_object._get_trial(trialno)
44 |
45 | def __iter__(self):
46 | # this generator gets freshly created and exhausted
47 | # for each new iteration, with only 1 trial being in memory
48 | # at any given time
49 | yield from (self[i] for i in self.idx_set)
50 |
51 | def __len__(self):
52 | return self._len
53 |
54 | def __repr__(self):
55 | return self.__str__()
56 |
57 | def __str__(self):
58 | return "{} element iterable".format(self._len)
59 |
60 |
61 | class TimeIndexer:
62 | def __init__(self, trialdefinition, samplerate, idx_list):
63 | """
64 | Class to obtain an indexable time array iterable from
65 | an instantiated Syncopy data class `data_object`.
66 | Relies on the `trialdefinition` of the respective
67 | `data_object`.
68 |
69 | Parameters
70 | ----------
71 | data_object : Syncopy data class, e.g. AnalogData
72 |
73 | idx_list : list
74 | List of valid trial indices
75 | """
76 |
77 | self.trialdefinition = trialdefinition
78 | self.samplerate = samplerate
79 | self.idx_set = set(idx_list)
80 | self._len = len(idx_list)
81 |
82 | def construct_time_array(self, trialno):
83 |
84 | start, stop, offset = self.trialdefinition[trialno, :3]
85 | return (np.arange(0, stop - start) + offset) / self.samplerate
86 |
87 | def __getitem__(self, trialno):
88 | # single trial access via index operator []
89 | if not isinstance(trialno, Number):
90 | raise SPYTypeError(trialno, "trial index", "single number to index a single trial")
91 | if trialno not in self.idx_set:
92 | lgl = "index of existing trials"
93 | raise SPYValueError(lgl, "trial index", trialno)
94 | return self.construct_time_array(trialno)
95 |
96 | def __iter__(self):
97 | # this generator gets freshly created and exhausted
98 | # for each new iteration, with only 1 time array being in memory
99 | # at any given time
100 | yield from (self[i] for i in self.idx_set)
101 |
102 | def __len__(self):
103 | return self._len
104 |
105 | def __repr__(self):
106 | return self.__str__()
107 |
108 | def __str__(self):
109 | return "{} element iterable".format(self._len)
110 |
111 |
112 | def get_dir_size(start_path=".", out="byte"):
113 | """
114 | Compute size of all files in directory (and its subdirectories), in bytes or GB.
115 | """
116 | total_size_bytes = 0
117 | num_files = 0
118 | for dirpath, _, filenames in os.walk(start_path):
119 | for f in filenames:
120 | fp = os.path.join(dirpath, f)
121 | # skip if it is symbolic link
122 | try:
123 | if not os.path.islink(fp):
124 | total_size_bytes += os.path.getsize(fp)
125 | num_files += 1
126 | except Exception as ex: # Ignore issues from several parallel cleanup processes.
127 | pass
128 |
129 | if out == "GB":
130 | total_size = total_size_bytes / 1e9
131 | elif out == "byte":
132 | total_size = total_size_bytes
133 | else:
134 | raise ValueError("Invalid 'out' unit: '{}', expected one of 'byte' or 'GB'".format(out))
135 | return total_size, num_files
136 |
137 |
138 | def setup_storage(storage_dir=__storage__):
139 | """
140 | Create temporary storage dir if needed, and report on its size.
141 |
142 | Returns
143 | -------
144 | storage_size: Size of files in temporary storage directory, in GB.
145 | storage_num_files: Number of files in temporary storage directory.
146 | """
147 |
148 | # Create package-wide tmp directory if not already present
149 | if not os.path.exists(storage_dir):
150 | try:
151 | os.mkdir(storage_dir)
152 | except Exception as exc:
153 | err = (
154 | "Syncopy core: cannot create temporary storage directory {}. "
155 | + "Original error message below\n{}"
156 | )
157 | raise IOError(err.format(storage_dir, str(exc)))
158 |
159 | return get_dir_size(storage_dir, out="GB")
160 |
--------------------------------------------------------------------------------
/syncopy/io/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with io routines
4 | #
5 |
6 | # Import __all__ routines from local modules
7 | from . import (
8 | utils,
9 | load_spy_container,
10 | save_spy_container,
11 | load_ft,
12 | load_tdt,
13 | load_nwb,
14 | nwb,
15 | mne_conv,
16 | )
17 | from .utils import *
18 | from .load_spy_container import *
19 | from .save_spy_container import *
20 | from .load_ft import *
21 | from .load_tdt import *
22 | from .load_nwb import *
23 | from .nwb import *
24 | from .mne_conv import *
25 |
26 | # Populate local __all__ namespace
27 | __all__ = ["load_ft_raw", "load_tdt", "load_nwb", "mne_conv"]
28 | __all__.extend(utils.__all__)
29 | __all__.extend(load_spy_container.__all__)
30 | __all__.extend(save_spy_container.__all__)
31 | __all__.extend(nwb.__all__)
32 | __all__.extend(mne_conv.__all__)
33 |
--------------------------------------------------------------------------------
/syncopy/plotting/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with plotting routines
4 | #
5 |
6 | # Importlocal modules, but only import routines from spy_plotting.py
7 | from .spy_plotting import *
8 |
9 | # Populate local __all__ namespace
10 | __all__ = []
11 | __all__.extend(spy_plotting.__all__)
12 |
--------------------------------------------------------------------------------
/syncopy/plotting/_plotting.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Syncopy plotting backend
4 | #
5 |
6 | # 3rd party imports
7 | import numpy as np
8 |
9 | from syncopy.plotting.config import pltConfig, rc_props, foreground
10 | from syncopy import __plt__
11 | from syncopy.plotting import _helpers
12 |
13 | if __plt__:
14 | import matplotlib
15 | import matplotlib.pyplot as ppl
16 |
17 |
18 | # for the legends
19 | ncol_max = 3
20 |
21 |
22 | # -- 2d-line plots --
23 | @matplotlib.rc_context(rc_props)
24 | def mk_line_figax(xlabel="time (s)", ylabel="signal (a.u.)"):
25 |
26 | """
27 | Create the figure and axis for a
28 | standard 2d-line plot
29 | """
30 |
31 | fig, ax = ppl.subplots(figsize=pltConfig["sFigSize"])
32 | # Hide the right and top spines
33 | ax.spines["right"].set_visible(False)
34 | ax.spines["top"].set_visible(False)
35 | ax.tick_params(axis="both", which="major", labelsize=pltConfig["sTickSize"])
36 |
37 | ax.set_xlabel(xlabel, fontsize=pltConfig["sLabelSize"])
38 | ax.set_ylabel(ylabel, fontsize=pltConfig["sLabelSize"])
39 |
40 | return fig, ax
41 |
42 |
43 | @matplotlib.rc_context(rc_props)
44 | def mk_multi_line_figax(nrows, ncols, xlabel="time (s)", ylabel="signal (a.u.)", x_size=None, y_size=None):
45 |
46 | """
47 | Create the figure and axes for a
48 | multipanel 2d-line plot
49 | """
50 |
51 | # ncols and nrows get
52 | # restricted via the plotting frontends
53 | if x_size is None:
54 | x_size = ncols * pltConfig["mXSize"]
55 | else:
56 | x_size = ncols * x_size
57 | if y_size is None:
58 | y_size = ncols * pltConfig["mYSize"]
59 | else:
60 | y_size = ncols * y_size
61 |
62 | fig, axs = ppl.subplots(nrows, ncols, figsize=(x_size, y_size), sharex=True, sharey=True, squeeze=False)
63 |
64 | # Hide the right and top spines
65 | # and remove all tick labels
66 | for ax in axs.flatten():
67 | ax.spines["right"].set_visible(False)
68 | ax.spines["top"].set_visible(False)
69 | ax.tick_params(labelsize=0)
70 |
71 | # determine axis layout
72 | y_left = axs[:, 0]
73 | x_bottom = axs[-1, :]
74 |
75 | # write tick and axis labels only on outer axes to save space
76 | for ax in y_left:
77 | ax.tick_params(labelsize=pltConfig["mTickSize"])
78 | ax.set_ylabel(ylabel, fontsize=pltConfig["mLabelSize"])
79 |
80 | for ax in x_bottom:
81 | ax.tick_params(labelsize=pltConfig["mTickSize"])
82 | ax.set_xlabel(xlabel, fontsize=pltConfig["mLabelSize"])
83 |
84 | return fig, axs
85 |
86 |
87 | @matplotlib.rc_context(rc_props)
88 | def plot_lines(ax, data_x, data_y, leg_fontsize=pltConfig["sLegendSize"], shifted=False, **pkwargs):
89 |
90 | if shifted:
91 | offsets = _helpers.shift_multichan(data_y)
92 | data_y = data_y + offsets
93 | # no colors needed
94 | pkwargs["color"] = foreground
95 |
96 | if "alpha" not in pkwargs:
97 | pkwargs["alpha"] = 0.9
98 |
99 | ax.plot(data_x, data_y, **pkwargs)
100 |
101 | # plot the legend
102 | if "label" in pkwargs:
103 | # multi-chan stacking, use labels as ticks
104 | if shifted and data_y.ndim > 1:
105 | pos = np.array(data_y.mean(axis=0))
106 | ax.set_yticks(pos, pkwargs["label"])
107 |
108 | else:
109 | ax.legend(
110 | ncol=ncol_max,
111 | loc="best",
112 | frameon=False,
113 | fontsize=leg_fontsize,
114 | )
115 | # make room for the legend
116 | mn, mx = data_y.min(), data_y.max()
117 |
118 | stretch = lambda x, fac: np.abs((fac - 1) * x)
119 |
120 | ax.set_ylim((mn - stretch(mn, 1.1), mx + stretch(mx, 1.1)))
121 |
122 |
123 | # -- image plots --
124 | @matplotlib.rc_context(rc_props)
125 | def mk_img_figax(xlabel="time (s)", ylabel="frequency (Hz)"):
126 |
127 | """
128 | Create the figure and axes for an
129 | image plot with `imshow`
130 | """
131 |
132 | fig, ax = ppl.subplots(figsize=pltConfig["sFigSize"])
133 |
134 | ax.tick_params(axis="both", which="major", labelsize=pltConfig["sTickSize"])
135 | ax.set_xlabel(xlabel, fontsize=pltConfig["sLabelSize"])
136 | ax.set_ylabel(ylabel, fontsize=pltConfig["sLabelSize"])
137 |
138 | return fig, ax
139 |
140 |
141 | @matplotlib.rc_context(rc_props)
142 | def mk_multi_img_figax(nrows, ncols, xlabel="time (s)", ylabel="frequency (Hz)"):
143 |
144 | """
145 | Create the figure and axes for an
146 | image plot with `imshow` for multiple
147 | sub plots
148 | """
149 | # ncols and nrows get
150 | # restricted via the plotting frontend
151 | x_size = ncols * pltConfig["mXSize"]
152 | y_size = nrows * pltConfig["mYSize"]
153 |
154 | fig, axs = ppl.subplots(nrows, ncols, figsize=(x_size, y_size), sharex=True, sharey=True, squeeze=False)
155 |
156 | # determine axis layout
157 | y_left = axs[:, 0]
158 | x_bottom = axs[-1, :]
159 |
160 | # write tick and axis labels only on outer axes to save space
161 | for ax in y_left:
162 | ax.tick_params(labelsize=pltConfig["mTickSize"])
163 | ax.set_ylabel(ylabel, fontsize=pltConfig["mLabelSize"])
164 |
165 | for ax in x_bottom:
166 | ax.tick_params(labelsize=pltConfig["mTickSize"])
167 | ax.set_xlabel(xlabel, fontsize=pltConfig["mLabelSize"])
168 |
169 | return fig, axs
170 |
171 |
172 | @matplotlib.rc_context(rc_props)
173 | def plot_tfreq(ax, data_yx, times, freqs, **pkwargs):
174 |
175 | """
176 | Plot time frequency data on a 2d grid, expects standard
177 | row-column (freq-time) axis ordering.
178 |
179 | Needs frequencies (`freqs`) and sampling rate (`fs`)
180 | for correct units.
181 | """
182 |
183 | # extent is defined in xy order
184 | df = freqs[1] - freqs[0]
185 | extent = [times[0], times[-1], freqs[0] - df / 2, freqs[-1] - df / 2]
186 |
187 | cmap = pkwargs.pop("cmap", pltConfig["cmap"])
188 | ax.imshow(data_yx[::-1], aspect="auto", cmap=cmap, extent=extent, **pkwargs)
189 |
--------------------------------------------------------------------------------
/syncopy/plotting/config.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Syncopy plotting setup
4 | #
5 |
6 | from syncopy import __plt__
7 | from packaging.version import parse
8 |
9 | foreground = "#2E3440" # nord0
10 | background = "#fcfcfc" # hint of gray
11 |
12 | # dark mode
13 | # foreground = "#D8DEE9" # nord4
14 | # background = "#2E3440" # nord0
15 |
16 | if __plt__:
17 | import matplotlib as mpl
18 |
19 | # to allow both older and newer matplotlib versions
20 | if parse(mpl.__version__) < parse("3.6"):
21 | mpl.style.use("seaborn-colorblind")
22 | else:
23 | mpl.style.use("seaborn-v0_8-colorblind")
24 | # a hint of gray
25 | rc_props = {
26 | "patch.edgecolor": foreground,
27 | "text.color": foreground,
28 | "axes.facecolor": background,
29 | "axes.facecolor": background,
30 | "figure.facecolor": background,
31 | "axes.edgecolor": foreground,
32 | "axes.labelcolor": foreground,
33 | "xtick.color": foreground,
34 | "ytick.color": foreground,
35 | "legend.framealpha": 0,
36 | "figure.facecolor": background,
37 | "figure.edgecolor": background,
38 | "savefig.facecolor": background,
39 | "savefig.edgecolor": background,
40 | "ytick.color": foreground,
41 | "xtick.color": foreground,
42 | "text.color": foreground,
43 | }
44 |
45 |
46 | # Global style settings for single-/multi-plots
47 | pltConfig = {
48 | "sTitleSize": 15,
49 | "sLabelSize": 16,
50 | "sTickSize": 12,
51 | "sLegendSize": 12,
52 | "sFigSize": (6.4, 4.2),
53 | "mTitleSize": 12.5,
54 | "mLabelSize": 12.5,
55 | "mTickSize": 11,
56 | "mLegendSize": 10,
57 | "mXSize": 3.2,
58 | "mYSize": 2.4,
59 | "mMaxAxes": 25,
60 | "cmap": "magma",
61 | }
62 |
63 | # Global consistent error message if matplotlib is missing
64 | pltErrMsg = (
65 | "\nSyncopy WARNING: Could not import 'matplotlib'. \n"
66 | + "{} requires a working matplotlib installation. \n"
67 | + "Please consider installing 'matplotlib', e.g., via conda: \n"
68 | + "\tconda install matplotlib\n"
69 | + "or using pip:\n"
70 | + "\tpip install matplotlib"
71 | )
72 |
--------------------------------------------------------------------------------
/syncopy/plotting/spy_plotting.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Top-level interfaces for the plotting functionality
4 | #
5 |
6 | from syncopy import __plt__
7 | from syncopy.plotting.config import pltErrMsg
8 | from syncopy.shared.errors import SPYWarning
9 |
10 | __all__ = ["singlepanelplot", "multipanelplot"]
11 |
12 |
13 | def singlepanelplot(data, **show_kwargs):
14 |
15 | """
16 | Plot Syncopy data in a single panel
17 |
18 | Careful with selecting to many trials/channels
19 | as this can quickly lead to memory exhaustion for
20 | big datasets.
21 |
22 | Parameters
23 | ----------
24 | data : :class:`~syncopy.datatype.base_data`
25 | Any (derived) Syncopy data type
26 | show_kwargs : dict
27 | :func:`~syncopy.datatype.methods.show.show` arguments to select
28 | which parts of the data to plot
29 |
30 | Examples
31 | --------
32 |
33 | Plot the 1st trial of `data`:
34 |
35 | >>> spy.singlepanelplot(data, trials=0)
36 |
37 | Alternatively directly use the method attached to `data`:
38 |
39 | >>> data.singlepanelplot(trials=0)
40 |
41 | Select a time- and frequency window (for e.g. :func:`~syncopy.SpectralData`):
42 |
43 | >>> data.singlepanelplot(trials=0, foilim=[20, 50], toilim=[0, 0.25])
44 | """
45 |
46 | if not __plt__:
47 | SPYWarning(pltErrMsg)
48 | return
49 |
50 | data.singlepanelplot(**show_kwargs)
51 |
52 |
53 | def multipanelplot(data, **show_kwargs):
54 |
55 | """
56 | Plot Syncopy data in multiple panels
57 |
58 | Careful with selecting to many trials/channels
59 | as this can quickly lead to memory exhaustion for
60 | big datasets.
61 |
62 | Parameters
63 | ----------
64 | data : :class:`~syncopy.datatype.base_data`
65 | Any (derived) Syncopy data type
66 | show_kwargs : dict
67 | :func:`~syncopy.datatype.methods.show.show` arguments to select
68 | which parts of the data to plot
69 |
70 | Examples
71 | --------
72 |
73 | Plot 4 channels of the 1st trial of `data`:
74 |
75 | >>> spy.singlepanelplot(data, trials=0, channel=[1, 2, 3, 4])
76 |
77 | Alternatively directly use the method attached to `data`:
78 |
79 | >>> data.singlepanelplot(trials=0, channel=[1, 2, 3, 4])
80 |
81 | Select a time- and frequency window (for e.g. :func:`~syncopy.SpectralData`):
82 |
83 | >>> data.singlepanelplot(trials=0, foilim=[20, 50], toilim=[0, 0.25], channel=['chanA', 'chanB'])
84 | """
85 |
86 | if not __plt__:
87 | SPYWarning(pltErrMsg)
88 | return
89 |
90 | data.multipanelplot(**show_kwargs)
91 |
--------------------------------------------------------------------------------
/syncopy/preproc/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with preprocessing frontend
4 | #
5 |
6 | from .preprocessing import *
7 | from .resampledata import *
8 |
9 | # Populate local __all__ namespace
10 | # with the user-exposed frontend
11 | __all__ = ["preprocessing", "resampledata"]
12 |
--------------------------------------------------------------------------------
/syncopy/preproc/resampling.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Backend methods/functions for
4 | # trivial down- and rational (p/q) resampling
5 | #
6 |
7 | # Builtin/3rd party package imports
8 | import fractions
9 | import scipy.signal as sci_sig
10 |
11 | # Syncopy imports
12 | from syncopy.preproc import firws
13 |
14 |
15 | def resample(data, orig_fs, new_fs, lpfreq=None, order=None):
16 |
17 | """
18 | Uses SciPy's polyphase method for the implementation
19 | of the standard resampling procedure:
20 | upsampling : FIR filtering : downsampling
21 |
22 | SciPy's default FIR filter has a slow roll-off,
23 | so the default is to design and use a homegrown firws.
24 |
25 | Parameters
26 | ----------
27 | data : (N, K) :class:`numpy.ndarray`
28 | Uniformly sampled multi-channel time-series data
29 | The 1st dimension is interpreted as the time axis,
30 | columns represent individual channels.
31 | orig_fs : float
32 | The original sampling rate
33 | new_fs : float
34 | The target sampling rate after resampling
35 | lpfreq : None or float, optional
36 | Leave at `None` for standard anti-alias filtering with
37 | the new Nyquist or set explicitly in Hz
38 | If set to `-1` use SciPy's default kaiser windowed FIR
39 | order : None or int, optional
40 | Order (length) of the firws anti-aliasing filter.
41 | The default `None` will create a filter of
42 | maximal order which is the number of samples times the upsampling
43 | factor of the trial, or 10 000 if that is smaller
44 |
45 | Returns
46 | -------
47 | resampled : (N, K) :class:`~numpy.ndarray`
48 | The resampled signals
49 |
50 | See also
51 | --------
52 | `SciPy's resample implementation `_
53 | syncopy.preproc.compRoutines.downsample_cF : Straightforward and cheap downsampling
54 | """
55 |
56 | nSamples = data.shape[0]
57 |
58 | # get up/down sampling factors
59 | up, down = _get_updn(orig_fs, new_fs)
60 | fs_ratio = new_fs / orig_fs
61 |
62 | # -- design firws low-pass filter --
63 |
64 | # default cuts at new Nyquist
65 | if lpfreq is None:
66 | f_c = 0.5 * fs_ratio
67 | # for backend tests only,
68 | # negative values don't pass the frontend
69 | elif lpfreq == -1:
70 | f_c = None
71 | # explicit cut-off
72 | else:
73 | f_c = lpfreq / orig_fs
74 | if order is None:
75 | order = nSamples * up
76 | # limit maximal order
77 | order = 10000 if order > 10000 else order
78 |
79 | if f_c:
80 | # filter has to be applied to the upsampled data
81 | window = firws.design_wsinc("hamming", order=order, f_c=f_c / up)
82 | else:
83 | window = ("kaiser", 5.0) # triggers SciPy default filter design
84 |
85 | resampled = sci_sig.resample_poly(data, up, down, window=window, axis=0)
86 |
87 | return resampled
88 |
89 |
90 | def downsample(
91 | dat,
92 | samplerate=1,
93 | new_samplerate=1,
94 | ):
95 | """
96 | Provides basic downsampling of signals. The `new_samplerate` should be
97 | an integer division of the original `samplerate`.
98 |
99 | Parameters
100 | ----------
101 | dat : (N, K) :class:`numpy.ndarray`
102 | Uniformly sampled multi-channel time-series data
103 | The 1st dimension is interpreted as the time axis,
104 | columns represent individual channels.
105 | samplerate : float
106 | Sample rate of the input data
107 | new_samplerate : float
108 | Sample rate of the output data
109 |
110 | Returns
111 | -------
112 | resampled : (X, K) :class:`~numpy.ndarray`
113 | The downsampled data
114 |
115 | """
116 |
117 | # we need integers for slicing
118 | skipped = int(samplerate // new_samplerate)
119 |
120 | return dat[::skipped]
121 |
122 |
123 | def _get_updn(orig_fs, new_fs):
124 |
125 | """
126 | Get the up/down sampling
127 | factors from the original and target
128 | sampling rate.
129 |
130 | NOTE: Can return very large factors for
131 | "almost irrational" sampling rate ratios!
132 | """
133 |
134 | frac = fractions.Fraction.from_float(new_fs / orig_fs)
135 | # trim down
136 | frac = frac.limit_denominator()
137 |
138 | return frac.numerator, frac.denominator
139 |
--------------------------------------------------------------------------------
/syncopy/shared/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Import utility functions mainly used internally
4 | #
5 |
6 | # Import __all__ routines from local modules
7 | from . import queries, errors, parsers, kwarg_decorators, computational_routine, tools
8 | from .queries import *
9 | from .errors import *
10 | from .parsers import *
11 | from .kwarg_decorators import *
12 | from .computational_routine import *
13 | from .tools import *
14 |
15 | # Populate local __all__ namespace
16 | __all__ = []
17 | __all__.extend(computational_routine.__all__)
18 | __all__.extend(errors.__all__)
19 | __all__.extend(parsers.__all__)
20 | __all__.extend(kwarg_decorators.__all__)
21 | __all__.extend(queries.__all__)
22 | __all__.extend(tools.__all__)
23 |
--------------------------------------------------------------------------------
/syncopy/shared/const_def.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Constant definitions used throughout SyNCoPy
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | from scipy.signal import windows
9 |
10 |
11 | # Module-wide output specs
12 | spectralDTypes = {
13 | "pow": np.float32,
14 | "abs": np.float32,
15 | "real": np.float32,
16 | "imag": np.float32,
17 | "angle": np.float32,
18 | "absreal": np.float32,
19 | "absimag": np.float32,
20 | "fourier": np.complex64,
21 | "complex": np.complex64,
22 | }
23 |
24 | #: output conversion of complex fourier coefficients
25 | spectralConversions = {
26 | "pow": lambda x: (x * np.conj(x)).real.astype(spectralDTypes["pow"]),
27 | "abs": lambda x: (np.absolute(x)).real.astype(spectralDTypes["abs"]),
28 | "fourier": lambda x: x.astype(spectralDTypes["fourier"]),
29 | "real": lambda x: np.real(x).astype(spectralDTypes["real"]),
30 | "imag": lambda x: np.imag(x).astype(spectralDTypes["imag"]),
31 | "angle": lambda x: np.angle(x).astype(spectralDTypes["angle"]),
32 | "absreal": lambda x: np.abs(np.real(x)).astype(spectralDTypes["absreal"]),
33 | "absimag": lambda x: np.abs(np.imag(x)).astype(spectralDTypes["absimag"]),
34 | }
35 |
36 | # FT compat
37 | spectralConversions["complex"] = spectralConversions["fourier"]
38 |
39 |
40 | #: available tapers of :func:`~syncopy.freqanalysis` and :func:`~syncopy.connectivity`
41 | all_windows = windows.__all__
42 | all_windows.remove("get_window") # aux. function
43 | all_windows.remove("exponential") # not symmetric
44 | all_windows.remove("dpss") # activated via `tapsmofrq`
45 |
46 | availableTapers = all_windows
47 | availablePaddingOpt = ["maxperlen", "nextpow2"]
48 |
49 | #: general, method agnostic, parameters for our CRs
50 | generalParameters = (
51 | "method",
52 | "keeptrials",
53 | "samplerate",
54 | "foi",
55 | "foilim",
56 | "polyremoval",
57 | "out",
58 | "pad",
59 | )
60 |
--------------------------------------------------------------------------------
/syncopy/shared/dask_helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Basic checkers to facilitate direct Dask interface
4 | #
5 |
6 | import subprocess
7 | from time import sleep
8 |
9 | # Syncopy imports
10 | from syncopy.shared.errors import SPYWarning, SPYInfo
11 | from .log import get_logger
12 |
13 |
14 | def check_slurm_available():
15 | """
16 | Returns `True` if a SLURM instance could be reached via
17 | a `sinfo` call, `False` otherwise.
18 | """
19 |
20 | # Check if SLURM's `sinfo` can be accessed
21 | proc = subprocess.Popen("sinfo", stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True)
22 | _, err = proc.communicate()
23 | # Any non-zero return-code means SLURM is not available
24 | # so we disable ACME
25 | if proc.returncode != 0:
26 | has_slurm = False
27 | else:
28 | has_slurm = True
29 |
30 | return has_slurm
31 |
32 |
33 | def check_workers_available(client, n_workers=1, timeout=120):
34 | """
35 | Checks for available (alive) Dask workers and waits max `timeout` seconds
36 | until at least ``n_workers`` workers are available.
37 | """
38 |
39 | logger = get_logger()
40 | totalWorkers = len(client.cluster.requested)
41 |
42 | # dictionary of workers
43 | workers = client.cluster.scheduler_info["workers"]
44 |
45 | # some small initial wait
46 | sleep(0.25)
47 |
48 | if len(workers) < n_workers:
49 | logger.important(
50 | f"waiting for at least {n_workers}/{totalWorkers} workers being available, timeout after {timeout} seconds.."
51 | )
52 | client.wait_for_workers(n_workers, timeout=timeout)
53 |
54 | sleep(0.25)
55 |
56 | # report what we have
57 | logger.important(f"{len(workers)}/{totalWorkers} workers available, starting computation..")
58 |
59 | # wait a little more to get consistent client print out
60 | sleep(0.25)
61 |
--------------------------------------------------------------------------------
/syncopy/shared/filetypes.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Supported Syncopy classes and file extensions
4 | #
5 |
6 |
7 | def _data_classname_to_extension(name):
8 | return "." + name.split("Data")[0].lower()
9 |
10 |
11 | # data file extensions are first word of data class name in lower-case
12 | supportedClasses = (
13 | "AnalogData",
14 | "SpectralData",
15 | "CrossSpectralData", # ContinousData
16 | "SpikeData",
17 | "EventData", # DiscreteData
18 | "TimelockData",
19 | ) # StatisticalData
20 |
21 | supportedDataExtensions = tuple([_data_classname_to_extension(cls) for cls in supportedClasses])
22 |
23 | # Define SynCoPy's general file-/directory-naming conventions
24 | FILE_EXT = {"dir": ".spy", "info": ".info", "data": supportedDataExtensions}
25 |
--------------------------------------------------------------------------------
/syncopy/shared/queries.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Auxiliary functions for querying things/people
4 | #
5 |
6 | __all__ = []
7 |
8 |
9 | def user_yesno(msg, default=None):
10 | """
11 | Docstring
12 | """
13 |
14 | # Parse optional `default` answer
15 | valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
16 | if default is None:
17 | suffix = " [y/n] "
18 | elif default == "yes":
19 | suffix = " [Y/n] "
20 | elif default == "no":
21 | suffix = " [y/N] "
22 |
23 | # Wait for valid user input, if received return `True`/`False`
24 | while True:
25 | choice = input(msg + suffix).lower()
26 | if default is not None and choice == "":
27 | return valid[default]
28 | elif choice in valid.keys():
29 | return valid[choice]
30 | else:
31 | print("Please respond with 'yes' or 'no' (or 'y' or 'n').\n")
32 |
33 |
34 | def user_input(msg, valid, default=None):
35 | """
36 | Docstring
37 |
38 | msg = str (message)
39 | valid = list (avail. options, no need specifying 'a', and '[a]', code strips brackets)
40 | default = str (default option, same as above)
41 | """
42 |
43 | # Add trailing whitespace to `msg` if not already present and append
44 | # default reply (if provided)
45 | suffix = "" + " " * (not msg.endswith(" "))
46 | if default is not None:
47 | default = default.replace("[", "").replace("]", "")
48 | assert default in valid
49 | suffix = "[Default: '{}'] ".format(default)
50 |
51 | # Wait for valid user input and return choice upon receipt
52 | while True:
53 | choice = input(msg + suffix)
54 | if default is not None and choice == "":
55 | return default
56 | elif choice in valid:
57 | return choice
58 | else:
59 | print("Please respond with '" + "or '".join(opt + "' " for opt in valid) + "\n")
60 |
--------------------------------------------------------------------------------
/syncopy/specest/README.md:
--------------------------------------------------------------------------------
1 | # specest - Power Spectral Estimation
2 |
3 | ## User Frontend
4 |
5 | - [freqanalysis.py](./freqanalysis.py): parent metafunction to access all implemented spectral estimation methods
6 |
7 | ## Available Methods
8 |
9 | - [mtmfft](./mtmfft.py): (multi-)tapered windowed Fourier transform, returns a periodogram estimate
10 | - [mtmconvol](./mtmconvol.py): (multi-)tapered windowed Fourier transform, returns time-frequency representation
11 | - [wavelet](./wavelet.py): Wavelet analysis based on [Torrence and Compo, 1998](https://cobblab.eas.gatech.edu/seminar/torrence&compo98.pdf)
12 | - [superlet](./superlet.py): Superlet transform as proposed in [Moca et al. 2021](https://www.nature.com/articles/s41467-020-20539-9) (coming soon..)
13 |
14 | ## Usage Examples (TODO..)
15 |
16 | ...
17 |
18 | ## Sources
19 |
20 | - [Wavelet core library](./wavelets/) from GitHub: https://github.com/aaren/wavelets
21 |
--------------------------------------------------------------------------------
/syncopy/specest/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with specest routines
4 | #
5 |
6 | # Import __all__ routines from local modules
7 | from .freqanalysis import freqanalysis
8 |
9 | # Populate local __all__ namespace
10 | # with the user-exposed frontend
11 | __all__ = ["freqanalysis"]
12 |
--------------------------------------------------------------------------------
/syncopy/specest/_norm_spec.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Helper routines to normalize Fourier spectra
4 | #
5 |
6 | import numpy as np
7 |
8 |
9 | def _norm_spec(ftr, nSamples, fs, mode="bins"):
10 |
11 | """
12 | Normalizes the complex Fourier transform to
13 | power spectral density or 1Hz-bin units.
14 | """
15 |
16 | # frequency bins
17 | if mode == "density":
18 | delta_f = fs / nSamples
19 | elif mode == "bins":
20 | delta_f = 1
21 |
22 | ftr *= np.sqrt(2) / (nSamples * np.sqrt(delta_f))
23 |
24 | return ftr
25 |
26 |
27 | def _norm_taper(taper, windows, nSamples):
28 |
29 | """
30 | Helper function to normalize tapers such
31 | that the resulting spectra are normalized
32 | with respect to the sum of the window. Meaning
33 | that the total original (untapered) power gets
34 | distributed over the spectral window response.
35 | """
36 |
37 | if taper == "dpss":
38 | windows *= np.sqrt(nSamples)
39 | elif taper == "boxcar":
40 | windows *= np.sqrt(nSamples / windows.sum())
41 | # weird 3 point normalization,
42 | # checks out exactly for 'hann' though
43 | else:
44 | windows *= np.sqrt(4 / 3) * np.sqrt(nSamples / windows.sum())
45 |
46 | return windows
47 |
--------------------------------------------------------------------------------
/syncopy/specest/mtmconvol.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Time-frequency analysis based on a short-time Fourier transform
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | import logging
9 | import platform
10 | from scipy import signal
11 |
12 | # local imports
13 | from .stft import stft
14 | from ._norm_spec import _norm_taper
15 |
16 |
17 | def mtmconvol(
18 | data_arr,
19 | samplerate,
20 | nperseg,
21 | noverlap=None,
22 | taper="hann",
23 | taper_opt=None,
24 | boundary="zeros",
25 | padded=True,
26 | detrend=False,
27 | ):
28 |
29 | """
30 | (Multi-)tapered short time fast Fourier transform. Returns
31 | full complex Fourier transform for each taper.
32 | Multi-tapering only supported with Slepian windwows (`taper="dpss"`).
33 |
34 | Parameters
35 | ----------
36 | data_arr : (N,) :class:`numpy.ndarray`
37 | Uniformly sampled multi-channel time-series data
38 | The 1st dimension is interpreted as the time axis
39 | samplerate : float
40 | Samplerate in Hz
41 | nperseg : int
42 | Sliding window size in sample units
43 | noverlap : int
44 | Overlap between consecutive windows, set to ``nperseg - 1``
45 | to cover the whole signal
46 | taper : str or None
47 | Taper function to use, one of `scipy.signal.windows`
48 | Set to `None` for no tapering.
49 | taper_opt : dict or None
50 | Additional keyword arguments passed to the `taper` function.
51 | For multi-tapering with ``taper='dpss'`` set the keys
52 | `'Kmax'` and `'NW'`.
53 | For further details, please refer to the
54 | `SciPy docs `_
55 | boundary : str or None
56 | Wether or not to auto-pad the signal such that a window is centered on each
57 | sample. If set to `None` half the window size (`nperseg`) will be lost
58 | on each side of the signal. Defaults `'zeros'`, for zero padding extension.
59 | padded : bool
60 | Additional padding in case ``noverlap != nperseg - 1`` to fit an integer number
61 | of windows.
62 |
63 | Returns
64 | -------
65 | ftr : 4D :class:`numpy.ndarray`
66 | The Fourier transforms, complex output has shape:
67 | ``(nTime, nTapers x nFreq x nChannels)``
68 | freqs : 1D :class:`numpy.ndarray`
69 | Array of Fourier frequencies
70 |
71 | Notes
72 | -----
73 | For a (MTM) power spectral estimate average the absolute squared
74 | transforms across tapers:
75 |
76 | ``Sxx = np.real(ftr * ftr.conj()).mean(axis=0)``
77 |
78 | The STFT result is normalized such that this yields the power
79 | spectral density. For a clean harmonic and a frequency bin
80 | width of `dF` this will give a peak power of `A**2 / 2 * dF`,
81 | with `A` as harmonic ampltiude.
82 | """
83 |
84 | # attach dummy channel axis in case only a
85 | # single signal/channel is the input
86 | if data_arr.ndim < 2:
87 | data_arr = data_arr[:, np.newaxis]
88 |
89 | nSamples = data_arr.shape[0]
90 | nChannels = data_arr.shape[1]
91 |
92 | # FFT frequencies from the window size
93 | freqs = np.fft.rfftfreq(nperseg, 1 / samplerate)
94 | nFreq = freqs.size
95 | # frequency bins
96 | dFreq = freqs[1] - freqs[0]
97 |
98 | if taper is None:
99 | taper = "boxcar"
100 |
101 | taper_func = getattr(signal.windows, taper)
102 |
103 | if taper_opt is None:
104 | taper_opt = {}
105 |
106 | # this parameter mitigates the sum-to-zero problem for the odd slepians
107 | # as signal.stft has hardcoded scaling='spectrum'
108 | # -> normalizes with win.sum() :/
109 | # see also https://github.com/scipy/scipy/issues/14740
110 | if taper == "dpss":
111 | taper_opt["sym"] = False
112 |
113 | # only truly 2d for multi-taper "dpss"
114 | windows = np.atleast_2d(taper_func(nperseg, **taper_opt))
115 |
116 | # normalize window(s)
117 | windows = _norm_taper(taper, windows, nperseg)
118 |
119 | # number of time points in the output
120 | if boundary is None:
121 | # no padding: we loose half the window on each side
122 | nTime = int(np.ceil(nSamples / (nperseg - noverlap))) - nperseg
123 | else:
124 | # the signal is padded on each side as to cover
125 | # the whole signal
126 | nTime = int(np.ceil(nSamples / (nperseg - noverlap)))
127 |
128 | # Short time Fourier transforms (nTime x nTapers x nFreq x nChannels)
129 | ftr = np.zeros((nTime, windows.shape[0], nFreq, nChannels), dtype="complex64")
130 |
131 | logger = logging.getLogger("syncopy_" + platform.node())
132 | logger.debug(
133 | f"Running mtmconvol on {len(windows)} windows, data chunk has {nSamples} samples and {nChannels} channels."
134 | )
135 |
136 | for taperIdx, win in enumerate(windows):
137 | # ftr has shape (nFreq, nChannels, nTime)
138 | pxx, _, _ = stft(
139 | data_arr,
140 | samplerate,
141 | window=win,
142 | nperseg=nperseg,
143 | noverlap=noverlap,
144 | boundary=boundary,
145 | padded=padded,
146 | axis=0,
147 | detrend=detrend,
148 | )
149 |
150 | ftr[:, taperIdx, ...] = pxx.transpose(2, 0, 1)[:nTime, ...]
151 |
152 | return ftr, freqs
153 |
--------------------------------------------------------------------------------
/syncopy/specest/mtmfft.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Spectral estimation with (multi-)tapered FFT
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | from scipy import signal
9 | import logging
10 | import platform
11 |
12 | # local imports
13 | from ._norm_spec import _norm_spec, _norm_taper
14 |
15 |
16 | def mtmfft(
17 | data_arr,
18 | samplerate,
19 | nSamples=None,
20 | taper="hann",
21 | taper_opt=None,
22 | demean_taper=False,
23 | ft_compat=False,
24 | ):
25 | """
26 | (Multi-)tapered fast Fourier transform. Returns
27 | full complex Fourier transform for each taper.
28 | Multi-tapering only supported with Slepian windwows (`taper="dpss"`).
29 |
30 | Parameters
31 | ----------
32 | data_arr : (N,) :class:`numpy.ndarray`
33 | Uniformly sampled multi-channel time-series data
34 | The 1st dimension is interpreted as the time axis
35 | samplerate : float
36 | Samplerate in Hz
37 | nSamples : int or None
38 | Absolute length of the (potentially to be padded) signals
39 | or `None` for no padding.
40 | taper : str or None
41 | Taper function to use, one of `scipy.signal.windows`
42 | Set to `None` for no tapering.
43 | taper_opt : dict or None
44 | Additional keyword arguments passed to the `taper` function.
45 | For multi-tapering with ``taper='dpss'`` set the keys
46 | `'Kmax'` and `'NW'`.
47 | For further details, please refer to the
48 | `SciPy docs `_
49 | demean_taper : bool
50 | Set to `True` to perform de-meaning after tapering
51 | ft_compat : bool
52 | Set to `True` to use Field Trip's normalization,
53 | which is NOT independent of the padding size
54 |
55 | Returns
56 | -------
57 | ftr : 3D :class:`numpy.ndarray`
58 | Complex output has shape ``(nTapers x nFreq x nChannels)``.
59 | freqs : 1D :class:`numpy.ndarray`
60 | Array of Fourier frequencies
61 |
62 | Notes
63 | -----
64 | For a (MTM) power spectral estimate average the absolute squared
65 | transforms across tapers:
66 |
67 | ``Sxx = np.real(ftr * ftr.conj()).mean(axis=0)``
68 |
69 | The FFT result is normalized such that this yields the
70 | spectral power. For a clean harmonic this will give a
71 | peak power of `A**2 / 2`, with `A` as harmonic amplitude.
72 | """
73 |
74 | # attach dummy channel axis in case only a
75 | # single signal/channel is the input
76 | if data_arr.ndim < 2:
77 | data_arr = data_arr[:, np.newaxis]
78 |
79 | # raw length without padding
80 | signal_length = data_arr.shape[0]
81 | if nSamples is None:
82 | nSamples = signal_length
83 |
84 | nChannels = data_arr.shape[1]
85 |
86 | freqs = np.fft.rfftfreq(nSamples, 1 / samplerate)
87 | nFreq = freqs.size
88 |
89 | # no taper is boxcar
90 | if taper is None:
91 | taper = "boxcar"
92 |
93 | if taper_opt is None:
94 | taper_opt = {}
95 |
96 | taper_func = getattr(signal.windows, taper)
97 | # only really 2d if taper='dpss' with Kmax > 1
98 | # here we take the actual signal lengths!
99 | windows = np.atleast_2d(taper_func(signal_length, **taper_opt))
100 | # normalize window with total (after padding) length
101 | windows = _norm_taper(taper, windows, nSamples)
102 |
103 | # Fourier transforms (nTapers x nFreq x nChannels)
104 | ftr = np.zeros((windows.shape[0], nFreq, nChannels), dtype="complex64")
105 |
106 | logger = logging.getLogger("syncopy_" + platform.node())
107 | logger.debug(
108 | f"Running mtmfft on {len(windows)} windows, data chunk has {nSamples} samples and {nChannels} channels."
109 | )
110 |
111 | for taperIdx, win in enumerate(windows):
112 | win = np.tile(win, (nChannels, 1)).T
113 | win *= data_arr
114 | # de-mean again after tapering - needed for Granger!
115 | if demean_taper:
116 | win -= win.mean(axis=0)
117 | ftr[taperIdx] = np.fft.rfft(win, n=nSamples, axis=0)
118 | # FT uses potentially padded length `nSamples`, which dilutes the power
119 | if ft_compat:
120 | ftr[taperIdx] = _norm_spec(ftr[taperIdx], nSamples, samplerate)
121 | # here the normalization adapts such that padding is NOT changing power
122 | else:
123 | ftr[taperIdx] = _norm_spec(
124 | ftr[taperIdx],
125 | signal_length * np.sqrt(nSamples / signal_length),
126 | samplerate,
127 | )
128 |
129 | return ftr, freqs
130 |
131 |
132 | def _get_dpss_pars(tapsmofrq, nSamples, samplerate):
133 |
134 | """Helper function to retrieve dpss parameters from tapsmofrq"""
135 |
136 | # taper width parameter in sample units
137 | NW = tapsmofrq * nSamples / samplerate
138 |
139 | # from the minBw formula in `input_processors.process_taper`
140 | # Kmax is at least 1!
141 | Kmax = int(2 * NW - 1) # optimal number of tapers
142 |
143 | # ..but NW can be 0.9999999999999999..
144 | # catch those floating point issues
145 | Kmax = Kmax if Kmax > 1 else 1
146 |
147 | return NW, Kmax
148 |
--------------------------------------------------------------------------------
/syncopy/specest/wavelet.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Time-frequency analysis with wavelets
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | import logging
9 | import platform
10 |
11 | # Local imports
12 | from syncopy.specest.wavelets import cwt
13 |
14 |
15 | def wavelet(data_arr, samplerate, scales, wavelet):
16 |
17 | """
18 | Perform time-frequency analysis on multi-channel time series data
19 | using a wavelet transform
20 |
21 | Parameters
22 | ----------
23 |
24 | data_arr : 2D :class:`numpy.ndarray`
25 | Uniformly sampled multi-channel time-series
26 | The 1st dimension is interpreted as the time axis
27 | samplerate : float
28 | Samplerate of `data_arr` in Hz
29 | scales : 1D :class:`numpy.ndarray`
30 | Set of scales to use in wavelet transform.
31 | wavelet : callable
32 | Wavelet function to use, one of
33 | :data:`~syncopy.specest.const_def.availableWavelets`
34 |
35 | Returns
36 | -------
37 | spec : :class:`numpy.ndarray`
38 | Complex time-frequency representation of the input data.
39 | Shape is (len(scales),) + data_arr.shape
40 | """
41 |
42 | logger = logging.getLogger("syncopy_" + platform.node())
43 | logger.debug(
44 | f"Running wavelet transform on data with shape {data_arr.shape} and samplerate {samplerate}."
45 | )
46 |
47 | spec = cwt(data_arr, wavelet=wavelet, widths=scales, dt=1 / samplerate, axis=0)
48 |
49 | return spec
50 |
51 |
52 | def get_optimal_wavelet_scales(scale_from_period, nSamples, dt, dj=0.25, s0=None):
53 | """
54 | Local helper to compute an "optimally spaced" set of scales for wavelet analysis
55 |
56 | Parameters
57 | ----------
58 | scale_from_period : func
59 | Function to convert periods to Wavelet specific scales.
60 | nSamples : int
61 | Sample-count (i.e., length) of time-series that is analyzed
62 | dt : float
63 | Time-series step-size; temporal spacing between consecutive samples
64 | (1 / sampling rate)
65 | dj : float
66 | Spectral resolution of scales. The choice of `dj` depends on the spectral
67 | width of the employed wavelet function. For instance, ``dj = 0.5`` is the
68 | largest value that still yields adequate sampling in scale for the Morlet
69 | wavelet. Other wavelets allow larger values of `dj` while still providing
70 | sufficient spectral resolution. Small values of `dj` yield finer scale
71 | resolution.
72 | s0 : float or None
73 | Smallest resolvable scale; should be chosen such that the equivalent
74 | Fourier period is approximately ``2 * dt``. If `None`, `s0` is computed
75 | to satisfy this criterion.
76 |
77 | Returns
78 | -------
79 | scales : 1D :class:`numpy.ndarray`
80 | Set of scales to use in the wavelet transform, ordered
81 | from high(low) scale(frequency) to low(high) scale(frequency)
82 |
83 | Notes
84 | -----
85 | The calculation of an "optimal" set of scales follows [ToCo98]_.
86 | This routine is a local auxiliary method that is purely intended for internal
87 | use. Thus, no error checking is performed.
88 |
89 | .. [ToCo98] C. Torrence and G. P. Compo. A Practical Guide to Wavelet Analysis.
90 | Bulletin of the American Meteorological Society. Vol. 79, No. 1, January 1998.
91 |
92 | See also
93 | --------
94 | syncopy.specest.wavelet.wavelet : :meth:`~syncopy.shared.computational_routine.ComputationalRoutine.computeFunction`
95 | performing time-frequency analysis using non-orthogonal continuous wavelet transform
96 | """
97 |
98 | # Compute `s0` so that the equivalent Fourier period is approximately ``2 * dt```
99 | if s0 is None:
100 | s0 = scale_from_period(2 * dt)
101 |
102 | # Largest scale
103 | J = int((1 / dj) * np.log2(nSamples * dt / s0))
104 | scales = s0 * 2 ** (dj * np.arange(0, J + 1))
105 | # we want the low frequencies first
106 | return scales[::-1]
107 |
--------------------------------------------------------------------------------
/syncopy/specest/wavelets/__init__.py:
--------------------------------------------------------------------------------
1 | from .wavelets import Morlet, Paul, DOG, Ricker, Marr, Mexican_hat
2 | from .transform import cwt, WaveletAnalysis, WaveletTransform
3 |
--------------------------------------------------------------------------------
/syncopy/statistics/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Populate namespace with statistics routines and classes
4 | #
5 |
6 | # Import __all__ routines from local modules
7 | from . import summary_stats
8 | from .spike_psth import spike_psth
9 | from .timelockanalysis import timelockanalysis
10 | from .summary_stats import (
11 | mean,
12 | var,
13 | std,
14 | median,
15 | itc,
16 | )
17 |
18 | # Populate local __all__ namespace
19 | __all__ = ["spike_psth", "timelockanalysis"]
20 | __all__.extend(summary_stats.__all__)
21 |
--------------------------------------------------------------------------------
/syncopy/synthdata/__init__.py:
--------------------------------------------------------------------------------
1 | # only relative imports for the functionality to be accessible via spy.synthdata
2 |
3 | from .utils import *
4 | from .analog import *
5 | from .spikes import *
6 |
--------------------------------------------------------------------------------
/syncopy/synthdata/spikes.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Synthetic spike data generators for testing and tutorials
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 |
9 | # syncopy imports
10 | from syncopy import SpikeData
11 | from syncopy.shared.kwarg_decorators import unwrap_cfg
12 |
13 | # ---- Synthetic SpikeData ----
14 |
15 |
16 | @unwrap_cfg
17 | def poisson_noise(
18 | nTrials=10,
19 | nSpikes=10000,
20 | nChannels=3,
21 | nUnits=10,
22 | intensity=0.1,
23 | samplerate=10000,
24 | seed=None,
25 | ):
26 |
27 | """
28 | Poisson (Shot-)noise generator
29 |
30 | The expected trial length in samples is given by:
31 |
32 | ``nSpikes`` / (``intensity`` * ``nTrials``)
33 |
34 | Dividing again by the ``samplerate` gives the
35 | expected trial length in seconds.
36 |
37 | Individual trial lengths get randomly
38 | shortened by up to 10% of this expected length.
39 |
40 | The trigger offsets are also
41 | randomized between 5% and 20% of the shortest
42 | trial length.
43 |
44 | Lastly, the distribution of the Poisson ``intensity`` along channels and units
45 | has uniformly randomized weights, meaning that typically
46 | you get very active channels/units and some which are almost quiet.
47 |
48 | Parameters
49 | ----------
50 | nTrials : int
51 | Number of trials
52 | nSpikes : int
53 | The total number of spikes over all trials to generate
54 | nChannels : int
55 | Number of channels
56 | nUnits : int
57 | Number of units
58 | intensity : int
59 | Expected number of spikes per sampling interval
60 | samplerate : float
61 | Sampling rate in Hz
62 | seed: None or int, passed on to `np.random.default_rng`.
63 | Set to an int to get reproducible results.
64 |
65 | Returns
66 | -------
67 | sdata : :class:`~syncopy.SpikeData`
68 | The generated spike data
69 |
70 | Notes
71 | -----
72 | Originally conceived by `Alejandro Tlaie Boria https://github.com/atlaie_`
73 |
74 | Examples
75 | --------
76 | With `nSpikes=20_000`, `samplerate=10_000`, `nTrials=10` and the default `intensity=0.1`
77 | we can expect a trial length of about 2 seconds:
78 |
79 | >>> spike_data = poisson_noise(nTrials=10, nSpikes=20_000, samplerate=10_000)
80 |
81 | Example output of the 1st trial [start, end] in seconds:
82 |
83 | >>> spike_data.trialintervals[0]
84 | >>> array([-0.3004, 1.6459])
85 |
86 | Which is close to 2 seconds.
87 |
88 | """
89 |
90 | # uniform random weights
91 | def get_rdm_weights(size, seed=seed):
92 | rng = np.random.default_rng(seed)
93 | pvec = rng.uniform(size=size)
94 | return pvec / pvec.sum()
95 |
96 | # total length of all trials combined
97 | rng = np.random.default_rng(seed)
98 | T_max = int(nSpikes / intensity)
99 |
100 | spike_samples = np.sort(rng.choice(range(T_max), size=nSpikes, replace=False))
101 | channels = rng.choice(np.arange(nChannels), p=get_rdm_weights(nChannels), size=nSpikes, replace=True)
102 |
103 | uvec = np.arange(nUnits)
104 | pvec = get_rdm_weights(nUnits)
105 | units = rng.choice(uvec, p=pvec, size=nSpikes, replace=True)
106 |
107 | # originally fixed trial size
108 | step = T_max // nTrials
109 | trl_intervals = np.arange(T_max + 1, step=step)
110 |
111 | # 1st trial
112 | idx_start = trl_intervals[:-1]
113 | idx_end = trl_intervals[1:] - 1
114 |
115 | # now randomize trial length a bit, max 10% size difference
116 | idx_end = idx_end - np.r_[rng.integers(step // 10, size=nTrials - 1), 0]
117 |
118 | shortest_trial = np.min(idx_end - idx_start)
119 | idx_offset = -rng.choice(
120 | np.arange(0.05 * shortest_trial, 0.2 * shortest_trial, dtype=int),
121 | size=nTrials,
122 | replace=True,
123 | )
124 |
125 | trldef = np.vstack([idx_start, idx_end, idx_offset]).T
126 | data = np.vstack([spike_samples, channels, units]).T
127 | sdata = SpikeData(
128 | data=data,
129 | trialdefinition=trldef,
130 | dimord=["sample", "channel", "unit"],
131 | samplerate=samplerate,
132 | )
133 |
134 | return sdata
135 |
--------------------------------------------------------------------------------
/syncopy/synthdata/utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Utilities for syncopy's synthetic data generators
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | from inspect import signature
8 | import numpy as np
9 | import functools
10 |
11 | from syncopy import AnalogData
12 | from syncopy.shared.parsers import scalar_parser
13 | from syncopy.shared.kwarg_decorators import (
14 | unwrap_cfg,
15 | _append_docstring,
16 | _append_signature,
17 | )
18 |
19 |
20 | def collect_trials(trial_func):
21 | """
22 | Decorator to wrap around a single trial (nSamples x nChannels shaped np.ndarray)
23 | synthetic data function. Creates a generator expression to arrive
24 | memory safely at a multi-trial :class:``~syncopy.AnalogData`` object.
25 |
26 |
27 | All single trial producing functions (the ``trial_func``) should
28 | accept `nChannels` and `nSamples` as keyword arguments, OR provide
29 | other means to define those numbers, e.g.
30 | `AdjMat` for :func:`~syncopy.synth_data.ar2_network`
31 |
32 | If the single trial function also accepts a `samplerate` parameter, forward it directly.
33 |
34 | If the underlying trial generating function also accepts
35 | a `seed`, forward this directly. One can set `seed_per_trial=False` to use
36 | the same seed for all trials, or leave `seed_per_trial=True` (the default),
37 | to have this function internally generate a list
38 | of seeds with len equal to `nTrials` from the given seed, with one seed per trial.
39 |
40 | One can set the `seed` to `None`, which will select a random seed each time,
41 | (and it will differ between trials).
42 |
43 | The default `nTrials=None` is the identity wrapper and
44 | just returns the output of the trial generating function
45 | directly, so a single trial :class:`numpy.ndarray`.
46 | """
47 |
48 | @unwrap_cfg
49 | @functools.wraps(trial_func)
50 | def wrapper_synth(*args, nTrials=100, samplerate=1000, seed=None, seed_per_trial=True, **tf_kwargs):
51 | seed_array = None # One seed per trial.
52 | # Use the single seed to create one seed per trial.
53 | if nTrials is not None and seed is not None and seed_per_trial:
54 | rng = np.random.default_rng(seed)
55 | seed_array = rng.integers(1_000_000, size=nTrials)
56 |
57 | # append samplerate parameter if also needed by the generator
58 | if "samplerate" in signature(trial_func).parameters.keys():
59 | tf_kwargs["samplerate"] = samplerate
60 |
61 | # bypass: directly return a single trial (may pass on the scalar seed if the function supports it)
62 | if nTrials is None:
63 | if "seed" in signature(trial_func).parameters.keys():
64 | tf_kwargs["seed"] = seed
65 | return trial_func(**tf_kwargs)
66 |
67 | # collect trials
68 | else:
69 | scalar_parser(nTrials, "nTrials", ntype="int_like", lims=[1, np.inf])
70 |
71 | # create the trial generator
72 | def mk_trl_generator():
73 |
74 | for trial_idx in range(nTrials):
75 | if "seed" in signature(trial_func).parameters.keys():
76 | if seed_array is not None:
77 | tf_kwargs["seed"] = seed_array[trial_idx]
78 | else:
79 | tf_kwargs["seed"] = seed
80 | yield trial_func(*args, **tf_kwargs)
81 |
82 | trl_generator = mk_trl_generator()
83 |
84 | data = AnalogData(trl_generator, samplerate=samplerate)
85 |
86 | return data
87 |
88 | # Append `nTrials` and `seed` keyword entry to wrapped function's docstring and signature
89 | nTrialsDocEntry = (
90 | " nTrials : int or None\n"
91 | " Number of trials for the returned :class:`~syncopy.AnalogData` object.\n"
92 | " When set to `None` a single-trial :class:`~numpy.ndarray`\n"
93 | " is returned."
94 | )
95 |
96 | wrapper_synth.__doc__ = _append_docstring(trial_func, nTrialsDocEntry)
97 | wrapper_synth.__signature__ = _append_signature(trial_func, "nTrials", kwdefault=100)
98 |
99 | return wrapper_synth
100 |
--------------------------------------------------------------------------------
/syncopy/tests/README.md:
--------------------------------------------------------------------------------
1 | ## Syncopy Testing Routines
2 |
3 | Frontends and general architecture, for explicit backend methods see `/backend` subdirectory.
4 |
5 | ### Run all
6 |
7 | Just launch the `run_tests.sh` script.
8 |
9 | ### Manually start specific tests
10 |
11 | Assuming you are in this `/test` directory,
12 | amend your Python path with the `/syncopy` module directory:
13 |
14 | ```bash
15 | export PYTHONPATH=../../
16 | ```
17 |
18 | To run all connectivity tests except the parallel routines:
19 |
20 | ```bash
21 | pytest -v test_connectivity.py -k 'not parallel'
22 | ```
23 |
24 | ### Running tests interactively in ipython
25 |
26 | To run the tests interactively, first make sure you are in a proper environment to run syncopy (e.g., your conda syncopy-dev environment.)
27 |
28 | Then start ipython from the Syncopy repo root, run a test file, and execute a test. E.g.:
29 |
30 |
31 | ```bash
32 | cd ~/develop/syncopy
33 | ipython
34 | ```
35 |
36 | And in iypthon:
37 |
38 | ```python
39 | run syncopy/tests/test_basedata.py # Just runs file, executes not tests.
40 | TestBaseData().test_data_alloc() # Run a single test.
41 | ```
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/syncopy/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/syncopy/tests/__init__.py
--------------------------------------------------------------------------------
/syncopy/tests/backend/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/esi-neuroscience/syncopy/a86199ac2db67e32dd5ac76a10a74e296c0929f5/syncopy/tests/backend/__init__.py
--------------------------------------------------------------------------------
/syncopy/tests/backend/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Some quick shortcuts to ease running Syncopy's testing pipeline
3 |
4 | # First and foremost, check if `srun` is available
5 | _useSLURM=$(command -v srun)
6 |
7 | # Stuff only relevant in here
8 | _self=$(basename "$BASH_SOURCE")
9 | _selfie="${_self%.*}"
10 | _ppname="<$_selfie>"
11 |
12 | # Brief help message explaining script usage
13 | usage()
14 | {
15 | echo "
16 | usage: $_selfie COMMAND
17 |
18 | Run Syncopy's testing pipeline via SLURM
19 |
20 | Arguments:
21 | COMMAND
22 | pytest perform testing using pytest in current user environment
23 | (if SLURM is available, tests are executed via `srun`)
24 | tox use tox to set up a new virtual environment (as defined in tox.ini)
25 | and run tests within this newly created env
26 | -h or --help show this help message and exit
27 | Example:
28 | $_selfie pytest
29 | "
30 | }
31 |
32 | # Running this script w/no arguments displays the above help message
33 | if [ "$1" == "" ]; then
34 | usage
35 | fi
36 |
37 | # Set up "global" pytest options for running test-suite
38 | export PYTEST_ADDOPTS="--color=yes --tb=short --verbose --ignore=syncopy/acme"
39 |
40 | # The while construction allows parsing of multiple positional/optional args (future-proofing...)
41 | while [ "$1" != "" ]; do
42 | case "$1" in
43 | pytest)
44 | shift
45 | export PYTHONPATH=$(cd ../../../ && pwd)
46 | if [ $_useSLURM ]; then
47 | srun -p DEV --mem=8000m -c 4 pytest
48 | else
49 | pytest
50 | fi
51 | ;;
52 | tox)
53 | shift
54 | if [ $_useSLURM ]; then
55 | srun -p DEV --mem=8000m -c 4 tox -r
56 | else
57 | tox -r
58 | fi
59 | ;;
60 | -h | --help)
61 | shift
62 | usage
63 | ;;
64 | *)
65 | shift
66 | echo "$_ppname invalid argument '$1'"
67 | ;;
68 | esac
69 | done
70 |
--------------------------------------------------------------------------------
/syncopy/tests/backend/test_resampling.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # syncopy.preproc resampling tests
4 | #
5 | import numpy as np
6 | import scipy.signal as sci_sig
7 | import matplotlib.pyplot as ppl
8 |
9 | from syncopy.preproc import resampling, firws
10 | from syncopy.specest import mtmfft
11 |
12 |
13 | def test_resample():
14 |
15 | """
16 | Tests both the non-trivial resampling method
17 | with non-integer sampling rate division (polyphase method)
18 | and also the much simpler downsampling (leave out every
19 | nth sample)
20 |
21 | Test data is comprised of white noise, criteria
22 | are the (absence of) gain in the remaining frequency
23 | band after resampling.
24 |
25 | This strategy was inspired by a Field Trip Tutorial written
26 | by Jan Mathijs Schoffelen: `https://www.fieldtriptoolbox.org/faq/resampling_lowpassfilter/`_
27 | """
28 |
29 | nSamples = 2000
30 | nTrials = 100
31 | orig_fs = 500 # Hz
32 | data = [np.random.randn(nSamples) for _ in range(nTrials)]
33 |
34 | # get original trial-averaged power spectrum
35 | orig_power, orig_freqs = trl_av_power(data, nSamples, orig_fs)
36 |
37 | # -- test simple downsampling w/o low-pass filtering --
38 |
39 | ds_fs = orig_fs // 2 # half the original sampling rate
40 | # make sure it's an integer division
41 | assert orig_fs % ds_fs == 0
42 | ds_data = [resampling.downsample(signal, orig_fs, ds_fs) for signal in data]
43 | ds_power, ds_freqs = trl_av_power(ds_data, nSamples, ds_fs)
44 | # w/o low-pass filtering, the high frequencies above the
45 | # new Nyquist frequency wrap around and give a gain
46 | # directly proportional to the ratio orig_fs / ds_fs
47 | gain = ds_power.mean() / orig_power.mean()
48 | fs_ratio = orig_fs / ds_fs
49 | assert 0.95 * fs_ratio < gain < 1.05 * fs_ratio
50 |
51 | # -- test simple downsampling with low-pass filtering --
52 |
53 | # design filter with cut off at new Nyquist
54 | lpfilter = firws.design_wsinc("hamming", order=nSamples, f_c=0.5 / fs_ratio)
55 |
56 | # apply to all signals BEFORE downsampling
57 | lp_data = [firws.apply_fir(signal, lpfilter) for signal in data]
58 | ds_lp_data = [resampling.downsample(signal, orig_fs, ds_fs) for signal in lp_data]
59 | ds_lp_power, ds_lp_freqs = trl_av_power(ds_lp_data, nSamples, ds_fs)
60 |
61 | # with low-pass filtering, the high frequencies above the
62 | # new Nyquist frequency are removed and hence there should
63 | # be no gain
64 | gain = ds_lp_power.mean() / orig_power.mean()
65 | assert 0.98 < gain < 1.02
66 |
67 | # -- test resampling --
68 |
69 | rs_fs = 205
70 | # make sure we have a non-integer division
71 | assert orig_fs % rs_fs > 1 # strictly > 0 would be enough..
72 |
73 | # -- test SciPy default --
74 | rs_dataSP = [resampling.resample(signal, orig_fs, rs_fs, lpfreq=-1) for signal in data]
75 |
76 | rs_powerSP, rs_freqsSP = trl_av_power(rs_dataSP, nSamples, rs_fs)
77 |
78 | # here we have implicit FIR filtering built in,
79 | # hence there should be again no gain
80 | # NOTE: There is however a quite slow roll-off
81 | # relax gain condition to tolerate losses up to 6%
82 | gain = rs_powerSP.mean() / orig_power.mean()
83 | assert 0.94 < gain < 1.02
84 |
85 | # -- use backend with homegrown default firws --
86 |
87 | rs_data = [resampling.resample(signals, orig_fs, rs_fs, lpfreq=None, order=None) for signals in data]
88 | rs_power, rs_freqs = trl_av_power(rs_data, nSamples, rs_fs)
89 | gain = rs_power.mean() / orig_power.mean()
90 | # NOTE: this works very well and we can
91 | # give again harder constraints on the gain (2%)
92 | assert 0.98 < gain < 1.02
93 |
94 | # -- plot all the power spectra --
95 |
96 | fig, ax = ppl.subplots()
97 | ax.set_xlabel("frequency (Hz)")
98 | ax.set_ylabel("power (a.u.)")
99 |
100 | ax.plot(orig_freqs, orig_power, label="original", lw=1.5, alpha=0.5)
101 | ax.plot(ds_freqs, ds_power, label="downsampled")
102 | ax.plot(ds_lp_freqs, ds_lp_power, label="downsampled + FIRWS")
103 | ax.plot(rs_freqsSP, rs_powerSP, label="resample_poly + default")
104 | ax.plot(rs_freqs, rs_power, label="resample_poly + FIRWS")
105 | ax.set_ylim((0, ds_power.mean() * 1.2))
106 | ax.legend()
107 | fig.tight_layout()
108 |
109 |
110 | def trl_av_power(data, nSamples, fs, tapsmofrq=1):
111 |
112 | power = []
113 | for signal in data:
114 | NW, Kmax = mtmfft._get_dpss_pars(tapsmofrq, nSamples, fs)
115 | ftr, freqs = mtmfft.mtmfft(signal, samplerate=fs, taper="dpss", taper_opt={"Kmax": Kmax, "NW": NW})
116 | power.append(np.real(ftr * ftr.conj()).mean(axis=0))
117 | # trial averaging
118 | power = np.mean(power, axis=0)
119 | return power, freqs
120 |
--------------------------------------------------------------------------------
/syncopy/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # central pytest configuration
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import sys
8 | import pytest
9 | from syncopy import __acme__
10 | import syncopy.tests.test_packagesetup as setupTestModule
11 | import dask.distributed as dd
12 | import dask_jobqueue as dj
13 | from syncopy.tests.misc import is_slurm_node
14 |
15 | # If acme is available, either launch a SLURM cluster on a cluster node or
16 | # create a `LocalCluster` object if tests are run on a single machine. If
17 | # acme is not available, launch a custom SLURM cluster or again just a local
18 | # cluster as fallback
19 | cluster = None
20 | if __acme__:
21 | from acme.dask_helpers import esi_cluster_setup
22 |
23 | if sys.platform != "win32":
24 | import resource
25 |
26 | if max(resource.getrlimit(resource.RLIMIT_NOFILE)) < 1024:
27 | msg = (
28 | "Not enough open file descriptors allowed. Consider increasing "
29 | + "the limit using, e.g., `ulimit -Sn 1024`"
30 | )
31 | raise ValueError(msg)
32 | if is_slurm_node():
33 | cluster = esi_cluster_setup(
34 | partition="8GB",
35 | n_jobs=4,
36 | timeout=360,
37 | interactive=False,
38 | start_client=False,
39 | )
40 | else:
41 | cluster = dd.LocalCluster(n_workers=4)
42 | else:
43 | # manually start slurm cluster
44 | if is_slurm_node():
45 | n_jobs = 3
46 | reqMem = 32
47 | ESIQueue = "S"
48 | slurm_wdir = "/cs/slurm/syncopy/"
49 |
50 | cluster = dj.SLURMCluster(
51 | cores=1,
52 | memory=f"{reqMem} GB",
53 | processes=1,
54 | local_directory=slurm_wdir,
55 | queue=f"{reqMem}GB{ESIQueue}",
56 | python=sys.executable,
57 | )
58 | cluster.scale(n_jobs)
59 | else:
60 | cluster = dd.LocalCluster(n_workers=4)
61 |
62 | # Set up a pytest fixture `testcluster` that uses the constructed cluster object
63 | @pytest.fixture
64 | def testcluster():
65 | return cluster
66 |
67 |
68 | # Re-order tests to first run stuff in test_packagesetup.py, then everything else
69 | def pytest_collection_modifyitems(items):
70 |
71 | # Collect tests to be run in this session and registered setup-related tests
72 | allTests = [testFunc.name if hasattr(testFunc, "name") else "" for testFunc in items]
73 | setupTests = [
74 | name for name in dir(setupTestModule) if not name.startswith("__") and not name.startswith("@")
75 | ]
76 |
77 | # If queried tests contain setup-tests, prioritize them
78 | newOrder = []
79 | for testFirst in setupTests:
80 | if testFirst in allTests:
81 | newOrder.append(allTests.index(testFirst))
82 | newOrder += [allTests.index(testFunc) for testFunc in allTests if testFunc not in setupTests]
83 |
84 | # Save potentially re-ordered test sequence
85 | items[:] = [items[idx] for idx in newOrder]
86 |
--------------------------------------------------------------------------------
/syncopy/tests/helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Helper functions for frontend test design
4 | #
5 | # The `run_` function signatures take a callable,
6 | # the `method_call`, as 1st argument
7 | #
8 |
9 | # 3rd party imports
10 | import itertools
11 | import numpy as np
12 | import os
13 | import matplotlib.pyplot as plt
14 | from syncopy.shared.errors import SPYValueError, SPYTypeError
15 | from os.path import expanduser
16 |
17 | # fix random generators
18 | test_seed = 42
19 |
20 |
21 | def run_padding_test(method_call, pad_length):
22 | """
23 | The callable should test a solution and support
24 | a single keyword argument `pad`
25 | """
26 |
27 | pad_options = [pad_length, "nextpow2", "maxperlen"]
28 | for pad in pad_options:
29 | method_call(pad=pad)
30 |
31 | # test invalid pads
32 | try:
33 | method_call(pad=-0.1) # trials should be longer than 0.1 seconds
34 | except SPYValueError as err:
35 | assert "pad" in str(err)
36 | assert "expected value to be greater" in str(err)
37 |
38 | try:
39 | method_call(pad="IamNoPad")
40 | except SPYValueError as err:
41 | assert "Invalid value of `pad`" in str(err)
42 | assert "nextpow2" in str(err)
43 |
44 | try:
45 | method_call(pad=np.array([1000]))
46 | except SPYValueError as err:
47 | assert "Invalid value of `pad`" in str(err)
48 | assert "nextpow2" in str(err)
49 |
50 |
51 | def run_polyremoval_test(method_call):
52 | """
53 | The callable should test a solution and support
54 | a single keyword argument `polyremoval`
55 | """
56 |
57 | poly_options = [0, 1]
58 | for poly in poly_options:
59 | method_call(polyremoval=poly)
60 |
61 | # test invalid polyremoval options
62 | try:
63 | method_call(polyremoval=2)
64 | except SPYValueError as err:
65 | assert "polyremoval" in str(err)
66 | assert "expected value to be greater" in str(err)
67 |
68 | try:
69 | method_call(polyremoval="IamNoPad")
70 | except SPYTypeError as err:
71 | assert "Wrong type of `polyremoval`" in str(err)
72 |
73 | try:
74 | method_call(polyremoval=np.array([1000]))
75 | except SPYTypeError as err:
76 | assert "Wrong type of `polyremoval`" in str(err)
77 |
78 |
79 | def mk_selection_dicts(nTrials, nChannels, toi_min, toi_max, min_len=0.25):
80 |
81 | """
82 | Takes 5 numbers, the last three descibing a `latency` time-interval
83 | and creates cartesian product like `select` keyword
84 | arguments. One random selection is enough!
85 |
86 | Returns
87 | -------
88 | selections : list
89 | The list of dicts holding the keys and values for
90 | Syncopy selections.
91 | """
92 | # at least 10 trials
93 | assert nTrials > 9
94 | # at least 2 channels
95 | assert nChannels > 1
96 | # at least 250ms
97 | assert (toi_max - toi_min) > 0.25
98 |
99 | # create 1 random trial and channel selections
100 | trials, channels = [], []
101 | for _ in range(1):
102 |
103 | sizeTr = np.random.randint(10, nTrials + 1)
104 | trials.append(list(np.random.choice(nTrials, size=sizeTr)))
105 |
106 | sizeCh = np.random.randint(2, nChannels + 1)
107 | channels.append(
108 | ["channel" + str(i + 1) for i in np.random.choice(nChannels, size=sizeCh, replace=False)]
109 | )
110 |
111 | # 1 random toilim
112 | toilims = []
113 | while len(toilims) < 1:
114 |
115 | toil = np.sort(np.random.rand(2)) * (toi_max - toi_min) + toi_min
116 | # at least min_len (250ms)
117 | if np.diff(toil) < min_len:
118 | continue
119 | else:
120 | toilims.append(toil)
121 |
122 | # combinatorics of all selection options
123 | # order matters to assign the selection dict keys!
124 | toilim_combinations = itertools.product(trials, channels, toilims)
125 |
126 | selections = []
127 | for comb in toilim_combinations:
128 |
129 | sel_dct = {}
130 | sel_dct["trials"] = comb[0]
131 | sel_dct["channel"] = comb[1]
132 | sel_dct["latency"] = comb[2]
133 | selections.append(sel_dct)
134 |
135 | return selections
136 |
137 |
138 | def teardown():
139 | """Cleanup to run at the end of a set of tests, typically at the end of a Test class."""
140 | # Close matplotlib plot windows:
141 | try:
142 | plt.close("all")
143 | except:
144 | pass
145 |
146 |
147 | def get_file_from_anywhere(possible_locations):
148 | """
149 | Helper function to get a file from a list of possible locations.
150 | Useful to run tests on different systems. If you do not have access
151 | to the ESI cluster, this allows you to still run tests on your local
152 | machine, all you need to do is copy the required files to your local
153 | machine and add the path to the list of possible locations.
154 |
155 | Parameters
156 | ----------
157 | possible_locations : list of strings, will be expanded with ```os.path.expanduser``` so it is fine to give somehthing like '~/file.txt'.
158 | """
159 | for loc in possible_locations:
160 | if os.path.isfile(expanduser(loc)):
161 | return loc
162 | return None
163 |
--------------------------------------------------------------------------------
/syncopy/tests/local_spy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Simple script for testing Syncopy w/o pip-installing it
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | import os
9 | import sys
10 |
11 | # Import package
12 | import syncopy as spy
13 |
14 | # Import artificial data generators
15 | from syncopy import synthdata
16 |
17 | # Prepare code to be executed using, e.g., iPython's `%run` magic command
18 | if __name__ == "__main__":
19 |
20 | nTrials = 20
21 |
22 | nSamples = 1000
23 | fs = 500
24 |
25 | trls = []
26 | AdjMat = np.zeros((2, 2))
27 | # coupling from 0 to 1
28 | AdjMat[0, 1] = 0.15
29 | alphas = [0.55, -0.8]
30 | adata = synthdata.ar2_network(nTrials, samplerate=fs, AdjMat=AdjMat, nSamples=nSamples, alphas=alphas)
31 | adata += synthdata.ar2_network(
32 | nTrials,
33 | AdjMat=np.zeros((2, 2)),
34 | samplerate=fs,
35 | nSamples=nSamples,
36 | alphas=[0.9, 0],
37 | )
38 |
39 | spec = spy.freqanalysis(adata, tapsmofrq=2, keeptrials=False)
40 | foi = np.linspace(40, 160, 25)
41 | coh = spy.connectivityanalysis(adata, method="coh", tapsmofrq=5)
42 |
43 | # show new plotting
44 | # adata.singlepanelplot(trials=12, toilim=[0, 0.35])
45 |
46 | # mtmfft spectrum
47 | # spec.singlepanelplot()
48 | # coh.singlepanelplot(channel_i=0, channel_j=1)
49 |
50 | specf2 = spy.freqanalysis(
51 | adata,
52 | tapsmofrq=2,
53 | keeptrials=False,
54 | foi=foi,
55 | output="fooof_peaks",
56 | fooof_opt={"max_n_peaks": 2},
57 | )
58 |
59 | # print("Start: Testing parallel computation of mtmfft")
60 | # spec4 = spy.freqanalysis(adata, tapsmofrq=2, keeptrials=True, foi=foi, parallel=True, output="pow")
61 | # print("End: Testing parallel computation of mtmfft")
62 |
63 | # spec.singlepanelplot()
64 | # specf.singlepanelplot()
65 | # specf2.singlepanelplot()S
66 |
--------------------------------------------------------------------------------
/syncopy/tests/no_slurm.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Sabotage local tox environment so that `sinfo` is not working any more
3 | if [ -n "$NO_SLURM" ]; then
4 | echo "ofnis" >| $VIRTUAL_ENV/bin/sinfo && chmod a+x $VIRTUAL_ENV/bin/sinfo
5 | fi
6 |
--------------------------------------------------------------------------------
/syncopy/tests/run_tests.cmd:
--------------------------------------------------------------------------------
1 | @echo off
2 | for %%I in ("%cd%\..\..") do set "PYTHONPATH=%%~fI"
3 |
4 | set PYTEST_ADDOPTS="-v"
5 |
6 | if "%1" == "" goto usage
7 |
8 | for %%a in (%*) do (
9 | if "%%a" == "tox" (
10 | tox
11 | goto end
12 | )
13 | if "%%a" == "pytest" (
14 | pytest
15 | goto end
16 | ) else (goto usage)
17 | )
18 |
19 | :end
20 | exit /B 1
21 |
22 | :usage
23 | echo "Run SyNCoPy's testing pipeline on Windows"
24 | echo " "
25 | echo "Arguments:"
26 | echo " pytest perform testing using pytest in current user environment"
27 | echo " tox use tox to set up a new virtual environment (as defined in tox.ini)"
28 | echo " and run tests within this newly created env"
29 | echo " "
30 | echo "Example: run_tests.cmd pytest "
31 |
--------------------------------------------------------------------------------
/syncopy/tests/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Some quick shortcuts to ease running Syncopy's testing pipeline
3 |
4 | # First and foremost, check if `srun` is available
5 | _useSLURM=$(command -v srun)
6 |
7 | # Stuff only relevant in here
8 | _self=$(basename "$BASH_SOURCE")
9 | _selfie="${_self%.*}"
10 | _ppname="<$_selfie>"
11 |
12 | # Brief help message explaining script usage
13 | usage()
14 | {
15 | echo "
16 | usage: $_selfie COMMAND
17 |
18 | Run Syncopy's testing pipeline via SLURM
19 |
20 | Arguments:
21 | COMMAND
22 | pytest perform testing using pytest in current user environment
23 | (if SLURM is available, tests are executed via `srun`)
24 | full (OPTIONAL) if provided, an exhaustive test-run is conducted
25 | including, e.g., all selection permutations etc. Default: off
26 | tox use tox to set up a new virtual environment (as defined in tox.ini)
27 | and run tests within this newly created env
28 | -h or --help show this help message and exit
29 | Example:
30 | $_selfie pytest
31 | $_selfie pytest full
32 | "
33 | }
34 |
35 | # Running this script w/no arguments displays the above help message
36 | if [ "$1" == "" ]; then
37 | usage
38 | fi
39 |
40 | # Set up "global" pytest options for running test-suite (coverage is only done in local pytest runs)
41 | export PYTEST_ADDOPTS="--color=yes --tb=short --verbose"
42 |
43 | # The while construction allows parsing of multiple positional/optional args (future-proofing...)
44 | while [ "$1" != "" ]; do
45 | case "$1" in
46 | pytest)
47 | if [ "$2" == "full" ]; then
48 | fulltests="--full"
49 | else
50 | fulltests=""
51 | fi
52 | shift
53 | export PYTHONPATH=$(cd ../../ && pwd)
54 | if [ $_useSLURM ]; then
55 | CMD="srun -p DEV --mem=8000m -c 4 pytest $fulltests"
56 | else
57 | PYTEST_ADDOPTS="$PYTEST_ADDOPTS --cov=../../syncopy --cov-config=../../.coveragerc"
58 | export PYTEST_ADDOPTS
59 | CMD="pytest $fulltests"
60 | fi
61 | echo ">>>"
62 | echo ">>> Running $CMD $PYTEST_ADDOPTS"
63 | echo ">>>"
64 | ${CMD}
65 | ;;
66 | tox)
67 | shift
68 | if [ $_useSLURM ]; then
69 | CMD="srun -p DEV --mem=8000m -c 4 tox"
70 | else
71 | CMD="tox"
72 | fi
73 | echo ">>>"
74 | echo ">>> Running $CMD "
75 | echo ">>>"
76 | ${CMD}
77 | ;;
78 | -h | --help)
79 | shift
80 | usage
81 | ;;
82 | *)
83 | shift
84 | echo "$_ppname invalid argument '$1'"
85 | ;;
86 | esac
87 | done
88 |
--------------------------------------------------------------------------------
/syncopy/tests/test_cfg.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test cfg structure to replay frontend calls
4 | #
5 |
6 | import pytest
7 | import numpy as np
8 | import inspect
9 | import tempfile
10 | import os
11 | import dask.distributed as dd
12 |
13 | # Local imports
14 | import syncopy as spy
15 |
16 | from syncopy import synthdata
17 | from syncopy.shared.tools import StructDict
18 |
19 |
20 | availableFrontend_cfgs = {
21 | "freqanalysis": {"method": "mtmconvol", "t_ftimwin": 0.1, "foi": np.arange(1, 60)},
22 | "preprocessing": {"freq": 10, "filter_class": "firws", "filter_type": "hp"},
23 | "resampledata": {"resamplefs": 125, "lpfreq": 60},
24 | "connectivityanalysis": {"method": "coh", "tapsmofrq": 5},
25 | "selectdata": {"trials": np.array([1, 7, 3]), "channel": [np.int64(2), 0]},
26 | }
27 |
28 |
29 | class TestCfg:
30 |
31 | nSamples = 100
32 | nChannels = 3
33 | nTrials = 10
34 | fs = 200
35 | fNy = fs / 2
36 |
37 | # -- use flat white noise as test data --
38 |
39 | adata = synthdata.white_noise(
40 | nTrials=nTrials, nSamples=nSamples, nChannels=nChannels, samplerate=fs, seed=42
41 | )
42 |
43 | # for toi tests, -1s offset
44 | time_span = [-0.9, -0.6]
45 | flow, fhigh = 0.3 * fNy, 0.4 * fNy
46 |
47 | def test_single_frontends(self):
48 |
49 | for frontend in availableFrontend_cfgs.keys():
50 |
51 | # unwrap cfg into keywords
52 | res = getattr(spy, frontend)(self.adata, **availableFrontend_cfgs[frontend])
53 | # now replay with cfg from preceding frontend call
54 | res2 = getattr(spy, frontend)(self.adata, res.cfg)
55 |
56 | # same results
57 | assert np.allclose(res.data[:], res2.data[:])
58 | assert res.cfg == res2.cfg
59 |
60 | # check that it's not just the defaults (mtmfft)
61 | if frontend == "freqanalysis":
62 | res3 = getattr(spy, frontend)(self.adata)
63 | assert res.data.shape != res3.data.shape
64 | assert res.cfg != res3.cfg
65 |
66 | def test_io(self):
67 |
68 | for frontend in availableFrontend_cfgs.keys():
69 |
70 | # unwrap cfg into keywords
71 | res = getattr(spy, frontend)(self.adata, **availableFrontend_cfgs[frontend])
72 | # make a copy
73 | cfg = StructDict(res.cfg)
74 |
75 | # test saving and loading
76 | with tempfile.TemporaryDirectory() as tdir:
77 | fname = os.path.join(tdir, "res")
78 | res.save(container=fname)
79 |
80 | res = spy.load(fname)
81 | assert res.cfg == cfg
82 |
83 | # now replay with cfg from preceding frontend call
84 | res2 = getattr(spy, frontend)(self.adata, res.cfg)
85 | # same results
86 | assert np.allclose(res.data[:], res2.data[:])
87 | assert res.cfg == res2.cfg
88 |
89 | del res, res2
90 |
91 | def test_selection(self):
92 |
93 | select = {"latency": self.time_span, "trials": [1, 2, 3], "channel": [2, 0]}
94 | for frontend in availableFrontend_cfgs.keys():
95 | # select kw for selectdata makes no direct sense
96 | if frontend == "selectdata":
97 | continue
98 | res = getattr(spy, frontend)(self.adata, cfg=availableFrontend_cfgs[frontend], select=select)
99 |
100 | # now replay with cfg from preceding frontend call
101 | res2 = getattr(spy, frontend)(self.adata, res.cfg)
102 |
103 | # same results
104 | assert "select" in res.cfg[frontend]
105 | assert "select" in res2.cfg[frontend]
106 | assert np.allclose(res.data[:], res2.data[:])
107 | assert res.cfg == res2.cfg
108 |
109 | def test_chaining_frontends(self):
110 |
111 | # only preprocessing makes sense to chain atm
112 | res_pp = spy.preprocessing(self.adata, cfg=availableFrontend_cfgs["preprocessing"])
113 |
114 | for frontend in availableFrontend_cfgs.keys():
115 | res = getattr(spy, frontend)(res_pp, cfg=availableFrontend_cfgs[frontend])
116 |
117 | # now replay with cfg from preceding frontend calls
118 | # note we can use the final results `res.cfg` for both calls!
119 | res_pp2 = spy.preprocessing(self.adata, res.cfg)
120 | res2 = getattr(spy, frontend)(res_pp2, res.cfg)
121 |
122 | # same results
123 | assert np.allclose(res.data[:], res2.data[:])
124 | assert res.cfg == res2.cfg
125 |
126 | def test_chaining_frontends_with_fooof_types(self):
127 |
128 | # only preprocessing makes sense to chain atm
129 | res_pp = spy.preprocessing(self.adata, cfg=availableFrontend_cfgs["preprocessing"])
130 |
131 | frontend = "freqanalysis"
132 | frontend_cfg = {"method": "mtmfft", "output": "fooof", "foilim": [0.5, 100.0]}
133 |
134 | res = getattr(spy, frontend)(res_pp, cfg=frontend_cfg)
135 |
136 | # now replay with cfg from preceding frontend calls
137 | # note we can use the final results `res.cfg` for both calls!
138 | res_pp2 = spy.preprocessing(self.adata, res.cfg)
139 | res2 = getattr(spy, frontend)(res_pp2, res.cfg)
140 |
141 | # same results
142 | assert np.allclose(res.data[:], res2.data[:])
143 | assert res.cfg == res2.cfg
144 |
145 | def test_parallel(self, testcluster):
146 |
147 | client = dd.Client(testcluster)
148 | all_tests = [
149 | attr
150 | for attr in self.__dir__()
151 | if (inspect.ismethod(getattr(self, attr)) and "parallel" not in attr)
152 | ]
153 |
154 | for test_name in all_tests:
155 | test_method = getattr(self, test_name)
156 | test_method()
157 | client.close()
158 |
159 |
160 | if __name__ == "__main__":
161 | T1 = TestCfg()
162 |
--------------------------------------------------------------------------------
/syncopy/tests/test_concat.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test spy.concat function
4 | #
5 |
6 | import pytest
7 | import numpy as np
8 |
9 | # Local imports
10 | import syncopy as spy
11 | from syncopy.shared.errors import SPYTypeError, SPYValueError
12 |
13 |
14 | class TestConcat:
15 |
16 | nTrials = 10
17 | nSamples = 100
18 | nChannels = 2
19 |
20 | nFreq = 5
21 | nTaper = 4
22 |
23 | def test_ad_concat(self):
24 |
25 | arr = np.zeros((self.nSamples, self.nChannels))
26 | adata = spy.AnalogData(data=[arr for _ in range(self.nTrials)], samplerate=10)
27 |
28 | # create 3 channel 2nd data object
29 |
30 | adata2 = spy.AnalogData(
31 | data=[np.zeros((self.nSamples, 3)) for _ in range(self.nTrials)],
32 | samplerate=10,
33 | )
34 |
35 | res = spy.concat(adata, adata2)
36 |
37 | assert isinstance(res, spy.AnalogData)
38 | assert len(res.trials) == len(adata.trials)
39 | assert len(res.channel) == len(adata.channel) + len(adata2.channel)
40 | # check total size
41 | assert res.data.size == adata.data.size + 3 * self.nSamples * self.nTrials
42 |
43 | def test_sd_concat(self):
44 |
45 | # -- SpectralData with non-standard dimord --
46 |
47 | arr = np.zeros((self.nSamples, self.nChannels, self.nTaper, self.nFreq))
48 | sdata = spy.SpectralData(
49 | data=[arr for _ in range(self.nTrials)],
50 | samplerate=10,
51 | dimord=["time", "channel", "taper", "freq"],
52 | )
53 |
54 | # create 3 channel 2nd data object
55 |
56 | arr = np.zeros((self.nSamples, 3, self.nTaper, self.nFreq))
57 | sdata2 = spy.SpectralData(
58 | data=[arr for _ in range(self.nTrials)],
59 | samplerate=10,
60 | dimord=["time", "channel", "taper", "freq"],
61 | )
62 |
63 | res = spy.concat(sdata, sdata2)
64 |
65 | assert isinstance(res, spy.SpectralData)
66 | assert len(res.trials) == len(sdata.trials)
67 | assert len(res.channel) == len(sdata.channel) + len(sdata2.channel)
68 | # check total size
69 | assert res.data.size == sdata.data.size + 3 * self.nSamples * self.nTrials * self.nTaper * self.nFreq
70 |
71 | def test_exceptions(self):
72 |
73 | # non matching data types
74 | adata = spy.AnalogData(data=np.zeros((10, 2)), samplerate=2)
75 | sdata = spy.SpectralData(data=np.zeros((10, 2, 2, 2)), samplerate=2)
76 |
77 | with pytest.raises(SPYValueError, match="expected objects with equal dimensional layout"):
78 | spy.concat(adata, sdata)
79 |
80 | # non matching dimord
81 | adata2 = spy.AnalogData(data=np.zeros((10, 2)), samplerate=2, dimord=["channel", "time"])
82 |
83 | with pytest.raises(SPYValueError, match="expected objects with equal dimensional layout"):
84 | spy.concat(adata, adata2)
85 |
86 | # dim not in dimord
87 | with pytest.raises(SPYValueError, match="object which has a `sth` dimension"):
88 | spy.concat(adata, adata, dim="sth")
89 |
90 | # only channel supported atm
91 | with pytest.raises(NotImplementedError, match="Only `channel`"):
92 | spy.concat(adata, adata, dim="time")
93 |
94 | # objects don't have the same size along remaining axes
95 | adata3 = spy.AnalogData(data=np.zeros((12, 2)), samplerate=3)
96 | with pytest.raises(SPYValueError, match="matching shapes"):
97 | spy.concat(adata, adata3, dim="channel")
98 |
99 |
100 | if __name__ == "__main__":
101 |
102 | T1 = TestConcat()
103 |
--------------------------------------------------------------------------------
/syncopy/tests/test_datatype_util.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test proper functionality of Syncopy's `ContinuousData` class + subclasses
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import os
8 | import tempfile
9 |
10 | # Local imports
11 | from syncopy.datatype.util import get_dir_size
12 |
13 |
14 | class TestDirSize:
15 | def test_dirsize(self):
16 | with tempfile.TemporaryDirectory() as tdir:
17 | fname = "tmpfile"
18 | for file_idx in range(20):
19 | tf = os.path.join(tdir, fname + str(file_idx))
20 | with open(tf, "w") as f:
21 | f.write(f"This is a dummy file {file_idx}.")
22 | dir_size_byte, num_files = get_dir_size(tdir, out="byte")
23 | assert num_files == 20
24 | assert dir_size_byte > 200
25 | assert dir_size_byte < 2000
26 | assert dir_size_byte == 470
27 | dir_size_gb, num_files = get_dir_size(tdir, out="GB")
28 | assert dir_size_gb < 1e-6
29 |
30 |
31 | if __name__ == "__main__":
32 |
33 | T1 = TestDirSize()
34 |
--------------------------------------------------------------------------------
/syncopy/tests/test_info.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test .info property of BaseData
4 | #
5 |
6 | import pytest
7 | import numpy as np
8 | import tempfile
9 | import os
10 |
11 | # Local imports
12 | import syncopy as spy
13 | from syncopy.shared.tools import SerializableDict
14 | from syncopy.shared.errors import SPYTypeError, SPYError
15 |
16 |
17 | class TestInfo:
18 |
19 | # serializable dict
20 | ok_dict = {
21 | "sth": 4,
22 | "important": [1, 2],
23 | "to": {"v1": 2},
24 | "remember": "need more coffe",
25 | }
26 | # non-serializable dict
27 | ns_dict = {"sth": 4, "not_serializable": {"v1": range(2)}}
28 | # dict with non-serializable keys
29 | ns_dict2 = {range(2): "small_range", range(1000): "large_range"}
30 |
31 | # test setter
32 | def test_property(self):
33 |
34 | # as .info is a basedata property,
35 | # testing for one derived class should suffice
36 | adata = spy.AnalogData([np.ones((3, 1))], samplerate=1)
37 |
38 | # attach some aux. info
39 | adata.info = self.ok_dict
40 |
41 | # got converted into SerializableDict
42 | # so testing this makes sense
43 | assert isinstance(adata.info, SerializableDict)
44 | assert adata.info == self.ok_dict
45 |
46 | # that is not allowed (akin to cfg)
47 | with pytest.raises(SPYTypeError, match="expected dictionary-like"):
48 | adata.info = None
49 |
50 | # clear with empty dict
51 | adata.info = {}
52 | assert len(adata.info) == 0
53 | assert len(self.ok_dict) != 0
54 |
55 | # test we're catching non-serializable dictionary entries
56 | with pytest.raises(SPYError, match="expected serializable data type"):
57 | adata.info = self.ns_dict
58 |
59 | # test that we also catch non-serializable keys
60 | with pytest.raises(SPYError, match="expected serializable data type"):
61 | adata.info = self.ns_dict2
62 |
63 | # this works, data types get converted from NumPy to Python scalars
64 | adata.info["new-var"] = list(np.arange(3))
65 | assert np.allclose(adata.info["new-var"], np.arange(3))
66 |
67 | # test aux. info dict saving and loading
68 | def test_io(self):
69 | with tempfile.TemporaryDirectory() as tdir:
70 |
71 | fname = os.path.join(tdir, "dummy")
72 | dummy = spy.AnalogData([np.ones((3, 1))], samplerate=1)
73 |
74 | # attach some aux. info
75 | dummy.info = self.ok_dict
76 | spy.save(dummy, fname)
77 | del dummy
78 |
79 | dummy2 = spy.load(fname)
80 | assert dummy2.info == self.ok_dict
81 |
82 |
83 | if __name__ == "__main__":
84 | T1 = TestInfo()
85 |
--------------------------------------------------------------------------------
/syncopy/tests/test_logging.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test logging.
4 | #
5 |
6 | import os
7 | import platform
8 |
9 | # Local imports
10 | import syncopy as spy
11 | from syncopy.shared.log import get_logger, get_parallel_logger
12 | from syncopy.shared.errors import SPYLog
13 |
14 |
15 | class TestLogging:
16 | def test_seq_logfile_exists(self):
17 | logfile = os.path.join(spy.__logdir__, "syncopy.log")
18 | assert os.path.isfile(logfile)
19 |
20 | def test_par_logfile_exists(self):
21 | par_logfile = os.path.join(spy.__logdir__, f"syncopy_{platform.node()}.log")
22 | assert os.path.isfile(par_logfile)
23 |
24 | def test_default_log_level_is_important(self):
25 | # Ensure the log level is at default (that user did not change SPYLOGLEVEL on test system)
26 | assert os.getenv("SPYLOGLEVEL", "IMPORTANT") == "IMPORTANT"
27 |
28 | logfile = os.path.join(spy.__logdir__, "syncopy.log")
29 | assert os.path.isfile(logfile)
30 | num_lines_initial = sum(
31 | 1 for line in open(logfile)
32 | ) # The log file gets appended, so it will most likely *not* be empty.
33 |
34 | # Log something with log level info and DEBUG, which should not affect the logfile.
35 | logger = get_logger()
36 | logger.info("I am adding an INFO level log entry.")
37 | SPYLog("I am adding a DEBUG level log entry.", loglevel="DEBUG")
38 |
39 | num_lines_after_info_debug = sum(1 for line in open(logfile))
40 |
41 | assert num_lines_initial == num_lines_after_info_debug
42 |
43 | # Now log something with log level WARNING
44 | SPYLog("I am adding a WARNING level log entry.", loglevel="WARNING")
45 |
46 | num_lines_after_warning = sum(1 for line in open(logfile))
47 | assert num_lines_after_warning > num_lines_after_info_debug
48 |
49 | def test_default_parellel_log_level_is_important(self):
50 | # Ensure the log level is at default (that user did not change SPYLOGLEVEL on test system)
51 | assert os.getenv("SPYLOGLEVEL", "IMPORTANT") == "IMPORTANT"
52 | assert os.getenv("SPYPARLOGLEVEL", "IMPORTANT") == "IMPORTANT"
53 |
54 | par_logfile = os.path.join(spy.__logdir__, f"syncopy_{platform.node()}.log")
55 | assert os.path.isfile(par_logfile)
56 | num_lines_initial = sum(
57 | 1 for line in open(par_logfile)
58 | ) # The log file gets appended, so it will most likely *not* be empty.
59 |
60 | # Log something with log level info and DEBUG, which should not affect the logfile.
61 | par_logger = get_parallel_logger()
62 | par_logger.info("I am adding an INFO level log entry.")
63 | par_logger.debug("I am adding a DEBUG level log entry.")
64 |
65 | num_lines_after_info_debug = sum(1 for line in open(par_logfile))
66 |
67 | assert num_lines_initial == num_lines_after_info_debug
68 |
69 | # Now log something with log level WARNING
70 | par_logger.important("I am adding a IMPORTANT level log entry.")
71 | par_logger.warning("This is the last warning.")
72 |
73 | num_lines_after_warning = sum(1 for line in open(par_logfile))
74 | assert num_lines_after_warning > num_lines_after_info_debug
75 |
76 | def test_log_function_is_in_root_namespace_with_seq(self):
77 | """Tests sequential logger via spy.log function."""
78 | assert os.getenv("SPYLOGLEVEL", "IMPORTANT") == "IMPORTANT"
79 |
80 | logfile = os.path.join(spy.__logdir__, "syncopy.log")
81 | assert os.path.isfile(logfile)
82 | num_lines_initial = sum(
83 | 1 for line in open(logfile)
84 | ) # The log file gets appended, so it will most likely *not* be empty.
85 |
86 | # Log something with log level info and DEBUG, which should not affect the logfile.
87 | spy.log("I am adding an INFO level log entry.", level="INFO")
88 |
89 | num_lines_after_info_debug = sum(1 for line in open(logfile))
90 | assert num_lines_initial == num_lines_after_info_debug
91 |
92 | # Now log something with log level WARNING
93 | spy.log("I am adding a IMPORTANT level log entry.", level="IMPORTANT", par=False)
94 | spy.log("This is the last warning.", level="IMPORTANT")
95 |
96 | num_lines_after_warning = sum(1 for line in open(logfile))
97 | assert num_lines_after_warning > num_lines_after_info_debug
98 |
99 | def test_log_function_is_in_root_namespace_with_par(self):
100 | """Tests parallel logger via spy.log function."""
101 | assert os.getenv("SPYPARLOGLEVEL", "IMPORTANT") == "IMPORTANT"
102 |
103 | par_logfile = os.path.join(spy.__logdir__, f"syncopy_{platform.node()}.log")
104 | assert os.path.isfile(par_logfile)
105 | num_lines_initial = sum(
106 | 1 for line in open(par_logfile)
107 | ) # The log file gets appended, so it will most likely *not* be empty.
108 |
109 | # Log something with log level info and DEBUG, which should not affect the logfile.
110 | spy.log("I am adding an INFO level log entry.", level="INFO", par=True)
111 |
112 | num_lines_after_info_debug = sum(1 for line in open(par_logfile))
113 | assert num_lines_initial == num_lines_after_info_debug
114 |
115 | # Now log something with log level WARNING
116 | spy.log("I am adding a IMPORTANT level log entry.", level="IMPORTANT", par=True)
117 | spy.log("This is the last warning.", level="IMPORTANT", par=True)
118 |
119 | num_lines_after_warning = sum(1 for line in open(par_logfile))
120 | assert num_lines_after_warning > num_lines_after_info_debug
121 |
--------------------------------------------------------------------------------
/syncopy/tests/test_packagesetup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Test if Syncopy's basic import setup/tmp storage initialization works as intended
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import os
8 | import sys
9 | import shutil
10 | import time
11 | import tempfile
12 | import importlib
13 | import subprocess
14 | import pytest
15 | from glob import glob
16 |
17 | # Local imports
18 | import syncopy
19 |
20 | # Decorator to decide whether or not to run dask-related tests
21 | skip_in_ghactions = pytest.mark.skipif(
22 | "GITHUB_ACTIONS" in os.environ.keys(), reason="Do not execute by GitHub Actions"
23 | )
24 |
25 |
26 | # check if folder creation in `__storage__` works as expected
27 | def test_storage_access():
28 | dirNames = [syncopy.__storage__, "first", "second", "third", "fourth"]
29 | folderCascade = os.path.join(*dirNames)
30 | os.makedirs(folderCascade)
31 | shutil.rmtree(folderCascade)
32 | time.sleep(1)
33 |
34 |
35 | # check if `SPYTMPDIR` is respected
36 | def test_spytmpdir():
37 | tmpDir = os.path.join(syncopy.__storage__, "__testStorage__")
38 | os.environ["SPYTMPDIR"] = tmpDir
39 | importlib.reload(syncopy)
40 | assert syncopy.__storage__ == tmpDir
41 | shutil.rmtree(tmpDir, ignore_errors=True)
42 | del os.environ["SPYTMPDIR"]
43 | time.sleep(1)
44 |
45 |
46 | # check if `cleanup` does what it's supposed to do
47 | # @skip_in_ghactions
48 | def test_cleanup():
49 | # spawn new Python instance, which creates and saves an `AnalogData` object
50 | # in custom $SPYTMPDIR; force-kill the process after a few seconds preventing
51 | # Syncopy from cleaning up its temp storage folder
52 |
53 | # this function assumes to be in the root directory of the repository
54 | # if that is not the case we have to move there
55 |
56 | cdir = os.getcwd()
57 | while "syncopy" in cdir:
58 | head, tail = os.path.split(cdir)
59 | if not "syncopy" in head:
60 | root_dir = os.path.join(head, tail)
61 | os.chdir(root_dir)
62 | break
63 | cdir = head
64 | # check that we are not entirely somewhere else
65 | else:
66 | assert False
67 |
68 | tmpDir = tempfile.mkdtemp()
69 |
70 | os.environ["SPYTMPDIR"] = tmpDir
71 | commandStr = (
72 | "import os; "
73 | + "import time; "
74 | + "import numpy as np; "
75 | + "import syncopy as spy; "
76 | + "dummy = spy.AnalogData(data=np.ones((10,10)), samplerate=1); "
77 | + "time.sleep(100)"
78 | )
79 | process = subprocess.Popen([sys.executable, "-c", commandStr])
80 | time.sleep(12)
81 | process.kill()
82 |
83 | # get inventory of external Syncopy instance's temp storage
84 | num_garbage_before = len(glob(os.path.join(tmpDir, "*.analog")))
85 | assert num_garbage_before >= 0
86 |
87 | # launch 2nd external instance with same $SPYTMPDIR, create 2nd `AnalogData`
88 | # object, run `cleanup` and keep instance alive in background (for max. 100s)
89 | commandStr = (
90 | "import time; "
91 | + "import syncopy as spy; "
92 | + "import numpy as np; "
93 | + "dummy = spy.AnalogData(data=np.ones((10,10)), samplerate=1); "
94 | + "time.sleep(5)"
95 | + "spy.cleanup(older_than=0, interactive=False, only_current_session=True); "
96 | + "time.sleep(100)"
97 | )
98 | process2 = subprocess.Popen(
99 | [sys.executable, "-c", commandStr],
100 | stdout=subprocess.PIPE,
101 | stderr=subprocess.PIPE,
102 | text=True,
103 | )
104 | time.sleep(12)
105 |
106 | num_garbage_after = len(glob(os.path.join(tmpDir, "*.analog")))
107 |
108 | # ensure `cleanup` call removed first instance's garbage but 2nd `AnalogData`
109 | # belonging to 2nd instance launched above is unharmed
110 | assert num_garbage_after == num_garbage_before
111 |
112 | # now kill 2nd instance and wipe `tmpDir`
113 | process2.kill()
114 | time.sleep(1)
115 |
116 | del os.environ["SPYTMPDIR"]
117 | time.sleep(1)
118 | shutil.rmtree(tmpDir)
119 |
--------------------------------------------------------------------------------
/syncopy/tests/test_spytools.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Ensure tooling functions shared across the package work as intended
4 | #
5 |
6 | # Builtin/3rd party package imports
7 | import numpy as np
8 | import pytest
9 |
10 | # Local imports
11 | from syncopy.shared.tools import best_match
12 | from syncopy.shared.errors import SPYValueError
13 |
14 |
15 | class TestBestMatch:
16 |
17 | # Source-arrays with integer elements
18 | intSource = np.arange(10)
19 | randIntSource = np.random.choice(intSource, size=intSource.size, replace=False)
20 |
21 | # Source-arrays with floating point elements
22 | floatSource = np.array([1.5, 1.5, 2.2, 6.2, 8.8])
23 | randFloatSource = np.random.choice(floatSource, size=floatSource.size, replace=False)
24 |
25 | # Selections defined by ordered/unordered int/float arrays
26 | intSelection = intSource[:4]
27 | randIntSelection = np.random.choice(intSelection, size=intSelection.size, replace=False)
28 | floatSelection = np.array([1.9, 9.0, 1.0, -0.4, 1.2, 0.2, 9.3])
29 | sortFloatSelection = np.sort(floatSelection)
30 |
31 | def test_intsource(self):
32 |
33 | for source in [self.intSource, self.randIntSource]:
34 | for selection in [
35 | self.intSelection,
36 | self.randIntSelection,
37 | self.floatSelection,
38 | self.sortFloatSelection,
39 | ]:
40 | expectedVal = np.round(selection)
41 | expectedIdx = np.array([np.where(source == elem)[0][0] for elem in expectedVal])
42 | val, idx = best_match(source, selection)
43 | assert np.array_equal(val, expectedVal)
44 | assert np.array_equal(idx, expectedIdx)
45 |
46 | val, idx = best_match(source, selection, squash_duplicates=True)
47 | _, sidx = np.unique(expectedVal, return_index=True)
48 | sidx.sort()
49 | assert np.array_equal(val, expectedVal[sidx])
50 | assert np.array_equal(idx, expectedIdx[sidx])
51 |
52 | with pytest.raises(SPYValueError):
53 | best_match(source, selection, tol=1e-6)
54 |
55 | val, idx = best_match(source, [selection.min(), selection.max()], span=True)
56 | expectedVal = np.array(
57 | [elem for elem in source if selection.min() <= elem <= selection.max()]
58 | )
59 | expectedIdx = np.array([np.where(source == elem)[0][0] for elem in expectedVal])
60 |
61 | def test_floatsource(self):
62 | for source in [self.floatSource, self.randFloatSource]:
63 | for selection in [
64 | self.intSelection,
65 | self.randIntSelection,
66 | self.floatSelection,
67 | self.sortFloatSelection,
68 | ]:
69 | expectedVal = np.array([source[np.argmin(np.abs(source - elem))] for elem in selection])
70 | expectedIdx = np.array([np.where(source == elem)[0][0] for elem in expectedVal])
71 | val, idx = best_match(source, selection)
72 | assert np.array_equal(val, expectedVal)
73 | assert np.array_equal(idx, expectedIdx)
74 |
75 | val, idx = best_match(source, selection, squash_duplicates=True)
76 | _, sidx = np.unique(expectedVal, return_index=True)
77 | sidx.sort()
78 | assert np.array_equal(val, expectedVal[sidx])
79 | assert np.array_equal(idx, expectedIdx[sidx])
80 |
81 | with pytest.raises(SPYValueError):
82 | best_match(source, selection, tol=1e-6)
83 |
84 | val, idx = best_match(source, [selection.min(), selection.max()], span=True)
85 | expectedVal = np.array(
86 | [elem for elem in source if selection.min() <= elem <= selection.max()]
87 | )
88 | expectedIdx = np.array([np.where(source == elem)[0][0] for elem in expectedVal])
89 |
--------------------------------------------------------------------------------
/syncopy_m1macos.yml:
--------------------------------------------------------------------------------
1 | name: syncopy
2 | channels:
3 | - defaults
4 | - conda-forge
5 | dependencies:
6 | - python >= 3.10, < 3.11
7 | - dask
8 | - distributed
9 | - dask-jobqueue
10 | - h5py
11 | - numpy
12 | - scipy
13 | - matplotlib
14 | - tqdm
15 | - natsort
16 | - pip
17 | - psutil
18 | - tqdm
19 | - fooof >= 1.0
20 | - bokeh
21 | - ipdb
22 | - memory_profiler
23 | - pylint
24 | - python-graphviz
25 | - pytest-cov
26 | - ruamel.yaml
27 | - setuptools_scm
28 | - pip
29 |
--------------------------------------------------------------------------------