├── .codecov.yml ├── .git_archival.txt ├── .gitattributes ├── .github ├── CODE_OF_CONDUCT.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ ├── documentation.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE │ └── release.md ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── check_sphinx_links.yml │ ├── codespell.yml │ ├── deploy_release.yml │ ├── project_action.yml │ ├── ruff.yml │ ├── run_all_tests.yml │ ├── run_coverage.yml │ ├── run_hdmf_zarr_tests.yml │ ├── run_pynwb_tests.yml │ └── run_tests.yml ├── .gitignore ├── .gitmodules ├── .mailmap ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGELOG.md ├── Legal.txt ├── README.rst ├── docs ├── CONTRIBUTING.rst ├── Makefile ├── gallery │ ├── README.txt │ ├── example_dynamic_term_set.yaml │ ├── example_term_set.yaml │ ├── multicontainerinterface.py │ ├── plot_aligneddynamictable.py │ ├── plot_dynamictable_howto.py │ ├── plot_dynamictable_tutorial.py │ ├── plot_external_resources.py │ ├── plot_generic_data_chunk_tutorial.py │ ├── plot_term_set.py │ └── schemasheets │ │ ├── classes.tsv │ │ ├── enums.tsv │ │ ├── prefixes.tsv │ │ ├── schema.tsv │ │ └── slots.tsv ├── make.bat └── source │ ├── _static │ └── theme_overrides.css │ ├── api_docs.rst │ ├── building_api.rst │ ├── conf.py │ ├── contributing.rst │ ├── export.rst │ ├── extensions.rst │ ├── figures │ ├── gallery_thumbnail_aligneddynamictable.png │ ├── gallery_thumbnail_dynamictable.png │ ├── gallery_thumbnail_externalresources.png │ ├── gallery_thumbnail_generic_data_chunk_tutorial.png │ ├── gallery_thumbnail_multicontainerinterface.png │ ├── gallery_thumbnail_termset.png │ ├── gallery_thumbnails.pptx │ ├── pynwb_package_overview.pptx │ ├── software_architecture.pdf │ ├── software_architecture.png │ ├── software_architecture.pptx │ ├── software_architecture_buildmanager.pdf │ ├── software_architecture_buildmanager.png │ ├── software_architecture_buildmanager.pptx │ ├── software_architecture_concepts.pdf │ ├── software_architecture_concepts.png │ ├── software_architecture_concepts.pptx │ ├── software_architecture_design_choices.pdf │ ├── software_architecture_design_choices.png │ ├── software_architecture_design_choices.pptx │ ├── software_architecture_hdmfio.pdf │ ├── software_architecture_hdmfio.png │ ├── software_architecture_hdmfio.pptx │ ├── software_architecture_mainconcepts.pdf │ ├── software_architecture_mainconcepts.png │ └── software_architecture_mainconcepts.pptx │ ├── hdmf_logo-180x180.png │ ├── hdmf_logo.pdf │ ├── hdmf_logo.png │ ├── index.rst │ ├── install_developers.rst │ ├── install_users.rst │ ├── legal.rst │ ├── make_a_release.rst │ ├── make_roundtrip_test.rst │ ├── overview_citing.rst │ ├── overview_intro.rst │ ├── overview_software_architecture.rst │ ├── software_process.rst │ ├── spec_language_support.rst │ └── validation.rst ├── environment-ros3.yml ├── license.txt ├── pyproject.toml ├── scripts └── check_py_support.py ├── src └── hdmf │ ├── __init__.py │ ├── _due.py │ ├── backends │ ├── __init__.py │ ├── errors.py │ ├── hdf5 │ │ ├── __init__.py │ │ ├── h5_utils.py │ │ └── h5tools.py │ ├── io.py │ ├── utils.py │ └── warnings.py │ ├── build │ ├── __init__.py │ ├── builders.py │ ├── classgenerator.py │ ├── errors.py │ ├── manager.py │ ├── objectmapper.py │ └── warnings.py │ ├── common │ ├── __init__.py │ ├── alignedtable.py │ ├── hierarchicaltable.py │ ├── io │ │ ├── __init__.py │ │ ├── alignedtable.py │ │ ├── multi.py │ │ ├── resources.py │ │ └── table.py │ ├── multi.py │ ├── resources.py │ ├── sparse.py │ └── table.py │ ├── container.py │ ├── data_utils.py │ ├── monitor.py │ ├── query.py │ ├── spec │ ├── __init__.py │ ├── catalog.py │ ├── namespace.py │ ├── spec.py │ └── write.py │ ├── term_set.py │ ├── testing │ ├── __init__.py │ ├── testcase.py │ ├── utils.py │ └── validate_spec.py │ ├── utils.py │ └── validate │ ├── __init__.py │ ├── errors.py │ └── validator.py ├── test_gallery.py ├── tests ├── __init__.py └── unit │ ├── __init__.py │ ├── back_compat_tests │ ├── 1.0.5.h5 │ ├── __init__.py │ └── test_1_1_0.py │ ├── build_tests │ ├── __init__.py │ ├── mapper_tests │ │ ├── __init__.py │ │ ├── test_build.py │ │ ├── test_build_datetime.py │ │ └── test_build_quantity.py │ ├── test_builder.py │ ├── test_classgenerator.py │ ├── test_convert_dtype.py │ ├── test_io_manager.py │ ├── test_io_map.py │ └── test_io_map_data.py │ ├── common │ ├── __init__.py │ ├── test_alignedtable.py │ ├── test_common.py │ ├── test_common_io.py │ ├── test_generate_table.py │ ├── test_linkedtables.py │ ├── test_multi.py │ ├── test_resources.py │ ├── test_sparse.py │ └── test_table.py │ ├── example_dynamic_term_set.yaml │ ├── example_test_term_set.yaml │ ├── example_test_term_set2.yaml │ ├── hdmf_config.yaml │ ├── hdmf_config2.yaml │ ├── helpers │ ├── __init__.py │ └── utils.py │ ├── spec_tests │ ├── __init__.py │ ├── test-ext.base.yaml │ ├── test-ext.namespace.yaml │ ├── test.base.yaml │ ├── test.namespace.yaml │ ├── test_attribute_spec.py │ ├── test_dataset_spec.py │ ├── test_dtype_spec.py │ ├── test_group_spec.py │ ├── test_link_spec.py │ ├── test_load_namespace.py │ ├── test_ref_spec.py │ ├── test_spec_catalog.py │ └── test_spec_write.py │ ├── test_container.py │ ├── test_io_hdf5.py │ ├── test_io_hdf5_h5tools.py │ ├── test_io_hdf5_streaming.py │ ├── test_monitor.py │ ├── test_multicontainerinterface.py │ ├── test_table.py │ ├── test_term_set.py │ ├── test_term_set_input │ └── schemasheets │ │ ├── classes.tsv │ │ ├── enums.tsv │ │ ├── nwb_static_enums.yaml │ │ ├── prefixes.tsv │ │ ├── schema.tsv │ │ └── slots.tsv │ ├── utils_test │ ├── __init__.py │ ├── test_core_DataChunk.py │ ├── test_core_DataChunkIterator.py │ ├── test_core_DataIO.py │ ├── test_core_GenericDataChunkIterator.py │ ├── test_core_ShapeValidator.py │ ├── test_data_utils.py │ ├── test_docval.py │ ├── test_labelleddict.py │ └── test_utils.py │ └── validator_tests │ ├── __init__.py │ ├── test_errors.py │ └── test_validate.py └── tox.ini /.codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | # Don't allow overall project coverage to be dropped more than 6 | # 2% 7 | threshold: 2 8 | patch: 9 | default: 10 | # 75% of the changed code must be covered by tests 11 | threshold: 25 12 | only_pulls: true 13 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 26acff54aad825edf377dd5df9424643df875e2d 2 | node-date: 2025-05-28T11:49:47-07:00 3 | describe-name: 4.1.0 4 | ref-names: HEAD -> dev, tag: latest, tag: 4.1.0 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others’ private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project’s leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report. 3 | title: "[Bug]: " 4 | labels: ["bug"] 5 | 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | # Thanks for taking the time to fill out this bug report! 11 | ### The following information will help us resolve your issue. 12 | - type: textarea 13 | id: what-happened 14 | attributes: 15 | label: What happened? 16 | description: Also tell us, what did you expect to happen? 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: reproduce 21 | attributes: 22 | label: Steps to Reproduce 23 | description: | 24 | Please copy and paste the code you were trying to run that caused the error. 25 | 26 | Feel free to include as little or as much as you think is relevant. This section will be automatically formatted into code, so no need for backticks. 27 | render: python 28 | validations: 29 | required: true 30 | - type: textarea 31 | id: traceback 32 | attributes: 33 | label: Traceback 34 | description: | 35 | Please copy and paste the full traceback produced by the error. 36 | 37 | This section will be automatically formatted into code, so no need for backticks. 38 | render: pytb 39 | - type: dropdown 40 | id: os 41 | attributes: 42 | label: Operating System 43 | options: 44 | - macOS 45 | - Windows 46 | - Linux 47 | validations: 48 | required: true 49 | - type: dropdown 50 | id: python_version 51 | attributes: 52 | label: Python Version 53 | options: 54 | - "3.13" 55 | - "3.12" 56 | - "3.11" 57 | - "3.10" 58 | - "3.9" 59 | - "newer" 60 | validations: 61 | required: true 62 | - type: textarea 63 | id: package_versions 64 | attributes: 65 | label: Package Versions 66 | description: | 67 | Please share your currently installed Python package versions by calling `pip freeze > environment_for_issue.txt` and uploading the text file along with this issue. 68 | 69 | This helps us determine if there are any secondary or tertiary issues caused by other dependencies. 70 | 71 | You can attach images or log files by clicking this area to highlight it and then dragging files in. 72 | If GitHub upload is not working, you can also copy and paste the output into this section. 73 | - type: markdown 74 | attributes: 75 | value: | 76 | By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/hdmf-dev/hdmf/blob/dev/.github/CODE_OF_CONDUCT.md). 77 | 78 | Before submitting this issue, please review the [Contributing Guide](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst). 79 | 80 | Please also ensure that this issue has not already been [reported](https://github.com/hdmf-dev/hdmf/issues). Thank you! 81 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Read the Documentation 4 | url: https://hdmf.readthedocs.io/en/stable/ 5 | about: Check out the full documentation. 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | description: Is the documentation of something missing, unclear, or lacking? This is the place. 3 | title: "[Documentation]: " 4 | labels: "documentation" 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | ## Thank you for your suggestion! 10 | 11 | We welcome any ideas about how to make **HDMF** better for the community. 12 | 13 | Please keep in mind that new or improved documentation may not get implemented immediately. 14 | - type: textarea 15 | id: summary 16 | attributes: 17 | label: What would you like changed or added to the documentation and why? 18 | description: Do you have any suggestions for the documents? 19 | validations: 20 | required: true 21 | - type: dropdown 22 | id: interest 23 | attributes: 24 | label: Do you have any interest in helping write or edit the documentation? 25 | description: | 26 | We appreciate any help you can offer! 27 | 28 | For information on how to contribute, please refer to our [contributing guidelines](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst). 29 | options: 30 | - Yes. 31 | - Yes, but I would need guidance. 32 | - No. 33 | validations: 34 | required: true 35 | - type: markdown 36 | attributes: 37 | value: | 38 | By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/hdmf-dev/hdmf/blob/dev/.github/CODE_OF_CONDUCT.md). 39 | 40 | Before submitting this issue, please review the [Contributing Guide](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst). 41 | 42 | Please also ensure that this issue has not already been [reported](https://github.com/hdmf-dev/hdmf/issues). Thank you! 43 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Suggest an idea for a brand new feature, or a change to an existing one. 3 | title: "[Feature]: " 4 | labels: ["enhancement"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | ## Thank you for your suggestion! 10 | 11 | We welcome any ideas about how to make **HDMF** better for the community. 12 | 13 | Please keep in mind that new features may not get implemented immediately. 14 | - type: textarea 15 | id: summary 16 | attributes: 17 | label: What would you like to see added to HDMF? 18 | description: | 19 | What are you trying to achieve with **HDMF**? 20 | 21 | Is this a more convenient way to do something that is already possible, or is a workaround currently unfeasible? 22 | 23 | If the change is related to a problem, please provide a clear and concise description of the problem. 24 | validations: 25 | required: true 26 | - type: textarea 27 | id: solution 28 | attributes: 29 | label: What solution would you like? 30 | description: | 31 | A clear and concise description of what you want to happen. 32 | 33 | Describe alternative solutions you have considered. 34 | validations: 35 | required: true 36 | - type: dropdown 37 | id: interest 38 | attributes: 39 | label: Do you have any interest in helping implement the feature? 40 | description: | 41 | We appreciate any help you can offer! 42 | 43 | For information on how to contribute, please refer to our [contributing guidelines](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst). 44 | options: 45 | - Yes. 46 | - Yes, but I would need guidance. 47 | - No. 48 | validations: 49 | required: true 50 | - type: markdown 51 | attributes: 52 | value: | 53 | By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/hdmf-dev/hdmf/blob/dev/.github/CODE_OF_CONDUCT.md). 54 | 55 | Before submitting this issue, please review the [Contributing Guide](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst). 56 | 57 | Please also ensure that this issue has not already been [reported](https://github.com/hdmf-dev/hdmf/issues). Thank you! 58 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/release.md: -------------------------------------------------------------------------------- 1 | Prepare for release of HDMF [version] 2 | 3 | ### Before merging: 4 | - [ ] Make sure all PRs to be included in this release have been merged to `dev`. 5 | - [ ] Major and minor releases: Update dependency ranges in `pyproject.toml` as needed. 6 | - [ ] Check legal file dates and information in `Legal.txt`, `license.txt`, `README.rst`, `docs/source/conf.py`, 7 | and any other locations as needed 8 | - [ ] Update `pyproject.toml` as needed 9 | - [ ] Update `README.rst` as needed 10 | - [ ] Update `src/hdmf/common/hdmf-common-schema` submodule as needed. Check the version number and commit SHA 11 | manually. Make sure we are using the latest release and not the latest commit on the `main` branch. 12 | - [ ] Update changelog (set release date) in `CHANGELOG.md` and any other docs as needed 13 | - [ ] Run tests locally including gallery tests, and inspect all warnings and outputs 14 | (`pytest && python test_gallery.py`). Try to remove all warnings. 15 | - [ ] Run PyNWB tests locally including gallery and validation tests, and inspect all warnings and outputs 16 | (`cd pynwb; git checkout dev; git pull; python test.py -v > out.txt 2>&1`) 17 | - [ ] Run HDMF-Zarr tests locally including gallery and validation tests, and inspect all warnings and outputs 18 | (`cd hdmf-zarr; git checkout dev; git pull; pytest && python test_gallery.py`) 19 | - [ ] Test docs locally and inspect all warnings and outputs `cd docs; make clean && make html` 20 | - [ ] After pushing this branch to GitHub, manually trigger the "Run all tests" GitHub Actions workflow on this 21 | branch by going to https://github.com/hdmf-dev/hdmf/actions/workflows/run_all_tests.yml, selecting 22 | "Run workflow" on the right, selecting this branch, and clicking "Run workflow". Make sure all tests pass. 23 | - [ ] Check that the readthedocs build for this PR succeeds (see the PR check) 24 | 25 | ### After merging: 26 | 1. Create release by following steps in `docs/source/make_a_release.rst` or use alias `git pypi-release [tag]` if set up 27 | 2. After the CI bot creates the new release (wait ~10 min), update the release notes on the 28 | [GitHub releases page](https://github.com/hdmf-dev/hdmf/releases) with the changelog 29 | 3. Check that the readthedocs "stable" build runs and succeeds 30 | 4. Either monitor [conda-forge/hdmf-feedstock](https://github.com/conda-forge/hdmf-feedstock) for the 31 | regro-cf-autotick-bot bot to create a PR updating the version of HDMF to the latest PyPI release, usually within 32 | 24 hours of release, or manually create a PR updating `recipe/meta.yaml` with the latest version number 33 | and SHA256 retrieved from PyPI > HDMF > Download Files > View hashes for the `.tar.gz` file. Re-render and 34 | update the dependencies as needed. 35 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | # Check for updates to GitHub Actions every week 7 | interval: "weekly" 8 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Motivation 2 | 3 | What was the reasoning behind this change? Please explain the changes briefly. 4 | 5 | ## How to test the behavior? 6 | ``` 7 | Show how to reproduce the new behavior (can be a bug fix or a new feature) 8 | ``` 9 | 10 | ## Checklist 11 | 12 | - [ ] Did you update `CHANGELOG.md` with your changes? 13 | - [ ] Does the PR clearly describe the problem and the solution? 14 | - [ ] Have you reviewed our [Contributing Guide](https://github.com/hdmf-dev/hdmf/blob/dev/docs/CONTRIBUTING.rst)? 15 | - [ ] Does the PR use "Fix #XXX" notation to tell GitHub to close the relevant issue numbered XXX when the PR is merged? 16 | -------------------------------------------------------------------------------- /.github/workflows/check_sphinx_links.yml: -------------------------------------------------------------------------------- 1 | name: Check Sphinx links 2 | on: 3 | pull_request: 4 | schedule: 5 | - cron: '0 5 * * *' # once per day at midnight ET 6 | workflow_dispatch: 7 | 8 | jobs: 9 | check-sphinx-links: 10 | runs-on: ubuntu-latest 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | steps: 15 | - name: Checkout repo with submodules 16 | uses: actions/checkout@v4 17 | with: 18 | submodules: 'recursive' 19 | fetch-depth: 0 # tags are required to determine the version 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: '3.12' # TODO: Update to 3.13 when linkml and its deps support 3.13 25 | 26 | - name: Install Sphinx dependencies and package 27 | run: | 28 | python -m pip install --upgrade pip 29 | python -m pip install ".[all]" 30 | 31 | - name: Check Sphinx internal and external links 32 | run: sphinx-build -W -b linkcheck ./docs/source ./test_build 33 | -------------------------------------------------------------------------------- /.github/workflows/codespell.yml: -------------------------------------------------------------------------------- 1 | name: Codespell 2 | on: 3 | pull_request: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | codespell: 8 | name: Check for spelling errors 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout repo 12 | uses: actions/checkout@v4 13 | - name: Codespell 14 | uses: codespell-project/actions-codespell@v2 15 | -------------------------------------------------------------------------------- /.github/workflows/deploy_release.yml: -------------------------------------------------------------------------------- 1 | name: Deploy release 2 | on: 3 | push: 4 | tags: # run only on new tags that follow semver MAJOR.MINOR.PATCH 5 | - '[0-9]+.[0-9]+.[0-9]+' 6 | 7 | jobs: 8 | deploy-release: 9 | name: Deploy release from tag 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout repo with submodules 13 | uses: actions/checkout@v4 14 | with: 15 | submodules: 'recursive' 16 | fetch-depth: 0 # tags are required for versioneer to determine the version 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v5 20 | with: 21 | python-version: '3.13' 22 | 23 | - name: Install build dependencies 24 | run: | 25 | python -m pip install --upgrade pip 26 | python -m pip install tox 27 | python -m pip list 28 | 29 | - name: Run tox tests 30 | run: | 31 | tox -e py313-upgraded 32 | 33 | - name: Build wheel and source distribution 34 | run: | 35 | tox -e build 36 | ls -1 dist 37 | 38 | - name: Test installation from a wheel 39 | run: | 40 | tox -e wheelinstall --installpkg dist/*-none-any.whl 41 | 42 | - name: Test installation from a source distribution 43 | run: | 44 | tox -e wheelinstall --installpkg dist/*.tar.gz 45 | 46 | - name: Upload wheel and source distributions to PyPI 47 | run: | 48 | python -m pip install twine 49 | ls -1 dist 50 | # twine upload --repository-url https://test.pypi.org/legacy/ -u ${{ secrets.BOT_PYPI_USER }} -p ${{ secrets.BOT_TEST_PYPI_PASSWORD }} --skip-existing dist/* 51 | twine upload -u ${{ secrets.BOT_PYPI_USER }} -p ${{ secrets.BOT_PYPI_PASSWORD }} --skip-existing dist/* 52 | 53 | - name: Publish wheel and source distributions as a GitHub release 54 | run: | 55 | python -m pip install "githubrelease>=1.5.9" 56 | githubrelease --github-token ${{ secrets.BOT_GITHUB_TOKEN }} release hdmf-dev/hdmf \ 57 | create ${{ github.ref_name }} --name ${{ github.ref_name }} \ 58 | --publish dist/* 59 | -------------------------------------------------------------------------------- /.github/workflows/project_action.yml: -------------------------------------------------------------------------------- 1 | name: Add issues to Development Project Board 2 | 3 | on: 4 | issues: 5 | types: 6 | - opened 7 | 8 | jobs: 9 | add-to-project: 10 | name: Add issue to project 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: GitHub App token 14 | id: generate_token 15 | uses: actions/create-github-app-token@v2 16 | with: 17 | app-id: ${{ secrets.APP_ID }} 18 | private-key: ${{ secrets.APP_PEM }} 19 | 20 | - name: Add to Developer Board 21 | env: 22 | TOKEN: ${{ steps.generate_token.outputs.token }} 23 | uses: actions/add-to-project@v1.0.2 24 | with: 25 | project-url: https://github.com/orgs/hdmf-dev/projects/7 26 | github-token: ${{ env.TOKEN }} 27 | 28 | - name: Add to Community Board 29 | env: 30 | TOKEN: ${{ steps.generate_token.outputs.token }} 31 | uses: actions/add-to-project@v1.0.2 32 | with: 33 | project-url: https://github.com/orgs/hdmf-dev/projects/8 34 | github-token: ${{ env.TOKEN }} 35 | -------------------------------------------------------------------------------- /.github/workflows/ruff.yml: -------------------------------------------------------------------------------- 1 | name: Ruff 2 | on: pull_request 3 | 4 | jobs: 5 | ruff: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout repo 9 | uses: actions/checkout@v4 10 | - name: Run ruff 11 | uses: astral-sh/ruff-action@v3 12 | -------------------------------------------------------------------------------- /.github/workflows/run_coverage.yml: -------------------------------------------------------------------------------- 1 | name: Run code coverage 2 | on: 3 | push: 4 | branches: 5 | - dev 6 | tags-ignore: # exclude tags created by "ci_addons publish_github_release" 7 | - 'latest' 8 | - 'latest-tmp' 9 | pull_request: 10 | workflow_dispatch: 11 | 12 | jobs: 13 | run-coverage: 14 | name: ${{ matrix.os }}, opt reqs ${{ matrix.opt_req }} 15 | runs-on: ${{ matrix.os }} 16 | # TODO handle forks 17 | # run pipeline on either a push event or a PR event on a fork 18 | # if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name 19 | defaults: 20 | run: 21 | shell: bash 22 | concurrency: 23 | group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.os }}-${{ matrix.opt_req }} 24 | cancel-in-progress: true 25 | strategy: 26 | matrix: 27 | include: 28 | - { os: ubuntu-latest , opt_req: true } 29 | - { os: ubuntu-latest , opt_req: false } 30 | - { os: windows-latest, opt_req: false } 31 | - { os: macos-latest , opt_req: false } 32 | env: # used by codecov-action 33 | OS: ${{ matrix.os }} 34 | PYTHON: '3.12' # TODO: Update to 3.13 when linkml and its deps support 3.13 35 | steps: 36 | - name: Checkout repo with submodules 37 | uses: actions/checkout@v4 38 | with: 39 | submodules: 'recursive' 40 | fetch-depth: 0 # tags are required to determine the version 41 | 42 | - name: Set up Python 43 | uses: actions/setup-python@v5 44 | with: 45 | python-version: ${{ env.PYTHON }} 46 | 47 | - name: Upgrade pip 48 | run: | 49 | python -m pip install --upgrade pip 50 | 51 | - name: Install package 52 | if: ${{ ! matrix.opt_req }} 53 | run: | 54 | python -m pip install ".[test]" 55 | 56 | - name: Install package with optional dependencies 57 | if: ${{ matrix.opt_req }} 58 | run: | 59 | python -m pip install ".[test,tqdm,sparse,zarr,termset]" 60 | 61 | - name: Run tests and generate coverage report 62 | run: | 63 | # coverage is configured in pyproject.toml 64 | # codecov uploader requires xml format 65 | python -m pip list 66 | pytest --cov --cov-report=xml --cov-report=term 67 | 68 | - name: Upload coverage to Codecov 69 | uses: codecov/codecov-action@v5 70 | with: 71 | fail_ci_if_error: true 72 | files: ./coverage.xml 73 | env: 74 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 75 | 76 | run-ros3-coverage: 77 | name: ${{ matrix.name }} 78 | runs-on: ${{ matrix.os }} 79 | defaults: 80 | run: 81 | shell: bash -l {0} # necessary for conda 82 | concurrency: 83 | group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} 84 | cancel-in-progress: true 85 | strategy: 86 | fail-fast: false 87 | matrix: 88 | include: 89 | - { name: linux-python3.13-ros3 , python-ver: "3.13", os: ubuntu-latest } 90 | steps: 91 | - name: Checkout repo with submodules 92 | uses: actions/checkout@v4 93 | with: 94 | submodules: 'recursive' 95 | fetch-depth: 0 # tags are required to determine the version 96 | 97 | - name: Set up Conda 98 | uses: conda-incubator/setup-miniconda@v3 99 | with: 100 | auto-update-conda: true 101 | activate-environment: ros3 102 | environment-file: environment-ros3.yml 103 | python-version: ${{ matrix.python-ver }} 104 | channels: conda-forge 105 | auto-activate-base: false 106 | 107 | - name: Install run dependencies 108 | run: | 109 | pip install . 110 | pip list 111 | 112 | - name: Conda reporting 113 | run: | 114 | conda info 115 | conda config --show-sources 116 | conda list --show-channel-urls 117 | 118 | - name: Run ros3 tests # TODO include gallery tests after they are written 119 | run: | 120 | pytest --cov --cov-report=xml --cov-report=term tests/unit/test_io_hdf5_streaming.py 121 | 122 | - name: Upload coverage to Codecov 123 | uses: codecov/codecov-action@v5 124 | with: 125 | fail_ci_if_error: true 126 | files: ./coverage.xml 127 | env: 128 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 129 | -------------------------------------------------------------------------------- /.github/workflows/run_hdmf_zarr_tests.yml: -------------------------------------------------------------------------------- 1 | name: Run HDMF-Zarr tests 2 | on: 3 | pull_request: 4 | schedule: 5 | - cron: '0 5 * * *' # once per day at midnight ET 6 | workflow_dispatch: 7 | 8 | jobs: 9 | run-hdmf-zarr-tests: 10 | runs-on: ubuntu-latest 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | steps: 15 | - name: Checkout repo with submodules 16 | uses: actions/checkout@v4 17 | with: 18 | submodules: 'recursive' 19 | fetch-depth: 0 # tags are required to determine the version 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: '3.13' 25 | 26 | - name: Update pip 27 | run: python -m pip install --upgrade pip 28 | 29 | - name: Clone HDMF-Zarr and install dev branch of HDMF 30 | run: | 31 | python -m pip list 32 | git clone https://github.com/hdmf-dev/hdmf-zarr.git 33 | cd hdmf-zarr 34 | python -m pip install ".[test]" # this will install a different version of hdmf from the current one 35 | cd .. 36 | python -m pip uninstall -y hdmf # uninstall the other version of hdmf 37 | python -m pip install . # reinstall current branch of hdmf 38 | python -m pip list 39 | 40 | - name: Run HDMF-Zarr tests on HDMF-Zarr dev branch 41 | run: | 42 | cd hdmf-zarr 43 | pytest -v 44 | -------------------------------------------------------------------------------- /.github/workflows/run_pynwb_tests.yml: -------------------------------------------------------------------------------- 1 | name: Run PyNWB tests 2 | on: 3 | pull_request: 4 | schedule: 5 | - cron: '0 5 * * *' # once per day at midnight ET 6 | workflow_dispatch: 7 | 8 | jobs: 9 | run-pynwb-tests: 10 | runs-on: ubuntu-latest 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | steps: 15 | - name: Checkout repo with submodules 16 | uses: actions/checkout@v4 17 | with: 18 | submodules: 'recursive' 19 | fetch-depth: 0 # tags are required to determine the version 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: '3.13' 25 | 26 | - name: Update pip 27 | run: python -m pip install --upgrade pip 28 | 29 | - name: Clone PyNWB and install dev branch of HDMF 30 | run: | 31 | python -m pip list 32 | git clone https://github.com/NeurodataWithoutBorders/pynwb.git --recurse-submodules 33 | cd pynwb 34 | python -m pip install -r requirements-dev.txt # do not install the pinned install requirements 35 | python -m pip install . # this will install a different version of hdmf from the current one 36 | cd .. 37 | python -m pip uninstall -y hdmf # uninstall the other version of hdmf 38 | python -m pip install . # reinstall current branch of hdmf 39 | python -m pip list 40 | 41 | - name: Run PyNWB tests on PyNWB dev branch 42 | run: | 43 | cd pynwb 44 | python test.py -v 45 | -------------------------------------------------------------------------------- /.github/workflows/run_tests.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | on: 3 | push: 4 | branches: 5 | - dev 6 | tags-ignore: # exclude tags created by "ci_addons publish_github_release" 7 | - 'latest' 8 | - 'latest-tmp' 9 | pull_request: 10 | 11 | jobs: 12 | run-tests: 13 | name: ${{ matrix.name }} 14 | runs-on: ${{ matrix.os }} 15 | defaults: 16 | run: 17 | shell: bash 18 | concurrency: 19 | group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} 20 | cancel-in-progress: true 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | include: 25 | # NOTE config below with "upload-wheels: true" specifies that wheels should be uploaded as an artifact 26 | - { name: linux-python3.9-minimum , test-tox-env: pytest-py39-minimum , python-ver: "3.9" , os: ubuntu-latest } 27 | - { name: linux-python3.13-upgraded , test-tox-env: pytest-py313-upgraded , python-ver: "3.13", os: ubuntu-latest } 28 | - { name: linux-python3.13-upgraded-optional , test-tox-env: pytest-py313-upgraded-optional , python-ver: "3.13", os: ubuntu-latest , upload-wheels: true } 29 | - { name: windows-python3.9-minimum , test-tox-env: pytest-py39-minimum , python-ver: "3.9" , os: windows-latest } 30 | - { name: windows-python3.13-upgraded-optional , test-tox-env: pytest-py313-upgraded-optional , python-ver: "3.13", os: windows-latest } 31 | - { name: macos-python3.9-minimum , test-tox-env: pytest-py39-minimum , python-ver: "3.9" , os: macos-13 } 32 | - { name: macos-python3.13-upgraded-optional , test-tox-env: pytest-py313-upgraded-optional , python-ver: "3.13", os: macos-latest } 33 | steps: 34 | - name: Checkout repo with submodules 35 | uses: actions/checkout@v4 36 | with: 37 | submodules: 'recursive' 38 | fetch-depth: 0 # tags are required to determine the version 39 | 40 | - name: Set up Python 41 | uses: actions/setup-python@v5 42 | with: 43 | python-version: ${{ matrix.python-ver }} 44 | 45 | - name: Install build dependencies 46 | run: | 47 | python -m pip install --upgrade pip 48 | python -m pip install tox 49 | python -m pip list 50 | 51 | - name: Run tox tests 52 | run: | 53 | tox -e ${{ matrix.test-tox-env }} 54 | 55 | - name: Build wheel and source distribution 56 | run: | 57 | tox -e build 58 | ls -1 dist 59 | 60 | - name: Test installation from a wheel 61 | run: | 62 | tox -e wheelinstall --installpkg dist/*-none-any.whl 63 | 64 | - name: Test installation from a source distribution 65 | run: | 66 | tox -e wheelinstall --installpkg dist/*.tar.gz 67 | 68 | - name: Upload distribution as a workspace artifact 69 | if: ${{ matrix.upload-wheels }} 70 | uses: actions/upload-artifact@v4 71 | with: 72 | name: distributions 73 | path: dist 74 | 75 | run-gallery-tests: 76 | name: ${{ matrix.name }} 77 | runs-on: ${{ matrix.os }} 78 | defaults: 79 | run: 80 | shell: bash 81 | concurrency: 82 | group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} 83 | cancel-in-progress: true 84 | strategy: 85 | fail-fast: false 86 | matrix: 87 | include: 88 | # TODO: Update to 3.13 when linkml and its deps support 3.13 89 | - { name: linux-gallery-python3.9-minimum , test-tox-env: gallery-py39-minimum , python-ver: "3.9" , os: ubuntu-latest } 90 | - { name: linux-gallery-python3.12-upgraded-optional , test-tox-env: gallery-py312-upgraded-optional , python-ver: "3.12", os: ubuntu-latest } 91 | - { name: windows-gallery-python3.9-minimum , test-tox-env: gallery-py39-minimum , python-ver: "3.9" , os: windows-latest } 92 | - { name: windows-gallery-python3.12-upgraded-optional , test-tox-env: gallery-py312-upgraded-optional , python-ver: "3.12", os: windows-latest } 93 | steps: 94 | - name: Checkout repo with submodules 95 | uses: actions/checkout@v4 96 | with: 97 | submodules: 'recursive' 98 | fetch-depth: 0 # tags are required to determine the version 99 | 100 | - name: Set up Python 101 | uses: actions/setup-python@v5 102 | with: 103 | python-version: ${{ matrix.python-ver }} 104 | 105 | - name: Install build dependencies 106 | run: | 107 | python -m pip install --upgrade pip 108 | python -m pip install tox 109 | python -m pip list 110 | 111 | - name: Run tox tests 112 | run: | 113 | tox -e ${{ matrix.test-tox-env }} 114 | 115 | deploy-dev: 116 | name: Deploy pre-release from dev 117 | needs: [run-tests, run-gallery-tests] 118 | if: ${{ github.event_name == 'push' }} 119 | runs-on: ubuntu-latest 120 | concurrency: 121 | group: ${{ github.workflow }}-${{ github.ref }} 122 | cancel-in-progress: true 123 | steps: 124 | - name: Checkout repo with submodules 125 | uses: actions/checkout@v4 126 | with: 127 | submodules: 'recursive' 128 | fetch-depth: 0 # tags are required to determine the version 129 | 130 | - name: Set up Python 131 | uses: actions/setup-python@v5 132 | with: 133 | python-version: '3.13' 134 | 135 | - name: Download wheel and source distributions from artifact 136 | uses: actions/download-artifact@v4 137 | with: 138 | name: distributions 139 | path: dist 140 | 141 | - name: Publish wheel and source distributions as a GitHub release 142 | run: | 143 | python -m pip install --upgrade pip 144 | python -m pip install scikit-ci-addons 145 | ci_addons publish_github_release hdmf-dev/hdmf \ 146 | --prerelease-packages "dist/*" \ 147 | --prerelease-sha $GITHUB_SHA \ 148 | --prerelease-packages-clear-pattern "*" \ 149 | --prerelease-packages-keep-pattern "*dev*" \ 150 | --token ${{ secrets.BOT_GITHUB_TOKEN }} \ 151 | --re-upload 152 | 153 | run-ros3-tests: 154 | name: ${{ matrix.name }} 155 | runs-on: ${{ matrix.os }} 156 | defaults: 157 | run: 158 | shell: bash -l {0} # necessary for conda 159 | concurrency: 160 | group: ${{ github.workflow }}-${{ github.ref }}-${{ matrix.name }} 161 | cancel-in-progress: true 162 | strategy: 163 | fail-fast: false 164 | matrix: 165 | include: 166 | - { name: linux-python3.13-ros3 , python-ver: "3.13", os: ubuntu-latest } 167 | steps: 168 | - name: Checkout repo with submodules 169 | uses: actions/checkout@v4 170 | with: 171 | submodules: 'recursive' 172 | fetch-depth: 0 # tags are required to determine the version 173 | 174 | - name: Set up Conda 175 | uses: conda-incubator/setup-miniconda@v3 176 | with: 177 | auto-update-conda: true 178 | activate-environment: ros3 179 | environment-file: environment-ros3.yml 180 | python-version: ${{ matrix.python-ver }} 181 | channels: conda-forge 182 | auto-activate-base: false 183 | 184 | - name: Install run dependencies 185 | run: | 186 | pip install . 187 | pip list 188 | 189 | - name: Conda reporting 190 | run: | 191 | conda info 192 | conda config --show-sources 193 | conda list --show-channel-urls 194 | 195 | - name: Run ros3 tests # TODO include gallery tests after they are written 196 | run: | 197 | pytest tests/unit/test_io_hdf5_streaming.py 198 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### VisualStudioCode ### 2 | .vscode/* 3 | !.vscode/settings.json 4 | !.vscode/tasks.json 5 | !.vscode/extensions.json 6 | .history 7 | 8 | ### HDMF ### 9 | 10 | # Auto-generated apidocs RST files 11 | /docs/source/gen_modules/ 12 | /docs/source/hdmf*.rst 13 | /docs/source/sg_execution_times.rst 14 | /docs/gallery/*.hdf5 15 | /docs/gallery/*.sqlite 16 | /docs/gallery/expanded_example_dynamic_term_set.yaml 17 | /docs/gallery/schemasheets/nwb_static_enums.yaml 18 | 19 | # Auto-generated files after running tutorials 20 | mylab.*.yaml 21 | *.npy 22 | manifest.json 23 | 24 | # Auto-generated tutorials 25 | /docs/source/tutorials/ 26 | 27 | ### Python ### 28 | # Byte-compiled / optimized / DLL files 29 | __pycache__/ 30 | *.py[cod] 31 | *$py.class 32 | 33 | # Sphinx documentation 34 | /docs/_build/ 35 | /docs/build/ 36 | 37 | 38 | # setuptools 39 | /build/ 40 | /dist/ 41 | *.egg-info 42 | 43 | # Jupyter Notebook 44 | .ipynb_checkpoints 45 | 46 | #PyCharm 47 | .idea/ 48 | 49 | # shell scripts (e.g. used with fswatch) 50 | *.sh 51 | 52 | /docs/tmpl.conf.py 53 | /docs/tmpl.index.rst 54 | 55 | # coverage output 56 | /tests/coverage/htmlcov 57 | .coverage* 58 | coverage.xml 59 | 60 | # duecredit output 61 | .duecredit.p 62 | 63 | # tox 64 | .tox 65 | 66 | # vscode 67 | .vscode/ 68 | 69 | #mypy 70 | .mypy_cache/ 71 | 72 | #DS_Store 73 | .DS_Store 74 | 75 | #sqlite 76 | .sqlite 77 | 78 | # Version 79 | _version.py 80 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "src/hdmf/common/hdmf-common-schema"] 2 | path = src/hdmf/common/hdmf-common-schema 3 | url = https://github.com/hdmf-dev/hdmf-common-schema.git 4 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | # Mailmap is used by git to map author/committer names and/or E-Mail addresses 2 | # See https://git-scm.com/docs/gitmailmap for details 3 | Ben Dichter 4 | Cody Baker <51133164+CodyCBakerPhD@users.noreply.github.com> 5 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # NOTE: run `pre-commit autoupdate` to update hooks to latest version 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v5.0.0 5 | hooks: 6 | - id: check-yaml 7 | - id: end-of-file-fixer 8 | - id: trailing-whitespace 9 | - id: check-added-large-files 10 | - id: check-json 11 | - id: check-toml 12 | - id: name-tests-test 13 | args: [--pytest-test-first] 14 | exclude: ^tests/unit/helpers/ 15 | - id: check-docstring-first 16 | # - repo: https://github.com/psf/black 17 | # rev: 23.3.0 18 | # hooks: 19 | # - id: black 20 | - repo: https://github.com/astral-sh/ruff-pre-commit 21 | rev: v0.11.11 22 | hooks: 23 | - id: ruff 24 | # - repo: https://github.com/econchick/interrogate 25 | # rev: 1.5.0 26 | # hooks: 27 | # - id: interrogate 28 | - repo: https://github.com/codespell-project/codespell 29 | rev: v2.4.1 30 | hooks: 31 | - id: codespell 32 | additional_dependencies: 33 | - tomli 34 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: ubuntu-24.04 10 | tools: 11 | python: '3.12' # TODO: Update to 3.13 when linkml and its deps support 3.13 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: docs/source/conf.py 16 | 17 | # Build documentation with MkDocs 18 | #mkdocs: 19 | # configuration: mkdocs.yml 20 | 21 | # Optionally build your docs in additional formats such as PDF and ePub 22 | formats: all 23 | 24 | # Optionally set the version of Python and requirements required to build your docs 25 | python: 26 | install: 27 | - path: .[docs,tqdm,sparse,zarr,termset] # path to the package relative to the root 28 | 29 | # Optionally include all submodules 30 | submodules: 31 | include: all 32 | recursive: true 33 | -------------------------------------------------------------------------------- /Legal.txt: -------------------------------------------------------------------------------- 1 | “hdmf” Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. 2 | 3 | If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Innovation & Partnerships Office at IPO@lbl.gov. 4 | 5 | NOTICE. This Software was developed under funding from the U.S. Department of Energy and the U.S. Government consequently retains certain rights. As such, the U.S. Government has been granted for itself and others acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in the Software to reproduce, distribute copies to the public, prepare derivative works, and perform publicly and display publicly, and to permit other to do so. 6 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======================================== 2 | The Hierarchical Data Modeling Framework 3 | ======================================== 4 | 5 | The Hierarchical Data Modeling Framework, or *HDMF*, is a Python package for working with hierarchical data. 6 | It provides APIs for specifying data models, reading and writing data to different storage backends, and 7 | representing data with Python objects. 8 | 9 | Documentation of HDMF can be found at https://hdmf.readthedocs.io. 10 | 11 | Latest Release 12 | ============== 13 | 14 | .. image:: https://badge.fury.io/py/hdmf.svg 15 | :target: https://badge.fury.io/py/hdmf 16 | 17 | .. image:: https://anaconda.org/conda-forge/hdmf/badges/version.svg 18 | :target: https://anaconda.org/conda-forge/hdmf 19 | 20 | 21 | Overall Health 22 | ============== 23 | 24 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/run_coverage.yml/badge.svg 25 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/run_coverage.yml 26 | 27 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/run_tests.yml/badge.svg 28 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/run_tests.yml 29 | 30 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/codespell.yml/badge.svg 31 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/codespell.yml 32 | 33 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/ruff.yml/badge.svg 34 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/ruff.yml 35 | 36 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/check_sphinx_links.yml/badge.svg 37 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/check_sphinx_links.yml 38 | 39 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/run_pynwb_tests.yml/badge.svg 40 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/run_pynwb_tests.yml 41 | 42 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/run_hdmf_zarr_tests.yml/badge.svg 43 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/run_hdmf_zarr_tests.yml 44 | 45 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/run_all_tests.yml/badge.svg 46 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/run_all_tests.yml 47 | 48 | .. image:: https://github.com/hdmf-dev/hdmf/actions/workflows/deploy_release.yml/badge.svg 49 | :target: https://github.com/hdmf-dev/hdmf/actions/workflows/deploy_release.yml 50 | 51 | .. image:: https://codecov.io/gh/hdmf-dev/hdmf/branch/dev/graph/badge.svg 52 | :target: https://codecov.io/gh/hdmf-dev/hdmf 53 | 54 | .. image:: https://readthedocs.org/projects/hdmf/badge/?version=stable 55 | :target: https://hdmf.readthedocs.io/en/stable/?badge=stable 56 | :alt: Documentation Status 57 | 58 | Installation 59 | ============ 60 | 61 | See the `HDMF documentation `_. 62 | 63 | Code of Conduct 64 | =============== 65 | 66 | This project and everyone participating in it is governed by our `code of conduct guidelines `_. By participating, you are expected to uphold this code. 67 | 68 | Contributing 69 | ============ 70 | 71 | For details on how to contribute to HDMF see our `contribution guidelines `_. 72 | 73 | Citing HDMF 74 | =========== 75 | 76 | * **Manuscript:** 77 | 78 | .. code-block:: bibtex 79 | 80 | @INPROCEEDINGS{9005648, 81 | author={A. J. {Tritt} and O. {Rübel} and B. {Dichter} and R. {Ly} and D. {Kang} and E. F. {Chang} and L. M. {Frank} and K. {Bouchard}}, 82 | booktitle={2019 IEEE International Conference on Big Data (Big Data)}, 83 | title={HDMF: Hierarchical Data Modeling Framework for Modern Science Data Standards}, 84 | year={2019}, 85 | volume={}, 86 | number={}, 87 | pages={165-179}, 88 | doi={10.1109/BigData47090.2019.9005648}, 89 | note={}} 90 | 91 | * **RRID:** (Hierarchical Data Modeling Framework, RRID:SCR_021303) 92 | 93 | 94 | LICENSE 95 | ======= 96 | 97 | "hdmf" Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. 98 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 99 | 100 | (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 101 | 102 | (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 103 | 104 | (3) Neither the name of the University of California, Lawrence Berkeley National Laboratory, U.S. Dept. of Energy nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 105 | 106 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 107 | 108 | You are under no obligation whatsoever to provide any bug fixes, patches, or upgrades to the features, functionality or performance of the source code ("Enhancements") to anyone; however, if you choose to make your Enhancements available either publicly, or directly to Lawrence Berkeley National Laboratory, without imposing a separate written license agreement for such Enhancements, then you hereby grant the following license: a non-exclusive, royalty-free perpetual license to install, use, modify, prepare derivative works, incorporate into other computer software, distribute, and sublicense such enhancements or derivative works thereof, in binary and source code form. 109 | 110 | COPYRIGHT 111 | ========= 112 | 113 | "hdmf" Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. 114 | If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Innovation & Partnerships Office at IPO@lbl.gov. 115 | 116 | NOTICE. This Software was developed under funding from the U.S. Department of Energy and the U.S. Government consequently retains certain rights. As such, the U.S. Government has been granted for itself and others acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in the Software to reproduce, distribute copies to the public, prepare derivative works, and perform publicly and display publicly, and to permit other to do so. 117 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXAPIDOC = sphinx-apidoc 8 | PAPER = 9 | BUILDDIR = _build 10 | SRCDIR = ../src 11 | RSTDIR = source 12 | GALLERYDIR = gallery 13 | PKGNAME = hdmf 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(RSTDIR) 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " text to make text files" 38 | @echo " man to make manual pages" 39 | @echo " texinfo to make Texinfo files" 40 | @echo " info to make Texinfo files and run them through makeinfo" 41 | @echo " gettext to make PO message catalogs" 42 | @echo " changes to make an overview of all changed/added/deprecated items" 43 | @echo " linkcheck to check all external links for integrity" 44 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 45 | @echo " clean to clean all documents built by Sphinx in _build" 46 | @echo " apidoc to build RST from source code" 47 | 48 | clean: 49 | -rm -rf $(BUILDDIR)/* $(RSTDIR)/$(PKGNAME)*.rst $(GALLERYDIR)/*.hdf5 $(GALLERYDIR)/*.sqlite $(RSTDIR)/tutorials 50 | 51 | html: 52 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 53 | @echo 54 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 55 | 56 | dirhtml: 57 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 58 | @echo 59 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 60 | 61 | singlehtml: 62 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 63 | @echo 64 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 65 | 66 | pickle: 67 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 68 | @echo 69 | @echo "Build finished; now you can process the pickle files." 70 | 71 | json: 72 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 73 | @echo 74 | @echo "Build finished; now you can process the JSON files." 75 | 76 | htmlhelp: 77 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 78 | @echo 79 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 80 | ".hhp project file in $(BUILDDIR)/htmlhelp." 81 | 82 | qthelp: 83 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 84 | @echo 85 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 86 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 87 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sample.qhcp" 88 | @echo "To view the help file:" 89 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sample.qhc" 90 | 91 | devhelp: 92 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 93 | @echo 94 | @echo "Build finished." 95 | @echo "To view the help file:" 96 | @echo "# mkdir -p $$HOME/.local/share/devhelp/sample" 97 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sample" 98 | @echo "# devhelp" 99 | 100 | epub: 101 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 102 | @echo 103 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 104 | 105 | latex: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo 108 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 109 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 110 | "(use \`make latexpdf' here to do that automatically)." 111 | 112 | latexpdf: 113 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 114 | @echo "Running LaTeX files through pdflatex..." 115 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 116 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 117 | 118 | text: 119 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 120 | @echo 121 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 122 | 123 | man: 124 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 125 | @echo 126 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 127 | 128 | texinfo: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo 131 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 132 | @echo "Run \`make' in that directory to run these through makeinfo" \ 133 | "(use \`make info' here to do that automatically)." 134 | 135 | info: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo "Running Texinfo files through makeinfo..." 138 | make -C $(BUILDDIR)/texinfo info 139 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 140 | 141 | gettext: 142 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 143 | @echo 144 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 145 | 146 | changes: 147 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 148 | @echo 149 | @echo "The overview file is in $(BUILDDIR)/changes." 150 | 151 | linkcheck: 152 | $(SPHINXBUILD) -W -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 153 | @echo 154 | @echo "Link check complete; look for any errors in the above output " \ 155 | "or in $(BUILDDIR)/linkcheck/output.txt." 156 | 157 | doctest: 158 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 159 | @echo "Testing of doctests in the sources finished, look at the " \ 160 | "results in $(BUILDDIR)/doctest/output.txt." 161 | 162 | apidoc: 163 | $(SPHINXAPIDOC) -f -e --no-toc -o $(RSTDIR) $(SRCDIR) 164 | @echo "Build rst docs from source code." 165 | -------------------------------------------------------------------------------- /docs/gallery/README.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | .. _tutorials: 4 | 5 | 6 | Tutorials 7 | ========= 8 | -------------------------------------------------------------------------------- /docs/gallery/example_dynamic_term_set.yaml: -------------------------------------------------------------------------------- 1 | id: https://w3id.org/linkml/examples/nwb_dynamic_enums 2 | title: dynamic enums example 3 | name: nwb_dynamic_enums 4 | description: this schema demonstrates the use of dynamic enums 5 | 6 | prefixes: 7 | linkml: https://w3id.org/linkml/ 8 | CL: http://purl.obolibrary.org/obo/CL_ 9 | 10 | imports: 11 | - linkml:types 12 | 13 | default_range: string 14 | 15 | # ======================== # 16 | # CLASSES # 17 | # ======================== # 18 | classes: 19 | BrainSample: 20 | slots: 21 | - cell_type 22 | 23 | # ======================== # 24 | # SLOTS # 25 | # ======================== # 26 | slots: 27 | cell_type: 28 | required: true 29 | range: NeuronTypeEnum 30 | 31 | # ======================== # 32 | # ENUMS # 33 | # ======================== # 34 | enums: 35 | NeuronTypeEnum: 36 | reachable_from: 37 | source_ontology: obo:cl 38 | source_nodes: 39 | - CL:0000540 ## neuron 40 | include_self: false 41 | relationship_types: 42 | - rdfs:subClassOf 43 | -------------------------------------------------------------------------------- /docs/gallery/example_term_set.yaml: -------------------------------------------------------------------------------- 1 | id: termset/species_example 2 | name: Species 3 | version: 0.0.1 4 | prefixes: 5 | NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= 6 | imports: 7 | - linkml:types 8 | default_range: string 9 | 10 | enums: 11 | Species: 12 | permissible_values: 13 | Homo sapiens: 14 | description: the species is human 15 | meaning: NCBI_TAXON:9606 16 | Mus musculus: 17 | description: the species is a house mouse 18 | meaning: NCBI_TAXON:10090 19 | Ursus arctos horribilis: 20 | description: the species is a grizzly bear 21 | meaning: NCBI_TAXON:116960 22 | Myrmecophaga tridactyla: 23 | description: the species is an anteater 24 | meaning: NCBI_TAXON:71006 25 | -------------------------------------------------------------------------------- /docs/gallery/plot_generic_data_chunk_tutorial.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | .. _genericdci-tutorial: 4 | 5 | GenericDataChunkIterator Tutorial 6 | ================================== 7 | 8 | This is a tutorial for interacting with :py:class:`~hdmf.data_utils.GenericDataChunkIterator` objects. This tutorial 9 | is written for beginners and does not describe the full capabilities and nuances 10 | of the functionality. This tutorial is designed to give 11 | you basic familiarity with how :py:class:`~hdmf.data_utils.GenericDataChunkIterator` works and help you get started 12 | with creating a specific instance for your data format or API access pattern. 13 | 14 | Introduction 15 | ------------ 16 | The :py:class:`~hdmf.data_utils.GenericDataChunkIterator` class represents a semi-abstract 17 | version of a :py:class:`~hdmf.data_utils.AbstractDataChunkIterator` that automatically handles the selection 18 | of buffer regions 19 | and resolves communication of compatible chunk regions within a H5DataIO wrapper. It does not, 20 | however, know how data (values) or metadata (data type, full shape) ought to be directly 21 | accessed. This is by intention to be fully agnostic to a range of indexing methods and 22 | format-independent APIs, rather than make strong assumptions about how data ranges are to be sliced. 23 | 24 | Constructing a simple child class 25 | --------------------------------- 26 | We will begin with a simple example case of data access to a standard Numpy array. 27 | To create a :py:class:`~hdmf.data_utils.GenericDataChunkIterator` that accomplishes this, 28 | we begin by defining our child class. 29 | """ 30 | 31 | # sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnail_generic_data_chunk_tutorial.png' 32 | import numpy as np 33 | 34 | from hdmf.data_utils import GenericDataChunkIterator 35 | 36 | 37 | class NumpyArrayDataChunkIterator(GenericDataChunkIterator): 38 | def __init__(self, array: np.ndarray, **kwargs): 39 | self.array = array 40 | super().__init__(**kwargs) 41 | 42 | def _get_data(self, selection): 43 | return self.array[selection] 44 | 45 | def _get_maxshape(self): 46 | return self.array.shape 47 | 48 | def _get_dtype(self): 49 | return self.array.dtype 50 | 51 | 52 | # To instantiate this class on an array to allow iteration over buffer_shapes, 53 | my_array = np.random.randint(low=0, high=10, size=(12, 6), dtype="int16") 54 | my_custom_iterator = NumpyArrayDataChunkIterator(array=my_array) 55 | 56 | # and this iterator now behaves as a standard Python generator (i.e., it can only be exhausted once) 57 | # that returns DataChunk objects for each buffer. 58 | for buffer in my_custom_iterator: 59 | print(buffer.data) 60 | 61 | ############################################################################### 62 | # Intended use for advanced data I/O 63 | # ---------------------------------- 64 | # Of course, the real use case for this class is intended for when the amount of data stored on a 65 | # hard drive is larger than what can be read into RAM. Hence the goal is to read only an amount of 66 | # data with a size in gigabytes (GB) at or below the `buffer_gb` argument (defaults to 1 GB). 67 | 68 | # This design can be seen if we increase the amount of data in our example code 69 | my_array = np.random.randint(low=0, high=10, size=(20000, 5000), dtype="int32") 70 | my_custom_iterator = NumpyArrayDataChunkIterator(array=my_array, buffer_gb=0.2) 71 | 72 | for j, buffer in enumerate(my_custom_iterator, start=1): 73 | print(f"Buffer number {j} returns data from selection: {buffer.selection}") 74 | 75 | ############################################################################### 76 | # .. note:: 77 | # Technically, in this example the total data is still fully loaded into RAM from the initial Numpy array. 78 | # A more accurate use case would be achieved from writing the test_array to a temporary file on your system 79 | # and loading it back with np.memmap, which is a subtype of Numpy arrays that do not immediately load the data. 80 | 81 | ############################################################################### 82 | # Writing to an HDF5 file with full control of shape arguments 83 | # ------------------------------------------------------------ 84 | # The true intention of returning data selections of this form, and within a DataChunk object, 85 | # is to write these piecewise to an HDF5 dataset. 86 | 87 | # This is where the importance of the underlying `chunk_shape` comes in, and why it is critical to performance 88 | # that it perfectly subsets the `buffer_shape`. 89 | import h5py 90 | 91 | maxshape = (20000, 5000) 92 | buffer_shape = (10000, 2500) 93 | chunk_shape = (1000, 250) 94 | 95 | my_array = np.random.randint(low=0, high=10, size=maxshape, dtype="int32") 96 | my_custom_iterator = NumpyArrayDataChunkIterator(array=my_array, buffer_shape=buffer_shape, chunk_shape=chunk_shape) 97 | out_file = "my_temporary_test_file.hdf5" 98 | with h5py.File(name=out_file, mode="w") as f: 99 | dset = f.create_dataset(name="test", shape=maxshape, dtype="int16", chunks=my_custom_iterator.chunk_shape) 100 | for buffer in my_custom_iterator: 101 | dset[buffer.selection] = buffer.data 102 | # Remember to remove the temporary file after running this and exploring the contents! 103 | 104 | ############################################################################### 105 | # .. note:: 106 | # Here we explicitly set the `chunks` value in the HDF5 dataset object; however, a nice part of the design of this 107 | # iterator is that when wrapped in a ``hdmf.backends.hdf5.h5_utils.H5DataIO`` that is called within a 108 | # ``hdmf.backends.hdf5.h5tools.HDF5IO`` context with a corresponding ``hdmf.container.Container``, these details 109 | # will be automatically parsed. 110 | 111 | ############################################################################### 112 | # .. note:: 113 | # There is some overlap here in nomenclature between HDMF and HDF5. The term *chunk* in both 114 | # HDMF and HDF5 refer to a subset of dataset, however, in HDF5 a chunk is a piece of dataset on disk, 115 | # whereas in the context of the :py:class:`~hdmf.data_utils.DataChunk` iteration is a block of data in memory. 116 | # As such, the 117 | # requirements on the shape and size of chunks are different. In HDF5 these chunks are pieces 118 | # of a dataset that get compressed and cached together, and they should usually be small in size for 119 | # optimal performance (typically 1 MB or less). In contrast, a :py:class:`~hdmf.data_utils.DataChunk` in 120 | # HDMF acts as a block of data for writing data to dataset, and spans multiple HDF5 chunks to improve performance. 121 | # This is achieved by avoiding repeat 122 | # updates to the same ``Chunk`` in the HDF5 file, :py:class:`~hdmf.data_utils.DataChunk` objects for write 123 | # should align with ``Chunks`` in the HDF5 file, i.e., the ``DataChunk.selection`` 124 | # should fully cover one or more ``Chunks`` in the HDF5 file to avoid repeat updates to the same 125 | # ``Chunks`` in the HDF5 file. This is what the `buffer` of the :py:class`~hdmf.data_utils.GenericDataChunkIterator` 126 | # does, which upon each iteration returns a single 127 | # :py:class:`~hdmf.data_utils.DataChunk` object (by default > 1 GB) that perfectly spans many HDF5 chunks 128 | # (by default < 1 MB) to help reduce the number of small I/O operations 129 | # and help improve performance. In practice, the `buffer` should usually be even larger than the default, i.e, 130 | # as much free RAM as can be safely used. 131 | 132 | ############################################################################### 133 | # Remove the test file 134 | import os 135 | if os.path.exists(out_file): 136 | os.remove(out_file) 137 | -------------------------------------------------------------------------------- /docs/gallery/schemasheets/classes.tsv: -------------------------------------------------------------------------------- 1 | class slot 2 | > class slot 3 | BrainSample cell_type 4 | -------------------------------------------------------------------------------- /docs/gallery/schemasheets/enums.tsv: -------------------------------------------------------------------------------- 1 | valueset value mapping description 2 | > enum permissible_value meaning description 3 | NeuronOrGlialCellTypeEnum Enumeration to capture various cell types found in the brain. 4 | NeuronOrGlialCellTypeEnum PYRAMIDAL_NEURON CL:0000598 Neurons with a pyramidal shaped cell body (soma) and two distinct dendritic trees. 5 | NeuronOrGlialCellTypeEnum INTERNEURON CL:0000099 Neurons whose axons (and dendrites) are limited to a single brain area. 6 | NeuronOrGlialCellTypeEnum MOTOR_NEURON CL:0000100 Neurons whose cell body is located in the motor cortex, brainstem or the spinal cord, and whose axon (fiber) projects to the spinal cord or outside of the spinal cord to directly or indirectly control effector organs, mainly muscles and glands. 7 | NeuronOrGlialCellTypeEnum ASTROCYTE CL:0000127 Characteristic star-shaped glial cells in the brain and spinal cord. 8 | NeuronOrGlialCellTypeEnum OLIGODENDROCYTE CL:0000128 Type of neuroglia whose main functions are to provide support and insulation to axons within the central nervous system (CNS) of jawed vertebrates. 9 | NeuronOrGlialCellTypeEnum MICROGLIAL_CELL CL:0000129 Microglia are the resident immune cells of the brain and constantly patrol the cerebral microenvironment to respond to pathogens and damage. 10 | -------------------------------------------------------------------------------- /docs/gallery/schemasheets/prefixes.tsv: -------------------------------------------------------------------------------- 1 | prefix URI 2 | > prefix prefix_reference 3 | linkml https://w3id.org/linkml/ 4 | CL http://purl.obolibrary.org/obo/CL_ 5 | -------------------------------------------------------------------------------- /docs/gallery/schemasheets/schema.tsv: -------------------------------------------------------------------------------- 1 | schema uri title description 2 | > schema id title description 3 | nwb_static_enums https://w3id.org/linkml/examples/nwb_static_enums static enums example this schema demonstrates the use of static enums 4 | -------------------------------------------------------------------------------- /docs/gallery/schemasheets/slots.tsv: -------------------------------------------------------------------------------- 1 | term required 2 | > slot required 3 | cell_type TRUE 4 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | if "%SPHINXAPIDOC%" == "" ( 9 | set SPHINXAPIDOC=sphinx-apidoc 10 | ) 11 | set BUILDDIR=_build 12 | set RSTDIR=source 13 | set SRCDIR=../src 14 | set GALLERYDIR=gallery 15 | set PKGNAME=hdmf 16 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% %RSTDIR% 17 | set I18NSPHINXOPTS=%SPHINXOPTS% . 18 | if NOT "%PAPER%" == "" ( 19 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 20 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 21 | ) 22 | 23 | if "%1" == "" goto help 24 | 25 | if "%1" == "help" ( 26 | :help 27 | echo.Please use `make ^` where ^ is one of 28 | echo. html to make standalone HTML files 29 | echo. dirhtml to make HTML files named index.html in directories 30 | echo. singlehtml to make a single large HTML file 31 | echo. pickle to make pickle files 32 | echo. json to make JSON files 33 | echo. htmlhelp to make HTML files and a HTML help project 34 | echo. qthelp to make HTML files and a qthelp project 35 | echo. devhelp to make HTML files and a Devhelp project 36 | echo. epub to make an epub 37 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 38 | echo. text to make text files 39 | echo. man to make manual pages 40 | echo. texinfo to make Texinfo files 41 | echo. gettext to make PO message catalogs 42 | echo. changes to make an overview over all changed/added/deprecated items 43 | echo. linkcheck to check all external links for integrity 44 | echo. doctest to run all doctests embedded in the documentation if enabled 45 | echo. clean to clean all documents built by Sphinx in _build 46 | echo. apidoc to build RST from source code" 47 | goto end 48 | ) 49 | 50 | if "%1" == "clean" ( 51 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 52 | del /q /s %BUILDDIR%\* 53 | del /q %RSTDIR%\%PKGNAME%*.rst 54 | del /q %GALLERYDIR%\*.hdf5 55 | del /q %GALLERYDIR%\*.sqlite 56 | rmdir /q /s %RSTDIR%\tutorials 57 | goto end 58 | ) 59 | 60 | if "%1" == "html" ( 61 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 62 | if errorlevel 1 exit /b 1 63 | echo. 64 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 65 | goto end 66 | ) 67 | 68 | if "%1" == "dirhtml" ( 69 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 70 | if errorlevel 1 exit /b 1 71 | echo. 72 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 73 | goto end 74 | ) 75 | 76 | if "%1" == "singlehtml" ( 77 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 78 | if errorlevel 1 exit /b 1 79 | echo. 80 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 81 | goto end 82 | ) 83 | 84 | if "%1" == "pickle" ( 85 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 86 | if errorlevel 1 exit /b 1 87 | echo. 88 | echo.Build finished; now you can process the pickle files. 89 | goto end 90 | ) 91 | 92 | if "%1" == "json" ( 93 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can process the JSON files. 97 | goto end 98 | ) 99 | 100 | if "%1" == "htmlhelp" ( 101 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 102 | if errorlevel 1 exit /b 1 103 | echo. 104 | echo.Build finished; now you can run HTML Help Workshop with the ^ 105 | .hhp project file in %BUILDDIR%/htmlhelp. 106 | goto end 107 | ) 108 | 109 | if "%1" == "qthelp" ( 110 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 111 | if errorlevel 1 exit /b 1 112 | echo. 113 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 114 | .qhcp project file in %BUILDDIR%/qthelp, like this: 115 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\sample.qhcp 116 | echo.To view the help file: 117 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\sample.ghc 118 | goto end 119 | ) 120 | 121 | if "%1" == "devhelp" ( 122 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 123 | if errorlevel 1 exit /b 1 124 | echo. 125 | echo.Build finished. 126 | goto end 127 | ) 128 | 129 | if "%1" == "epub" ( 130 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 131 | if errorlevel 1 exit /b 1 132 | echo. 133 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 134 | goto end 135 | ) 136 | 137 | if "%1" == "latex" ( 138 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 139 | if errorlevel 1 exit /b 1 140 | echo. 141 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 142 | goto end 143 | ) 144 | 145 | if "%1" == "text" ( 146 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 147 | if errorlevel 1 exit /b 1 148 | echo. 149 | echo.Build finished. The text files are in %BUILDDIR%/text. 150 | goto end 151 | ) 152 | 153 | if "%1" == "man" ( 154 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 155 | if errorlevel 1 exit /b 1 156 | echo. 157 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 158 | goto end 159 | ) 160 | 161 | if "%1" == "texinfo" ( 162 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 166 | goto end 167 | ) 168 | 169 | if "%1" == "gettext" ( 170 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 171 | if errorlevel 1 exit /b 1 172 | echo. 173 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 174 | goto end 175 | ) 176 | 177 | if "%1" == "changes" ( 178 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 179 | if errorlevel 1 exit /b 1 180 | echo. 181 | echo.The overview file is in %BUILDDIR%/changes. 182 | goto end 183 | ) 184 | 185 | if "%1" == "linkcheck" ( 186 | %SPHINXBUILD% -W -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 187 | if errorlevel 1 exit /b 1 188 | echo. 189 | echo.Link check complete; look for any errors in the above output ^ 190 | or in %BUILDDIR%/linkcheck/output.txt. 191 | goto end 192 | ) 193 | 194 | if "%1" == "doctest" ( 195 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 196 | if errorlevel 1 exit /b 1 197 | echo. 198 | echo.Testing of doctests in the sources finished, look at the ^ 199 | results in %BUILDDIR%/doctest/output.txt. 200 | goto end 201 | ) 202 | 203 | if "%1" == "apidoc" ( 204 | %SPHINXAPIDOC% -f -e --no-toc -o %RSTDIR% %SRCDIR% 205 | if errorlevel 1 exit /b 1 206 | echo. 207 | echo.Build rst docs from source code. 208 | goto end 209 | ) 210 | 211 | :end 212 | -------------------------------------------------------------------------------- /docs/source/_static/theme_overrides.css: -------------------------------------------------------------------------------- 1 | .wy-side-nav-search .wy-dropdown>a.icon img.logo, .wy-side-nav-search>a.icon img.logo { 2 | width: 150px; 3 | } 4 | 5 | .wy-nav-content { 6 | max-width: 1000px !important; 7 | } 8 | 9 | button.copybtn { 10 | height:25px; 11 | width:25px; 12 | opacity: 0.5; 13 | padding: 0; 14 | border: none; 15 | background: none; 16 | } 17 | -------------------------------------------------------------------------------- /docs/source/api_docs.rst: -------------------------------------------------------------------------------- 1 | .. _api_docs: 2 | 3 | ================== 4 | API Documentation 5 | ================== 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | :caption: HDMF Modules 10 | 11 | Common data types 12 | Base container classes 13 | Build layer 14 | Specification layer 15 | I/O layer 16 | Data I/O utilities 17 | Development utilities 18 | Validation utilities 19 | Testing utilities 20 | Full list of HDMF package contents 21 | 22 | 23 | :ref:`modindex` 24 | -------------------------------------------------------------------------------- /docs/source/building_api.rst: -------------------------------------------------------------------------------- 1 | Building API Classes 2 | ==================== 3 | 4 | After you have written an extension, you will need a Pythonic way to interact with the data model. To do this, 5 | you will need to write some classes that represent the data you defined in your specification extensions. 6 | 7 | The :py:mod:`hdmf.container` module defines two base classes that represent the primitive structures supported by 8 | the schema. :py:class:`~hdmf.container.Data` represents datasets and :py:class:`~hdmf.container.Container` 9 | represents groups. See the classes in the `:py:mod:hdmf.common` package for examples. 10 | 11 | The register_class function/decorator 12 | ------------------------------------- 13 | 14 | When defining a class that represents a *data_type* (i.e. anything that has a *data_type_def*) 15 | from your extension, you can tell HDMF which *data_type* it represents using the function 16 | :py:func:`~hdmf.common.register_class`. This class can be called on its own, or used as a class decorator. The 17 | first argument should be the *data_type* and the second argument should be the *namespace* name. 18 | 19 | The following example demonstrates how to register a class as the Python class representation of the 20 | *data_type* "MyContainer" from the *namespace* "my_ns". The namespace must be loaded prior to the below code using 21 | the :py:func:`~hdmf.common.load_namespaces` function. 22 | 23 | .. code-block:: python 24 | 25 | from hdmf.common import register_class 26 | from hdmf.container import Container 27 | 28 | class MyContainer(Container): 29 | ... 30 | 31 | register_class(data_type='MyContainer', namespace='my_ns', container_cls=MyContainer) 32 | 33 | 34 | Alternatively, you can use :py:func:`~hdmf.common.register_class` as a decorator. 35 | 36 | .. code-block:: python 37 | 38 | from hdmf.common import register_class 39 | from hdmf.container import Container 40 | 41 | @type_map.register_class('MyContainer', 'my_ns') 42 | class MyContainer(Container): 43 | ... 44 | 45 | :py:func:`~hdmf.common.register_class` is used with :py:class:`~hdmf.container.Data` the same way it is used with 46 | :py:class:`~hdmf.container.Container`. 47 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/source/export.rst: -------------------------------------------------------------------------------- 1 | Export 2 | ====== 3 | 4 | Export is a new feature in HDMF 2.0. You can use export to take a container that was read from a file and write it to 5 | a different file, with or without modifications to the container in memory. 6 | The in-memory container being exported will be written to the exported file as if it was never read from a file. 7 | 8 | To export a container, first read the container from a file, then create a new 9 | :py:class:`~hdmf.backends.hdf5.h5tools.HDF5IO` object for exporting the data, then call 10 | :py:meth:`~hdmf.backends.hdf5.h5tools.HDF5IO.export` on the 11 | :py:class:`~hdmf.backends.hdf5.h5tools.HDF5IO` object, passing in the IO object used to read the container 12 | and optionally, the container itself, which may be modified in memory between reading and exporting. 13 | 14 | For example: 15 | 16 | .. code-block:: python 17 | 18 | with HDF5IO(self.read_path, manager=manager, mode='r') as read_io: 19 | with HDF5IO(self.export_path, mode='w') as export_io: 20 | export_io.export(src_io=read_io) 21 | 22 | FAQ 23 | --- 24 | 25 | Can I read a container from disk, modify it, and then export the modified container? 26 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 27 | Yes, you can export the in-memory container after modifying it in memory. The modifications will appear in the exported 28 | file and not the read file. 29 | 30 | - If the modifications are removals or additions of containers, then no special action must be taken, as long as the 31 | container hierarchy is updated correspondingly. 32 | - If the modifications are changes to attributes, then 33 | :py:meth:`Container.set_modified() ` must be called 34 | on the container before exporting. 35 | 36 | .. code-block:: python 37 | 38 | with HDF5IO(self.read_path, manager=manager, mode='r') as read_io: 39 | container = read_io.read() 40 | # ... # modify container 41 | container.set_modified() # this may be necessary if the modifications are changes to attributes 42 | with HDF5IO(self.export_path, mode='w') as export_io: 43 | export_io.export(src_io=read_io, container=container) 44 | 45 | .. note:: 46 | 47 | Modifications to :py:class:`h5py.Dataset ` objects act *directly* on the read file on disk. 48 | Changes are applied immediately and do not require exporting or writing the file. If you want to modify a dataset 49 | only in the new file, than you should replace the whole object with a new array holding the modified data. To 50 | prevent unintentional changes to the source file, the source file should be opened with ``mode='r'``. 51 | 52 | Can I export a newly instantiated container? 53 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 54 | No, you can only export containers that have been read from a file. The ``src_io`` argument is required in 55 | :py:meth:`HDMFIO.export `. 56 | 57 | Can I read a container from disk and export only part of the container? 58 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 59 | It depends. You can only export the root container from a file. To export the root container without certain other 60 | sub-containers in the hierarchy, you can remove those other containers before exporting. However, you cannot export 61 | only a sub-container of the container hierarchy. 62 | 63 | Can I write a newly instantiated container to two different files? 64 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 65 | HDMF does not allow you to write a container that was not read from a file to two different files. For example, if you 66 | instantiate container A and write it file 1 and then try to write it to file 2, an error will be raised. However, you 67 | can read container A from file 1 and then export it to file 2, with or without modifications to container A in 68 | memory. 69 | 70 | What happens to links when I export? 71 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 72 | The exported file will not contain any links to the original file. 73 | 74 | All links (such as internal links (i.e., HDF5 soft links) and links to other files (i.e., HDF5 external links)) 75 | will be preserved in the exported file. 76 | 77 | If a link to an :py:class:`h5py.Dataset ` in another file is added to the in-memory container after 78 | reading it from file and then exported, then by default, the export process will create an external link to the 79 | existing :py:class:`h5py.Dataset ` object. To instead copy the data from the 80 | :py:class:`h5py.Dataset ` in another 81 | file to the exported file, pass the keyword argument ``write_args={'link_data': False}`` to 82 | :py:meth:`HDF5IO.export `. This is similar to passing the keyword argument 83 | ``link_data=False`` to :py:meth:`HDF5IO.write ` when writing a file with a 84 | copy of externally linked datasets. 85 | 86 | What happens to references when I export? 87 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 88 | References will be preserved in the exported file. 89 | NOTE: Exporting a file involves loading into memory all datasets that contain references and attributes that are 90 | references. The HDF5 reference IDs within an exported file may differ from the reference IDs in the original file. 91 | 92 | What happens to object IDs when I export? 93 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 94 | After exporting a container, the object IDs of the container and its child containers will be identical to the object 95 | IDs of the read container and its child containers. The object ID of a container uniquely identifies the container 96 | within a file, but should *not* be used to distinguish between two different files. 97 | 98 | If you would like all object IDs to change on export, then first call the method 99 | :py:meth:`generate_new_id ` on the root container to generate 100 | a new set of IDs for the root container and all of its children, recursively. Then export the container with its 101 | new IDs. Note: calling the :py:meth:`generate_new_id ` method 102 | changes the object IDs of the containers in memory. These changes are not reflected in the original file from 103 | which the containers were read unless the :py:meth:`HDF5IO.write ` 104 | method is subsequently called. 105 | 106 | .. code-block:: python 107 | 108 | with HDF5IO(self.read_path, manager=manager, mode='r') as read_io: 109 | container = read_io.read() 110 | container.generate_new_id() 111 | with HDF5IO(self.export_path, mode='w') as export_io: 112 | export_io.export(src_io=read_io, container=container) 113 | -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_aligneddynamictable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_aligneddynamictable.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_dynamictable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_dynamictable.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_externalresources.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_externalresources.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_generic_data_chunk_tutorial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_generic_data_chunk_tutorial.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_multicontainerinterface.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_multicontainerinterface.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnail_termset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnail_termset.png -------------------------------------------------------------------------------- /docs/source/figures/gallery_thumbnails.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/gallery_thumbnails.pptx -------------------------------------------------------------------------------- /docs/source/figures/pynwb_package_overview.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/pynwb_package_overview.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_buildmanager.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_buildmanager.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_buildmanager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_buildmanager.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_buildmanager.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_buildmanager.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_concepts.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_concepts.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_concepts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_concepts.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_concepts.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_concepts.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_design_choices.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_design_choices.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_design_choices.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_design_choices.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_design_choices.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_design_choices.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_hdmfio.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_hdmfio.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_hdmfio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_hdmfio.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_hdmfio.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_hdmfio.pptx -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_mainconcepts.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_mainconcepts.pdf -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_mainconcepts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_mainconcepts.png -------------------------------------------------------------------------------- /docs/source/figures/software_architecture_mainconcepts.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/figures/software_architecture_mainconcepts.pptx -------------------------------------------------------------------------------- /docs/source/hdmf_logo-180x180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/hdmf_logo-180x180.png -------------------------------------------------------------------------------- /docs/source/hdmf_logo.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/hdmf_logo.pdf -------------------------------------------------------------------------------- /docs/source/hdmf_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/docs/source/hdmf_logo.png -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | The Hierarchical Data Modeling Framework 2 | ======================================== 3 | 4 | HDMF is a Python package for working with standardizing, reading, and writing hierarchical object data. 5 | 6 | HDMF is a by-product of the `Neurodata Without Borders (NWB) `_ project. 7 | The goal of NWB was to enable collaborative science within the neurophysiology and systems neuroscience communities 8 | through data standardization. The team of neuroscientists and software developers involved with NWB 9 | recognize that adoption of a unified data format is an important step toward breaking down the barriers to 10 | data sharing in neuroscience. HDMF was central to the NWB development efforts, and has since been split off 11 | with the intention of providing it as an open-source tool for other scientific communities. 12 | 13 | If you use HDMF in your research, please use the following citation: 14 | 15 | A. J. Tritt et al., "HDMF: Hierarchical Data Modeling Framework for Modern Science Data Standards," 16 | 2019 IEEE International Conference on Big Data (Big Data), Los Angeles, CA, USA, 2019, pp. 165-179, 17 | doi: 10.1109/BigData47090.2019.9005648. 18 | 19 | 20 | .. toctree:: 21 | :hidden: 22 | :maxdepth: 2 23 | :caption: Getting Started 24 | 25 | install_users 26 | tutorials/index 27 | 28 | .. toctree:: 29 | :hidden: 30 | :maxdepth: 2 31 | :caption: Overview 32 | 33 | overview_intro 34 | overview_software_architecture 35 | overview_citing 36 | 37 | 38 | .. toctree:: 39 | :hidden: 40 | :maxdepth: 2 41 | :caption: Resources 42 | 43 | api_docs 44 | extensions 45 | building_api 46 | export 47 | validation 48 | spec_language_support 49 | 50 | .. toctree:: 51 | :hidden: 52 | :caption: For Developers 53 | 54 | install_developers 55 | contributing 56 | make_roundtrip_test 57 | software_process 58 | 59 | .. toctree:: 60 | :hidden: 61 | :maxdepth: 2 62 | :caption: For Maintainers 63 | 64 | make_a_release 65 | 66 | .. toctree:: 67 | :hidden: 68 | :maxdepth: 2 69 | :caption: Legal 70 | 71 | legal 72 | -------------------------------------------------------------------------------- /docs/source/install_developers.rst: -------------------------------------------------------------------------------- 1 | .. _install_developers: 2 | 3 | ------------------------------ 4 | Installing HDMF for Developers 5 | ------------------------------ 6 | 7 | 8 | Set up a virtual environment 9 | ---------------------------- 10 | 11 | For development, we recommend installing HDMF in a virtual environment in editable mode. You can use 12 | the venv_ tool that comes packaged with Python to create a new virtual environment. Or you can use the 13 | `conda package and environment management system`_ for managing virtual environments. 14 | 15 | .. _venv: https://docs.python.org/3/library/venv.html 16 | .. _conda package and environment management system: https://conda.io/projects/conda/en/latest/index.html 17 | 18 | 19 | Option 1: Using venv 20 | ^^^^^^^^^^^^^^^^^^^^ 21 | 22 | First, create a new virtual environment using the ``venv`` tool. This 23 | virtual environment will be stored in a new directory called ``"hdmf-env"`` in the current directory. 24 | 25 | .. code:: bash 26 | 27 | venv hdmf-env 28 | 29 | On macOS or Linux, run the following to activate your new virtual environment: 30 | 31 | .. code:: bash 32 | 33 | source hdmf-env/bin/activate 34 | 35 | On Windows, run the following to activate your new virtual environment: 36 | 37 | .. code:: batch 38 | 39 | hdmf-env\Scripts\activate 40 | 41 | This virtual environment is a space where you can install Python packages that are isolated from other virtual 42 | environments. This is especially useful when working on multiple Python projects that have different package 43 | requirements and for testing Python code with different sets of installed packages or versions of Python. 44 | 45 | Activate your newly created virtual environment using the above command whenever you want to work on HDMF. You can also 46 | deactivate it using the ``deactivate`` command to return to the base environment. And you can delete the virtual 47 | environment by deleting the directory that was created. 48 | 49 | 50 | Option 2: Using conda 51 | ^^^^^^^^^^^^^^^^^^^^^ 52 | 53 | The `conda package and environment management system`_ is an alternate way of managing virtual environments. 54 | First, install Anaconda_ to install the ``conda`` tool. Then create and 55 | activate a new virtual environment called ``"hdmf-env"`` with Python 3.13 installed. 56 | 57 | .. code:: bash 58 | 59 | conda create --name hdmf-env python=3.13 60 | conda activate hdmf-env 61 | 62 | Similar to a virtual environment created with ``venv``, a conda environment 63 | is a space where you can install Python packages that are isolated from other virtual 64 | environments. In general, you should use ``conda install`` instead of ``pip install`` to install packages 65 | in a conda environment. 66 | 67 | Activate your newly created virtual environment using the above command whenever you want to work on HDMF. You can also 68 | deactivate it using the ``conda deactivate`` command to return to the base environment. And you can delete the virtual 69 | environment by using the ``conda remove --name hdmf-venv --all`` command. 70 | 71 | .. note:: 72 | 73 | For advanced users, we recommend using Mambaforge_, a faster version of the conda package manager 74 | that includes conda-forge as a default channel. 75 | 76 | .. _Anaconda: https://www.anaconda.com/download 77 | .. _Mambaforge: https://github.com/conda-forge/miniforge 78 | 79 | Install from GitHub 80 | ------------------- 81 | 82 | After you have created and activated a virtual environment, clone the HDMF git repository from GitHub, install the 83 | package requirements using the pip_ Python package manager, and install HDMF in editable mode. 84 | 85 | .. _pip: https://pip.pypa.io/en/stable/ 86 | 87 | .. code:: bash 88 | 89 | git clone --recurse-submodules https://github.com/hdmf-dev/hdmf.git 90 | cd hdmf 91 | pip install -e ".[all]" 92 | 93 | .. note:: 94 | 95 | When using ``conda``, you may use ``pip install`` to install dependencies as shown above; however, it is generally 96 | recommended that dependencies should be installed via ``conda install``. 97 | 98 | 99 | Run tests 100 | --------- 101 | 102 | You can run the full test suite by running: 103 | 104 | .. code:: bash 105 | 106 | pytest 107 | 108 | This will run all the tests and compute the test coverage. The coverage report can be found in ``/htmlcov``. 109 | You can also run a specific test module or class, or you can configure ``pytest`` to start the 110 | Python debugger (PDB) prompt on an error, e.g., 111 | 112 | .. code:: bash 113 | 114 | pytest tests/unit/test_container.py # run all tests in the module 115 | pytest tests/unit/test_container.py::TestContainer # run all tests in this class 116 | pytest tests/unit/test_container.py::TestContainer::test_constructor # run this test method 117 | pytest --pdb tests/unit/test_container.py # start pdb on error 118 | 119 | 120 | You can run tests across multiple Python versions using the tox_ automated testing tool. Running ``tox`` will 121 | create a virtual environment, install dependencies, and run the test suite for different versions of Python. 122 | This can take some time to run. 123 | 124 | .. _pytest: https://docs.pytest.org/ 125 | .. _tox: https://tox.wiki/en/latest/ 126 | 127 | .. code:: bash 128 | 129 | tox 130 | 131 | You can also test that the Sphinx Gallery files run without warnings or errors by running: 132 | 133 | .. code:: bash 134 | 135 | python test_gallery.py 136 | 137 | 138 | Install latest pre-release 139 | -------------------------- 140 | 141 | To try out the latest features or set up continuous integration of your own project against the 142 | latest version of HDMF, install the latest release from GitHub. 143 | 144 | .. code:: bash 145 | 146 | pip install -U hdmf --find-links https://github.com/hdmf-dev/hdmf/releases/tag/latest --no-index 147 | -------------------------------------------------------------------------------- /docs/source/install_users.rst: -------------------------------------------------------------------------------- 1 | .. _install_users: 2 | 3 | --------------- 4 | Installing HDMF 5 | --------------- 6 | 7 | HDMF requires having Python 3.9-3.13 installed. If you don't have Python installed and want the simplest way to 8 | get started, we recommend you install and use the `Anaconda Distribution`_. It includes Python, NumPy, and many other 9 | commonly used packages for scientific computing and data science. 10 | 11 | HDMF can be installed with ``pip``, ``conda``, or from source. HDMF works on Windows, macOS, and Linux. 12 | 13 | Option 1: Using pip 14 | ------------------- 15 | 16 | If you are a beginner to programming in Python and using Python tools, we recommend that you install HDMF by running 17 | the following command in a terminal or command prompt: 18 | 19 | .. code:: 20 | 21 | pip install hdmf 22 | 23 | Option 2: Using conda 24 | --------------------- 25 | 26 | You can also install HDMF using ``conda`` by running the following command in a terminal or command prompt: 27 | 28 | .. code:: 29 | 30 | conda install -c conda-forge hdmf 31 | 32 | .. _Anaconda Distribution: https://www.anaconda.com/download 33 | -------------------------------------------------------------------------------- /docs/source/legal.rst: -------------------------------------------------------------------------------- 1 | .. _copyright: 2 | 3 | --------------- 4 | Copyright 5 | --------------- 6 | 7 | .. include:: ../../Legal.txt 8 | 9 | .. _license: 10 | 11 | --------------- 12 | License 13 | --------------- 14 | 15 | .. include:: ../../license.txt 16 | -------------------------------------------------------------------------------- /docs/source/make_roundtrip_test.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | How to Make a Roundtrip Test 3 | ============================ 4 | 5 | The HDMF test suite has tools for easily doing round-trip tests of container classes. These 6 | tools exist in the :py:mod:`hdmf.testing` module. Round-trip tests exist for the container classes in the 7 | :py:mod:`hdmf.common` module. We recommend you write any additional round-trip tests in 8 | the ``tests/unit/common`` subdirectory of the Git repository. 9 | 10 | For executing your new tests, we recommend using the `test.py` script in the top of the Git 11 | repository. Roundtrip tests will get executed as part of the full test suite, which can be executed 12 | with the following command:: 13 | 14 | $ python test.py 15 | 16 | The roundtrip test will generate a new HDMF file with the name ``test_.h5`` where ``CLASS_NAME`` is 17 | the class name of the container class you are roundtripping. The test 18 | will write an HDMF file with an instance of the container to disk, read this instance back in, and compare it 19 | to the instance that was used for writing to disk. Once the test is complete, the HDMF file will be deleted. 20 | You can keep the HDMF file around after the test completes by setting the environment variable ``CLEAN_HDMF`` 21 | to ``0``, ``false``, ``False``, or ``FALSE``. Setting ``CLEAN_HDMF`` to any value not listed here will 22 | cause the roundtrip HDMF file to be deleted once the test has completed 23 | 24 | Before writing tests, we also suggest you familiarize yourself with the 25 | :ref:`software architecture ` of HDMF. 26 | 27 | ------------------------ 28 | ``H5RoundTripMixin`` 29 | ------------------------ 30 | 31 | To write a roundtrip test, you will need to subclass the 32 | :py:class:`~hdmf.testing.testcase.H5RoundTripMixin` class and the 33 | :py:class:`~hdmf.testing.testcase.TestCase` class, in that order, and override some of the instance methods of the 34 | :py:class:`~hdmf.testing.testcase.H5RoundTripMixin` class to test the process of going from in-memory Python object 35 | to data stored on disk and back. 36 | 37 | ################## 38 | ``setUpContainer`` 39 | ################## 40 | 41 | To configure the test for a particular container class, you need to override the 42 | :py:meth:`~hdmf.testing.testcase.H5RoundTripMixin.setUpContainer` method. This method should take no arguments, and 43 | return an instance of the container class you are testing. 44 | 45 | Here is an example using a :py:class:`~hdmf.common.sparse.CSRMatrix`: 46 | 47 | .. code-block:: python 48 | 49 | from hdmf.common import CSRMatrix 50 | from hdmf.testing import TestCase, H5RoundTripMixin 51 | import numpy as np 52 | 53 | class TestCSRMatrixRoundTrip(H5RoundTripMixin, TestCase): 54 | 55 | def setUpContainer(self): 56 | data = np.array([1, 2, 3, 4, 5, 6]) 57 | indices = np.array([0, 2, 2, 0, 1, 2]) 58 | indptr = np.array([0, 2, 3, 6]) 59 | return CSRMatrix(data, indices, indptr, (3, 3)) 60 | -------------------------------------------------------------------------------- /docs/source/overview_citing.rst: -------------------------------------------------------------------------------- 1 | .. _citing: 2 | 3 | Citing HDMF 4 | ============ 5 | 6 | BibTeX entry 7 | ------------ 8 | 9 | If you use HDMF in your research, please use the following citation: 10 | 11 | .. code-block:: bibtex 12 | 13 | @INPROCEEDINGS{9005648, 14 | author={A. J. {Tritt} and O. {Rübel} and B. {Dichter} and R. {Ly} and D. {Kang} and E. F. {Chang} and L. M. {Frank} and K. {Bouchard}}, 15 | booktitle={2019 IEEE International Conference on Big Data (Big Data)}, 16 | title={HDMF: Hierarchical Data Modeling Framework for Modern Science Data Standards}, 17 | year={2019}, 18 | volume={}, 19 | number={}, 20 | pages={165-179}, 21 | doi={10.1109/BigData47090.2019.9005648}} 22 | 23 | Using RRID 24 | ---------- 25 | 26 | * **RRID:** (Hierarchical Data Modeling Framework, RRID:SCR_021303) 27 | 28 | Using duecredit 29 | ----------------- 30 | 31 | Citations can be generated using duecredit_. To install duecredit, run ``pip install duecredit``. 32 | 33 | You can obtain a list of citations for your Python script, e.g., ``yourscript.py``, using: 34 | 35 | .. code-block:: bash 36 | 37 | cd /path/to/your/module 38 | python -m duecredit yourscript.py 39 | 40 | Alternatively, you can set the environment variable ``DUECREDIT_ENABLE=yes`` 41 | 42 | .. code-block:: bash 43 | 44 | DUECREDIT-ENABLE=yes python yourscript.py 45 | 46 | Citations will be saved in a hidden file (``.duecredit.p``) in the current directory. You can then use the duecredit_ 47 | command line tool to export the citations to different formats. For example, you can display your citations in 48 | BibTeX format using: 49 | 50 | .. code-block:: bash 51 | 52 | duecredit summary --format=bibtex 53 | 54 | For more information on using duecredit, please consult its `homepage `_. 55 | 56 | .. _duecredit: https://github.com/duecredit/duecredit 57 | -------------------------------------------------------------------------------- /docs/source/overview_intro.rst: -------------------------------------------------------------------------------- 1 | .. _overview: 2 | 3 | Introduction 4 | ============ 5 | 6 | HDMF provides a high-level Python API for specifying, reading, writing and manipulating hierarchical object data. 7 | This section provides a broad overview of the software architecture of HDMF (see Section :ref:`software-architecture`) 8 | and its functionality. 9 | -------------------------------------------------------------------------------- /docs/source/software_process.rst: -------------------------------------------------------------------------------- 1 | .. _software_process: 2 | 3 | ================ 4 | Software Process 5 | ================ 6 | 7 | ---------------------- 8 | Continuous Integration 9 | ---------------------- 10 | 11 | HDMF is tested against Ubuntu, macOS, and Windows operating systems. 12 | The project has both unit and integration tests. 13 | Tests run on `GitHub Actions`_. 14 | 15 | Each time a PR is created or updated, the project is built, packaged, and tested on all supported operating systems 16 | and python distributions. That way, as a contributor, you know if you introduced regressions or coding style 17 | inconsistencies. 18 | 19 | There are badges in the README_ file which shows the current condition of the dev branch. 20 | 21 | .. _GitHub Actions: https://github.com/hdmf-dev/hdmf/actions 22 | .. _README: https://github.com/hdmf-dev/hdmf/blob/dev/README.rst 23 | 24 | 25 | -------- 26 | Coverage 27 | -------- 28 | 29 | Code coverage is computed and reported using the coverage_ tool. There are two coverage-related badges in the README_ 30 | file. One shows the status of the `GitHub Action workflow`_ which runs the coverage_ tool and uploads the report to 31 | codecov_, and the other badge shows the percentage coverage reported from codecov_. A detailed report can be found on 32 | codecov_, which shows line by line which lines are covered by the tests. 33 | 34 | .. _coverage: https://coverage.readthedocs.io 35 | .. _GitHub Action workflow: https://github.com/hdmf-dev/hdmf/actions?query=workflow%3A%22Run+coverage%22 36 | .. _codecov: https://app.codecov.io/gh/hdmf-dev/hdmf/tree/dev/src/hdmf 37 | 38 | .. _software_process_requirement_specifications: 39 | 40 | ------------------------- 41 | Installation Requirements 42 | ------------------------- 43 | 44 | pyproject.toml_ contains a list of package dependencies and their version ranges allowed for 45 | running HDMF. As a library, upper bound version constraints create more harm than good in the long term (see this 46 | `blog post`_) so we avoid setting upper bounds on requirements. 47 | 48 | When setting lower bounds, make sure to specify the lower bounds in the ``[project] dependencies`` key and 49 | ``[project.optional-dependencies] min-reqs`` key in pyproject.toml_. 50 | The latter is used in automated testing to ensure that the package runs 51 | correctly using the minimum versions of dependencies. 52 | 53 | Minimum requirements should be updated manually if a new feature or bug fix is added in a dependency that is required 54 | for proper running of HDMF. Minimum requirements should also be updated if a user requests that HDMF be installable 55 | with an older version of a dependency, all tests pass using the older version, and there is no valid reason for the 56 | minimum version to be as high as it is. 57 | 58 | .. _pyproject.toml: https://github.com/hdmf-dev/hdmf/blob/dev/pyproject.toml 59 | .. _blog post: https://iscinumpy.dev/post/bound-version-constraints/ 60 | 61 | -------------------- 62 | Testing Requirements 63 | -------------------- 64 | 65 | pyproject.toml_ contains the optional dependency group "test" with testing requirements. 66 | 67 | See tox.ini_ and the GitHub Actions workflows for how different testing environments are 68 | defined using the optional dependency groups. 69 | 70 | environment-ros3.yml_ lists the dependencies used to test ROS3 streaming in HDMF which 71 | can only be done in a Conda environment. 72 | 73 | .. _tox.ini: https://github.com/hdmf-dev/hdmf/blob/dev/tox.ini 74 | .. _environment-ros3.yml: https://github.com/hdmf-dev/hdmf/blob/dev/environment-ros3.yml 75 | 76 | -------------------------- 77 | Documentation Requirements 78 | -------------------------- 79 | 80 | pyproject.toml_ contains the optional dependency group "docs" with documentation requirements. 81 | This dependency group is used by ReadTheDocs_ to initialize the local environment for Sphinx to run 82 | (see .readthedocs.yaml_). 83 | 84 | .. _ReadTheDocs: https://readthedocs.org/projects/hdmf/ 85 | .. _.readthedocs.yaml: https://github.com/hdmf-dev/hdmf/blob/dev/.readthedocs.yaml 86 | 87 | ------------------------- 88 | Versioning and Releasing 89 | ------------------------- 90 | 91 | HDMF uses setuptools_scm_ for versioning source and wheel distributions. `setuptools_scm` creates a semi-unique release 92 | name for the wheels that are created based on git tags. 93 | After all the tests pass, the "Deploy release" GitHub Actions workflow 94 | creates both a wheel (``\*.whl``) and source distribution (``\*.tar.gz``) for Python 3 95 | and uploads them back to GitHub as a release_. 96 | 97 | It is important to note that GitHub automatically generates source code archives in ``.zip`` and ``.tar.gz`` formats and 98 | attaches those files to all releases as an asset. These files currently do not contain the submodules within HDMF and 99 | thus do not serve as a complete installation. For a complete source code archive, use the source distribution generated 100 | by GitHub Actions, typically named ``hdmf-{version}.tar.gz``. 101 | 102 | .. _setuptools_scm: https://github.com/pypa/setuptools_scm 103 | .. _release: https://github.com/hdmf-dev/hdmf/releases 104 | -------------------------------------------------------------------------------- /docs/source/spec_language_support.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _spec_language_support: 3 | 4 | =========================================== 5 | Support for the HDMF Specification Language 6 | =========================================== 7 | 8 | The HDMF API provides nearly full support for all features of the `HDMF Specification Language`_ 9 | version 3.0.0, except for the following: 10 | 11 | 1. Attributes containing multiple references (see `#833`_) 12 | 2. Certain text and integer values for quantity (see `#423`_, `#531`_) 13 | 3. Datasets that do not have a data_type_inc/data_type_def and contain either a reference dtype or a compound dtype (see `#737`_) 14 | 4. Passing dataset dtype and shape from parent data type to child data type (see `#320`_) 15 | 16 | .. _HDMF Specification Language: https://hdmf-schema-language.readthedocs.io 17 | .. _#833: https://github.com/hdmf-dev/hdmf/issues/833 18 | .. _#423: https://github.com/hdmf-dev/hdmf/issues/423 19 | .. _#531: https://github.com/hdmf-dev/hdmf/issues/531 20 | .. _#737: https://github.com/hdmf-dev/hdmf/issues/737 21 | .. _#320: https://github.com/hdmf-dev/hdmf/issues/320 22 | -------------------------------------------------------------------------------- /docs/source/validation.rst: -------------------------------------------------------------------------------- 1 | .. _validating: 2 | 3 | Validating HDMF Data 4 | ==================== 5 | 6 | Validation of NWB files is available through ``pynwb``. See the `PyNWB documentation 7 | `_ for more information. 8 | 9 | -------- 10 | 11 | .. note:: 12 | 13 | A simple interface for validating HDMF structured data through the command line like for PyNWB files is not yet 14 | implemented. If you would like this functionality to be available through :py:mod:`~hdmf`, then please upvote 15 | `this issue `_. 16 | 17 | .. 18 | Validating HDMF structured data is handled by a command-line tool available in :py:mod:`~hdmf`. 19 | The validator can be invoked like so: 20 | 21 | .. code-block:: bash 22 | 23 | python -m hdmf.validate -p namespace.yaml test.h5 24 | 25 | This will validate the file ``test.h5`` against the specification in ``namespace.yaml``. 26 | -------------------------------------------------------------------------------- /environment-ros3.yml: -------------------------------------------------------------------------------- 1 | # environment file used to test HDMF with ROS3 support 2 | name: ros3 3 | channels: 4 | - conda-forge 5 | - defaults 6 | dependencies: 7 | - python==3.13 8 | - h5py==3.12.1 9 | - matplotlib==3.9.2 10 | - numpy==2.2.1 11 | - pandas==2.2.3 12 | - python-dateutil==2.9.0.post0 13 | - pytest==8.3.4 14 | - pytest-cov==6.0.0 15 | -------------------------------------------------------------------------------- /license.txt: -------------------------------------------------------------------------------- 1 | “hdmf” Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | (3) Neither the name of the University of California, Lawrence Berkeley National Laboratory, U.S. Dept. of Energy nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | 13 | You are under no obligation whatsoever to provide any bug fixes, patches, or upgrades to the features, functionality or performance of the source code ("Enhancements") to anyone; however, if you choose to make your Enhancements available either publicly, or directly to Lawrence Berkeley National Laboratory, without imposing a separate written license agreement for such Enhancements, then you hereby grant the following license: a non-exclusive, royalty-free perpetual license to install, use, modify, prepare derivative works, incorporate into other computer software, distribute, and sublicense such enhancements or derivative works thereof, in binary and source code form. 14 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "hdmf" 7 | authors = [ 8 | { name="Ryan Ly", email="rly@lbl.gov" }, 9 | { name="Andrew Tritt", email="ajtritt@lbl.gov" }, 10 | { name="Oliver Ruebel", email="oruebel@lbl.gov" }, 11 | { name="Ben Dichter", email="ben.dichter@gmail.com" }, 12 | { name="Matthew Avaylon", email="mavaylon@lbl.gov" }, 13 | ] 14 | description = "A hierarchical data modeling framework for modern science data standards" 15 | readme = "README.rst" 16 | requires-python = ">=3.9" 17 | license = "BSD-3-Clause" 18 | classifiers = [ 19 | "Programming Language :: Python", 20 | "Programming Language :: Python :: 3.9", 21 | "Programming Language :: Python :: 3.10", 22 | "Programming Language :: Python :: 3.11", 23 | "Programming Language :: Python :: 3.12", 24 | "Programming Language :: Python :: 3.13", 25 | "License :: OSI Approved :: BSD License", 26 | "Development Status :: 5 - Production/Stable", 27 | "Operating System :: OS Independent", 28 | "Intended Audience :: Developers", 29 | "Intended Audience :: Science/Research", 30 | "Topic :: Scientific/Engineering :: Medical Science Apps.", 31 | ] 32 | # make sure to update min-reqs dependencies below when these lower bounds change 33 | dependencies = [ 34 | "h5py>=3.1.0", 35 | "jsonschema>=3.2.0", 36 | 'numpy>=1.19.3', 37 | "pandas>=1.2.0", 38 | "ruamel.yaml>=0.16", 39 | ] 40 | dynamic = ["version"] 41 | 42 | [project.optional-dependencies] 43 | # make sure to update min-reqs dependencies below when these lower bounds change 44 | tqdm = ["tqdm>=4.41.0"] 45 | zarr = [ 46 | "zarr>=2.12.0,<3", 47 | "numcodecs<0.16.0", # numcodecs 0.16.0 is not compatible with zarr<3 48 | ] 49 | sparse = ["scipy>=1.7"] 50 | termset = [ 51 | "linkml-runtime>=1.5.5", 52 | "schemasheets>=0.4.0rc1", 53 | "oaklib>=0.5.12", 54 | "pyyaml>=6.0.1", 55 | ] 56 | 57 | # development dependencies 58 | test = [ 59 | "codespell", 60 | "pre-commit", 61 | "pytest", 62 | "pytest-cov", 63 | "python-dateutil", 64 | "ruff", 65 | "tox", 66 | ] 67 | 68 | # documentation dependencies 69 | docs = [ 70 | "matplotlib", 71 | "sphinx>=4", # improved support for docutils>=0.17 72 | "sphinx_rtd_theme>=1", # <1 does not work with docutils>=0.17 73 | "sphinx-gallery", 74 | "sphinx-copybutton", 75 | ] 76 | 77 | # all possible dependencies 78 | all = ["hdmf[tqdm,zarr,sparse,termset,test,docs]"] 79 | 80 | # minimum requirements of project dependencies for testing (see .github/workflows/run_all_tests.yml) 81 | min-reqs = [ 82 | "h5py==3.1.0", 83 | "jsonschema==3.2.0", 84 | "numpy==1.19.3", 85 | "pandas==1.2.0", 86 | "ruamel.yaml==0.16.0", 87 | "scipy==1.7.0", 88 | "tqdm==4.41.0", 89 | "zarr==2.12.0", 90 | ] 91 | 92 | [project.urls] 93 | "Homepage" = "https://github.com/hdmf-dev/hdmf" 94 | "Bug Tracker" = "https://github.com/hdmf-dev/hdmf/issues" 95 | 96 | [project.scripts] 97 | validate_hdmf_spec = "hdmf.testing.validate_spec:main" 98 | 99 | [tool.hatch.version] 100 | source = "vcs" 101 | 102 | [tool.hatch.build.hooks.vcs] 103 | # this file is created/updated when the package is installed and used in 104 | # src/hdmf/__init__.py to set `hdmf.__version__` 105 | version-file = "src/hdmf/_version.py" 106 | 107 | [tool.hatch.build.targets.sdist] 108 | exclude = [ 109 | ".git*", 110 | ".codecov.yml", 111 | ".readthedocs.yaml", 112 | ".mailmap", 113 | ".pre-commit-config.yaml", 114 | ] 115 | 116 | [tool.hatch.build.targets.wheel] 117 | packages = ["src/hdmf"] 118 | exclude = [ 119 | ".git*", 120 | ".codecov.yml", 121 | ".readthedocs.yaml", 122 | ".mailmap", 123 | ".pre-commit-config.yaml", 124 | ] 125 | 126 | # [tool.mypy] 127 | # no_incremental = true # needed b/c mypy and ruamel.yaml do not play nice. https://github.com/python/mypy/issues/12664 128 | 129 | # [tool.interrogate] 130 | # fail-under = 95 131 | # verbose = 1 132 | 133 | [tool.pytest.ini_options] 134 | norecursedirs = "tests/unit/helpers" 135 | 136 | [tool.codespell] 137 | skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs,.tox,hdmf-common-schema,./docs/_build/*,*.ipynb" 138 | ignore-words-list = "datas,assertIn" 139 | 140 | [tool.coverage.run] 141 | branch = true 142 | source = ["hdmf"] 143 | 144 | [tool.coverage.report] 145 | exclude_lines = [ 146 | "pragma: no cover", 147 | "@abstract" 148 | ] 149 | omit = [ 150 | "*/hdmf/_due.py", 151 | "*/hdmf/testing/*", 152 | ] 153 | 154 | # [tool.black] 155 | # line-length = 120 156 | # preview = true 157 | # exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb" 158 | # force-exclude = "src/hdmf/common/hdmf-common-schema|docs/gallery" 159 | 160 | [tool.ruff] 161 | lint.select = ["E", "F", "T100", "T201", "T203", "C901"] 162 | exclude = [ 163 | "src/hdmf/common/hdmf-common-schema", 164 | "docs/source/conf.py", 165 | "src/hdmf/_due.py", 166 | "docs/source/tutorials/", 167 | "docs/_build/", 168 | "scripts/" 169 | ] 170 | line-length = 120 171 | 172 | [tool.ruff.lint.per-file-ignores] 173 | "docs/gallery/*" = ["E402", "T201"] 174 | "src/*/__init__.py" = ["F401"] 175 | "test_gallery.py" = ["T201"] 176 | 177 | [tool.ruff.lint.mccabe] 178 | max-complexity = 17 179 | -------------------------------------------------------------------------------- /src/hdmf/__init__.py: -------------------------------------------------------------------------------- 1 | from . import query 2 | from .backends.hdf5.h5_utils import H5Dataset 3 | from .container import Container, Data, HERDManager 4 | from .utils import docval, getargs 5 | from .term_set import TermSet, TermSetWrapper, TypeConfigurator 6 | 7 | 8 | try: 9 | # see https://effigies.gitlab.io/posts/python-packaging-2023/ 10 | from ._version import __version__ 11 | except ImportError: # pragma: no cover 12 | # this is a relatively slower method for getting the version string 13 | from importlib.metadata import version # noqa: E402 14 | 15 | __version__ = version("hdmf") 16 | del version 17 | 18 | 19 | from ._due import BibTeX, due # noqa: E402 20 | 21 | due.cite( 22 | BibTeX(""" 23 | @INPROCEEDINGS{9005648, 24 | author={A. J. {Tritt} and O. {Rübel} and B. {Dichter} and R. {Ly} and D. {Kang} and E. F. {Chang} and L. M. {Frank} and K. {Bouchard}}, 25 | booktitle={2019 IEEE International Conference on Big Data (Big Data)}, 26 | title={HDMF: Hierarchical Data Modeling Framework for Modern Science Data Standards}, 27 | year={2019}, 28 | volume={}, 29 | number={}, 30 | pages={165-179}, 31 | doi={10.1109/BigData47090.2019.9005648}} 32 | """), # noqa: E501 33 | description="HDMF: Hierarchical Data Modeling Framework for Modern Science Data Standards", 34 | path="hdmf/", 35 | version=__version__, 36 | cite_module=True, 37 | ) 38 | del due, BibTeX 39 | -------------------------------------------------------------------------------- /src/hdmf/_due.py: -------------------------------------------------------------------------------- 1 | # emacs: at the end of the file 2 | # ex: set sts=4 ts=4 sw=4 et: 3 | # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### # 4 | """ 5 | 6 | Stub file for a guaranteed safe import of duecredit constructs: if duecredit 7 | is not available. 8 | 9 | To use it, place it into your project codebase to be imported, e.g. copy as 10 | 11 | cp stub.py /path/tomodule/module/due.py 12 | 13 | Note that it might be better to avoid naming it duecredit.py to avoid shadowing 14 | installed duecredit. 15 | 16 | Then use in your code as 17 | 18 | from .due import due, Doi, BibTeX, Text 19 | 20 | See https://github.com/duecredit/duecredit/blob/master/README.md for examples. 21 | 22 | Origin: Originally a part of the duecredit 23 | Copyright: 2015-2021 DueCredit developers 24 | License: BSD-2 25 | """ 26 | 27 | __version__ = "0.0.9" 28 | 29 | 30 | class InactiveDueCreditCollector(object): 31 | """Just a stub at the Collector which would not do anything""" 32 | 33 | def _donothing(self, *args, **kwargs): 34 | """Perform no good and no bad""" 35 | pass 36 | 37 | def dcite(self, *args, **kwargs): 38 | """If I could cite I would""" 39 | 40 | def nondecorating_decorator(func): 41 | return func 42 | 43 | return nondecorating_decorator 44 | 45 | active = False 46 | activate = add = cite = dump = load = _donothing 47 | 48 | def __repr__(self): 49 | return self.__class__.__name__ + "()" 50 | 51 | 52 | def _donothing_func(*args, **kwargs): 53 | """Perform no good and no bad""" 54 | pass 55 | 56 | 57 | try: 58 | from duecredit import due, BibTeX, Doi, Url, Text # lgtm [py/unused-import] 59 | 60 | if "due" in locals() and not hasattr(due, "cite"): 61 | raise RuntimeError("Imported due lacks .cite. DueCredit is now disabled") 62 | except Exception as e: 63 | if not isinstance(e, ImportError): 64 | import logging 65 | 66 | logging.getLogger("duecredit").error("Failed to import duecredit due to %s" % str(e)) 67 | # Initiate due stub 68 | due = InactiveDueCreditCollector() 69 | BibTeX = Doi = Url = Text = _donothing_func 70 | 71 | # Emacs mode definitions 72 | # Local Variables: 73 | # mode: python 74 | # py-indent-offset: 4 75 | # tab-width: 4 76 | # indent-tabs-mode: nil 77 | # End: 78 | -------------------------------------------------------------------------------- /src/hdmf/backends/__init__.py: -------------------------------------------------------------------------------- 1 | from . import hdf5 2 | -------------------------------------------------------------------------------- /src/hdmf/backends/errors.py: -------------------------------------------------------------------------------- 1 | """Module for I/O backend errors""" 2 | 3 | 4 | class UnsupportedOperation(ValueError): 5 | pass 6 | -------------------------------------------------------------------------------- /src/hdmf/backends/hdf5/__init__.py: -------------------------------------------------------------------------------- 1 | from . import h5_utils, h5tools 2 | from .h5_utils import H5DataIO 3 | from .h5tools import HDF5IO, H5SpecWriter, H5SpecReader 4 | -------------------------------------------------------------------------------- /src/hdmf/backends/utils.py: -------------------------------------------------------------------------------- 1 | """Module with utility functions and classes used for implementation of I/O backends""" 2 | import os 3 | from ..spec import NamespaceCatalog, GroupSpec, NamespaceBuilder 4 | from ..utils import docval, popargs 5 | 6 | 7 | class WriteStatusTracker(dict): 8 | """ 9 | Helper class used for tracking the write status of builders. I.e., to track whether a 10 | builder has been written or not. 11 | """ 12 | def __init__(self): 13 | pass 14 | 15 | def __builderhash(self, builder): 16 | """Return the ID of a builder for use as a unique hash.""" 17 | # NOTE: id may not be sufficient if builders are created inline in the function call, in which 18 | # case the id is the id of the functions parameter, so it can be the same for different 19 | # builders. This should typically only happen in unit testing, but just to be safe. 20 | return str(id(builder)) + "_" + str(builder.name) 21 | 22 | def set_written(self, builder): 23 | """ 24 | Mark this builder as written. 25 | 26 | :param builder: Builder object to be marked as written 27 | :type builder: Builder 28 | """ 29 | # currently all values in self._written_builders are True, so this could be a set but is a dict for 30 | # future flexibility 31 | builder_id = self.__builderhash(builder) 32 | self[builder_id] = True 33 | 34 | def get_written(self, builder): 35 | """Return True if this builder has been written to (or read from) disk by this IO object, False otherwise. 36 | 37 | :param builder: Builder object to get the written flag for 38 | :type builder: Builder 39 | 40 | :return: True if the builder is found in self._written_builders using the builder ID, False otherwise 41 | """ 42 | builder_id = self.__builderhash(builder) 43 | return self.get(builder_id, False) 44 | 45 | 46 | class NamespaceToBuilderHelper(object): 47 | """Helper class used in HDF5IO (and possibly elsewhere) to convert a namespace to a builder for I/O""" 48 | 49 | @classmethod 50 | @docval({'name': 'ns_catalog', 'type': NamespaceCatalog, 'doc': 'the namespace catalog with the specs'}, 51 | {'name': 'namespace', 'type': str, 'doc': 'the name of the namespace to be converted to a builder'}, 52 | rtype=NamespaceBuilder) 53 | def convert_namespace(cls, **kwargs): 54 | """Convert a namespace to a builder""" 55 | ns_catalog, namespace = popargs('ns_catalog', 'namespace', kwargs) 56 | ns = ns_catalog.get_namespace(namespace) 57 | builder = NamespaceBuilder(ns.doc, ns.name, 58 | full_name=ns.full_name, 59 | version=ns.version, 60 | author=ns.author, 61 | contact=ns.contact) 62 | for elem in ns.schema: 63 | if 'namespace' in elem: 64 | inc_ns = elem['namespace'] 65 | builder.include_namespace(inc_ns) 66 | else: 67 | source = elem['source'] 68 | for dt in ns_catalog.get_types(source): 69 | spec = ns_catalog.get_spec(namespace, dt) 70 | if spec.parent is not None: 71 | continue 72 | h5_source = cls.get_source_name(source) 73 | spec = cls.__copy_spec(spec) 74 | builder.add_spec(h5_source, spec) 75 | return builder 76 | 77 | @classmethod 78 | @docval({'name': 'source', 'type': str, 'doc': "source path"}) 79 | def get_source_name(self, source): 80 | return os.path.splitext(source)[0] 81 | 82 | @classmethod 83 | def __copy_spec(cls, spec): 84 | kwargs = dict() 85 | kwargs['attributes'] = cls.__get_new_specs(spec.attributes, spec) 86 | to_copy = ['doc', 'name', 'default_name', 'linkable', 'quantity', spec.inc_key(), spec.def_key()] 87 | if isinstance(spec, GroupSpec): 88 | kwargs['datasets'] = cls.__get_new_specs(spec.datasets, spec) 89 | kwargs['groups'] = cls.__get_new_specs(spec.groups, spec) 90 | kwargs['links'] = cls.__get_new_specs(spec.links, spec) 91 | else: 92 | to_copy.append('dtype') 93 | to_copy.append('shape') 94 | to_copy.append('dims') 95 | for key in to_copy: 96 | val = getattr(spec, key) 97 | if val is not None: 98 | kwargs[key] = val 99 | ret = spec.build_spec(kwargs) 100 | return ret 101 | 102 | @classmethod 103 | def __get_new_specs(cls, subspecs, spec): 104 | ret = list() 105 | for subspec in subspecs: 106 | if not spec.is_inherited_spec(subspec) or spec.is_overridden_spec(subspec): 107 | ret.append(subspec) 108 | return ret 109 | -------------------------------------------------------------------------------- /src/hdmf/backends/warnings.py: -------------------------------------------------------------------------------- 1 | class BrokenLinkWarning(UserWarning): 2 | """ 3 | Raised when a group has a key with a None value. 4 | """ 5 | pass 6 | -------------------------------------------------------------------------------- /src/hdmf/build/__init__.py: -------------------------------------------------------------------------------- 1 | from .builders import Builder, DatasetBuilder, GroupBuilder, LinkBuilder, ReferenceBuilder 2 | from .classgenerator import CustomClassGenerator, MCIClassGenerator 3 | from .errors import (BuildError, OrphanContainerBuildError, ReferenceTargetNotBuiltError, ContainerConfigurationError, 4 | ConstructError) 5 | from .manager import BuildManager, TypeMap 6 | from .objectmapper import ObjectMapper 7 | from .warnings import (BuildWarning, MissingRequiredBuildWarning, DtypeConversionWarning, IncorrectQuantityBuildWarning, 8 | MissingRequiredWarning, OrphanContainerWarning) 9 | -------------------------------------------------------------------------------- /src/hdmf/build/errors.py: -------------------------------------------------------------------------------- 1 | """Module for build error definitions""" 2 | from .builders import Builder 3 | from ..container import AbstractContainer 4 | from ..utils import docval, getargs 5 | 6 | 7 | class BuildError(Exception): 8 | """Error raised when building a container into a builder.""" 9 | 10 | @docval({'name': 'builder', 'type': Builder, 'doc': 'the builder that cannot be built'}, 11 | {'name': 'reason', 'type': str, 'doc': 'the reason for the error'}) 12 | def __init__(self, **kwargs): 13 | self.__builder = getargs('builder', kwargs) 14 | self.__reason = getargs('reason', kwargs) 15 | self.__message = "%s (%s): %s" % (self.__builder.name, self.__builder.path, self.__reason) 16 | super().__init__(self.__message) 17 | 18 | 19 | class OrphanContainerBuildError(BuildError): 20 | 21 | @docval({'name': 'builder', 'type': Builder, 'doc': 'the builder containing the broken link'}, 22 | {'name': 'container', 'type': AbstractContainer, 'doc': 'the container that has no parent'}) 23 | def __init__(self, **kwargs): 24 | builder = getargs('builder', kwargs) 25 | self.__container = getargs('container', kwargs) 26 | reason = ("Linked %s '%s' has no parent. Remove the link or ensure the linked container is added properly." 27 | % (self.__container.__class__.__name__, self.__container.name)) 28 | super().__init__(builder=builder, reason=reason) 29 | 30 | 31 | class ReferenceTargetNotBuiltError(BuildError): 32 | 33 | @docval({'name': 'builder', 'type': Builder, 'doc': 'the builder containing the reference that cannot be found'}, 34 | {'name': 'container', 'type': AbstractContainer, 'doc': 'the container that is not built yet'}) 35 | def __init__(self, **kwargs): 36 | builder = getargs('builder', kwargs) 37 | self.__container = getargs('container', kwargs) 38 | reason = ("Could not find already-built Builder for %s '%s' in BuildManager" 39 | % (self.__container.__class__.__name__, self.__container.name)) 40 | super().__init__(builder=builder, reason=reason) 41 | 42 | 43 | class ContainerConfigurationError(Exception): 44 | """Error raised when the container class is improperly configured.""" 45 | pass 46 | 47 | 48 | class ConstructError(Exception): 49 | """Error raised when constructing a container from a builder.""" 50 | -------------------------------------------------------------------------------- /src/hdmf/build/warnings.py: -------------------------------------------------------------------------------- 1 | """Module for build warnings""" 2 | 3 | 4 | class BuildWarning(UserWarning): 5 | """ 6 | Base class for warnings that are raised during the building of a container. 7 | """ 8 | pass 9 | 10 | 11 | class IncorrectQuantityBuildWarning(BuildWarning): 12 | """ 13 | Raised when a container field contains a number of groups/datasets/links that is not allowed by the spec. 14 | """ 15 | pass 16 | 17 | 18 | class IncorrectDatasetShapeBuildWarning(BuildWarning): 19 | """ 20 | Raised when a dataset has a shape that is not allowed by the spec. 21 | """ 22 | pass 23 | 24 | 25 | class MissingRequiredBuildWarning(BuildWarning): 26 | """ 27 | Raised when a required field is missing. 28 | """ 29 | pass 30 | 31 | 32 | class MissingRequiredWarning(MissingRequiredBuildWarning): 33 | """ 34 | Raised when a required field is missing. 35 | """ 36 | pass 37 | 38 | 39 | class OrphanContainerWarning(BuildWarning): 40 | """ 41 | Raised when a container is built without a parent. 42 | """ 43 | pass 44 | 45 | 46 | class DtypeConversionWarning(UserWarning): 47 | """ 48 | Raised when a value is converted to a different data type in order to match the specification. 49 | """ 50 | pass 51 | -------------------------------------------------------------------------------- /src/hdmf/common/io/__init__.py: -------------------------------------------------------------------------------- 1 | from . import multi 2 | from . import table 3 | from . import resources 4 | from . import alignedtable 5 | -------------------------------------------------------------------------------- /src/hdmf/common/io/alignedtable.py: -------------------------------------------------------------------------------- 1 | from .. import register_map 2 | from ..alignedtable import AlignedDynamicTable 3 | from .table import DynamicTableMap 4 | 5 | 6 | @register_map(AlignedDynamicTable) 7 | class AlignedDynamicTableMap(DynamicTableMap): 8 | """ 9 | Customize the mapping for AlignedDynamicTable 10 | """ 11 | def __init__(self, spec): 12 | super().__init__(spec) 13 | # By default the DynamicTables contained as sub-categories in the AlignedDynamicTable are mapped to 14 | # the 'dynamic_tables' class attribute. This renames the attribute to 'category_tables' 15 | self.map_spec('category_tables', spec.get_data_type('DynamicTable')) 16 | -------------------------------------------------------------------------------- /src/hdmf/common/io/multi.py: -------------------------------------------------------------------------------- 1 | from .. import register_map 2 | from ..multi import SimpleMultiContainer 3 | from ...build import ObjectMapper 4 | from ...container import Container, Data 5 | 6 | 7 | @register_map(SimpleMultiContainer) 8 | class SimpleMultiContainerMap(ObjectMapper): 9 | 10 | @ObjectMapper.object_attr('containers') 11 | def containers_attr(self, container, manager): 12 | return [c for c in container.containers.values() if isinstance(c, Container)] 13 | 14 | @ObjectMapper.constructor_arg('containers') 15 | def containers_carg(self, builder, manager): 16 | return [manager.construct(sub) for sub in builder.datasets.values() 17 | if manager.is_sub_data_type(sub, 'Data')] + \ 18 | [manager.construct(sub) for sub in builder.groups.values() 19 | if manager.is_sub_data_type(sub, 'Container')] 20 | 21 | @ObjectMapper.object_attr('datas') 22 | def datas_attr(self, container, manager): 23 | return [c for c in container.containers.values() if isinstance(c, Data)] 24 | -------------------------------------------------------------------------------- /src/hdmf/common/io/resources.py: -------------------------------------------------------------------------------- 1 | from .. import register_map 2 | from ..resources import HERD, KeyTable, FileTable, ObjectTable, ObjectKeyTable, EntityTable, EntityKeyTable 3 | from ...build import ObjectMapper 4 | 5 | 6 | @register_map(HERD) 7 | class HERDMap(ObjectMapper): 8 | 9 | def construct_helper(self, name, parent_builder, table_cls, manager): 10 | """Create a new instance of table_cls with data from parent_builder[name]. 11 | 12 | The DatasetBuilder for name is associated with data_type Data and container class Data, 13 | but users should use the more specific table_cls for these datasets. 14 | """ 15 | parent = manager._get_proxy_builder(parent_builder) 16 | builder = parent_builder[name] 17 | src = builder.source 18 | oid = builder.attributes.get(self.spec.id_key()) 19 | kwargs = dict(name=builder.name, data=builder.data) 20 | return self.__new_container__(table_cls, src, parent, oid, **kwargs) 21 | 22 | @ObjectMapper.constructor_arg('keys') 23 | def keys(self, builder, manager): 24 | return self.construct_helper('keys', builder, KeyTable, manager) 25 | 26 | @ObjectMapper.constructor_arg('files') 27 | def files(self, builder, manager): 28 | return self.construct_helper('files', builder, FileTable, manager) 29 | 30 | @ObjectMapper.constructor_arg('entities') 31 | def entities(self, builder, manager): 32 | return self.construct_helper('entities', builder, EntityTable, manager) 33 | 34 | @ObjectMapper.constructor_arg('objects') 35 | def objects(self, builder, manager): 36 | return self.construct_helper('objects', builder, ObjectTable, manager) 37 | 38 | @ObjectMapper.constructor_arg('object_keys') 39 | def object_keys(self, builder, manager): 40 | return self.construct_helper('object_keys', builder, ObjectKeyTable, manager) 41 | 42 | @ObjectMapper.constructor_arg('entity_keys') 43 | def entity_keys(self, builder, manager): 44 | return self.construct_helper('entity_keys', builder, EntityKeyTable, manager) 45 | -------------------------------------------------------------------------------- /src/hdmf/common/io/table.py: -------------------------------------------------------------------------------- 1 | from .. import register_map 2 | from ..table import DynamicTable, VectorData, VectorIndex, DynamicTableRegion 3 | from ...build import ObjectMapper, BuildManager, CustomClassGenerator 4 | from ...spec import Spec 5 | from ...utils import docval, getargs 6 | 7 | 8 | @register_map(DynamicTable) 9 | class DynamicTableMap(ObjectMapper): 10 | 11 | def __init__(self, spec): 12 | super().__init__(spec) 13 | vector_data_spec = spec.get_data_type('VectorData') 14 | self.map_spec('columns', vector_data_spec) 15 | 16 | @ObjectMapper.object_attr('colnames') 17 | def attr_columns(self, container, manager): 18 | if all(not col for col in container.columns): 19 | return tuple() 20 | return container.colnames 21 | 22 | @docval({"name": "spec", "type": Spec, "doc": "the spec to get the attribute value for"}, 23 | {"name": "container", "type": DynamicTable, "doc": "the container to get the attribute value from"}, 24 | {"name": "manager", "type": BuildManager, "doc": "the BuildManager used for managing this build"}, 25 | returns='the value of the attribute') 26 | def get_attr_value(self, **kwargs): 27 | ''' Get the value of the attribute corresponding to this spec from the given container ''' 28 | spec, container, manager = getargs('spec', 'container', 'manager', kwargs) 29 | attr_value = super().get_attr_value(spec, container, manager) 30 | if attr_value is None and spec.name in container: 31 | if spec.data_type_inc == 'VectorData': 32 | attr_value = container[spec.name] 33 | if isinstance(attr_value, VectorIndex): 34 | attr_value = attr_value.target 35 | elif spec.data_type_inc == 'DynamicTableRegion': 36 | attr_value = container[spec.name] 37 | if isinstance(attr_value, VectorIndex): 38 | attr_value = attr_value.target 39 | if attr_value.table is None: 40 | msg = "empty or missing table for DynamicTableRegion '%s' in DynamicTable '%s'" % \ 41 | (attr_value.name, container.name) 42 | raise ValueError(msg) 43 | elif spec.data_type_inc == 'VectorIndex': 44 | attr_value = container[spec.name] 45 | return attr_value 46 | 47 | 48 | class DynamicTableGenerator(CustomClassGenerator): 49 | 50 | @classmethod 51 | def apply_generator_to_field(cls, field_spec, bases, type_map): 52 | """Return True if this is a DynamicTable and the field spec is a column.""" 53 | for b in bases: 54 | if issubclass(b, DynamicTable): 55 | break 56 | else: # return False if no base is a subclass of DynamicTable 57 | return False 58 | dtype = cls._get_type(field_spec, type_map) 59 | return isinstance(dtype, type) and issubclass(dtype, VectorData) 60 | 61 | @classmethod 62 | def process_field_spec(cls, classdict, docval_args, parent_cls, attr_name, not_inherited_fields, type_map, spec): 63 | """Add __columns__ to the classdict and update the docval args for the field spec with the given attribute name. 64 | :param classdict: The dict to update with __columns__. 65 | :param docval_args: The list of docval arguments. 66 | :param parent_cls: The parent class. 67 | :param attr_name: The attribute name of the field spec for the container class to generate. 68 | :param not_inherited_fields: Dictionary of fields not inherited from the parent class. 69 | :param type_map: The type map to use. 70 | :param spec: The spec for the container class to generate. 71 | """ 72 | if attr_name.endswith('_index'): # do not add index columns to __columns__ 73 | return 74 | field_spec = not_inherited_fields[attr_name] 75 | column_conf = dict( 76 | name=attr_name, 77 | description=field_spec['doc'], 78 | required=field_spec.required 79 | ) 80 | dtype = cls._get_type(field_spec, type_map) 81 | column_conf['class'] = dtype 82 | if issubclass(dtype, DynamicTableRegion): 83 | # the spec does not know which table this DTR points to 84 | # the user must specify the table attribute on the DTR after it is generated 85 | column_conf['table'] = True 86 | 87 | index_counter = 0 88 | index_name = attr_name 89 | while '{}_index'.format(index_name) in not_inherited_fields: # an index column exists for this column 90 | index_name = '{}_index'.format(index_name) 91 | index_counter += 1 92 | if index_counter == 1: 93 | column_conf['index'] = True 94 | elif index_counter > 1: 95 | column_conf['index'] = index_counter 96 | 97 | classdict.setdefault('__columns__', list()).append(column_conf) 98 | 99 | # do not add DynamicTable columns to init docval 100 | 101 | @classmethod 102 | def post_process(cls, classdict, bases, docval_args, spec): 103 | """Convert classdict['__columns__'] to tuple. 104 | :param classdict: The class dictionary. 105 | :param bases: The list of base classes. 106 | :param docval_args: The dict of docval arguments. 107 | :param spec: The spec for the container class to generate. 108 | """ 109 | # convert classdict['__columns__'] from list to tuple if present 110 | columns = classdict.get('__columns__') 111 | if columns is not None: 112 | classdict['__columns__'] = tuple(columns) 113 | 114 | @classmethod 115 | def _get_attrs_not_to_set_init(cls, classdict, parent_docval_args): 116 | # exclude columns from the args that are set in __init__ 117 | attrs_not_to_set = parent_docval_args.copy() 118 | if "__columns__" in classdict: 119 | column_names = [column_conf["name"] for column_conf in classdict["__columns__"]] 120 | attrs_not_to_set.update(column_names) 121 | return attrs_not_to_set 122 | -------------------------------------------------------------------------------- /src/hdmf/common/multi.py: -------------------------------------------------------------------------------- 1 | from . import register_class 2 | from ..container import Container, Data, MultiContainerInterface 3 | from ..utils import docval, popargs, AllowPositional 4 | 5 | 6 | @register_class('SimpleMultiContainer') 7 | class SimpleMultiContainer(MultiContainerInterface): 8 | 9 | __clsconf__ = { 10 | 'attr': 'containers', 11 | 'type': (Container, Data), 12 | 'add': 'add_container', 13 | 'get': 'get_container', 14 | } 15 | 16 | @docval({'name': 'name', 'type': str, 'doc': 'the name of this container'}, 17 | {'name': 'containers', 'type': (list, tuple), 'default': None, 18 | 'doc': 'the Container or Data objects in this file'}, 19 | allow_positional=AllowPositional.WARNING) 20 | def __init__(self, **kwargs): 21 | containers = popargs('containers', kwargs) 22 | super().__init__(**kwargs) 23 | self.containers = containers 24 | -------------------------------------------------------------------------------- /src/hdmf/common/sparse.py: -------------------------------------------------------------------------------- 1 | try: 2 | from scipy.sparse import csr_matrix 3 | SCIPY_INSTALLED = True 4 | except ImportError: 5 | SCIPY_INSTALLED = False 6 | class csr_matrix: # dummy class to prevent import errors 7 | pass 8 | 9 | from . import register_class 10 | from ..container import Container 11 | from ..utils import docval, popargs, to_uint_array, get_data_shape, AllowPositional 12 | 13 | 14 | @register_class('CSRMatrix') 15 | class CSRMatrix(Container): 16 | 17 | @docval({'name': 'data', 'type': (csr_matrix, 'array_data'), 18 | 'doc': 'the data to use for this CSRMatrix or CSR data array.' 19 | 'If passing CSR data array, *indices*, *indptr*, and *shape* must also be provided'}, 20 | {'name': 'indices', 'type': 'array_data', 'doc': 'CSR index array', 'default': None}, 21 | {'name': 'indptr', 'type': 'array_data', 'doc': 'CSR index pointer array', 'default': None}, 22 | {'name': 'shape', 'type': 'array_data', 'doc': 'the shape of the matrix', 'default': None}, 23 | {'name': 'name', 'type': str, 'doc': 'the name to use for this when storing', 'default': 'csr_matrix'}, 24 | allow_positional=AllowPositional.WARNING) 25 | def __init__(self, **kwargs): 26 | if not SCIPY_INSTALLED: 27 | raise ImportError( 28 | "scipy must be installed to use CSRMatrix. Please install scipy using `pip install scipy`." 29 | ) 30 | data, indices, indptr, shape = popargs('data', 'indices', 'indptr', 'shape', kwargs) 31 | super().__init__(**kwargs) 32 | if not isinstance(data, csr_matrix): 33 | temp_shape = get_data_shape(data) 34 | temp_ndim = len(temp_shape) 35 | if temp_ndim == 2: 36 | data = csr_matrix(data) 37 | elif temp_ndim == 1: 38 | if any(_ is None for _ in (indptr, indices, shape)): 39 | raise ValueError("Must specify 'indptr', 'indices', and 'shape' arguments when passing data array.") 40 | indptr = self.__check_arr(indptr, 'indptr') 41 | indices = self.__check_arr(indices, 'indices') 42 | shape = self.__check_arr(shape, 'shape') 43 | if len(shape) != 2: 44 | raise ValueError("'shape' argument must specify two and only two dimensions.") 45 | data = csr_matrix((data, indices, indptr), shape=shape) 46 | else: 47 | raise ValueError("'data' argument cannot be ndarray of dimensionality > 2.") 48 | # self.__data is a scipy.sparse.csr_matrix 49 | self.__data = data 50 | 51 | @staticmethod 52 | def __check_arr(ar, arg): 53 | try: 54 | ar = to_uint_array(ar) 55 | except ValueError as ve: 56 | raise ValueError("Cannot convert '%s' to an array of unsigned integers." % arg) from ve 57 | if ar.ndim != 1: 58 | raise ValueError("'%s' must be a 1D array of unsigned integers." % arg) 59 | return ar 60 | 61 | def __getattr__(self, val): 62 | # NOTE: this provides access to self.data, self.indices, self.indptr, self.shape 63 | attr = getattr(self.__data, val) 64 | if val in ('indices', 'indptr', 'shape'): # needed because sps.csr_matrix may contain int arrays for these 65 | attr = to_uint_array(attr) 66 | return attr 67 | 68 | def to_spmat(self): 69 | return self.__data 70 | -------------------------------------------------------------------------------- /src/hdmf/monitor.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | import warnings 3 | 4 | from .data_utils import AbstractDataChunkIterator, DataChunkIterator, DataChunk 5 | from .utils import docval, getargs 6 | 7 | warnings.warn( 8 | "The hdmf.monitor module is deprecated and will be removed in HDMF 5.0. If you are using this module, " 9 | "please copy this module to your codebase or raise an issue in the HDMF repository: " 10 | "https://github.com/hdmf-dev/hdmf/issues", 11 | DeprecationWarning, 12 | ) 13 | 14 | 15 | class NotYetExhausted(Exception): 16 | pass 17 | 18 | 19 | class DataChunkProcessor(AbstractDataChunkIterator, metaclass=ABCMeta): 20 | 21 | @docval({'name': 'data', 'type': DataChunkIterator, 'doc': 'the DataChunkIterator to analyze'}) 22 | def __init__(self, **kwargs): 23 | """Initialize the DataChunkIterator""" 24 | # Get the user parameters 25 | self.__dci = getargs('data', kwargs) 26 | 27 | def __next__(self): 28 | try: 29 | dc = self.__dci.__next__() 30 | except StopIteration as e: 31 | self.__done = True 32 | raise e 33 | self.process_data_chunk(dc) 34 | return dc 35 | 36 | def __iter__(self): 37 | return iter(self.__dci) 38 | 39 | def recommended_chunk_shape(self): 40 | return self.__dci.recommended_chunk_shape() 41 | 42 | def recommended_data_shape(self): 43 | return self.__dci.recommended_data_shape() 44 | 45 | def get_final_result(self, **kwargs): 46 | ''' Return the result of processing data fed by this DataChunkIterator ''' 47 | if not self.__done: 48 | raise NotYetExhausted() 49 | return self.compute_final_result() 50 | 51 | @abstractmethod 52 | @docval({'name': 'data_chunk', 'type': DataChunk, 'doc': 'a chunk to process'}) 53 | def process_data_chunk(self, **kwargs): 54 | ''' This method should take in a DataChunk, 55 | and process it. 56 | ''' 57 | pass 58 | 59 | @abstractmethod 60 | @docval(returns='the result of processing this stream') 61 | def compute_final_result(self, **kwargs): 62 | ''' Return the result of processing this stream 63 | Should raise NotYetExhaused exception 64 | ''' 65 | pass 66 | 67 | 68 | class NumSampleCounter(DataChunkProcessor): 69 | 70 | def __init__(self, **kwargs): 71 | super().__init__(**kwargs) 72 | self.__sample_count = 0 73 | 74 | @docval({'name': 'data_chunk', 'type': DataChunk, 'doc': 'a chunk to process'}) 75 | def process_data_chunk(self, **kwargs): 76 | dc = getargs('data_chunk', kwargs) 77 | self.__sample_count += len(dc) 78 | 79 | @docval(returns='the result of processing this stream') 80 | def compute_final_result(self, **kwargs): 81 | return self.__sample_count 82 | -------------------------------------------------------------------------------- /src/hdmf/query.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | 3 | import numpy as np 4 | 5 | from .utils import ExtenderMeta, docval_macro, docval, getargs 6 | 7 | 8 | @docval_macro('array_data') 9 | class HDMFDataset(metaclass=ExtenderMeta): 10 | def __evaluate_key(self, key): 11 | if isinstance(key, tuple) and len(key) == 0: 12 | return key 13 | if isinstance(key, (tuple, list, np.ndarray)): 14 | return list(map(self.__evaluate_key, key)) 15 | else: 16 | return key 17 | 18 | def __getitem__(self, key): 19 | idx = self.__evaluate_key(key) 20 | return self.dataset[idx] 21 | 22 | @docval({'name': 'dataset', 'type': 'array_data', 'doc': 'the HDF5 file lazily evaluate'}) 23 | def __init__(self, **kwargs): 24 | super().__init__() 25 | self.__dataset = getargs('dataset', kwargs) 26 | 27 | @property 28 | def dataset(self): 29 | return self.__dataset 30 | 31 | @property 32 | def dtype(self): 33 | return self.__dataset.dtype 34 | 35 | def __len__(self): 36 | return len(self.__dataset) 37 | 38 | def __iter__(self): 39 | return iter(self.dataset) 40 | 41 | def __next__(self): 42 | return next(self.dataset) 43 | 44 | def next(self): 45 | return self.dataset.next() 46 | 47 | def append(self, arg): 48 | """ 49 | Override this method to support appending to backend-specific datasets 50 | """ 51 | pass # pragma: no cover 52 | 53 | 54 | class ReferenceResolver(metaclass=ABCMeta): 55 | """ 56 | A base class for classes that resolve references 57 | """ 58 | 59 | @classmethod 60 | @abstractmethod 61 | def get_inverse_class(cls): 62 | """ 63 | Return the class the represents the ReferenceResolver 64 | that resolves references to the opposite type. 65 | 66 | BuilderResolver.get_inverse_class should return a class 67 | that subclasses ContainerResolver. 68 | 69 | ContainerResolver.get_inverse_class should return a class 70 | that subclasses BuilderResolver. 71 | """ 72 | pass 73 | 74 | @abstractmethod 75 | def invert(self): 76 | """ 77 | Return an object that defers reference resolution 78 | but in the opposite direction. 79 | """ 80 | pass 81 | 82 | 83 | class BuilderResolver(ReferenceResolver): 84 | """ 85 | A reference resolver that resolves references to Builders 86 | 87 | Subclasses should implement the invert method and the get_inverse_class 88 | classmethod 89 | 90 | BuilderResolver.get_inverse_class should return a class that subclasses 91 | ContainerResolver. 92 | """ 93 | 94 | pass 95 | 96 | 97 | class ContainerResolver(ReferenceResolver): 98 | """ 99 | A reference resolver that resolves references to Containers 100 | 101 | Subclasses should implement the invert method and the get_inverse_class 102 | classmethod 103 | 104 | ContainerResolver.get_inverse_class should return a class that subclasses 105 | BuilderResolver. 106 | """ 107 | 108 | pass 109 | -------------------------------------------------------------------------------- /src/hdmf/spec/__init__.py: -------------------------------------------------------------------------------- 1 | from .catalog import SpecCatalog 2 | from .namespace import NamespaceCatalog, SpecNamespace, SpecReader 3 | from .spec import (AttributeSpec, DatasetSpec, DtypeHelper, DtypeSpec, GroupSpec, LinkSpec, 4 | NAME_WILDCARD, RefSpec, Spec) 5 | from .write import NamespaceBuilder, SpecWriter, export_spec 6 | -------------------------------------------------------------------------------- /src/hdmf/testing/__init__.py: -------------------------------------------------------------------------------- 1 | from .testcase import TestCase, H5RoundTripMixin 2 | from .utils import remove_test_file 3 | -------------------------------------------------------------------------------- /src/hdmf/testing/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def remove_test_file(path): 5 | """A helper function for removing intermediate test files 6 | 7 | This checks if the environment variable CLEAN_HDMF has been set to False 8 | before removing the file. If CLEAN_HDMF is set to False, it does not remove the file. 9 | """ 10 | clean_flag_set = os.getenv('CLEAN_HDMF', True) not in ('False', 'false', 'FALSE', '0', 0, False) 11 | if os.path.exists(path) and clean_flag_set: 12 | os.remove(path) 13 | -------------------------------------------------------------------------------- /src/hdmf/testing/validate_spec.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from argparse import ArgumentParser 4 | from glob import glob 5 | 6 | import jsonschema 7 | import ruamel.yaml as yaml 8 | 9 | 10 | def validate_spec(fpath_spec, fpath_schema): 11 | """ 12 | Validate a yaml specification file against the json schema file that 13 | defines the specification language. Can be used to validate changes 14 | to the NWB and HDMF core schemas, as well as any extensions to either. 15 | 16 | :param fpath_spec: path-like 17 | :param fpath_schema: path-like 18 | """ 19 | 20 | schemaAbs = 'file://' + os.path.abspath(fpath_schema) 21 | 22 | f_schema = open(fpath_schema, 'r') 23 | schema = json.load(f_schema) 24 | 25 | class FixResolver(jsonschema.RefResolver): 26 | def __init__(self): 27 | jsonschema.RefResolver.__init__(self, 28 | base_uri=schemaAbs, 29 | referrer=None) 30 | self.store[schemaAbs] = schema 31 | 32 | new_resolver = FixResolver() 33 | 34 | f_nwb = open(fpath_spec, 'r') 35 | yaml_obj = yaml.YAML(typ='safe', pure=True) 36 | instance = yaml_obj.load(f_nwb) 37 | 38 | jsonschema.validate(instance, schema, resolver=new_resolver) 39 | 40 | 41 | def main(): 42 | parser = ArgumentParser(description="Validate an HDMF/NWB specification") 43 | parser.add_argument("paths", type=str, nargs='+', help="yaml file paths") 44 | parser.add_argument("-m", "--metaschema", type=str, 45 | help=".json.schema file used to validate yaml files") 46 | args = parser.parse_args() 47 | 48 | for path in args.paths: 49 | if os.path.isfile(path): 50 | validate_spec(path, args.metaschema) 51 | elif os.path.isdir(path): 52 | for ipath in glob(os.path.join(path, '*.yaml')): 53 | validate_spec(ipath, args.metaschema) 54 | else: 55 | raise ValueError('path must be a valid file or directory') 56 | 57 | 58 | if __name__ == "__main__": 59 | main() 60 | -------------------------------------------------------------------------------- /src/hdmf/validate/__init__.py: -------------------------------------------------------------------------------- 1 | from . import errors 2 | from .errors import * # noqa: F403 3 | from .validator import ValidatorMap, Validator, AttributeValidator, DatasetValidator, GroupValidator 4 | -------------------------------------------------------------------------------- /test_gallery.py: -------------------------------------------------------------------------------- 1 | """Test that the Sphinx Gallery files run without warnings or errors. 2 | 3 | See tox.ini for usage. 4 | """ 5 | 6 | import importlib.util 7 | import logging 8 | import os 9 | import os.path 10 | import sys 11 | import traceback 12 | import warnings 13 | 14 | TOTAL = 0 15 | FAILURES = 0 16 | ERRORS = 0 17 | 18 | 19 | def _import_from_file(script): 20 | modname = os.path.basename(script) 21 | spec = importlib.util.spec_from_file_location(os.path.basename(script), script) 22 | module = importlib.util.module_from_spec(spec) 23 | sys.modules[modname] = module 24 | spec.loader.exec_module(module) 25 | 26 | 27 | _numpy_warning_re = "numpy.ufunc size changed, may indicate binary incompatibility. Expected 216, got 192" 28 | 29 | _experimental_warning_re = ( 30 | "[a-zA-Z0-9]+ is experimental -- it may be removed in the future " 31 | "and is not guaranteed to maintain backward compatibility" 32 | ) 33 | 34 | def run_gallery_tests(): 35 | global TOTAL, FAILURES, ERRORS 36 | logging.info("Testing execution of Sphinx Gallery files") 37 | 38 | # get all python file names in docs/gallery 39 | gallery_file_names = list() 40 | for root, _, files in os.walk(os.path.join(os.path.dirname(__file__), "docs", "gallery")): 41 | for f in files: 42 | if f.endswith(".py"): 43 | gallery_file_names.append(os.path.join(root, f)) 44 | 45 | warnings.simplefilter("error") 46 | warnings.filterwarnings( 47 | "ignore", 48 | category=DeprecationWarning, # these can be triggered by downstream packages. ignore for these tests 49 | ) 50 | 51 | TOTAL += len(gallery_file_names) 52 | for script in gallery_file_names: 53 | logging.info("Executing %s" % script) 54 | try: 55 | with warnings.catch_warnings(record=True): 56 | warnings.filterwarnings( 57 | "ignore", 58 | message=_experimental_warning_re, 59 | category=UserWarning, 60 | ) 61 | warnings.filterwarnings( 62 | # this warning is triggered when some numpy extension code in an upstream package was compiled 63 | # against a different version of numpy than the one installed 64 | "ignore", 65 | message=_numpy_warning_re, 66 | category=RuntimeWarning, 67 | ) 68 | _import_from_file(script) 69 | except (ImportError, ValueError) as e: 70 | if "Please install linkml-runtime to run this example" in str(e): 71 | # this is OK because linkml is not always installed 72 | print(f"Skipping {script} because linkml-runtime is not installed") 73 | else: 74 | raise e 75 | except Exception: 76 | print(traceback.format_exc()) 77 | FAILURES += 1 78 | ERRORS += 1 79 | 80 | 81 | def main(): 82 | logging_format = ( 83 | "======================================================================\n" 84 | "%(asctime)s - %(levelname)s - %(message)s" 85 | ) 86 | logging.basicConfig(format=logging_format, level=logging.INFO) 87 | 88 | run_gallery_tests() 89 | 90 | final_message = "Ran %s tests" % TOTAL 91 | exitcode = 0 92 | if ERRORS > 0 or FAILURES > 0: 93 | exitcode = 1 94 | _list = list() 95 | if ERRORS > 0: 96 | _list.append("errors=%d" % ERRORS) 97 | if FAILURES > 0: 98 | _list.append("failures=%d" % FAILURES) 99 | final_message = "%s - FAILED (%s)" % (final_message, ",".join(_list)) 100 | else: 101 | final_message = "%s - OK" % final_message 102 | 103 | logging.info(final_message) 104 | 105 | return exitcode 106 | 107 | 108 | if __name__ == "__main__": 109 | sys.exit(main()) 110 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/back_compat_tests/1.0.5.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/back_compat_tests/1.0.5.h5 -------------------------------------------------------------------------------- /tests/unit/back_compat_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/back_compat_tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/back_compat_tests/test_1_1_0.py: -------------------------------------------------------------------------------- 1 | import os 2 | from shutil import copyfile 3 | 4 | from hdmf.backends.hdf5.h5tools import HDF5IO 5 | from tests.unit.helpers.utils import Foo, FooBucket, get_foo_buildmanager 6 | from hdmf.testing import TestCase 7 | 8 | 9 | class Test1_1_0(TestCase): 10 | 11 | def setUp(self): 12 | # created using manager in test_io_hdf5_h5tools 13 | self.orig_1_0_5 = 'tests/unit/back_compat_tests/1.0.5.h5' 14 | self.path_1_0_5 = 'test_1.0.5.h5' 15 | copyfile(self.orig_1_0_5, self.path_1_0_5) 16 | 17 | # note: this may break if the current manager is different from the old manager 18 | # better to save a spec file 19 | self.manager = get_foo_buildmanager() 20 | 21 | def tearDown(self): 22 | if os.path.exists(self.path_1_0_5): 23 | os.remove(self.path_1_0_5) 24 | 25 | def test_read_1_0_5(self): 26 | '''Test whether we can read files made by hdmf version 1.0.5''' 27 | with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io: 28 | read_foofile = io.read() 29 | self.assertTrue(len(read_foofile.buckets) == 1) 30 | self.assertListEqual(read_foofile.buckets['test_bucket'].foos['foo1'].my_data[:].tolist(), [0, 1, 2, 3, 4]) 31 | self.assertListEqual(read_foofile.buckets['test_bucket'].foos['foo2'].my_data[:].tolist(), [5, 6, 7, 8, 9]) 32 | 33 | def test_append_1_0_5(self): 34 | '''Test whether we can append to files made by hdmf version 1.0.5''' 35 | foo = Foo('foo3', [10, 20, 30, 40, 50], "I am foo3", 17, 3.14) 36 | foobucket = FooBucket('foobucket2', [foo]) 37 | 38 | with HDF5IO(self.path_1_0_5, manager=self.manager, mode='a') as io: 39 | read_foofile = io.read() 40 | read_foofile.add_bucket(foobucket) 41 | io.write(read_foofile) 42 | 43 | with HDF5IO(self.path_1_0_5, manager=self.manager, mode='r') as io: 44 | read_foofile = io.read() 45 | self.assertListEqual(read_foofile.buckets['foobucket2'].foos['foo3'].my_data[:].tolist(), foo.my_data) 46 | -------------------------------------------------------------------------------- /tests/unit/build_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/build_tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/build_tests/mapper_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/build_tests/mapper_tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/build_tests/mapper_tests/test_build_datetime.py: -------------------------------------------------------------------------------- 1 | from hdmf.utils import docval, getargs 2 | from hdmf import Container 3 | from hdmf.spec import GroupSpec, DatasetSpec 4 | from hdmf.testing import TestCase 5 | from datetime import datetime, date 6 | 7 | from tests.unit.helpers.utils import create_test_type_map 8 | 9 | 10 | class Bar(Container): 11 | 12 | @docval({'name': 'name', 'type': str, 'doc': 'the name of this Bar'}, 13 | {'name': 'data', 'type': ('data', 'array_data', datetime, date), 'doc': 'some data'}) 14 | def __init__(self, **kwargs): 15 | name, data = getargs('name', 'data', kwargs) 16 | super().__init__(name=name) 17 | self.__data = data 18 | 19 | @property 20 | def data_type(self): 21 | return 'Bar' 22 | 23 | @property 24 | def data(self): 25 | return self.__data 26 | 27 | 28 | class TestBuildDatasetDateTime(TestCase): 29 | """Test that building a dataset with dtype isodatetime works with datetime and date objects.""" 30 | 31 | def test_datetime_scalar(self): 32 | bar_spec = GroupSpec( 33 | doc='A test group specification with a data type', 34 | data_type_def='Bar', 35 | datasets=[DatasetSpec(doc='an example dataset', name='data', dtype='isodatetime')], 36 | ) 37 | type_map = create_test_type_map([bar_spec], {'Bar': Bar}) 38 | 39 | bar_inst = Bar(name='my_bar', data=datetime(2023, 7, 9)) 40 | builder = type_map.build(bar_inst) 41 | ret = builder.get('data') 42 | assert ret.data == b'2023-07-09T00:00:00' 43 | assert ret.dtype == 'ascii' 44 | 45 | def test_date_scalar(self): 46 | bar_spec = GroupSpec( 47 | doc='A test group specification with a data type', 48 | data_type_def='Bar', 49 | datasets=[DatasetSpec(doc='an example dataset', name='data', dtype='isodatetime')], 50 | ) 51 | type_map = create_test_type_map([bar_spec], {'Bar': Bar}) 52 | 53 | bar_inst = Bar(name='my_bar', data=date(2023, 7, 9)) 54 | builder = type_map.build(bar_inst) 55 | ret = builder.get('data') 56 | assert ret.data == b'2023-07-09' 57 | assert ret.dtype == 'ascii' 58 | 59 | def test_datetime_array(self): 60 | bar_spec = GroupSpec( 61 | doc='A test group specification with a data type', 62 | data_type_def='Bar', 63 | datasets=[DatasetSpec(doc='an example dataset', name='data', dtype='isodatetime', dims=(None,))], 64 | ) 65 | type_map = create_test_type_map([bar_spec], {'Bar': Bar}) 66 | 67 | bar_inst = Bar(name='my_bar', data=[datetime(2023, 7, 9), datetime(2023, 7, 10)]) 68 | builder = type_map.build(bar_inst) 69 | ret = builder.get('data') 70 | assert ret.data == [b'2023-07-09T00:00:00', b'2023-07-10T00:00:00'] 71 | assert ret.dtype == 'ascii' 72 | 73 | def test_date_array(self): 74 | bar_spec = GroupSpec( 75 | doc='A test group specification with a data type', 76 | data_type_def='Bar', 77 | datasets=[DatasetSpec(doc='an example dataset', name='data', dtype='isodatetime', dims=(None,))], 78 | ) 79 | type_map = create_test_type_map([bar_spec], {'Bar': Bar}) 80 | 81 | bar_inst = Bar(name='my_bar', data=[date(2023, 7, 9), date(2023, 7, 10)]) 82 | builder = type_map.build(bar_inst) 83 | ret = builder.get('data') 84 | assert ret.data == [b'2023-07-09', b'2023-07-10'] 85 | assert ret.dtype == 'ascii' 86 | -------------------------------------------------------------------------------- /tests/unit/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/common/__init__.py -------------------------------------------------------------------------------- /tests/unit/common/test_common.py: -------------------------------------------------------------------------------- 1 | from hdmf import Data, Container 2 | from hdmf.common import get_type_map, load_type_config, unload_type_config 3 | from hdmf.testing import TestCase 4 | 5 | 6 | class TestCommonTypeMap(TestCase): 7 | 8 | def test_base_types(self): 9 | tm = get_type_map() 10 | cls = tm.get_dt_container_cls('Container', 'hdmf-common') 11 | self.assertIs(cls, Container) 12 | cls = tm.get_dt_container_cls('Data', 'hdmf-common') 13 | self.assertIs(cls, Data) 14 | 15 | def test_copy_ts_config(self): 16 | path = 'tests/unit/hdmf_config.yaml' 17 | load_type_config(config_path=path) 18 | tm = get_type_map() 19 | config = {'namespaces': {'hdmf-common': {'version': '3.12.2', 20 | 'data_types': {'VectorData': 21 | {'description': {'termset': 'example_test_term_set.yaml'}}, 22 | 'VectorIndex': {'data': '...'}}}, 'foo_namespace': 23 | {'version': '...', 'data_types': 24 | {'ExtensionContainer': {'description': None}}}}} 25 | 26 | self.assertEqual(tm.type_config.config, config) 27 | self.assertEqual(tm.type_config.path, [path]) 28 | unload_type_config() 29 | -------------------------------------------------------------------------------- /tests/unit/common/test_common_io.py: -------------------------------------------------------------------------------- 1 | from h5py import File 2 | 3 | from hdmf.backends.hdf5 import HDF5IO 4 | from hdmf.common import Container, get_manager, get_hdf5io 5 | from hdmf.spec import NamespaceCatalog 6 | from hdmf.testing import TestCase, remove_test_file 7 | 8 | from tests.unit.helpers.utils import get_temp_filepath 9 | 10 | 11 | class TestCacheSpec(TestCase): 12 | """Test caching spec specifically with the namespaces provided by hdmf.common. 13 | 14 | See also TestCacheSpec in tests/unit/test_io_hdf5_h5tools.py. 15 | """ 16 | 17 | def setUp(self): 18 | self.manager = get_manager() 19 | self.path = get_temp_filepath() 20 | self.container = Container('dummy') 21 | 22 | def tearDown(self): 23 | remove_test_file(self.path) 24 | 25 | def test_write_no_cache_spec(self): 26 | """Roundtrip test for not writing spec.""" 27 | with HDF5IO(self.path, manager=self.manager, mode="a") as io: 28 | io.write(self.container, cache_spec=False) 29 | with File(self.path, 'r') as f: 30 | self.assertNotIn('specifications', f) 31 | 32 | def test_write_cache_spec(self): 33 | """Roundtrip test for writing spec and reading it back in.""" 34 | with HDF5IO(self.path, manager=self.manager, mode="a") as io: 35 | io.write(self.container) 36 | with File(self.path, 'r') as f: 37 | self.assertIn('specifications', f) 38 | self._check_spec() 39 | 40 | def test_write_cache_spec_injected(self): 41 | """Roundtrip test for writing spec and reading it back in when HDF5IO is passed an open h5py.File.""" 42 | with File(self.path, 'w') as fil: 43 | with HDF5IO(self.path, manager=self.manager, file=fil, mode='a') as io: 44 | io.write(self.container) 45 | with File(self.path, 'r') as f: 46 | self.assertIn('specifications', f) 47 | self._check_spec() 48 | 49 | def _check_spec(self): 50 | ns_catalog = NamespaceCatalog() 51 | HDF5IO.load_namespaces(ns_catalog, self.path) 52 | self.maxDiff = None 53 | for namespace in self.manager.namespace_catalog.namespaces: 54 | with self.subTest(namespace=namespace): 55 | original_ns = self.manager.namespace_catalog.get_namespace(namespace) 56 | cached_ns = ns_catalog.get_namespace(namespace) 57 | ns_fields_to_check = list(original_ns.keys()) 58 | ns_fields_to_check.remove('schema') # schema fields will not match, so reset 59 | for ns_field in ns_fields_to_check: 60 | with self.subTest(namespace_field=ns_field): 61 | self.assertEqual(original_ns[ns_field], cached_ns[ns_field]) 62 | for dt in original_ns.get_registered_types(): 63 | with self.subTest(data_type=dt): 64 | original_spec = original_ns.get_spec(dt) 65 | cached_spec = cached_ns.get_spec(dt) 66 | with self.subTest('Data type spec is read back in'): 67 | self.assertIsNotNone(cached_spec) 68 | with self.subTest('Cached spec matches original spec'): 69 | self.assertDictEqual(original_spec, cached_spec) 70 | 71 | 72 | class TestGetHdf5IO(TestCase): 73 | 74 | def setUp(self): 75 | self.path = get_temp_filepath() 76 | 77 | def tearDown(self): 78 | remove_test_file(self.path) 79 | 80 | def test_gethdf5io(self): 81 | """Test the get_hdf5io convenience method with manager=None.""" 82 | with get_hdf5io(self.path, "w") as io: 83 | self.assertIsNotNone(io.manager) 84 | 85 | def test_gethdf5io_manager(self): 86 | """Test the get_hdf5io convenience method with manager set.""" 87 | manager = get_manager() 88 | with get_hdf5io(self.path, "w", manager=manager) as io: 89 | self.assertIs(io.manager, manager) 90 | -------------------------------------------------------------------------------- /tests/unit/common/test_multi.py: -------------------------------------------------------------------------------- 1 | from hdmf.common import SimpleMultiContainer 2 | from hdmf.container import Container, Data 3 | from hdmf.testing import TestCase, H5RoundTripMixin 4 | 5 | 6 | class SimpleMultiContainerRoundTrip(H5RoundTripMixin, TestCase): 7 | 8 | def setUpContainer(self): 9 | containers = [ 10 | Container('container1'), 11 | Container('container2'), 12 | Data('data1', [0, 1, 2, 3, 4]), 13 | Data('data2', [0.0, 1.0, 2.0, 3.0, 4.0]), 14 | ] 15 | multi_container = SimpleMultiContainer(name='multi', containers=containers) 16 | return multi_container 17 | -------------------------------------------------------------------------------- /tests/unit/example_dynamic_term_set.yaml: -------------------------------------------------------------------------------- 1 | id: https://w3id.org/linkml/examples/nwb_dynamic_enums 2 | title: dynamic enums example 3 | name: nwb_dynamic_enums 4 | description: this schema demonstrates the use of dynamic enums 5 | 6 | prefixes: 7 | linkml: https://w3id.org/linkml/ 8 | CL: http://purl.obolibrary.org/obo/CL_ 9 | 10 | imports: 11 | - linkml:types 12 | 13 | default_range: string 14 | 15 | # ======================== # 16 | # CLASSES # 17 | # ======================== # 18 | classes: 19 | BrainSample: 20 | slots: 21 | - cell_type 22 | 23 | # ======================== # 24 | # SLOTS # 25 | # ======================== # 26 | slots: 27 | cell_type: 28 | required: true 29 | range: NeuronTypeEnum 30 | 31 | # ======================== # 32 | # ENUMS # 33 | # ======================== # 34 | enums: 35 | NeuronTypeEnum: 36 | reachable_from: 37 | source_ontology: obo:cl 38 | source_nodes: 39 | - CL:0000540 ## neuron 40 | include_self: false 41 | relationship_types: 42 | - rdfs:subClassOf 43 | -------------------------------------------------------------------------------- /tests/unit/example_test_term_set.yaml: -------------------------------------------------------------------------------- 1 | id: termset/species_example 2 | name: Species 3 | version: 0.0.1 4 | prefixes: 5 | NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= 6 | imports: 7 | - linkml:types 8 | default_range: string 9 | 10 | enums: 11 | Species: 12 | permissible_values: 13 | Homo sapiens: 14 | description: the species is human 15 | meaning: NCBI_TAXON:9606 16 | Mus musculus: 17 | description: the species is a house mouse 18 | meaning: NCBI_TAXON:10090 19 | Ursus arctos horribilis: 20 | description: the species is a grizzly bear 21 | meaning: NCBI_TAXON:116960 22 | Myrmecophaga tridactyla: 23 | description: the species is an anteater 24 | meaning: NCBI_TAXON:71006 25 | Ailuropoda melanoleuca: 26 | description: the species is a panda 27 | meaning: NCBI_TAXON:9646 28 | -------------------------------------------------------------------------------- /tests/unit/example_test_term_set2.yaml: -------------------------------------------------------------------------------- 1 | id: termset/species_example2 2 | name: Species 3 | version: 0.0.1 4 | prefixes: 5 | NCBI_TAXON: https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=Info&id= 6 | imports: 7 | - linkml:types 8 | default_range: string 9 | 10 | enums: 11 | Species: 12 | permissible_values: 13 | Homo sapiens: 14 | description: the species is human 15 | meaning: NCBI_TAXON:9606 16 | Mus musculus: 17 | description: the species is a house mouse 18 | meaning: NCBI_TAXON:10090 19 | Ursus arctos horribilis: 20 | description: the species is a grizzly bear 21 | meaning: NCBI_TAXON:116960 22 | -------------------------------------------------------------------------------- /tests/unit/hdmf_config.yaml: -------------------------------------------------------------------------------- 1 | namespaces: 2 | hdmf-common: 3 | version: 3.12.2 4 | data_types: 5 | VectorData: 6 | description: 7 | termset: example_test_term_set.yaml 8 | VectorIndex: 9 | data: ... 10 | foo_namespace: 11 | version: ... 12 | data_types: 13 | ExtensionContainer: 14 | description: 15 | -------------------------------------------------------------------------------- /tests/unit/hdmf_config2.yaml: -------------------------------------------------------------------------------- 1 | namespaces: 2 | hdmf-common: 3 | version: 3.12.2 4 | data_types: 5 | Data: 6 | description: 7 | termset: example_test_term_set.yaml 8 | EnumData: 9 | description: 10 | termset: example_test_term_set.yaml 11 | VectorData: 12 | name: 13 | namespace2: 14 | version: 0 15 | data_types: 16 | MythicData: 17 | description: 18 | termset: example_test_term_set.yaml 19 | -------------------------------------------------------------------------------- /tests/unit/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/helpers/__init__.py -------------------------------------------------------------------------------- /tests/unit/spec_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/spec_tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/spec_tests/test-ext.base.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | - my_data_type_def: TestExtData 3 | my_data_type_inc: TestData 4 | doc: An abstract data type for a dataset. 5 | 6 | groups: 7 | - my_data_type_def: TestExtContainer 8 | my_data_type_inc: Container 9 | doc: An abstract data type for a generic container storing collections of data and metadata. 10 | 11 | - my_data_type_def: TestExtTable 12 | my_data_type_inc: TestTable 13 | doc: An abstract data type for a table. 14 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test-ext.namespace.yaml: -------------------------------------------------------------------------------- 1 | namespaces: 2 | - name: test-ext 3 | doc: Test extension namespace 4 | author: 5 | - Test test 6 | contact: 7 | - test@test.com 8 | full_name: Test extension 9 | schema: 10 | - namespace: test 11 | - doc: This source module contains base data types. 12 | source: test-ext.base.yaml 13 | title: Base data types 14 | version: 0.1.0 15 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test.base.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | - my_data_type_def: TestData 3 | my_data_type_inc: Data 4 | doc: An abstract data type for a dataset. 5 | 6 | groups: 7 | - my_data_type_def: TestContainer 8 | my_data_type_inc: Container 9 | doc: An abstract data type for a generic container storing collections of data and metadata. 10 | 11 | - my_data_type_def: TestTable 12 | my_data_type_inc: DynamicTable 13 | doc: An abstract data type for a table. 14 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test.namespace.yaml: -------------------------------------------------------------------------------- 1 | namespaces: 2 | - name: test 3 | doc: Test namespace 4 | author: 5 | - Test test 6 | contact: 7 | - test@test.com 8 | full_name: Test 9 | schema: 10 | - namespace: hdmf-common 11 | my_data_types: 12 | - Data 13 | - DynamicTable 14 | - Container 15 | - doc: This source module contains base data types. 16 | source: test.base.yaml 17 | title: Base data types 18 | version: 0.1.0 19 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test_attribute_spec.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from hdmf.spec import AttributeSpec, RefSpec 4 | from hdmf.testing import TestCase 5 | 6 | 7 | class AttributeSpecTests(TestCase): 8 | 9 | def test_constructor(self): 10 | spec = AttributeSpec('attribute1', 11 | 'my first attribute', 12 | 'text') 13 | self.assertEqual(spec['name'], 'attribute1') 14 | self.assertEqual(spec['dtype'], 'text') 15 | self.assertEqual(spec['doc'], 'my first attribute') 16 | self.assertIsNone(spec.parent) 17 | json.dumps(spec) # to ensure there are no circular links 18 | 19 | def test_invalid_dtype(self): 20 | with self.assertRaises(ValueError): 21 | AttributeSpec(name='attribute1', 22 | doc='my first attribute', 23 | dtype='invalid' # <-- Invalid dtype must raise a ValueError 24 | ) 25 | 26 | def test_both_value_and_default_value_set(self): 27 | with self.assertRaises(ValueError): 28 | AttributeSpec(name='attribute1', 29 | doc='my first attribute', 30 | dtype='int', 31 | value=5, 32 | default_value=10 # <-- Default_value and value can't be set at the same time 33 | ) 34 | 35 | def test_colliding_shape_and_dims(self): 36 | with self.assertRaises(ValueError): 37 | AttributeSpec(name='attribute1', 38 | doc='my first attribute', 39 | dtype='int', 40 | dims=['test'], 41 | shape=[None, 2] # <-- Length of shape and dims do not match must raise a ValueError 42 | ) 43 | 44 | def test_default_value(self): 45 | spec = AttributeSpec('attribute1', 46 | 'my first attribute', 47 | 'text', 48 | default_value='some text') 49 | self.assertEqual(spec['default_value'], 'some text') 50 | self.assertEqual(spec.default_value, 'some text') 51 | 52 | def test_shape(self): 53 | shape = [None, 2] 54 | spec = AttributeSpec('attribute1', 55 | 'my first attribute', 56 | 'text', 57 | shape=shape) 58 | self.assertEqual(spec['shape'], shape) 59 | self.assertEqual(spec.shape, shape) 60 | 61 | def test_dims_without_shape(self): 62 | spec = AttributeSpec('attribute1', 63 | 'my first attribute', 64 | 'text', 65 | dims=['test']) 66 | self.assertEqual(spec.shape, (None, )) 67 | 68 | def test_build_spec(self): 69 | spec_dict = {'name': 'attribute1', 70 | 'doc': 'my first attribute', 71 | 'dtype': 'text', 72 | 'shape': [None], 73 | 'dims': ['dim1'], 74 | 'value': ['a', 'b']} 75 | ret = AttributeSpec.build_spec(spec_dict) 76 | self.assertTrue(isinstance(ret, AttributeSpec)) 77 | self.assertDictEqual(ret, spec_dict) 78 | 79 | def test_build_spec_reftype(self): 80 | spec_dict = {'name': 'attribute1', 81 | 'doc': 'my first attribute', 82 | 'dtype': {'target_type': 'AnotherType', 'reftype': 'object'}} 83 | expected = spec_dict.copy() 84 | expected['dtype'] = RefSpec(target_type='AnotherType', reftype='object') 85 | ret = AttributeSpec.build_spec(spec_dict) 86 | self.assertTrue(isinstance(ret, AttributeSpec)) 87 | self.assertDictEqual(ret, expected) 88 | 89 | def test_build_spec_no_doc(self): 90 | spec_dict = {'name': 'attribute1', 'dtype': 'text'} 91 | msg = "AttributeSpec.__init__: missing argument 'doc'" 92 | with self.assertRaisesWith(TypeError, msg): 93 | AttributeSpec.build_spec(spec_dict) 94 | 95 | def test_build_warn_extra_args(self): 96 | spec_dict = { 97 | 'name': 'attribute1', 98 | 'doc': 'test attribute', 99 | 'dtype': 'int', 100 | 'quantity': '?', 101 | } 102 | msg = ("Unexpected keys ['quantity'] in spec {'name': 'attribute1', 'doc': 'test attribute', " 103 | "'dtype': 'int', 'quantity': '?'}") 104 | with self.assertWarnsWith(UserWarning, msg): 105 | AttributeSpec.build_spec(spec_dict) 106 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test_dtype_spec.py: -------------------------------------------------------------------------------- 1 | from hdmf.spec import DtypeSpec, DtypeHelper, RefSpec 2 | from hdmf.testing import TestCase 3 | 4 | 5 | class DtypeSpecHelper(TestCase): 6 | def setUp(self): 7 | pass 8 | 9 | def test_recommended_dtypes(self): 10 | self.assertListEqual(DtypeHelper.recommended_primary_dtypes, 11 | list(DtypeHelper.primary_dtype_synonyms.keys())) 12 | 13 | def test_valid_primary_dtypes(self): 14 | a = set(list(DtypeHelper.primary_dtype_synonyms.keys()) + 15 | [vi for v in DtypeHelper.primary_dtype_synonyms.values() for vi in v]) 16 | self.assertSetEqual(a, DtypeHelper.valid_primary_dtypes) 17 | 18 | def test_simplify_cpd_type(self): 19 | compound_type = [DtypeSpec('test', 'test field', 'float'), 20 | DtypeSpec('test2', 'test field2', 'int')] 21 | expected_result = ['float', 'int'] 22 | result = DtypeHelper.simplify_cpd_type(compound_type) 23 | self.assertListEqual(result, expected_result) 24 | 25 | def test_simplify_cpd_type_ref(self): 26 | compound_type = [DtypeSpec('test', 'test field', 'float'), 27 | DtypeSpec('test2', 'test field2', RefSpec(target_type='MyType', reftype='object'))] 28 | expected_result = ['float', 'object'] 29 | result = DtypeHelper.simplify_cpd_type(compound_type) 30 | self.assertListEqual(result, expected_result) 31 | 32 | def test_check_dtype_ok(self): 33 | self.assertEqual('int', DtypeHelper.check_dtype('int')) 34 | 35 | def test_check_dtype_bad(self): 36 | msg = "dtype 'bad dtype' is not a valid primary data type." 37 | with self.assertRaisesRegex(ValueError, msg): 38 | DtypeHelper.check_dtype('bad dtype') 39 | 40 | def test_check_dtype_ref(self): 41 | refspec = RefSpec(target_type='target', reftype='object') 42 | self.assertIs(refspec, DtypeHelper.check_dtype(refspec)) 43 | 44 | 45 | class DtypeSpecTests(TestCase): 46 | def setUp(self): 47 | pass 48 | 49 | def test_constructor(self): 50 | spec = DtypeSpec('column1', 'an example column', 'int') 51 | self.assertEqual(spec.doc, 'an example column') 52 | self.assertEqual(spec.name, 'column1') 53 | self.assertEqual(spec.dtype, 'int') 54 | 55 | def test_build_spec(self): 56 | spec = DtypeSpec.build_spec({'doc': 'an example column', 'name': 'column1', 'dtype': 'int'}) 57 | self.assertEqual(spec.doc, 'an example column') 58 | self.assertEqual(spec.name, 'column1') 59 | self.assertEqual(spec.dtype, 'int') 60 | 61 | def test_invalid_refspec_dict(self): 62 | """Test missing or bad target key for RefSpec.""" 63 | msg = "'dtype' must have the key 'target_type'" 64 | with self.assertRaisesWith(ValueError, msg): 65 | DtypeSpec.assertValidDtype({'no target': 'test', 'reftype': 'object'}) 66 | 67 | def test_refspec_dtype(self): 68 | # just making sure this does not cause an error 69 | DtypeSpec('column1', 'an example column', RefSpec('TimeSeries', 'object')) 70 | 71 | def test_invalid_dtype(self): 72 | msg = "dtype 'bad dtype' is not a valid primary data type." 73 | with self.assertRaisesRegex(ValueError, msg): 74 | DtypeSpec('column1', 'an example column', dtype='bad dtype') 75 | 76 | def test_is_ref(self): 77 | spec = DtypeSpec('column1', 'an example column', RefSpec('TimeSeries', 'object')) 78 | self.assertTrue(DtypeSpec.is_ref(spec)) 79 | spec = DtypeSpec('column1', 'an example column', 'int') 80 | self.assertFalse(DtypeSpec.is_ref(spec)) 81 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test_link_spec.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from hdmf.spec import GroupSpec, LinkSpec 4 | from hdmf.testing import TestCase 5 | 6 | 7 | class LinkSpecTests(TestCase): 8 | 9 | def test_constructor(self): 10 | spec = LinkSpec( 11 | doc='A test link', 12 | target_type='Group1', 13 | quantity='+', 14 | name='Link1', 15 | ) 16 | self.assertEqual(spec.doc, 'A test link') 17 | self.assertEqual(spec.target_type, 'Group1') 18 | self.assertEqual(spec.data_type_inc, 'Group1') 19 | self.assertEqual(spec.quantity, '+') 20 | self.assertEqual(spec.name, 'Link1') 21 | json.dumps(spec) 22 | 23 | def test_constructor_target_spec_def(self): 24 | group_spec_def = GroupSpec( 25 | data_type_def='Group1', 26 | doc='A test group', 27 | ) 28 | spec = LinkSpec( 29 | doc='A test link', 30 | target_type=group_spec_def, 31 | ) 32 | self.assertEqual(spec.target_type, 'Group1') 33 | json.dumps(spec) 34 | 35 | def test_constructor_target_spec_inc(self): 36 | group_spec_inc = GroupSpec( 37 | data_type_inc='Group1', 38 | doc='A test group', 39 | ) 40 | msg = "'target_type' must be a string or a GroupSpec or DatasetSpec with a 'data_type_def' key." 41 | with self.assertRaisesWith(ValueError, msg): 42 | LinkSpec( 43 | doc='A test link', 44 | target_type=group_spec_inc, 45 | ) 46 | 47 | def test_constructor_defaults(self): 48 | spec = LinkSpec( 49 | doc='A test link', 50 | target_type='Group1', 51 | ) 52 | self.assertEqual(spec.quantity, 1) 53 | self.assertIsNone(spec.name) 54 | json.dumps(spec) 55 | 56 | def test_required_is_many(self): 57 | quantity_opts = ['?', 1, '*', '+'] 58 | is_required = [False, True, False, True] 59 | is_many = [False, False, True, True] 60 | for (quantity, req, many) in zip(quantity_opts, is_required, is_many): 61 | with self.subTest(quantity=quantity): 62 | spec = LinkSpec( 63 | doc='A test link', 64 | target_type='Group1', 65 | quantity=quantity, 66 | name='Link1', 67 | ) 68 | self.assertEqual(spec.required, req) 69 | self.assertEqual(spec.is_many(), many) 70 | 71 | def test_build_warn_extra_args(self): 72 | spec_dict = { 73 | 'name': 'link1', 74 | 'doc': 'test link', 75 | 'target_type': 'TestType', 76 | 'required': True, 77 | } 78 | msg = ("Unexpected keys ['required'] in spec {'name': 'link1', 'doc': 'test link', " 79 | "'target_type': 'TestType', 'required': True}") 80 | with self.assertWarnsWith(UserWarning, msg): 81 | LinkSpec.build_spec(spec_dict) 82 | -------------------------------------------------------------------------------- /tests/unit/spec_tests/test_ref_spec.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from hdmf.spec import RefSpec 4 | from hdmf.testing import TestCase 5 | 6 | 7 | class RefSpecTests(TestCase): 8 | 9 | def test_constructor(self): 10 | spec = RefSpec('TimeSeries', 'object') 11 | self.assertEqual(spec.target_type, 'TimeSeries') 12 | self.assertEqual(spec.reftype, 'object') 13 | json.dumps(spec) # to ensure there are no circular links 14 | 15 | def test_wrong_reference_type(self): 16 | with self.assertRaises(ValueError): 17 | RefSpec('TimeSeries', 'unknownreftype') 18 | -------------------------------------------------------------------------------- /tests/unit/test_monitor.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def test_deprecation_warning(): 5 | with pytest.warns(DeprecationWarning): 6 | import hdmf.monitor # noqa: F401 7 | -------------------------------------------------------------------------------- /tests/unit/test_table.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from hdmf.container import Table, Row, RowGetter 4 | from hdmf.testing import TestCase 5 | 6 | 7 | class TestTable(TestCase): 8 | 9 | @classmethod 10 | def get_table_class(cls): 11 | class MyTable(Table): 12 | 13 | __defaultname__ = 'my_table' 14 | 15 | __columns__ = [ 16 | {'name': 'col1', 'type': str, 'help': 'a string column'}, 17 | {'name': 'col2', 'type': int, 'help': 'an integer column'}, 18 | ] 19 | return MyTable 20 | 21 | def test_init(self): 22 | MyTable = TestTable.get_table_class() 23 | table = MyTable('test_table') 24 | self.assertTrue(hasattr(table, '__colidx__')) 25 | self.assertEqual(table.__colidx__, {'col1': 0, 'col2': 1}) 26 | 27 | def test_add_row_getitem(self): 28 | MyTable = TestTable.get_table_class() 29 | table = MyTable('test_table') 30 | table.add_row(col1='foo', col2=100) 31 | table.add_row(col1='bar', col2=200) 32 | row1 = table[0] 33 | row2 = table[1] 34 | self.assertEqual(row1, ('foo', 100)) 35 | self.assertEqual(row2, ('bar', 200)) 36 | 37 | def test_to_dataframe(self): 38 | MyTable = TestTable.get_table_class() 39 | table = MyTable('test_table') 40 | table.add_row(col1='foo', col2=100) 41 | table.add_row(col1='bar', col2=200) 42 | 43 | df = table.to_dataframe() 44 | exp = pd.DataFrame(data=[{'col1': 'foo', 'col2': 100}, {'col1': 'bar', 'col2': 200}]) 45 | pd.testing.assert_frame_equal(df, exp) 46 | 47 | def test_from_dataframe(self): 48 | MyTable = TestTable.get_table_class() 49 | exp = pd.DataFrame(data=[{'col1': 'foo', 'col2': 100}, {'col1': 'bar', 'col2': 200}]) 50 | table = MyTable.from_dataframe(exp) 51 | row1 = table[0] 52 | row2 = table[1] 53 | self.assertEqual(row1, ('foo', 100)) 54 | self.assertEqual(row2, ('bar', 200)) 55 | 56 | 57 | class TestRow(TestCase): 58 | 59 | def setUp(self): 60 | self.MyTable = TestTable.get_table_class() 61 | 62 | class MyRow(Row): 63 | __table__ = self.MyTable 64 | 65 | self.MyRow = MyRow 66 | 67 | self.table = self.MyTable('test_table') 68 | 69 | def test_row_no_table(self): 70 | with self.assertRaisesRegex(ValueError, '__table__ must be set if sub-classing Row'): 71 | class MyRow(Row): 72 | pass 73 | 74 | def test_table_init(self): 75 | MyTable = TestTable.get_table_class() 76 | table = MyTable('test_table') 77 | self.assertFalse(hasattr(table, 'row')) 78 | 79 | table_w_row = self.MyTable('test_table') 80 | self.assertTrue(hasattr(table_w_row, 'row')) 81 | self.assertIsInstance(table_w_row.row, RowGetter) 82 | self.assertIs(table_w_row.row.table, table_w_row) 83 | 84 | def test_init(self): 85 | row1 = self.MyRow(col1='foo', col2=100, table=self.table) 86 | 87 | # make sure Row object set up properly 88 | self.assertEqual(row1.idx, 0) 89 | self.assertEqual(row1.col1, 'foo') 90 | self.assertEqual(row1.col2, 100) 91 | 92 | # make sure Row object is stored in Table properly 93 | tmp_row1 = self.table.row[0] 94 | self.assertEqual(tmp_row1, row1) 95 | 96 | def test_add_row_getitem(self): 97 | self.table.add_row(col1='foo', col2=100) 98 | self.table.add_row(col1='bar', col2=200) 99 | 100 | row1 = self.table.row[0] 101 | self.assertIsInstance(row1, self.MyRow) 102 | self.assertEqual(row1.idx, 0) 103 | self.assertEqual(row1.col1, 'foo') 104 | self.assertEqual(row1.col2, 100) 105 | 106 | row2 = self.table.row[1] 107 | self.assertIsInstance(row2, self.MyRow) 108 | self.assertEqual(row2.idx, 1) 109 | self.assertEqual(row2.col1, 'bar') 110 | self.assertEqual(row2.col2, 200) 111 | 112 | # test memoization 113 | row3 = self.table.row[0] 114 | self.assertIs(row3, row1) 115 | 116 | def test_todict(self): 117 | row1 = self.MyRow(col1='foo', col2=100, table=self.table) 118 | self.assertEqual(row1.todict(), {'col1': 'foo', 'col2': 100}) 119 | 120 | def test___str__(self): 121 | row1 = self.MyRow(col1='foo', col2=100, table=self.table) 122 | row1_str = str(row1) 123 | expected_str = "Row(0, test_table) = {'col1': 'foo', 'col2': 100}" 124 | self.assertEqual(row1_str, expected_str) 125 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/classes.tsv: -------------------------------------------------------------------------------- 1 | class slot 2 | > class slot 3 | BrainSample cell_type 4 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/enums.tsv: -------------------------------------------------------------------------------- 1 | valueset value mapping description 2 | > enum permissible_value meaning description 3 | NeuronOrGlialCellTypeEnum Enumeration to capture various cell types found in the brain. 4 | NeuronOrGlialCellTypeEnum PYRAMIDAL_NEURON CL:0000598 Neurons with a pyramidal shaped cell body (soma) and two distinct dendritic trees. 5 | NeuronOrGlialCellTypeEnum INTERNEURON CL:0000099 Neurons whose axons (and dendrites) are limited to a single brain area. 6 | NeuronOrGlialCellTypeEnum MOTOR_NEURON CL:0000100 Neurons whose cell body is located in the motor cortex, brainstem or the spinal cord, and whose axon (fiber) projects to the spinal cord or outside of the spinal cord to directly or indirectly control effector organs, mainly muscles and glands. 7 | NeuronOrGlialCellTypeEnum ASTROCYTE CL:0000127 Characteristic star-shaped glial cells in the brain and spinal cord. 8 | NeuronOrGlialCellTypeEnum OLIGODENDROCYTE CL:0000128 Type of neuroglia whose main functions are to provide support and insulation to axons within the central nervous system (CNS) of jawed vertebrates. 9 | NeuronOrGlialCellTypeEnum MICROGLIAL_CELL CL:0000129 Microglia are the resident immune cells of the brain and constantly patrol the cerebral microenvironment to respond to pathogens and damage. 10 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/nwb_static_enums.yaml: -------------------------------------------------------------------------------- 1 | classes: 2 | BrainSample: 3 | slot_usage: 4 | cell_type: {} 5 | slots: [cell_type] 6 | default_prefix: TEMP 7 | default_range: string 8 | description: this schema demonstrates the use of static enums 9 | enums: 10 | NeuronOrGlialCellTypeEnum: 11 | description: Enumeration to capture various cell types found in the brain. 12 | permissible_values: 13 | ASTROCYTE: {description: Characteristic star-shaped glial cells in the brain 14 | and spinal cord., meaning: 'CL:0000127'} 15 | INTERNEURON: {description: Neurons whose axons (and dendrites) are limited to 16 | a single brain area., meaning: 'CL:0000099'} 17 | MICROGLIAL_CELL: {description: Microglia are the resident immune cells of the 18 | brain and constantly patrol the cerebral microenvironment to respond to 19 | pathogens and damage., meaning: 'CL:0000129'} 20 | MOTOR_NEURON: {description: 'Neurons whose cell body is located in the motor 21 | cortex, brainstem or the spinal cord, and whose axon (fiber) projects to 22 | the spinal cord or outside of the spinal cord to directly or indirectly 23 | control effector organs, mainly muscles and glands.', meaning: 'CL:0000100'} 24 | OLIGODENDROCYTE: {description: Type of neuroglia whose main functions are to 25 | provide support and insulation to axons within the central nervous system 26 | (CNS) of jawed vertebrates., meaning: 'CL:0000128'} 27 | PYRAMIDAL_NEURON: {description: Neurons with a pyramidal shaped cell body (soma) 28 | and two distinct dendritic trees., meaning: 'CL:0000598'} 29 | id: https://w3id.org/linkml/examples/nwb_static_enums 30 | imports: ['linkml:types'] 31 | name: nwb_static_enums 32 | prefixes: {CL: 'http://purl.obolibrary.org/obo/CL_', TEMP: 'https://example.org/TEMP/', 33 | linkml: 'https://w3id.org/linkml/'} 34 | slots: 35 | cell_type: {required: true} 36 | title: static enums example 37 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/prefixes.tsv: -------------------------------------------------------------------------------- 1 | prefix URI 2 | > prefix prefix_reference 3 | linkml https://w3id.org/linkml/ 4 | CL http://purl.obolibrary.org/obo/CL_ 5 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/schema.tsv: -------------------------------------------------------------------------------- 1 | schema uri title description 2 | > schema id title description 3 | nwb_static_enums https://w3id.org/linkml/examples/nwb_static_enums static enums example this schema demonstrates the use of static enums 4 | -------------------------------------------------------------------------------- /tests/unit/test_term_set_input/schemasheets/slots.tsv: -------------------------------------------------------------------------------- 1 | term required 2 | > slot required 3 | cell_type TRUE 4 | -------------------------------------------------------------------------------- /tests/unit/utils_test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/utils_test/__init__.py -------------------------------------------------------------------------------- /tests/unit/utils_test/test_core_DataChunk.py: -------------------------------------------------------------------------------- 1 | from copy import copy, deepcopy 2 | 3 | import numpy as np 4 | from hdmf.data_utils import DataChunk 5 | from hdmf.testing import TestCase 6 | 7 | 8 | class DataChunkTests(TestCase): 9 | 10 | def setUp(self): 11 | pass 12 | 13 | def tearDown(self): 14 | pass 15 | 16 | def test_datachunk_copy(self): 17 | obj = DataChunk(data=np.arange(3), selection=np.s_[0:3]) 18 | obj_copy = copy(obj) 19 | self.assertNotEqual(id(obj), id(obj_copy)) 20 | self.assertEqual(id(obj.data), id(obj_copy.data)) 21 | self.assertEqual(id(obj.selection), id(obj_copy.selection)) 22 | 23 | def test_datachunk_deepcopy(self): 24 | obj = DataChunk(data=np.arange(3), selection=np.s_[0:3]) 25 | obj_copy = deepcopy(obj) 26 | self.assertNotEqual(id(obj), id(obj_copy)) 27 | self.assertNotEqual(id(obj.data), id(obj_copy.data)) 28 | self.assertNotEqual(id(obj.selection), id(obj_copy.selection)) 29 | 30 | def test_datachunk_astype(self): 31 | obj = DataChunk(data=np.arange(3), selection=np.s_[0:3]) 32 | newtype = np.dtype('int16') 33 | obj_astype = obj.astype(newtype) 34 | self.assertNotEqual(id(obj), id(obj_astype)) 35 | self.assertEqual(obj_astype.dtype, np.dtype(newtype)) 36 | -------------------------------------------------------------------------------- /tests/unit/utils_test/test_core_DataIO.py: -------------------------------------------------------------------------------- 1 | from copy import copy, deepcopy 2 | 3 | import numpy as np 4 | from hdmf.data_utils import DataIO 5 | from hdmf.testing import TestCase 6 | 7 | 8 | class DataIOTests(TestCase): 9 | 10 | def test_copy(self): 11 | obj = DataIO(data=[1., 2., 3.]) 12 | obj_copy = copy(obj) 13 | self.assertNotEqual(id(obj), id(obj_copy)) 14 | self.assertEqual(id(obj.data), id(obj_copy.data)) 15 | 16 | def test_deepcopy(self): 17 | obj = DataIO(data=[1., 2., 3.]) 18 | obj_copy = deepcopy(obj) 19 | self.assertNotEqual(id(obj), id(obj_copy)) 20 | self.assertNotEqual(id(obj.data), id(obj_copy.data)) 21 | 22 | def test_dataio_slice_delegation(self): 23 | indata = np.arange(30) 24 | dset = DataIO(indata) 25 | self.assertTrue(np.all(dset[2:15] == indata[2:15])) 26 | 27 | indata = np.arange(50).reshape(5, 10) 28 | dset = DataIO(indata) 29 | self.assertTrue(np.all(dset[1:3, 5:8] == indata[1:3, 5:8])) 30 | 31 | def test_set_data_io_data_already_set(self): 32 | """ 33 | Test that Data.set_dataio works as intended 34 | """ 35 | dataio = DataIO(data=np.arange(30).reshape(5, 2, 3)) 36 | with self.assertRaisesWith(ValueError, "cannot overwrite 'data' on DataIO"): 37 | dataio.data=[1,2,3,4] 38 | 39 | def test_dataio_options(self): 40 | """ 41 | Test that either data or dtype+shape are specified exclusively 42 | """ 43 | with self.assertWarnsRegex(UserWarning, "Argument 'dtype' is ignored when 'data' is specified"): 44 | DataIO(data=np.arange(5), dtype=int) 45 | with self.assertWarnsRegex(UserWarning, "Argument 'shape' is ignored when 'data' is specified"): 46 | DataIO(data=np.arange(5), shape=(3,)) 47 | 48 | dataio = DataIO(shape=(3,), dtype=int) 49 | with self.assertRaisesRegex(ValueError, "Setting data when dtype and shape are not None is not supported"): 50 | dataio.data = np.arange(5) 51 | -------------------------------------------------------------------------------- /tests/unit/utils_test/test_data_utils.py: -------------------------------------------------------------------------------- 1 | from hdmf.data_utils import append_data 2 | from hdmf.testing import TestCase 3 | 4 | import numpy as np 5 | from numpy.testing import assert_array_equal 6 | 7 | try: 8 | import zarr 9 | ZARR_INSTALLED = True 10 | except ImportError: 11 | ZARR_INSTALLED = False 12 | 13 | 14 | class MyIterable: 15 | def __init__(self, data): 16 | self.data = data 17 | 18 | 19 | class TestAppendData(TestCase): 20 | def test_append_exception(self): 21 | data = MyIterable([1, 2, 3, 4, 5]) 22 | with self.assertRaises(ValueError): 23 | append_data(data, 4) 24 | 25 | def test_append_1D_to_2D(self): 26 | data = np.array([[1, 2], [3, 4]]) 27 | arg = np.array([5, 6]) 28 | new = append_data(data, arg) 29 | assert_array_equal(new, np.array([[1, 2], 30 | [3, 4], 31 | [5, 6]])) 32 | 33 | def test_append_same_dim_array_arg(self): 34 | data = np.array([[1, 2], [3, 4]]) 35 | arg = np.array([[5, 6]]) 36 | new = append_data(data, arg) 37 | assert_array_equal(new, np.array([[1, 2], 38 | [3, 4], 39 | [5, 6]])) 40 | 41 | class TestZarrAppendData(TestCase): 42 | def setUp(self): 43 | if not ZARR_INSTALLED: 44 | self.skipTest("optional Zarr package is not installed") 45 | 46 | def test_append_data_zarr(self): 47 | zarr_array = zarr.array([1,2,3]) 48 | new = append_data(zarr_array, 4) 49 | 50 | assert_array_equal(new[:], np.array([1,2,3,4])) 51 | -------------------------------------------------------------------------------- /tests/unit/validator_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hdmf-dev/hdmf/26acff54aad825edf377dd5df9424643df875e2d/tests/unit/validator_tests/__init__.py -------------------------------------------------------------------------------- /tests/unit/validator_tests/test_errors.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from hdmf.validate.errors import Error 4 | 5 | 6 | class TestErrorEquality(TestCase): 7 | def test_self_equality(self): 8 | """Verify that one error equals itself""" 9 | error = Error('foo', 'bad thing', 'a.b.c') 10 | self.assertEqual(error, error) 11 | 12 | def test_equality_with_same_field_values(self): 13 | """Verify that two errors with the same field values are equal""" 14 | err1 = Error('foo', 'bad thing', 'a.b.c') 15 | err2 = Error('foo', 'bad thing', 'a.b.c') 16 | self.assertEqual(err1, err2) 17 | 18 | def test_not_equal_with_different_reason(self): 19 | """Verify that two errors with a different reason are not equal""" 20 | err1 = Error('foo', 'bad thing', 'a.b.c') 21 | err2 = Error('foo', 'something else', 'a.b.c') 22 | self.assertNotEqual(err1, err2) 23 | 24 | def test_not_equal_with_different_name(self): 25 | """Verify that two errors with a different name are not equal""" 26 | err1 = Error('foo', 'bad thing', 'a.b.c') 27 | err2 = Error('bar', 'bad thing', 'a.b.c') 28 | self.assertNotEqual(err1, err2) 29 | 30 | def test_not_equal_with_different_location(self): 31 | """Verify that two errors with a different location are not equal""" 32 | err1 = Error('foo', 'bad thing', 'a.b.c') 33 | err2 = Error('foo', 'bad thing', 'd.e.f') 34 | self.assertNotEqual(err1, err2) 35 | 36 | def test_equal_with_no_location(self): 37 | """Verify that two errors with no location but the same name are equal""" 38 | err1 = Error('foo', 'bad thing') 39 | err2 = Error('foo', 'bad thing') 40 | self.assertEqual(err1, err2) 41 | 42 | def test_not_equal_with_overlapping_name_when_no_location(self): 43 | """Verify that two errors with an overlapping name but no location are 44 | not equal 45 | """ 46 | err1 = Error('foo', 'bad thing') 47 | err2 = Error('x/y/foo', 'bad thing') 48 | self.assertNotEqual(err1, err2) 49 | 50 | def test_equal_with_overlapping_name_when_location_present(self): 51 | """Verify that two errors with an overlapping name and a location are equal""" 52 | err1 = Error('foo', 'bad thing', 'a.b.c') 53 | err2 = Error('x/y/foo', 'bad thing', 'a.b.c') 54 | self.assertEqual(err1, err2) 55 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox (https://tox.readthedocs.io/) is a tool for running tests 2 | # in multiple virtualenvs. This configuration file will run the 3 | # test suite on all supported python versions. To use it, "pip install tox" 4 | # and then run "tox -e [envname]" from this directory. 5 | 6 | [tox] 7 | requires = pip >= 24.3.1 8 | 9 | [testenv] 10 | download = True 11 | setenv = 12 | PYTHONDONTWRITEBYTECODE = 1 13 | recreate = 14 | minimum, upgraded, prerelease: False 15 | build, wheelinstall: True # good practice to recreate the environment 16 | skip_install = 17 | minimum, upgraded, prerelease, wheelinstall: False 18 | build: True # no need to install anything when building 19 | install_command = 20 | # when using [testenv:wheelinstall] and --installpkg, the wheel and its dependencies 21 | # are installed, instead of the package in the current directory 22 | minimum, wheelinstall: python -I -m pip install {opts} {packages} 23 | upgraded: python -I -m pip install -U {opts} {packages} 24 | prerelease: python -I -m pip install -U --pre {opts} {packages} 25 | extras = 26 | # which optional dependency set(s) to use (default: none) 27 | pytest: test 28 | gallery: doc 29 | optional: tqdm,sparse,zarr,termset 30 | minimum: min-reqs 31 | commands = 32 | # commands to run for every environment 33 | python --version # print python version for debugging 34 | python -m pip check # check for conflicting packages 35 | python -m pip list # list installed packages for debugging 36 | 37 | # commands to run for select environments 38 | pytest: pytest -v 39 | gallery: python test_gallery.py 40 | build: python -m pip install -U build 41 | build: python -m build 42 | wheelinstall: python -c "import hdmf; import hdmf.common" 43 | 44 | # list of pre-defined environments 45 | [testenv:pytest-py{39,310,311,312,313}-upgraded] 46 | [testenv:pytest-py313-upgraded-optional] 47 | [testenv:pytest-py313-prerelease-optional] 48 | [testenv:pytest-py39-minimum] 49 | 50 | # TODO: Update to 3.13 when linkml and its deps support 3.13 51 | [testenv:gallery-py312-upgraded-optional] 52 | [testenv:gallery-py312-prerelease-optional] 53 | [testenv:gallery-py39-minimum] 54 | 55 | [testenv:build] # using tox for this so that we can have a clean build environment 56 | [testenv:wheelinstall] # use with `--installpkg dist/*-none-any.whl` 57 | --------------------------------------------------------------------------------