├── .git-blame-ignore-revs ├── .git_archival.txt ├── .gitattributes ├── .github ├── CODEOWNERS └── workflows │ ├── linting.yml │ ├── publish.yml │ ├── test_with_pytest.yml │ └── verify_sphinx_doc.yml ├── .gitignore ├── .pylintrc ├── .readthedocs.yml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── asv.conf.json ├── benchmarks ├── __init__.py ├── core_profiles.py ├── edge_profiles.py ├── technical.py └── utils.py ├── ci ├── build_dd_zip.sh ├── build_docs_and_dist.sh ├── linting.sh ├── run_benchmark.sh └── run_pytest.sh ├── conftest.py ├── docs ├── .gitignore ├── Makefile └── source │ ├── .gitignore │ ├── _static │ ├── imas.css │ └── imas_200x200.png │ ├── _templates │ ├── autosummary │ │ ├── accessor.rst │ │ ├── accessor_attribute.rst │ │ ├── accessor_callable.rst │ │ ├── accessor_method.rst │ │ └── base.rst │ ├── custom-class-template.rst │ ├── custom-module-template.rst │ └── layout.html │ ├── api-hidden.rst │ ├── api.rst │ ├── benchmarking.rst │ ├── changelog.rst │ ├── ci_config.rst │ ├── cli.rst │ ├── code_style.rst │ ├── conf.py │ ├── configuring.rst │ ├── courses │ ├── advanced │ │ ├── dd_versions.rst │ │ ├── explore.rst │ │ ├── hashing.rst │ │ ├── imas_snippets │ │ │ ├── alternative_coordinates.py │ │ │ ├── autoconvert_get.py │ │ │ ├── autoconvert_put.py │ │ │ ├── calc_with_units.py │ │ │ ├── coordinates.py │ │ │ ├── dd_versions.py │ │ │ ├── explore_data.py │ │ │ ├── explore_structures.py │ │ │ ├── hashing.py │ │ │ ├── ids_convert.py │ │ │ ├── ids_to_xarray.py │ │ │ └── tensorized_ids_to_xarray.py │ │ ├── metadata.rst │ │ └── xarray.rst │ ├── advanced_user_training.rst │ ├── basic │ │ ├── analyze.rst │ │ ├── core_profiles_ne_timeslice.png │ │ ├── core_profiles_te.png │ │ ├── create.rst │ │ ├── explore.rst │ │ ├── imas_inspect.png │ │ ├── imas_snippets │ │ │ ├── create_core_profiles.py │ │ │ ├── explore_public_ec_launchers.py │ │ │ ├── explore_public_pf_active.py │ │ │ ├── explore_training_data.py │ │ │ ├── find_paths.py │ │ │ ├── iterate_core_profiles.py │ │ │ ├── plot_core_profiles_ne_timeslice.py │ │ │ ├── plot_core_profiles_te.py │ │ │ ├── print_idss.py │ │ │ ├── read_core_profiles_ne_timeslice.py │ │ │ ├── read_equilibrium_time_array.py │ │ │ ├── read_whole_equilibrium.py │ │ │ └── transform_grid.py │ │ ├── interactive_tab_core_profiles_toplevel.png │ │ ├── print_tree_ids_properties.png │ │ ├── setup.rst │ │ └── transform.rst │ └── basic_user_training.rst │ ├── identifiers.rst │ ├── imas_architecture.rst │ ├── imas_structure.png │ ├── index.rst │ ├── installing.rst │ ├── intro.rst │ ├── lazy_loading.rst │ ├── mdsplus.rst │ ├── metadata.rst │ ├── multi-dd.rst │ ├── netcdf.rst │ ├── netcdf │ └── conventions.rst │ ├── release_imas.rst │ ├── resampling.rst │ └── validation.rst ├── imas ├── __init__.py ├── __main__.py ├── _to_xarray.py ├── _util.py ├── assets │ ├── IDSDef2MDSpreTree.xsl │ ├── IDS_fake_toplevel.xml │ ├── IDS_minimal.xml │ ├── IDS_minimal_2.xml │ ├── IDS_minimal_struct_array.xml │ ├── IDS_minimal_types.xml │ ├── ITER_134173_106_core_profiles.ids │ ├── ITER_134173_106_equilibrium.ids │ ├── README.md │ ├── core_profiles.ids │ └── equilibrium.ids ├── backends │ ├── __init__.py │ ├── db_entry_impl.py │ ├── imas_core │ │ ├── __init__.py │ │ ├── al_context.py │ │ ├── db_entry_al.py │ │ ├── db_entry_helpers.py │ │ ├── imas_interface.py │ │ ├── mdsplus_model.py │ │ └── uda_support.py │ └── netcdf │ │ ├── __init__.py │ │ ├── db_entry_nc.py │ │ ├── ids2nc.py │ │ ├── ids_tensorizer.py │ │ ├── iterators.py │ │ ├── nc2ids.py │ │ ├── nc_metadata.py │ │ └── nc_validate.py ├── command │ ├── cli.py │ ├── db_analysis.py │ ├── helpers.py │ └── timer.py ├── db_entry.py ├── dd_helpers.py ├── dd_zip.py ├── exception.py ├── ids_base.py ├── ids_convert.py ├── ids_coordinates.py ├── ids_data_type.py ├── ids_defs.py ├── ids_factory.py ├── ids_identifiers.py ├── ids_metadata.py ├── ids_path.py ├── ids_primitive.py ├── ids_struct_array.py ├── ids_structure.py ├── ids_toplevel.py ├── setup_logging.py ├── test │ ├── test_all_dd_versions.py │ ├── test_cli.py │ ├── test_dbentry.py │ ├── test_dd_helpers.py │ ├── test_dd_zip.py │ ├── test_deepcopy.py │ ├── test_exception.py │ ├── test_get_sample.py │ ├── test_hash.py │ ├── test_helpers.py │ ├── test_identifiers.py │ ├── test_ids2nc.py │ ├── test_ids_ascii_data.py │ ├── test_ids_convert.py │ ├── test_ids_coordinates.py │ ├── test_ids_data_type.py │ ├── test_ids_factory.py │ ├── test_ids_metadata.py │ ├── test_ids_mixin.py │ ├── test_ids_path.py │ ├── test_ids_primitive.py │ ├── test_ids_struct_array.py │ ├── test_ids_structure.py │ ├── test_ids_toplevel.py │ ├── test_ids_validate.py │ ├── test_latest_dd_autofill.py │ ├── test_latest_dd_manual.py │ ├── test_latest_dd_resample.py │ ├── test_lazy_loading.py │ ├── test_list_occurrences.py │ ├── test_minimal.py │ ├── test_minimal_conversion.py │ ├── test_minimal_io.py │ ├── test_minimal_struct_array_io.py │ ├── test_minimal_types.py │ ├── test_minimal_types_io.py │ ├── test_minimal_types_io_automatic.py │ ├── test_nbc_change.py │ ├── test_nc_autofill.py │ ├── test_nc_entry.py │ ├── test_nc_metadata.py │ ├── test_nc_validation.py │ ├── test_snippets.py │ ├── test_static_ids.py │ ├── test_str_1d.py │ ├── test_time_slicing.py │ ├── test_to_xarray.py │ ├── test_uda_support.py │ └── test_util.py ├── training.py └── util.py ├── pyproject.toml ├── setup.cfg ├── setup.py └── tools ├── compare_lowlevel_access_patterns.py ├── extract_test_data.py └── get_tagnames.py /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Migrate code style to Black 2 | 8ca47b64e91d698334919e8c8d837a7e45e222f5 3 | 4 | # split ids_classes.py 5 | 5befe36579c4dc51ddc410d49fd7bbec586cfc55 6 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 09104ac4360cc8cf220db15507b789d25ca977ba 2 | node-date: 2025-06-03T14:23:38+02:00 3 | describe-name: 2.0.0-15-g09104ac4 -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This file allows setting automatically reviewers for pull requests. 2 | # Each line is a file pattern followed by one or more owners. 3 | # The last match takes precedence over previous ones. 4 | # Do not edit unless specifically mandated to do so. 5 | 6 | # Global/fallback and technical modifications. 7 | * @maarten-ic @prasad-sawantdesai @olivhoenen 8 | 9 | # Modifications to CODEOWNERS and action workflows 10 | .github/ @SimonPinches @olivhoenen 11 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | name: linting-and-code-formatting 2 | 3 | on: 4 | push: 5 | pull_request: 6 | types: [opened, synchronize, reopened] 7 | 8 | jobs: 9 | build: 10 | 11 | runs-on: ubuntu-22.04 12 | 13 | steps: 14 | - name: Checkout IMAS-Python sources 15 | uses: actions/checkout@v4 16 | 17 | - name: Set up Python 18 | uses: actions/setup-python@v5 19 | with: 20 | # until saxonche is available in 3.13 21 | # https://saxonica.plan.io/issues/6561 22 | python-version: "<3.13" 23 | 24 | - name: Display Python version 25 | run: python -c "import sys; print(sys.version)" 26 | 27 | - name: Install the code linting and formatting tools 28 | run: pip install --upgrade 'black >=24,<25' flake8 29 | 30 | - name: Check formatting of code with black 31 | run: black --check imas 32 | 33 | - name: Check linting with flake8 34 | run: flake8 imas 35 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: build-wheel-and-publish-test-pypi 2 | 3 | on: 4 | push: 5 | pull_request: 6 | types: [opened, synchronize, reopened] 7 | 8 | jobs: 9 | build: 10 | name: Build distribution 11 | runs-on: ubuntu-22.04 12 | steps: 13 | - uses: actions/checkout@v4 14 | with: 15 | fetch-depth: 0 16 | - name: Set up Python 17 | uses: actions/setup-python@v5 18 | with: 19 | # until saxonche is available in 3.13 20 | # https://saxonica.plan.io/issues/6561 21 | python-version: "<3.13" 22 | - name: Install pypa/build 23 | run: >- 24 | python3 -m pip install pip setuptools wheel build 25 | - name: Build a binary wheel and a source tarball 26 | run: python3 -m build . 27 | - name: Store the distribution packages 28 | uses: actions/upload-artifact@v4 29 | with: 30 | name: python-package-distributions 31 | path: dist/ 32 | 33 | publish-to-pypi: 34 | name: Publish IMAS-Python distribution to PyPI 35 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes 36 | needs: 37 | - build 38 | runs-on: ubuntu-22.04 39 | environment: 40 | name: pypi 41 | url: https://pypi.org/p/imas-python 42 | permissions: 43 | id-token: write # IMPORTANT: mandatory for trusted publishing 44 | steps: 45 | - name: Download all the dists 46 | uses: actions/download-artifact@v4 47 | with: 48 | name: python-package-distributions 49 | path: dist/ 50 | - name: Publish distribution to PyPI 51 | uses: pypa/gh-action-pypi-publish@release/v1 52 | 53 | publish-to-testpypi: 54 | name: Publish IMAS-Python distribution to TestPyPI 55 | if: github.ref=='refs/heads/develop' # only publish to TestPyPI on develop pushes 56 | needs: 57 | - build 58 | runs-on: ubuntu-22.04 59 | environment: 60 | name: testpypi 61 | url: https://test.pypi.org/p/imas-python 62 | permissions: 63 | id-token: write # IMPORTANT: mandatory for trusted publishing 64 | steps: 65 | - name: Download all the dists 66 | uses: actions/download-artifact@v4 67 | with: 68 | name: python-package-distributions 69 | path: dist/ 70 | - name: Publish distribution to TestPyPI 71 | uses: pypa/gh-action-pypi-publish@unstable/v1 72 | with: 73 | repository-url: https://test.pypi.org/legacy/ 74 | verbose: true 75 | -------------------------------------------------------------------------------- /.github/workflows/test_with_pytest.yml: -------------------------------------------------------------------------------- 1 | name: Test using pytest 2 | 3 | on: 4 | push: 5 | pull_request: 6 | types: [opened, synchronize, reopened] 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-22.04 11 | strategy: 12 | matrix: 13 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # Test on multiple Python versions 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | 19 | - name: Set up Python ${{ matrix.python-version }} 20 | 21 | uses: actions/setup-python@v4 22 | with: 23 | # until saxonche is available in 3.13 24 | # https://saxonica.plan.io/issues/6561 25 | python-version: ${{ matrix.python-version }} 26 | - name: Display Python version 27 | run: python -c "import sys; print(sys.version)" 28 | 29 | - name: Install dependencies 30 | run: | 31 | python -m venv venv 32 | source venv/bin/activate 33 | pip install --upgrade pip setuptools wheel 34 | pip install .[test] 35 | 36 | - name: Run tests 37 | run: | 38 | source venv/bin/activate 39 | python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=xml:coverage.xml --cov-report=html:htmlcov --junit-xml=junit.xml 40 | 41 | - name: Upload coverage report ${{ matrix.python-version }} 42 | uses: actions/upload-artifact@v4 43 | with: 44 | name: coverage-report-${{ matrix.python-version }} 45 | path: htmlcov 46 | 47 | - name: Upload test report ${{ matrix.python-version }} 48 | uses: actions/upload-artifact@v4 49 | with: 50 | name: test-report-${{ matrix.python-version }} 51 | path: junit.xml 52 | -------------------------------------------------------------------------------- /.github/workflows/verify_sphinx_doc.yml: -------------------------------------------------------------------------------- 1 | name: verify-sphinx-doc-generation 2 | 3 | on: 4 | push: 5 | pull_request: 6 | types: [opened, synchronize, reopened] 7 | 8 | jobs: 9 | build-and-test: 10 | runs-on: ubuntu-22.04 11 | 12 | steps: 13 | - name: Checkout IMAS-Python sources 14 | uses: actions/checkout@v4 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v5 18 | with: 19 | with: 20 | # until saxonche is available in 3.13 21 | # https://saxonica.plan.io/issues/6561 22 | python-version: "<3.13" 23 | 24 | - name: Display Python version 25 | run: python -c "import sys; print(sys.version)" 26 | 27 | 28 | - name: Set up Python virtual environment 29 | run: | 30 | python -m venv venv 31 | source venv/bin/activate 32 | 33 | - name: Install build dependencies 34 | run: | 35 | pip install --upgrade pip setuptools wheel build 36 | 37 | - name: Build package 38 | run: | 39 | rm -rf dist 40 | python -m build . 41 | 42 | - name: Install package and dependencies 43 | run: | 44 | pip install "$(readlink -f dist/*.whl)[docs,netcdf]" 45 | 46 | - name: Debug dependencies 47 | run: | 48 | pip freeze 49 | 50 | - name: Build Sphinx documentation 51 | run: | 52 | export SPHINXOPTS='-W -n --keep-going' 53 | make -C docs clean html 54 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # PyBuilder 64 | target/ 65 | 66 | # IPython Notebook 67 | .ipynb_checkpoints 68 | 69 | # pyenv 70 | .python-version 71 | 72 | # celery beat schedule file 73 | celerybeat-schedule 74 | 75 | # dotenv 76 | .env 77 | 78 | # virtualenv 79 | .venv/ 80 | venv/ 81 | ENV/ 82 | 83 | # Spyder project settings 84 | .spyderproject 85 | 86 | # Rope project settings 87 | .ropeproject 88 | 89 | # VIM 90 | *.swp 91 | *.swo 92 | 93 | # SCM setuptools 94 | imas/_version.py 95 | 96 | # IMAS DD 97 | data-dictionary 98 | containers/arch/imas/ 99 | containers/arch/data-dictionary/ 100 | imas/assets/IDSDef.zip 101 | 102 | # IDS files 103 | # *.ids 104 | 105 | # ASV folder 106 | /.asv 107 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "3.11" 7 | jobs: 8 | post_checkout: 9 | - git fetch --unshallow || true 10 | 11 | python: 12 | install: 13 | - method: pip 14 | path: . 15 | extra_requirements: 16 | - docs 17 | - netcdf 18 | - h5py 19 | 20 | sphinx: 21 | builder: html 22 | configuration: docs/source/conf.py 23 | fail_on_warning: false -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at . All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing guidelines 2 | 3 | We welcome any kind of contribution to `IMAS-Python`, 4 | from a simple comment, a question or even a full fledged pull 5 | request. 6 | Please first make sure you read and follow the 7 | [Code of Conduct](CODE_OF_CONDUCT.md). 8 | 9 | ## You think you found a bug in the code, or have a question in its use 10 | 1. use the [issue search](https://github.com/iterorganization/IMAS-Python/issues) 11 | to check if someone already created a similar issue; 12 | 2. if not, make a **new issue** to describe your problem or question. 13 | In the case of a bug suspiscion, please try to give all the relevant 14 | information to allow reproducing the error or identifying 15 | its root cause (version of the IMAS-Python, OS and relevant 16 | dependencies, snippet of code); 17 | 3. apply relevant labels to the issue. 18 | 19 | ## You want to make or ask some change to the code 20 | 1. use the [issue search](https://github.com/iterorganization/IMAS-Python/issues) 21 | to check if someone already proposed a similar idea/change; 22 | 2. if not, create a **new issue** to describe what change you would like to see 23 | implemented and specify it if you intend to work on it yourself or if some help 24 | will be needed; 25 | 3. wait until some kind of consensus is reached about your idea being relevant, 26 | at which time the issue will be assigned (to you or someone else who can work on 27 | this topic); 28 | 4. if you do the development yourself, fork the repository to your own Github 29 | profile and create your own feature branch off of the latest develop commit. 30 | Make sure to regularly sync your branch with the latest commits from `develop` 31 | (find instructions 32 | [here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork)); 33 | 5. when your development is ready, create a pull request (find instructions 34 | [here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)). 35 | 36 | 37 | While we will try to answer questions quickly and to address issues in a timely 38 | manner, it can may sometimes take longer than expected. A friendly ping in the 39 | discussion or the issue thread can help draw attention if you find that it was 40 | stalled. 41 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include imas/assets/IDSDef.zip 2 | include imas/assets/IDSDef2MDSpreTree.xsl 3 | include imas/assets/ITER_134173_106_equilibrium.ids 4 | include imas/assets/ITER_134173_106_core_profiles.ids 5 | include imas/assets/equilibrium.ids 6 | include imas/assets/core_profiles.ids 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IMAS-Python 2 | 3 | IMAS-Python (formerly known as IMASPy its for versions < 2) is a pure-python library to handle 4 | arbitrarily nested data structures. It is designed for, but not necessarily bound to, interacting 5 | with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) 6 | Data Model. 7 | 8 | 9 | ## Install 10 | 11 | Simply install IMAS-Python with ``pip``: 12 | ```bash 13 | pip install imas-python 14 | ``` 15 | or with optional dependencies for netCDF and xarray support: 16 | ```bash 17 | pip install imas-python[netcdf,xarray] 18 | ``` 19 | 20 | More details are described in the documentation generated from `/docs/source/installing.rst`. 21 | The documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) 22 | and can be found at the [readthedocs](https://imas-python.readthedocs.io/en/latest/) 23 | 24 | To generated the documentation yourself, install the ``docs`` optional dependencies and do: 25 | ```bash 26 | make -C docs html 27 | ``` 28 | 29 | 30 | ## How to use 31 | 32 | ```python 33 | import imas 34 | factory = imas.IDSFactory() 35 | equilibrium = factory.equilibrium() 36 | print(equilibrium) 37 | 38 | equilibrium.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS 39 | equilibrium.ids_properties.comment = "testing" 40 | equilibrium.time = [0.01] 41 | 42 | with imas.DBEntry("imas:hdf5?path=./testdb", "w") as dbentry: 43 | dbentry.put(equilibrium) 44 | 45 | # or without imas_core dependency 46 | with imas.DBEntry("./test.nc", "w") as dbentry: 47 | dbentry.put(equilibrium) 48 | 49 | ``` 50 | 51 | A quick 5 minutes introduction is available in the documentation generated from `/docs/sources/intro.rst`. 52 | 53 | 54 | ## Legal 55 | 56 | IMAS-Python is Copyright 2020-2025 ITER Organization, Copyright 2020-2023 Karel Lucas van de 57 | Plassche , Copyright 2020-2022 Daan van Vugt , 58 | and Copyright 2020 Dutch Institute for Fundamental Energy Research . 59 | It is licensed under [LGPL 3.0](LICENSE.txt). 60 | 61 | 62 | ## Acknowledgments 63 | 64 | Inspired and bootstrapped by existing tools, notably the IMAS Python HLI, 65 | IMAS Python workflows, and OMAS. 66 | -------------------------------------------------------------------------------- /benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/technical.py: -------------------------------------------------------------------------------- 1 | import imas 2 | 3 | 4 | def timeraw_create_default_imas_factory(): 5 | # timeraw to ensure that nothing is cached 6 | return """ 7 | import imas 8 | imas.IDSFactory() 9 | """ 10 | 11 | 12 | def timeraw_import_imas(): 13 | return """ 14 | import imas 15 | """ 16 | 17 | 18 | # It would be nice if we could track these, but unfortunately it breaks things like 19 | # `asv compare` :( 20 | """ 21 | def track_imas_versions(): 22 | ids_factory = imas.IDSFactory() 23 | equilibrium = ids_factory.equilibrium() 24 | equilibrium.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_INDEPENDENT 25 | dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) 26 | dbentry.create() 27 | dbentry.put(equilibrium) 28 | equilibrium = dbentry.get("equilibrium") 29 | return ( 30 | equilibrium.ids_properties.version_put.data_dictionary, 31 | equilibrium.ids_properties.version_put.access_layer, 32 | ) 33 | 34 | 35 | def track_imas_dd_version(): 36 | return imas.IDSFactory().version 37 | """ 38 | -------------------------------------------------------------------------------- /benchmarks/utils.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import logging 3 | import uuid 4 | from pathlib import Path 5 | 6 | import imas 7 | import imas.exception 8 | 9 | # Backend constants 10 | HDF5 = "HDF5" 11 | MDSPLUS = "MDSplus" 12 | MEMORY = "memory" 13 | ASCII = "ASCII" 14 | NETCDF = "netCDF" 15 | 16 | 17 | def create_uri(backend, path): 18 | if backend == NETCDF: 19 | return f"{path}.nc" 20 | return f"imas:{backend.lower()}?path={path}" 21 | 22 | 23 | def backend_exists(backend): 24 | """Tries to detect if the lowlevel has support for the given backend.""" 25 | uri = create_uri(backend, str(uuid.uuid4())) 26 | try: 27 | entry = imas.DBEntry(uri, "r") 28 | except Exception as exc: 29 | if "backend is not available" in str(exc): 30 | return False 31 | elif isinstance(exc, (imas.exception.ALException, FileNotFoundError)): 32 | return True 33 | return True 34 | # Highly unlikely, but it could succeed without error 35 | entry.close() 36 | return True 37 | 38 | 39 | # Note: UDA backend is not used for benchmarking 40 | all_backends = [ 41 | HDF5, 42 | MDSPLUS, 43 | MEMORY, 44 | ASCII, 45 | NETCDF, 46 | ] 47 | 48 | # Suppress error logs for testing backend availabitily: 49 | # ERROR:root:b'ual_open_pulse: [UALBackendException = HDF5 master file not found: ]' 50 | # ERROR:root:b'ual_open_pulse: [UALBackendException = %TREE-E-FOPENR, Error opening file read-only.]' 51 | # ERROR:root:b'ual_open_pulse: [UALBackendException = Missing pulse]' 52 | logging.getLogger().setLevel(logging.CRITICAL) 53 | available_backends = list(filter(backend_exists, all_backends)) 54 | logging.getLogger().setLevel(logging.INFO) 55 | available_slicing_backends = [ 56 | backend for backend in available_backends if backend not in [ASCII, NETCDF] 57 | ] 58 | 59 | hlis = ["imas"] 60 | DBEntry = { 61 | "imas": imas.DBEntry, 62 | } 63 | factory = { 64 | "imas": imas.IDSFactory(), 65 | } 66 | available_serializers = [imas.ids_defs.ASCII_SERIALIZER_PROTOCOL] 67 | 68 | 69 | def create_dbentry(hli, backend): 70 | if backend == NETCDF: 71 | if hli == "imas": # check if netcdf backend is available 72 | try: 73 | assert ( 74 | imas.DBEntry._select_implementation("x.nc").__name__ 75 | == "NCDBEntryImpl" 76 | ) 77 | except (AttributeError, AssertionError): 78 | raise NotImplementedError( 79 | "This version of IMAS-Python doesn't implement netCDF." 80 | ) from None 81 | 82 | path = Path.cwd() / f"DB-{hli}-{backend}" 83 | return DBEntry[hli](create_uri(backend, path), "w") 84 | -------------------------------------------------------------------------------- /ci/build_dd_zip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Bamboo CI script to build IDSDef.zip 3 | # Note: this script should be run from the root of the git repository 4 | 5 | # Debuggging: 6 | if [[ "$(uname -n)" == *"bamboo"* ]]; then 7 | set -e -o pipefail 8 | fi 9 | echo "Loading modules..." 10 | 11 | # Set up environment such that module files can be loaded 12 | source /etc/profile.d/modules.sh 13 | module purge 14 | # Modules are supplied as arguments in the CI job: 15 | if [ -z "$@" ]; then 16 | module load Python 17 | else 18 | module load $@ 19 | fi 20 | 21 | # Debuggging: 22 | echo "Done loading modules" 23 | 24 | # Build the DD zip 25 | rm -rf venv # Environment should be clean, but remove directory to be sure 26 | python -m venv venv 27 | source venv/bin/activate 28 | pip install gitpython saxonche packaging 29 | python imas/dd_helpers.py 30 | deactivate 31 | -------------------------------------------------------------------------------- /ci/build_docs_and_dist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Bamboo CI script to install imas Python module and run all tests 3 | # Note: this script should be run from the root of the git repository 4 | 5 | # Debuggging: 6 | if [[ "$(uname -n)" == *"bamboo"* ]]; then 7 | set -e -o pipefail 8 | fi 9 | echo "Loading modules:" $@ 10 | 11 | # Set up environment such that module files can be loaded 12 | source /etc/profile.d/modules.sh 13 | module purge 14 | # Modules are supplied as arguments in the CI job: 15 | module load $@ 16 | 17 | # Debuggging: 18 | echo "Done loading modules" 19 | 20 | # Set up the testing venv 21 | rm -rf venv # Environment should be clean, but remove directory to be sure 22 | python -m venv venv 23 | source venv/bin/activate 24 | 25 | # Create sdist and wheel 26 | pip install --upgrade pip setuptools wheel build 27 | rm -rf dist 28 | python -m build . 29 | 30 | # Install imas Python module and documentation dependencies from the just-built wheel 31 | pip install "`readlink -f dist/*.whl`[docs,netcdf]" 32 | 33 | # Debugging: 34 | pip freeze 35 | 36 | # Enable sphinx options: 37 | # - `-W`: turn warnings into errors 38 | # - `-n`: nit-picky mode, warn about all missing references 39 | # - `--keep-going`: with -W, keep going when getting warnings 40 | export SPHINXOPTS='-W -n --keep-going' 41 | 42 | # Run sphinx to create the documentation 43 | make -C docs clean html 44 | 45 | deactivate 46 | -------------------------------------------------------------------------------- /ci/linting.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Bamboo CI script for linting 3 | # Note: this script should be run from the root of the git repository 4 | 5 | # Debuggging: 6 | if [[ "$(uname -n)" == *"bamboo"* ]]; then 7 | set -e -o pipefail 8 | fi 9 | echo "Loading modules..." 10 | 11 | # Set up environment such that module files can be loaded 12 | source /etc/profile.d/modules.sh 13 | module purge 14 | # Modules are supplied as arguments in the CI job: 15 | if [ -z "$@" ]; then 16 | module load Python 17 | else 18 | module load $@ 19 | fi 20 | 21 | # Debuggging: 22 | echo "Done loading modules" 23 | 24 | # Create a venv 25 | rm -rf venv 26 | python -m venv venv 27 | . venv/bin/activate 28 | 29 | # Install and run linters 30 | pip install --upgrade 'black >=24,<25' flake8 31 | 32 | black --check imas 33 | flake8 imas 34 | 35 | deactivate -------------------------------------------------------------------------------- /ci/run_benchmark.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Bamboo CI script to install imas Python module and run all tests 3 | # Note: this script should be run from the root of the git repository 4 | 5 | # Debuggging: 6 | 7 | echo "Loading modules:" $@ 8 | BENCHMARKS_DIR=$(realpath "$PWD/imas_benchmarks") 9 | if [[ "$(uname -n)" == *"bamboo"* ]]; then 10 | set -e -o pipefail 11 | # create 12 | BENCHMARKS_DIR=$(realpath "/mnt/bamboo_deploy/imas/benchmarks/") 13 | fi 14 | 15 | # Set up environment such that module files can be loaded 16 | source /etc/profile.d/modules.sh 17 | module purge 18 | # Modules are supplied as arguments in the CI job: 19 | # IMAS-AL-Python/5.2.1-intel-2023b-DD-3.41.0 Saxon-HE/12.4-Java-21 20 | if [ -z "$@" ]; then 21 | module load IMAS-AL-Core 22 | else 23 | module load $@ 24 | fi 25 | 26 | 27 | 28 | # Debuggging: 29 | echo "Done loading modules" 30 | 31 | # Export current PYTHONPATH so ASV benchmarks can import imas 32 | export ASV_PYTHONPATH="$PYTHONPATH" 33 | 34 | # Set up the testing venv 35 | rm -rf venv # Environment should be clean, but remove directory to be sure 36 | python -m venv venv 37 | source venv/bin/activate 38 | 39 | # Install asv and imas 40 | pip install --upgrade pip setuptools wheel 41 | pip install virtualenv .[test] 42 | 43 | # Generate MDS+ models cache 44 | python -c 'import imas.backends.imas_core.mdsplus_model; print(imas.backends.imas_core.mdsplus_model.mdsplus_model_dir(imas.IDSFactory()))' 45 | 46 | # Copy previous results (if any) 47 | mkdir -p "$BENCHMARKS_DIR/results" 48 | mkdir -p .asv 49 | cp -rf "$BENCHMARKS_DIR/results" .asv/ 50 | 51 | # Ensure numpy won't do multi-threading 52 | export OPENBLAS_NUM_THREADS=1 53 | export MKL_NUM_THREADS=1 54 | export OMP_NUM_THREADS=1 55 | 56 | # Ensure there is a machine configuration 57 | asv machine --yes 58 | 59 | # Run ASV for the current commit, develop and main 60 | asv run --skip-existing-successful HEAD^! 61 | asv run --skip-existing-successful develop^! 62 | asv run --skip-existing-successful main^! 63 | 64 | # Compare results 65 | if [ `git rev-parse --abbrev-ref HEAD` == develop ] 66 | then 67 | asv compare main develop --machine $(hostname) || echo "asv compare failed" 68 | else 69 | asv compare develop HEAD --machine $(hostname) || echo "asv compare failed" 70 | fi 71 | 72 | # Publish results 73 | asv publish 74 | 75 | # And persistently store them 76 | cp -rf .asv/{results,html} "$BENCHMARKS_DIR" 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /ci/run_pytest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Bamboo CI script to install imas Python module and run all tests 3 | # Note: this script should be run from the root of the git repository 4 | 5 | # Debuggging: 6 | if [[ "$(uname -n)" == *"bamboo"* ]]; then 7 | set -e -o pipefail 8 | fi 9 | echo "Loading modules:" $@ 10 | 11 | # Set up environment such that module files can be loaded 12 | source /etc/profile.d/modules.sh 13 | module purge 14 | # Modules are supplied as arguments in the CI job: 15 | if [ -z "$@" ]; then 16 | module load IMAS-AL-Core Java MDSplus 17 | else 18 | module load $@ 19 | fi 20 | 21 | # Debuggging: 22 | echo "Done loading modules" 23 | 24 | # Set up the testing venv 25 | rm -rf venv # Environment should be clean, but remove directory to be sure 26 | python -m venv venv 27 | source venv/bin/activate 28 | 29 | # Install imas and test dependencies 30 | pip install --upgrade pip setuptools wheel 31 | pip install .[h5py,netcdf,test] 32 | 33 | # Debugging: 34 | pip freeze 35 | 36 | # Run pytest 37 | # Clean artifacts created by pytest 38 | rm -f junit.xml 39 | rm -rf htmlcov 40 | 41 | # setups local directory to not to full /tmp directory with pytest temporary files 42 | # mkdir -p ~/tmp 43 | # export PYTEST_DEBUG_TEMPROOT=~/tmp 44 | python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml 45 | 46 | 47 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | _build 2 | source/generated 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | GENERATEDDIR = $(SOURCEDIR)/generated 10 | BUILDDIR = _build 11 | PROJECT_ROOT = .. 12 | 13 | # Put it first so that "make" without argument is like "make help". 14 | help: 15 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 16 | 17 | .PHONY: help Makefile 18 | 19 | # Catch-all target: route all unknown targets to Sphinx using the new 20 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 21 | %: Makefile 22 | @echo Running '$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)' 23 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 24 | 25 | clean: Makefile 26 | @echo Running '$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)' 27 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 28 | rm -rf $(GENERATEDDIR) 29 | 30 | # This recipe generates source files, so put result in source 31 | # Seems to overwrite autosummary documentation though! So not using this rn 32 | MODULE_EXCLUDE="../imas/examples/**" "../imas/**" 33 | apidocs: Makefile 34 | sphinx-apidoc --implicit-namespaces -o "$(GENERATEDDIR)" "$(PROJECT_ROOT)/imas/" $(MODULE_EXCLUDE) 35 | 36 | # Sphinx will run this automatically 37 | autogen: Makefile 38 | sphinx-autogen -o "$(GENERATEDDIR)" $(SOURCEDIR)/*.rst 39 | 40 | realclean: Makefile clean 41 | rm -rf $(SOURCEDIR)/apidoc $(GENERATEDDIR) $(SOURCEDIR)/_build/ 42 | -------------------------------------------------------------------------------- /docs/source/.gitignore: -------------------------------------------------------------------------------- 1 | apidoc/* 2 | netcdf4.objects.inv 3 | -------------------------------------------------------------------------------- /docs/source/_static/imas.css: -------------------------------------------------------------------------------- 1 | .tabbed-set { 2 | border: 1px solid #888; 3 | } 4 | .tabbed-content { 5 | padding: 0 0.5rem; 6 | } 7 | 8 | table caption { 9 | /* style adapted from code-block captions (selector: `.highlight span.filename`) */ 10 | background-color: var(--md-code-bg-color); 11 | border-bottom: .05rem solid var(--md-default-fg-color--lightest); 12 | font-weight: 700; 13 | padding: .6617647059em 1.1764705882em; 14 | text-align: left; 15 | } 16 | -------------------------------------------------------------------------------- /docs/source/_static/imas_200x200.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/_static/imas_200x200.png -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/accessor.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Taken from xarray 3 | 4 | {{ fullname }} 5 | {{ underline }} 6 | 7 | .. currentmodule:: {{ module.split('.')[0] }} 8 | 9 | .. autoaccessor:: {{ (module.split('.')[1:] + [objname]) | join('.') }} 10 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/accessor_attribute.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Taken from xarray 3 | 4 | {{ fullname }} 5 | {{ underline }} 6 | 7 | .. currentmodule:: {{ module.split('.')[0] }} 8 | 9 | .. autoaccessorattribute:: {{ (module.split('.')[1:] + [objname]) | join('.') }} 10 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/accessor_callable.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Taken from xarray 3 | 4 | {{ fullname }} 5 | {{ underline }} 6 | 7 | .. currentmodule:: {{ module.split('.')[0] }} 8 | 9 | .. autoaccessorcallable:: {{ (module.split('.')[1:] + [objname]) | join('.') }}.__call__ 10 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/accessor_method.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Taken from xarray 3 | 4 | {{ fullname }} 5 | {{ underline }} 6 | 7 | .. currentmodule:: {{ module.split('.')[0] }} 8 | 9 | .. autoaccessormethod:: {{ (module.split('.')[1:] + [objname]) | join('.') }} 10 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/base.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Taken from xarray 3 | 4 | {% extends "!autosummary/base.rst" %} 5 | -------------------------------------------------------------------------------- /docs/source/_templates/custom-class-template.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. autoclass:: {{ objname }} 6 | :members: 7 | :show-inheritance: 8 | :inherited-members: 9 | 10 | {% block methods %} 11 | .. automethod:: __init__ 12 | 13 | {% if methods %} 14 | .. rubric:: {{ _('Methods') }} 15 | 16 | .. autosummary:: 17 | {% for item in methods %} 18 | ~{{ name }}.{{ item }} 19 | {%- endfor %} 20 | {% endif %} 21 | {% endblock %} 22 | 23 | {% block attributes %} 24 | {% if attributes %} 25 | .. rubric:: {{ _('Attributes') }} 26 | 27 | .. autosummary:: 28 | {% for item in attributes %} 29 | ~{{ name }}.{{ item }} 30 | {%- endfor %} 31 | {% endif %} 32 | {% endblock %} 33 | -------------------------------------------------------------------------------- /docs/source/_templates/custom-module-template.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. automodule:: {{ fullname }} 4 | 5 | {% block attributes %} 6 | {% if attributes %} 7 | .. rubric:: {{ _('Module Attributes') }} 8 | 9 | .. autosummary:: 10 | :toctree: 11 | {% for item in attributes %} 12 | {{ item }} 13 | {%- endfor %} 14 | {% endif %} 15 | {% endblock %} 16 | 17 | {% block functions %} 18 | {% if functions %} 19 | .. rubric:: {{ _('Functions') }} 20 | 21 | .. autosummary:: 22 | :toctree: 23 | {% for item in functions %} 24 | {{ item }} 25 | {%- endfor %} 26 | {% endif %} 27 | {% endblock %} 28 | 29 | {% block classes %} 30 | {% if classes %} 31 | .. rubric:: {{ _('Classes') }} 32 | 33 | .. autosummary:: 34 | :toctree: 35 | :template: custom-class-template.rst 36 | {% for item in classes %} 37 | {{ item }} 38 | {%- endfor %} 39 | {% endif %} 40 | {% endblock %} 41 | 42 | {% block exceptions %} 43 | {% if exceptions %} 44 | .. rubric:: {{ _('Exceptions') }} 45 | 46 | .. autosummary:: 47 | :toctree: 48 | {% for item in exceptions %} 49 | {{ item }} 50 | {%- endfor %} 51 | {% endif %} 52 | {% endblock %} 53 | 54 | {% block modules %} 55 | {% if modules %} 56 | .. rubric:: Modules 57 | 58 | .. autosummary:: 59 | :toctree: 60 | :template: custom-module-template.rst 61 | :recursive: 62 | {% for item in modules | reject("equalto", "test") %} 63 | {{ item }} 64 | {%- endfor %} 65 | {% endif %} 66 | {% endblock %} 67 | -------------------------------------------------------------------------------- /docs/source/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | {% set css_files = css_files + ["_static/style.css"] %} 3 | -------------------------------------------------------------------------------- /docs/source/api-hidden.rst: -------------------------------------------------------------------------------- 1 | .. Generate API reference pages, but don't display these in tables. 2 | .. This extra page is a work around for sphinx not having any support for 3 | .. hiding an autosummary table. 4 | 5 | API autosummary 6 | =============== 7 | 8 | .. Explicitly list submodules here 9 | .. autosummary:: 10 | :toctree: generated/ 11 | :recursive: 12 | :template: custom-module-template.rst 13 | 14 | imas 15 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: imas 2 | 3 | API reference 4 | ============= 5 | 6 | This page provides an auto-generated summary of IMAS-Python's API. For more details 7 | and examples, refer to the relevant chapters in the main part of the 8 | documentation. 9 | 10 | IMAS-Python IDS manipulation 11 | ---------------------------- 12 | 13 | .. currentmodule:: imas 14 | 15 | .. autosummary:: 16 | 17 | db_entry.DBEntry 18 | ids_factory.IDSFactory 19 | ids_toplevel.IDSToplevel 20 | ids_primitive.IDSPrimitive 21 | ids_structure.IDSStructure 22 | ids_struct_array.IDSStructArray 23 | -------------------------------------------------------------------------------- /docs/source/ci_config.rst: -------------------------------------------------------------------------------- 1 | .. _`ci configuration`: 2 | 3 | CI configuration 4 | ================ 5 | 6 | IMAS-Python uses `ITER Bamboo `_ for CI. This page provides an overview 7 | of the CI Plan and deployment projects. 8 | 9 | CI Plan 10 | ------- 11 | 12 | The `IMAS-Python CI plan `_ consists of 4 types of jobs: 13 | 14 | Linting and DD ZIP 15 | This job is responsible for three things: 16 | 17 | 1. Verify that the ``IDSDef2MDSplusPreTree.xsl`` file matches the one in the Access 18 | Layer repository. This file is required for building MDSplus models and the 19 | models built by IMAS-Python should match those built by the Access Layer. 20 | 2. Linting: run ``black`` and ``flake8`` on the IMAS-Python code base. See :ref:`code 21 | style and linting`. 22 | 3. Build the Data Dictionary zip file. This Task builds the Data Dictionary for all 23 | tagged releases since DD version ``3.22.0``. These are combined into the 24 | ``IDSDef.zip`` file, which is distributed with IMAS-Python. 25 | 26 | The ZIP file is built in a separate job, such that the subsequent test jobs can 27 | reuse this. 28 | 29 | The CI scripts executed in this job are: 30 | 31 | - ``ci/linting.sh`` 32 | - ``ci/build_dd_zip.sh`` 33 | 34 | Test with AL 35 | This runs all unit tests with pytest. 36 | Access Layer version that we test against: 37 | IMAS-AL-Core/5.4.3-intel-2023b 38 | 39 | The CI script executed in this job is ``ci/run_pytest.sh``, which expects the 40 | modules it needs to load as arguments. 41 | 42 | Cloning this job to test against a new AL version is easy: 43 | 44 | 1. On the "Default plan configuration" page of Bamboo, click "+ Add job" and select 45 | the option to "Clone an existing job". 46 | 2. Select one of the existing Test jobs. 47 | 3. Indicate the new AL version in the job name. 48 | 4. Click Create job 49 | 5. In the "Script" Task, update the module(s) in the Argument field 50 | 51 | Benchmark 52 | This job runs the :ref:`ASV benchmarks ` on the CI server. It 53 | is configured such that it can only run on a single CI agent 54 | (`io-ls-bamboowk6.iter.org`). There are two reasons for this: 55 | 56 | 1. Simplify the data I/O of the script - we can avoid file locks because there will 57 | only be a single Benchmark Job running globally. This is the main reason. 58 | 2. Benchmarks should be more reproduceable when always run on the same machine. 59 | Although the agents are virtualized, so the performance will always depend to 60 | some extent to the load on the CI cluster. 61 | 62 | The CI script executed in this job is: ``ci/run_benchmark.sh``. 63 | 64 | Build docs and dists 65 | This job builds the Sphinx documentation and python packages for IMAS-Python (``sdist`` 66 | and ``wheel``). 67 | 68 | The CI script executed in this job is: ``ci/build_docs_and_dist.sh``. 69 | 70 | 71 | Deployment projects 72 | ------------------- 73 | 74 | There is github workflow for IMAS-Python: 75 | 76 | `IMAS-Python-PyPi `_ 77 | Deploy the python packages job to the https://pypi.org/ server and https://test.pypi.org/ server. 78 | You can find link here : `IMAS-Python `_ 79 | 80 | 81 | `Deploy IMAS-Python-doc `_ 82 | Deploy the documentation using `readthedocs 83 | `_. 84 | 85 | -------------------------------------------------------------------------------- /docs/source/code_style.rst: -------------------------------------------------------------------------------- 1 | .. _`code style and linting`: 2 | 3 | Code style and linting 4 | ====================== 5 | 6 | 7 | Code style 8 | ---------- 9 | 10 | IMAS-Python follows `The Black Code Style 11 | `_. All Python 12 | files should be formatted with the ``black`` command line tool (this is checked in 13 | :ref:`CI `). 14 | 15 | 16 | Why Black? 17 | '''''''''' 18 | 19 | We use the black autoformatter, so the code style is uniform across all Python files, 20 | regardless of the developer that created the code 🙂. 21 | 22 | This improves efficiency of developers working on the project: 23 | 24 | - Uniform code style makes it easier to read, review and understand other's code. 25 | - Autoformatting code means that developers can save time and mental energy for the 26 | important matters. 27 | 28 | More reasons for using black can be found on `their website 29 | `_. 30 | 31 | 32 | Using Black 33 | ''''''''''' 34 | 35 | The easiest way to work with Black is by using an integration with your editor. See 36 | https://black.readthedocs.io/en/stable/integrations/editors.html. 37 | 38 | You can also ``pip install black`` and run it every time before committing (manually or 39 | with pre-commit hooks): 40 | 41 | .. code-block:: console 42 | 43 | $ black imas 44 | All done! ✨ 🍰 ✨ 45 | 66 files left unchanged. 46 | 47 | 48 | Linting 49 | ------- 50 | 51 | IMAS-Python uses `flake8 `_ for linting (static code 52 | analysis). Flake8 should not report any violations when running it on the ``imas`` 53 | code base. Again, this is checked in CI. 54 | 55 | In some exceptions we can ignore a violation. For example, if a violation cannot be 56 | avoided, or fixing it would result in less readable code. This should be avoided as much 57 | as possible though. 58 | 59 | 60 | Why linting? 61 | '''''''''''' 62 | 63 | Because it results in more readable code and can prevent some types of bugs! 64 | 65 | 66 | Using flake8 67 | '''''''''''' 68 | 69 | Again, the easiest way to work with the ``flake8`` linter is by using an integration 70 | with your editor. 71 | 72 | You can also ``pip install flake8`` and run it every time before comitting to check if 73 | your code introduces any violations: 74 | 75 | .. code-block:: console 76 | 77 | $ flake8 imas 78 | 79 | -------------------------------------------------------------------------------- /docs/source/configuring.rst: -------------------------------------------------------------------------------- 1 | Configuring IMAS-Python 2 | ======================= 3 | 4 | IMAS-Python has a couple of environment variables that can be used to control its behaviour. 5 | This page provides an overview of available variables. 6 | 7 | .. note:: 8 | 9 | In addition to the listed environment variables, the IMAS Core library also has 10 | environment variables available to control its behaviour. See the `IMAS Core 11 | documentation 12 | `_ 13 | 14 | 15 | ``IMAS_LOGLEVEL`` 16 | Sets the log level used by the IMAS-Python logger. 17 | 18 | By default (when this environment variable is not set), all log messages of ``INFO`` 19 | or more severe are logged. You may set this to, for example, 20 | ``IMAS_LOGLEVEL=WARNING``, to suppress some of the log messages. 21 | 22 | See the Python documentation for the :external:py:mod:`logging` module which log 23 | levels are available. 24 | 25 | .. note:: 26 | 27 | This environment variable is read when the ``imas`` library is initialized 28 | during the first ``import imas``. Changing it afterwards has no effect, but 29 | you can use :external:py:meth:`logging.getLogger("imas").setLevel(...) 30 | ` to change the log level programmatically. 31 | 32 | 33 | ``IMAS_DISABLE_NC_VALIDATE`` 34 | Disables validation of netCDF files when loading an IDS from an IMAS netCDF file. 35 | 36 | .. caution:: 37 | Disabling the validation may lead to errors when reading data from an IMAS netCDF file. 38 | 39 | ``IMAS_VERSION`` 40 | Sets :ref:`The default Data Dictionary version` to use. 41 | 42 | 43 | Environment variables shared with the IMAS Python HLI 44 | ----------------------------------------------------- 45 | 46 | ``IMAS_AL_DISABLE_VALIDATE`` 47 | By default, IMAS-Python :ref:`validates ` IDSs to check that all data is 48 | consistent with their coordinates during a :py:meth:`~imas.db_entry.DBEntry.put` 49 | or :py:meth:`~imas.db_entry.DBEntry.put_slice`. 50 | 51 | Setting ``IMAS_AL_DISABLE_VALIDATE=1`` disables this validation. 52 | 53 | ``IMAS_AL_SERIALIZER_TMP_DIR`` 54 | Specify the path to storing temporary data during 55 | :py:meth:`~imas.ids_toplevel.IDSToplevel.serialize` and 56 | :py:meth:`~imas.ids_toplevel.IDSToplevel.deserialize`. 57 | 58 | If it is not set, the default location ``/dev/shm/`` or the current working 59 | directory will be chosen. 60 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/hashing.rst: -------------------------------------------------------------------------------- 1 | Calculating hashes of IMAS data 2 | =============================== 3 | 4 | IMAS-Python can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could 5 | do `__: 6 | 7 | A hash function is any function that can be used to map data of arbitrary size to 8 | fixed-size values, [...]. The values returned by a hash function are called *hash 9 | values*, *hash codes*, *hash digests*, *digests*, or simply *hashes*. 10 | 11 | IMAS-Python is using the XXH3 hash function from the `xxHash project 12 | `__. This is a *non-cryptographic* hash and returns 13 | 64-bit hashes. 14 | 15 | 16 | Use cases 17 | --------- 18 | 19 | Hashes of IMAS data are probably most useful as *checksums*: when the hashes of two IDSs 20 | match, there is `a very decent chance `__ 21 | that they contain identical data. [#collision]_ This can be useful to verify data 22 | integrity, and detect whether data has been accidentally corrupted or altered. 23 | 24 | .. [#collision] Note that it is possible to construct two IDSs that share the same 25 | *hash* but have completely different data. This is tricky and should be rare to 26 | occur. 27 | 28 | 29 | Exercise 1: Calculate some hashes 30 | --------------------------------- 31 | 32 | .. md-tab-set:: 33 | 34 | .. md-tab-item:: Exercise 35 | 36 | In this exercise we will use :py:func:`imas.util.calc_hash` to calculate 37 | hashes of some IDSs. Use :external:py:meth:`bytes.hex` to show a more readable 38 | hexidecimal format of the hash. 39 | 40 | 1. Create an empty ``equilibrium`` IDS and print its hash. 41 | 2. Now fill ``ids_properties.homogeneous_time`` and print the hash. Did it 42 | change? 43 | 3. Resize the ``time_slice`` Array of Structures to size 2. Calculate the hash 44 | of ``time_slice[0]`` and ``time_slice[1]``. What do you notice? 45 | 4. Resize ``time_slice[0].profiles_2d`` to size 1. For convenience, you can 46 | create a variable ``p2d = time_slice[0].profiles_2d[0]``. 47 | 5. Fill ``p2d.r = [[1., 2.]]`` and ``p2d.z = p2d.r``, then calculate their 48 | hashes. What do you notice? 49 | 6. ``del p2d.z`` and calculate the hash of ``p2d``. Then set ``p2d.z = p2d.r`` 50 | and ``del p2d.r``. What do you notice? 51 | 52 | .. md-tab-item:: Solution 53 | 54 | .. literalinclude:: imas_snippets/hashing.py 55 | 56 | 57 | Properties of IMAS-Python's hashes 58 | ---------------------------------- 59 | 60 | The implementation of the hash function has the following properties: 61 | 62 | - Only fields that are filled are included in the hash. 63 | 64 | If a newer version of the Data Dictionary introduces additional data fields, then 65 | this won't affect the hash of your data. 66 | 67 | As long as there are no Non Backwards Compatible changes in the Data Dictionary for 68 | the filled fields, the data hashes should not change. 69 | 70 | - The ``ids_properties/version_put`` structure is not included in the hash. 71 | 72 | This means that the precise Access Layer version, Data Dictionary version or high 73 | level interface that was used to store the data, does not affect the hash of the 74 | data. 75 | 76 | - Hashes are different for ND arrays with different shapes that share the same 77 | underlying data. 78 | 79 | For example, the following arrays are stored the same way in your RAM, but 80 | they result in different hashes: 81 | 82 | .. code-block:: python 83 | 84 | array1 = [1, 2] 85 | array2 = [[1, 2]] 86 | array3 = [[1], 87 | [2]] 88 | 89 | 90 | Technical details and specification 91 | ----------------------------------- 92 | 93 | You can find the technical details, and a specification for calculating the hashes, in 94 | the documentation of :py:meth:`imas.util.calc_hash`. 95 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/alternative_coordinates.py: -------------------------------------------------------------------------------- 1 | import imas 2 | 3 | # 1. Create an empty distributions IDS 4 | distributions = imas.IDSFactory().distributions() 5 | 6 | # 2. Use the metadata attribute to find the coordinates of 7 | # distribution/profiles_2d/density 8 | print(distributions.metadata["distribution/profiles_2d/density"].coordinates) 9 | # Alternative, by resizing the Arrays of Structures: 10 | distributions.distribution.resize(1) 11 | distributions.distribution[0].profiles_2d.resize(1) 12 | p2d = distributions.distribution[0].profiles_2d[0] 13 | print(p2d.density.metadata.coordinates) 14 | # This outputs (newlines added for clarity): 15 | # (IDSCoordinate('distribution(i1)/profiles_2d(itime)/grid/r 16 | # OR distribution(i1)/profiles_2d(itime)/grid/rho_tor_norm'), 17 | # IDSCoordinate('distribution(i1)/profiles_2d(itime)/grid/z 18 | # OR distribution(i1)/profiles_2d(itime)/grid/theta_geometric 19 | # OR distribution(i1)/profiles_2d(itime)/grid/theta_straight')) 20 | # 21 | # What do you notice: in both dimensions there are multiple options for the coordinate. 22 | 23 | # 3. Retrieve the coordinate values through the ``coordinates`` attribute. 24 | # This will raise a coordinate lookup error because IMAS-Python cannot choose which of the 25 | # coordinates to use: 26 | try: 27 | print(p2d.density.coordinates[0]) 28 | except Exception as exc: 29 | print(exc) 30 | 31 | # 4a. Use the IDSCoordinate.references attribute: 32 | # Example for the first dimension: 33 | coordinate_options = p2d.density.metadata.coordinates[0].references 34 | # 4b. Use IDSPath.goto: 35 | for option in coordinate_options: 36 | coordinate_node = option.goto(p2d.density) 37 | print(coordinate_node) 38 | # This will print: 39 | # 40 | # 41 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/autoconvert_get.py: -------------------------------------------------------------------------------- 1 | import imas 2 | from imas.ids_defs import ASCII_BACKEND, IDS_TIME_MODE_HOMOGENEOUS 3 | from imas.util import get_data_dictionary_version 4 | 5 | # 1. Create test data 6 | # Create an IDSFactory for DD 3.25.0 7 | factory = imas.IDSFactory("3.25.0") 8 | 9 | # Create a pulse_schedule IDS 10 | pulse_schedule = factory.new("pulse_schedule") 11 | 12 | # Fill the IDS with some test data 13 | pulse_schedule.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 14 | pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with IMAS-Python" 15 | pulse_schedule.time = [1.0, 1.1, 1.2] 16 | 17 | pulse_schedule.ec.antenna.resize(1) 18 | antenna = pulse_schedule.ec.antenna[0] 19 | antenna.name = "ec.antenna[0].name in DD 3.25.0" 20 | antenna.launching_angle_pol.reference_name = ( 21 | "ec.antenna[0].launching_angle_pol.reference_name in DD 3.25.0" 22 | ) 23 | antenna.launching_angle_pol.reference.data = [2.1, 2.2, 2.3] 24 | antenna.launching_angle_tor.reference_name = ( 25 | "ec.antenna[0].launching_angle_tor.reference_name in DD 3.25.0" 26 | ) 27 | antenna.launching_angle_tor.reference.data = [3.1, 3.2, 3.3] 28 | antenna.phase.reference_name = "Phase reference name" 29 | 30 | # And store the IDS in a DBEntry using DD 3.25.0 31 | entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.25.0") 32 | entry.create() 33 | entry.put(pulse_schedule) 34 | entry.close() 35 | 36 | # 2. Reopen the DBEntry with DD 3.42.0: 37 | entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.42.0") 38 | entry.open() 39 | 40 | # 3. Get the pulse schedule IDS 41 | ps_autoconvert = entry.get("pulse_schedule") 42 | 43 | print(f"{ps_autoconvert.ids_properties.version_put.data_dictionary=!s}") 44 | print(f"{get_data_dictionary_version(ps_autoconvert)=!s}") 45 | # What do you notice? 46 | # version_put: 3.25.0 47 | # get_data_dictionary_version: 3.40.0 -> the IDS was automatically converted 48 | 49 | # 4. Print the data in the loaded IDS 50 | imas.util.print_tree(ps_autoconvert) 51 | # What do you notice? 52 | # 1. The antenna AoS was renamed 53 | # 2. Several nodes no longer exist! 54 | 55 | print() 56 | print("Disable autoconvert:") 57 | print("====================") 58 | # 5. Repeat steps 3 and 4 with autoconvert disabled: 59 | ps_noconvert = entry.get("pulse_schedule", autoconvert=False) 60 | 61 | print(f"{ps_noconvert.ids_properties.version_put.data_dictionary=!s}") 62 | print(f"{get_data_dictionary_version(ps_noconvert)=!s}") 63 | # What do you notice? 64 | # version_put: 3.25.0 65 | # get_data_dictionary_version: 3.25.0 -> the IDS was not converted! 66 | 67 | # Print the data in the loaded IDS 68 | imas.util.print_tree(ps_noconvert) 69 | # What do you notice? 70 | # All data is here exactly as it was put at the beginnning of this exercise. 71 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/autoconvert_put.py: -------------------------------------------------------------------------------- 1 | import imas 2 | import imas.training 3 | from imas.util import get_data_dictionary_version 4 | 5 | # 1. Load the training data for the ``core_profiles`` IDS 6 | entry = imas.training.get_training_db_entry() 7 | core_profiles = entry.get("core_profiles") 8 | 9 | # 2. Print the DD version: 10 | print(get_data_dictionary_version(core_profiles)) 11 | 12 | # 3. Create a new DBEntry with DD version 3.37.0 13 | new_entry = imas.DBEntry( 14 | imas.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" 15 | ) 16 | new_entry.create() 17 | 18 | # 4. Put the core_profiles IDS in the new DBEntry 19 | new_entry.put(core_profiles) 20 | 21 | # 5. Print version_put.data_dictionary 22 | print(core_profiles.ids_properties.version_put.data_dictionary) 23 | # -> 3.37.0 24 | # What do you notice? 25 | # The IDS was converted to the DD version of the DBEntry (3.37.0) when writing the 26 | # data to the backend. 27 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/calc_with_units.py: -------------------------------------------------------------------------------- 1 | import itertools # python standard library iteration tools 2 | 3 | import imas 4 | import imas.training 5 | import pint 6 | 7 | # 1. Load core_profiles IDS from training DBEntry 8 | entry = imas.training.get_training_db_entry() 9 | cp = entry.get("core_profiles") 10 | 11 | # 2. Select the first time slice of profiles_1d 12 | p1d = cp.profiles_1d[0] 13 | 14 | # 3. 15 | # Create pint UnitRegistry 16 | ureg = pint.UnitRegistry() 17 | 18 | # Convert DD units to Pint Units 19 | _dd_to_pint = { 20 | "-": ureg("dimensionless"), 21 | "Atomic Mass Unit": ureg("unified_atomic_mass_unit"), 22 | "Elementary Charge Unit": ureg("elementary_charge"), 23 | } 24 | def dd_to_pint(dd_unit): 25 | if dd_unit in _dd_to_pint: 26 | return _dd_to_pint[dd_unit] 27 | return ureg(dd_unit) 28 | # End of translation 29 | 30 | # 4. Calculate mass density: 31 | # 4a. Create mass_density variable with units: 32 | mass_density = ureg("0 kg.m^-3") 33 | # 4b. Loop over all ion and neutral species 34 | for species in itertools.chain(p1d.ion, p1d.neutral): 35 | mass = sum( 36 | element.a * dd_to_pint(element.a.metadata.units) 37 | for element in species.element 38 | ) 39 | density = species.density * dd_to_pint(species.density.metadata.units) 40 | mass_density += mass * density 41 | 42 | # 4c. Print the total mass density 43 | print(mass_density) 44 | # Note that the species mass is given in Atomic Mass Units, but pint 45 | # automatically converted this to kilograms for us, because we defined 46 | # mass_density in kg/m^3! 47 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/coordinates.py: -------------------------------------------------------------------------------- 1 | import imas.training 2 | 3 | # 1. Load the training data for the core_profiles IDS: 4 | entry = imas.training.get_training_db_entry() 5 | core_profiles = entry.get("core_profiles") 6 | 7 | # 1a. Print the coordinate of profiles_1d[0].electrons.temperature 8 | print(core_profiles.profiles_1d[0].electrons.temperature.coordinates[0]) 9 | # Do you recognize the coordinate? Yes, as shown in the first line of the output, this 10 | # is "profiles_1d[0]/grid/rho_tor_norm". 11 | 12 | # 1b. Print the coordinate of profiles_1d: 13 | print(core_profiles.profiles_1d.coordinates[0]) 14 | # What do you notice? This prints the core_profiles.time array: 15 | # 16 | # numpy.ndarray([ 3.98722186, 432.93759781, 792. ]) 17 | 18 | # 1c. Change the time mode and print again 19 | core_profiles.ids_properties.homogeneous_time = \ 20 | imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS 21 | print(core_profiles.profiles_1d.coordinates[0]) 22 | # What has changed? Now we get a numpy array with values -9e+40: 23 | # [-9.e+40 -9.e+40 -9.e+40] 24 | # 25 | # In heterogeneous time, the coordinate of profiles_1d is profiles_1d/time, which is a 26 | # scalar. IMAS-Python will construct a numpy array for you where 27 | # array[i] := profiles_1d[i]/time 28 | # Since we didn't set these values, they are set to the default EMPTY_FLOAT, which is 29 | # -9e+40. 30 | 31 | # 2. Load the training data for the equilibrium IDS: 32 | equilibrium = entry.get("equilibrium") 33 | 34 | # 2a. What is the coordinate of time_slice/profiles_2d? 35 | slice0 = equilibrium.time_slice[0] 36 | print(slice0.profiles_2d.metadata.coordinates) 37 | # This will output: 38 | # (IDSCoordinate('1...N'),) 39 | # The coordinate of profiles_2d is an index. When requesting the coordinate values, 40 | # IMAS-Python will generate an index array for you: 41 | print(slice0.profiles_2d.coordinates[0]) 42 | # -> array([0]) 43 | 44 | # 2b. What are the coordinates of ``time_slice/profiles_2d/b_field_r``? 45 | print(slice0.profiles_2d[0].b_field_r.metadata.coordinates) 46 | # This is a 2D array and therefore there are two coordinates: 47 | # (IDSCoordinate('time_slice(itime)/profiles_2d(i1)/grid/dim1'), 48 | # IDSCoordinate('time_slice(itime)/profiles_2d(i1)/grid/dim2')) 49 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/dd_versions.py: -------------------------------------------------------------------------------- 1 | import imas 2 | from imas.util import get_data_dictionary_version 3 | 4 | # 1. Create an IDSFactory 5 | default_factory = imas.IDSFactory() 6 | 7 | # 2. Print the DD version used by the IDSFactory 8 | # 9 | # This factory will use the default DD version, because we didn't explicitly indicate 10 | # which version of the DD we want to use: 11 | print("Default DD version:", default_factory.version) 12 | 13 | # 3. Create an empty IDS 14 | pf_active = default_factory.new("pf_active") 15 | print("DD version used for pf_active:", get_data_dictionary_version(pf_active)) 16 | # What do you notice? This is the same version as the IDSFactory that was used to create 17 | # it. 18 | 19 | # 4. Create a new DBEntry 20 | default_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 0, 0) 21 | default_entry.create() 22 | # Alternative URI syntax when using AL5.0.0: 23 | # default_entry = imas.DBEntry("imas:memory?path=.") 24 | print("DD version used for the DBEntry:", get_data_dictionary_version(default_entry)) 25 | # What do you notice? It is the same default version again. 26 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/explore_data.py: -------------------------------------------------------------------------------- 1 | import imas 2 | import imas.training 3 | from imas.util import get_full_path 4 | 5 | # 1. Load the training data equilibrium IDS 6 | entry = imas.training.get_training_db_entry() 7 | equilibrium = entry.get("equilibrium") 8 | 9 | 10 | # 2. Function that prints the path, shape and size of an IDS node 11 | def print_path_shape_size(node): 12 | print(f"{get_full_path(node):40}: shape {node.shape} with total {node.size} items.") 13 | 14 | 15 | # 3. Apply to equilibrium IDS 16 | imas.util.visit_children(print_path_shape_size, equilibrium) 17 | print() 18 | 19 | 20 | # 4. Update function to skip 0D nodes 21 | def print_path_shape_size_not0d(node): 22 | if node.metadata.ndim == 0: 23 | return 24 | print(f"{get_full_path(node):40}: shape {node.shape} with total {node.size} items.") 25 | 26 | 27 | # And apply to the equilibrium IDS 28 | imas.util.visit_children(print_path_shape_size_not0d, equilibrium) 29 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/explore_structures.py: -------------------------------------------------------------------------------- 1 | import imas 2 | import imas.training 3 | 4 | # 1. Load the equilibrium IDS from the training data 5 | entry = imas.training.get_training_db_entry() 6 | equilibrium = entry.get("equilibrium") 7 | 8 | # 2. Print non-empty child nodes 9 | print("The following child nodes of the equilibrium IDS are filled:") 10 | for child_node in equilibrium.iter_nonempty_(): 11 | print('-', child_node.metadata.name) 12 | print() 13 | 14 | # 3. Print child nodes of ids_properties 15 | print("equilibrium/ids_properties has the following child nodes:") 16 | for child_node in equilibrium.ids_properties: 17 | print(f"- {child_node.metadata.name}: {child_node!r}") 18 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/hashing.py: -------------------------------------------------------------------------------- 1 | import imas 2 | 3 | # 1. Create IDS 4 | eq = imas.IDSFactory().equilibrium() 5 | print(imas.util.calc_hash(eq).hex(' ', 2)) # 2d06 8005 38d3 94c2 6 | 7 | # 2. Update homogeneous_time 8 | eq.ids_properties.homogeneous_time = 0 9 | print(imas.util.calc_hash(eq).hex(' ', 2)) # 3b9b 9297 56a2 42fd 10 | # Yes: the hash changed (significantly!). This was expected, because the data is no 11 | # longer the same 12 | 13 | # 3. Resize time_slice 14 | eq.time_slice.resize(2) 15 | print(imas.util.calc_hash(eq.time_slice[0]).hex(' ', 2)) # 2d06 8005 38d3 94c2 16 | print(imas.util.calc_hash(eq.time_slice[1]).hex(' ', 2)) # 2d06 8005 38d3 94c2 17 | # What do you notice? 18 | # 19 | # The hashes of both time_slice[0] and time_slice[1] are identical, because both 20 | # contain no data. 21 | # 22 | # The hashes are also identical to the empty IDS hash from step 1. An IDS, or a 23 | # structure within an IDS, that has no fields filled will always have this hash value. 24 | 25 | # 4. Resize profiles_2d 26 | eq.time_slice[0].profiles_2d.resize(1) 27 | p2d = eq.time_slice[0].profiles_2d[0] 28 | 29 | # 5. Fill data 30 | p2d.r = [[1., 2.]] 31 | p2d.z = p2d.r 32 | print(imas.util.calc_hash(p2d.r).hex(' ', 2)) # 352b a6a6 b40c 708d 33 | print(imas.util.calc_hash(p2d.z).hex(' ', 2)) # 352b a6a6 b40c 708d 34 | # These hashes are identical, because they contain the same data 35 | 36 | # 6. Only r or z 37 | del p2d.z 38 | print(imas.util.calc_hash(p2d).hex(' ', 2)) # 0dcb ddaa 78ea 83a3 39 | p2d.z = p2d.r 40 | del p2d.r 41 | print(imas.util.calc_hash(p2d).hex(' ', 2)) # f86b 8ea8 9652 3768 42 | # Although the data inside `r` and `z` is identical, we get different hashes because the 43 | # data is in a different attribute. 44 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/ids_convert.py: -------------------------------------------------------------------------------- 1 | import imas 2 | from imas.util import get_data_dictionary_version 3 | 4 | # 1. Create an IDSFactory for DD 3.25.0 5 | factory = imas.IDSFactory("3.25.0") 6 | 7 | # 2. Create a pulse_schedule IDS 8 | pulse_schedule = factory.new("pulse_schedule") 9 | print(get_data_dictionary_version(pulse_schedule)) # This should print 3.25.0 10 | 11 | # 3. Fill the IDS with some test data 12 | pulse_schedule.ids_properties.homogeneous_time = \ 13 | imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS 14 | pulse_schedule.ids_properties.comment = \ 15 | "Testing renamed IDS nodes with IMAS-Python" 16 | pulse_schedule.time = [1., 1.1, 1.2] 17 | 18 | pulse_schedule.ec.antenna.resize(1) 19 | antenna = pulse_schedule.ec.antenna[0] 20 | antenna.name = "ec.antenna[0].name in DD 3.25.0" 21 | antenna.launching_angle_pol.reference_name = \ 22 | "ec.antenna[0].launching_angle_pol.reference_name in DD 3.25.0" 23 | antenna.launching_angle_pol.reference.data = [2.1, 2.2, 2.3] 24 | antenna.launching_angle_tor.reference_name = \ 25 | "ec.antenna[0].launching_angle_tor.reference_name in DD 3.25.0" 26 | antenna.launching_angle_tor.reference.data = [3.1, 3.2, 3.3] 27 | 28 | # 4. Convert the IDS from version 3.25.0 to 3.39.0 29 | pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") 30 | 31 | # Check that the data is converted 32 | imas.util.print_tree(pulse_schedule_3_39) 33 | 34 | # 5. Update time data 35 | pulse_schedule.time[1] = 3 36 | # Yes, the time array of the converted IDS is updated as well: 37 | print(pulse_schedule_3_39.time) # [1., 3., 1.2] 38 | 39 | # 6. Update ids_properties/comment 40 | pulse_schedule.ids_properties.comment = "Updated comment" 41 | print(pulse_schedule_3_39.ids_properties.comment) 42 | # What do you notice? 43 | # This prints the original value of the comment ("Testing renamed IDS 44 | # nodes with IMAS-Python"). 45 | # This is actually the same that you get when creating a shallow copy 46 | # with ``copy.copy`` of a regular Python dictionary: 47 | import copy 48 | 49 | dict1 = {"a list": [1, 1.1, 1.2], "a string": "Some text"} 50 | dict2 = copy.copy(dict1) 51 | print(dict2) # {"a list": [1, 1.1, 1.2], "a string": "Some text"} 52 | # dict2 is a shallow copy, so dict1["a_list"] and dict2["a_list"] are 53 | # the exact same object, and updating it is reflected in both dicts: 54 | dict1["a list"][1] = 3 55 | print(dict2) # {"a list": [1, 3, 1.2], "a string": "Some text"} 56 | # Replacing a value in one dict doesn't update the other: 57 | dict1["a string"] = "Some different text" 58 | print(dict2) # {"a list": [1, 3, 1.2], "a string": "Some text"} 59 | 60 | # 7. Set phase.reference_name: 61 | pulse_schedule.ec.antenna[0].phase.reference_name = "Test refname" 62 | # And convert again 63 | pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") 64 | imas.util.print_tree(pulse_schedule_3_39) 65 | # What do you notice? 66 | # Element 'ec/antenna/phase' does not exist in the target IDS. Data is not copied. 67 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/ids_to_xarray.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib 4 | # To avoid possible display issues when Matplotlib uses a non-GUI backend 5 | if "DISPLAY" not in os.environ: 6 | matplotlib.use("agg") 7 | else: 8 | matplotlib.use("TKagg") 9 | 10 | import matplotlib.pyplot as plt 11 | import numpy 12 | import imas 13 | import imas.training 14 | import xarray 15 | 16 | # 1. Load core_profiles IDS from training DBEntry 17 | entry = imas.training.get_training_db_entry() 18 | cp = entry.get("core_profiles") 19 | 20 | # 2. Store the temperature of the first time slice 21 | temperature = cp.profiles_1d[0].t_i_average 22 | 23 | # 3. Get the required labels and data: 24 | data = temperature 25 | coordinates = { 26 | coordinate.metadata.name: coordinate 27 | for coordinate in data.coordinates 28 | } 29 | attributes = {"units": data.metadata.units} 30 | name = data.metadata.name 31 | 32 | # 4. Create the DataArray 33 | temperature = xarray.DataArray(data, coords=coordinates, attrs=attributes, name=name) 34 | print(temperature) 35 | 36 | # 5a. Select subset of temperature where 0.4 <= rho_tor_norm < 0.6: 37 | print(temperature.sel(rho_tor_norm=slice(0.4, 0.6))) 38 | 39 | # 5b. Interpolate temperature on a new grid: [0, 0.1, 0.2, ..., 0.9, 1.0] 40 | print(temperature.interp(rho_tor_norm=numpy.linspace(0, 1, 11))) 41 | 42 | # 5c. Plot 43 | temperature.plot() 44 | plt.show() 45 | -------------------------------------------------------------------------------- /docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib 4 | 5 | # To avoid possible display issues when Matplotlib uses a non-GUI backend 6 | if "DISPLAY" not in os.environ: 7 | matplotlib.use("agg") 8 | else: 9 | matplotlib.use("TKagg") 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy 13 | import imas 14 | import imas.training 15 | import xarray 16 | 17 | # 1. Load core_profiles IDS from training DBEntry 18 | entry = imas.training.get_training_db_entry() 19 | cp = entry.get("core_profiles") 20 | 21 | ####################################################################################### 22 | # Steps 2, 3 and 4, using imas.util.to_xarray 23 | # Create an xarray Dataset containing t_i_average and its coordinates 24 | xrds = imas.util.to_xarray(cp, "profiles_1d/t_i_average") 25 | # Note that profiles_1d.grid.rho_tor_norm is a 2D coordinate: its values may be 26 | # different at different times. 27 | # 28 | # Since the values at different time slices differ only minutely in this example, we'll 29 | # rename the `profiles_1d.grid.rho_tor_norm:i` dimension to `rho_tor_norm` and set the 30 | # values to the values of rho_tor_norm of the first time slice: 31 | xrds = xrds.rename({"profiles_1d.grid.rho_tor_norm:i": "rho_tor_norm"}).assign_coords( 32 | {"rho_tor_norm": xrds["profiles_1d.grid.rho_tor_norm"].isel(time=0).data} 33 | ) 34 | 35 | # Extract temperatures as an xarray DataArray 36 | temperature = xrds["profiles_1d.t_i_average"] 37 | 38 | # 5a. Select subset of temperature where 0.4 <= rho_tor_norm < 0.6: 39 | print(temperature.sel(rho_tor_norm=slice(0.4, 0.6))) 40 | 41 | # 5b. Interpolate temperature on a new grid: [0, 0.1, 0.2, ..., 0.9, 1.0] 42 | print(temperature.interp(rho_tor_norm=numpy.linspace(0, 1, 11))) 43 | 44 | # 5c. Interpolate temperature on a new time base: [10, 20] 45 | print(temperature.interp(time=[10, 20])) 46 | 47 | # 5d. Plot 48 | temperature.plot(x="time", norm=matplotlib.colors.LogNorm()) 49 | plt.show() 50 | 51 | ####################################################################################### 52 | # We can also manually build an xarray DataArray, this is shown below: 53 | 54 | # 2. Store the temperature of the first time slice 55 | temperature = cp.profiles_1d[0].t_i_average 56 | 57 | # Verify that the coordinates don't change 58 | for p1d in cp.profiles_1d: 59 | assert numpy.allclose(p1d.t_i_average.coordinates[0], temperature.coordinates[0]) 60 | 61 | # 3. Get the required labels and data: 62 | # Concatenate all temperature arrays: 63 | data = numpy.array([p1d.t_i_average for p1d in cp.profiles_1d]) 64 | coordinates = { 65 | "time": cp.profiles_1d.coordinates[0], 66 | **{ 67 | coordinate.metadata.name: coordinate 68 | for coordinate in temperature.coordinates 69 | } 70 | } 71 | attributes = {"units": temperature.metadata.units} 72 | name = "t_i_average" 73 | 74 | # 4. Create the DataArray 75 | temperature = xarray.DataArray(data, coords=coordinates, attrs=attributes, name=name) 76 | print(temperature) 77 | 78 | # 5a. Select subset of temperature where 0.4 <= rho_tor_norm < 0.6: 79 | print(temperature.sel(rho_tor_norm=slice(0.4, 0.6))) 80 | 81 | # 5b. Interpolate temperature on a new grid: [0, 0.1, 0.2, ..., 0.9, 1.0] 82 | print(temperature.interp(rho_tor_norm=numpy.linspace(0, 1, 11))) 83 | 84 | # 5c. Interpolate temperature on a new time base: [10, 20] 85 | print(temperature.interp(time=[10, 20])) 86 | 87 | # 5d. Plot 88 | temperature.plot(x="time", norm=matplotlib.colors.LogNorm()) 89 | plt.show() 90 | -------------------------------------------------------------------------------- /docs/source/courses/advanced_user_training.rst: -------------------------------------------------------------------------------- 1 | Advanced IMAS-Python 2 | ==================== 3 | 4 | In this IMAS-Python training, we dive into more advanced features of IMAS-Python. It is assumed 5 | you are familiar with the basic features of IMAS-Python, which are introduced in the 6 | :ref:`IMAS-Python 101` training. 7 | 8 | .. note:: 9 | 10 | Some of the sections use the additional Python packages ``pint`` or ``xarray``. 11 | These can be installed by following the instructions on their websites: 12 | 13 | - `Installing pint `_ 14 | - `Installing xarray 15 | `_ 16 | 17 | If you use ``pip`` you can do: ``pip install pint xarray``. 18 | 19 | .. toctree:: 20 | :caption: Training contents 21 | :maxdepth: 1 22 | 23 | advanced/metadata 24 | advanced/explore 25 | advanced/dd_versions 26 | advanced/xarray 27 | advanced/hashing 28 | -------------------------------------------------------------------------------- /docs/source/courses/basic/core_profiles_ne_timeslice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/courses/basic/core_profiles_ne_timeslice.png -------------------------------------------------------------------------------- /docs/source/courses/basic/core_profiles_te.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/courses/basic/core_profiles_te.png -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_inspect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/courses/basic/imas_inspect.png -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/create_core_profiles.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import imas 4 | import numpy as np 5 | 6 | 7 | factory = imas.IDSFactory() 8 | cp = factory.new("core_profiles") 9 | # Alternative 10 | cp = factory.core_profiles() 11 | 12 | # Set properties 13 | cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS 14 | cp.ids_properties.comment = "Synthetic IDS created for the IMAS-Python course" 15 | cp.ids_properties.creation_date = datetime.date.today().isoformat() 16 | 17 | # Set a time array 18 | cp.time = [1.0, 2.5, 4.0] 19 | 20 | # Main coordinate 21 | rho_tor_norm = np.linspace(0, 1, num=64) 22 | 23 | # Generate some 1D profiles 24 | cp.profiles_1d.resize(len(cp.time)) 25 | for index, t in enumerate(cp.time): 26 | t_e = np.exp(-16 * rho_tor_norm**2) + (1 - np.tanh(4 * rho_tor_norm - 3)) * t / 8 27 | t_e *= t * 500 28 | # Store the generated t_e as electron temperature 29 | cp.profiles_1d[index].electrons.temperature = t_e 30 | 31 | # Validate the IDS for consistency 32 | try: 33 | cp.validate() 34 | print("IDS is valid!") 35 | except imas.exception.ValidationError as exc: 36 | print("Oops, the IDS is not valid: ", exc) 37 | 38 | # Fill in the missing rho_tor_norm coordinate 39 | for index in range(3): 40 | cp.profiles_1d[index].grid.rho_tor_norm = rho_tor_norm 41 | # And validate again 42 | cp.validate() 43 | 44 | # Create a new data entry for storing the IDS 45 | pulse, run, database = 1, 1, "imas-course" 46 | entry = imas.DBEntry(imas.ids_defs.ASCII_BACKEND, database, pulse, run) 47 | entry.create() 48 | 49 | entry.put(cp) 50 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py: -------------------------------------------------------------------------------- 1 | import imas.util 2 | 3 | # Open input data entry 4 | entry = imas.DBEntry( 5 | imas.ids_defs.HDF5_BACKEND, "ITER_MD", 120000, 204, "public", data_version="3" 6 | ) 7 | entry.open() 8 | 9 | # Get the ec_launchers IDS 10 | pf = entry.get("ec_launchers") 11 | 12 | # Inspect the IDS 13 | imas.util.inspect(pf, hide_empty_nodes=True) 14 | 15 | entry.close() 16 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/explore_public_pf_active.py: -------------------------------------------------------------------------------- 1 | import imas.util 2 | 3 | # Open input data entry 4 | entry = imas.DBEntry( 5 | imas.ids_defs.HDF5_BACKEND, "ITER_MD", 111001, 103, "public", data_version="3" 6 | ) 7 | entry.open() 8 | 9 | # Get the pf_active IDS 10 | pf = entry.get("pf_active") 11 | 12 | # Inspect the IDS 13 | imas.util.inspect(pf, hide_empty_nodes=True) 14 | 15 | entry.close() 16 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/explore_training_data.py: -------------------------------------------------------------------------------- 1 | import imas.util 2 | import imas.training 3 | 4 | # Open input data entry 5 | entry = imas.training.get_training_db_entry() 6 | 7 | # Get the core_profiles IDS 8 | cp = entry.get("core_profiles") 9 | 10 | # Inspect the IDS 11 | imas.util.inspect(cp, hide_empty_nodes=True) 12 | 13 | entry.close() -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/find_paths.py: -------------------------------------------------------------------------------- 1 | import imas.util 2 | 3 | factory = imas.IDSFactory() 4 | core_profiles = factory.core_profiles() 5 | 6 | print("Paths containing `rho`:") 7 | print(imas.util.find_paths(core_profiles, "rho")) 8 | print() 9 | 10 | print("Paths containing `rho`, not followed by `error`:") 11 | print(imas.util.find_paths(core_profiles, "rho(?!.*error)")) 12 | print() 13 | 14 | print("All paths ending with `time`:") 15 | print(imas.util.find_paths(core_profiles, "time$")) 16 | print() 17 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/iterate_core_profiles.py: -------------------------------------------------------------------------------- 1 | import imas.training 2 | 3 | # Open input data entry 4 | entry = imas.training.get_training_db_entry() 5 | 6 | cp = entry.get("core_profiles") 7 | for el in ["profiles_1d", "global_quantities", "code"]: 8 | print(cp[el]) 9 | 10 | # You can also get sub-elements by separating them with a '/': 11 | print(cp["profiles_1d[0]/electrons/temperature"]) 12 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib 4 | import imas.training 5 | 6 | # To avoid possible display issues when Matplotlib uses a non-GUI backend 7 | if "DISPLAY" not in os.environ: 8 | matplotlib.use("agg") 9 | else: 10 | matplotlib.use("TKagg") 11 | 12 | import matplotlib.pyplot as plt 13 | 14 | # Open input data entry 15 | entry = imas.training.get_training_db_entry() 16 | 17 | # Read n_e profile and the associated normalised toroidal flux coordinate at 18 | t = 443 # seconds 19 | 20 | cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) 21 | 22 | # profiles_1d should only contain the requested slice 23 | assert len(cp.profiles_1d) == 1 24 | 25 | ne = cp.profiles_1d[0].electrons.density 26 | rho = cp.profiles_1d[0].grid.rho_tor_norm 27 | 28 | # Plot the figure 29 | fig, ax = plt.subplots() 30 | ax.plot(rho, ne) 31 | ax.set_ylabel(r"$n_e$") 32 | ax.set_xlabel(r"$\rho_{tor, norm}$") 33 | ax.ticklabel_format(axis="y", scilimits=(-1, 1)) 34 | plt.show() 35 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib 4 | import numpy 5 | 6 | # To avoid possible display issues when Matplotlib uses a non-GUI backend 7 | if "DISPLAY" not in os.environ: 8 | matplotlib.use("agg") 9 | else: 10 | matplotlib.use("TKagg") 11 | 12 | from matplotlib import pyplot as plt 13 | 14 | import imas 15 | from imas.ids_defs import MDSPLUS_BACKEND 16 | 17 | database, pulse, run, user = "ITER", 134173, 106, "public" 18 | data_entry = imas.DBEntry( 19 | MDSPLUS_BACKEND, database, pulse, run, user, data_version="3" 20 | ) 21 | data_entry.open() 22 | # Enable lazy loading with `lazy=True`: 23 | core_profiles = data_entry.get("core_profiles", lazy=True) 24 | 25 | # No data has been read from the lowlevel backend yet 26 | # The time array is loaded only when we access it on the following lines: 27 | time = core_profiles.time 28 | print(f"Time has {len(time)} elements, between {time[0]} and {time[-1]}") 29 | 30 | # Find the electron temperature at rho=0 for all time slices 31 | electon_temperature_0 = numpy.array( 32 | [p1d.electrons.temperature[0] for p1d in core_profiles.profiles_1d] 33 | ) 34 | 35 | # Plot the figure 36 | fig, ax = plt.subplots() 37 | ax.plot(time, electon_temperature_0) 38 | ax.set_ylabel("$T_e$") 39 | ax.set_xlabel("$t$") 40 | plt.show() 41 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/print_idss.py: -------------------------------------------------------------------------------- 1 | import imas 2 | 3 | # IMAS-Python has multiple DD versions inside, which makes this exercise harder. 4 | # We provide possible solutions here 5 | 6 | # Option 1: Print the IDSs in the default-selected DD version 7 | factory = imas.IDSFactory() 8 | print("IDSs available in DD version", factory.version) 9 | print(factory.ids_names()) 10 | 11 | # Alternative: 12 | for ids_name in factory: 13 | print(ids_name, end=", ") 14 | print() 15 | 16 | # Option 2: Print the IDSs in a specific DD version 17 | factory = imas.IDSFactory("3.39.0") 18 | print("IDSs available in DD version", factory.version) 19 | print(list(factory)) 20 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py: -------------------------------------------------------------------------------- 1 | import imas.training 2 | 3 | # Open input data entry 4 | entry = imas.training.get_training_db_entry() 5 | 6 | # Read n_e profile and the associated normalised toroidal flux coordinate at 7 | t = 443 # seconds 8 | 9 | cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) 10 | 11 | # profiles_1d should only contain the requested slice 12 | assert len(cp.profiles_1d) == 1 13 | 14 | ne = cp.profiles_1d[0].electrons.density 15 | rho = cp.profiles_1d[0].grid.rho_tor_norm 16 | print("ne =", ne) 17 | print("rho =", rho) 18 | 19 | # Close the datafile 20 | entry.close() 21 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import imas.training 3 | 4 | 5 | # Find nearest value and index in an array 6 | def find_nearest(a, a0): 7 | "Element in nd array `a` closest to the scalar value `a0`" 8 | idx = np.abs(a - a0).argmin() 9 | return a[idx], idx 10 | 11 | 12 | # Open input data entry 13 | entry = imas.training.get_training_db_entry() 14 | 15 | # Read the time array from the equilibrium IDS 16 | eq = entry.get("equilibrium") 17 | time_array = eq.time 18 | 19 | # Find the index of the desired time slice in the time array 20 | t_closest, t_index = find_nearest(time_array, 433) 21 | print("Time index = ", t_index) 22 | print("Time value = ", t_closest) 23 | 24 | # Close input data entry 25 | entry.close() 26 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py: -------------------------------------------------------------------------------- 1 | import imas.training 2 | 3 | # Open input data entry 4 | entry = imas.training.get_training_db_entry() 5 | 6 | # 1. Read and print the time of the equilibrium IDS for the whole scenario 7 | # This explicitly converts the data from the old DD version on disk, to the 8 | # new DD version of the environment that you have loaded! 9 | equilibrium = entry.get("equilibrium") # All time slices 10 | # 2. Print the time array: 11 | print(equilibrium.time) 12 | 13 | # 3. Load the core_profiles IDS 14 | core_profiles = entry.get("core_profiles") 15 | # 4. When you inspect the core_profiles.time array, you'll find that item [1] 16 | # corresponds to t ~ 433s. 17 | # 5. Print the electron temperature 18 | print(core_profiles.profiles_1d[1].electrons.temperature) 19 | 20 | # Close input data entry 21 | entry.close() 22 | -------------------------------------------------------------------------------- /docs/source/courses/basic/imas_snippets/transform_grid.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib 4 | import numpy as np 5 | from scipy.interpolate import RegularGridInterpolator 6 | 7 | import imas.training 8 | 9 | if "DISPLAY" not in os.environ: 10 | matplotlib.use("agg") 11 | else: 12 | matplotlib.use("TKagg") 13 | 14 | import matplotlib.pyplot as plt 15 | 16 | # Open input data entry 17 | entry = imas.training.get_training_db_entry() 18 | 19 | # Lazy-loaded input equilibrium 20 | eq_in = entry.get("equilibrium", lazy=True) 21 | input_times = eq_in.time 22 | 23 | # Create output data entry 24 | output_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "imas-course", 2, 1) 25 | output_entry.create() 26 | 27 | # Loop over each time slice 28 | for time in input_times: 29 | eq = entry.get_slice("equilibrium", time, imas.ids_defs.CLOSEST_INTERP) 30 | 31 | # Update comment 32 | eq.ids_properties.comment = "IMAS-Python training: transform coordinate system" 33 | 34 | p2d = eq.time_slice[0].profiles_2d[0] 35 | # Get `.value` so we can plot the original values after the IDS node is overwritten 36 | r, z = p2d.grid.dim1.value, p2d.grid.dim2.value 37 | r_axis = eq.time_slice[0].global_quantities.magnetic_axis.r 38 | z_axis = eq.time_slice[0].global_quantities.magnetic_axis.z 39 | 40 | # Create new rho/theta coordinates 41 | theta = np.linspace(-np.pi, np.pi, num=64, endpoint=False) 42 | max_rho = min( 43 | r_axis - r[0], 44 | r[-1] - r_axis, 45 | z_axis - z[0], 46 | z[-1] - z_axis, 47 | ) 48 | rho = np.linspace(0, max_rho, num=64) 49 | 50 | # Calculate corresponding R/Z for interpolating the original values 51 | rho_grid, theta_grid = np.meshgrid(rho, theta, indexing="ij", sparse=True) 52 | grid_r = r_axis + rho_grid * np.cos(theta_grid) 53 | grid_z = z_axis + rho_grid * np.sin(theta_grid) 54 | interpolation_points = np.dstack((grid_r.flatten(), grid_z.flatten())) 55 | 56 | # Interpolate data nodes on the new grid 57 | for data_node in ["b_field_r", "b_field_z", "psi"]: 58 | # `.value` so we can plot the original values after the IDS node is overwritten 59 | data = p2d[data_node].value 60 | interp = RegularGridInterpolator((r, z), data) 61 | new_data = interp(interpolation_points).reshape(grid_r.shape) 62 | p2d[data_node] = new_data 63 | 64 | # Update coordinate identifier 65 | p2d.grid_type = "inverse" 66 | 67 | # Update coordinates 68 | p2d.grid.dim1 = rho 69 | p2d.grid.dim2 = theta 70 | p2d.r = grid_r 71 | p2d.z = grid_z 72 | 73 | # Finally, put the slice to disk 74 | output_entry.put_slice(eq) 75 | 76 | # Create a plot to verify the transformation is correct 77 | fig, (ax1, ax2, ax3) = plt.subplots(1, 3) 78 | 79 | vmin, vmax = np.min(data), np.max(data) 80 | contour_levels = np.linspace(vmin, vmax, 32) 81 | 82 | rzmesh = np.meshgrid(r, z, indexing="ij") 83 | mesh = ax1.pcolormesh(*rzmesh, data, vmin=vmin, vmax=vmax) 84 | ax1.contour(*rzmesh, data, contour_levels, colors="black") 85 | 86 | ax2.pcolormesh(grid_r, grid_z, new_data, vmin=vmin, vmax=vmax) 87 | ax2.contour(grid_r, grid_z, new_data, contour_levels, colors="black") 88 | 89 | rho_theta_mesh = np.meshgrid(rho, theta, indexing="ij") 90 | ax3.pcolormesh(*rho_theta_mesh, new_data, vmin=vmin, vmax=vmax) 91 | ax3.contour(*rho_theta_mesh, new_data, contour_levels, colors="black") 92 | 93 | ax1.set_xlabel("r [m]") 94 | ax2.set_xlabel("r [m]") 95 | ax1.set_ylabel("z [m]") 96 | ax2.set_xlim(ax1.get_xlim()) 97 | ax2.set_ylim(ax1.get_ylim()) 98 | ax3.set_xlabel(r"$\rho$ [m]") 99 | ax3.set_ylabel(r"$\theta$ [rad]") 100 | 101 | fig.suptitle(r"$\psi$ in ($r,z$) and ($\rho,\theta$) coordinates.") 102 | fig.colorbar(mesh, ax=ax3) 103 | fig.tight_layout() 104 | 105 | plt.show() 106 | -------------------------------------------------------------------------------- /docs/source/courses/basic/interactive_tab_core_profiles_toplevel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/courses/basic/interactive_tab_core_profiles_toplevel.png -------------------------------------------------------------------------------- /docs/source/courses/basic/print_tree_ids_properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/courses/basic/print_tree_ids_properties.png -------------------------------------------------------------------------------- /docs/source/courses/basic/setup.rst: -------------------------------------------------------------------------------- 1 | IMAS-Python 101: setup IMAS-Python 2 | ================================== 3 | 4 | This course was written for IMAS-Python version 0.8.0 and requires an IMAS installation to 5 | load IMAS data. IMAS-Python may be installed on your cluster, in which case you can do 6 | 7 | .. code-block:: console 8 | 9 | $ module load IMAS-Python IMAS 10 | $ python -c 'import imas; print(imas.__version__)' 11 | 12 | Have a look at the :ref:`Installing IMAS-Python` page for more details on installing IMAS-Python. 13 | -------------------------------------------------------------------------------- /docs/source/courses/basic/transform.rst: -------------------------------------------------------------------------------- 1 | Transform with IMAS-Python 2 | ========================== 3 | 4 | In this part of the course we'll perform a coordinate transformation. Our input data is 5 | in rectilinear :math:`R, Z` coordinates, which we will transform into poloidal polar 6 | coordinates (:math:`\rho, \theta`) then store in a separate data entry. 7 | 8 | Our strategy for doing this will be: 9 | 10 | #. Check which time slices exist 11 | #. The actual processing is done per time slice to limit memory consumption: 12 | 13 | #. Load the time slice 14 | #. Apply the coordinate transformation 15 | #. Store the time slice 16 | 17 | 18 | Exercise 1: Check which time slices exist 19 | ----------------------------------------- 20 | 21 | .. md-tab-set:: 22 | 23 | .. md-tab-item:: Exercise 24 | 25 | Load the time array from the ``equilibrium`` IDS in the training data entry. 26 | 27 | .. hint:: 28 | You can use :ref:`lazy loading` to avoid loading all data in memory. 29 | 30 | .. md-tab-item:: IMAS-Python 31 | 32 | .. literalinclude:: imas_snippets/transform_grid.py 33 | :start-at: # Open input data entry 34 | :end-before: # Create output data entry 35 | 36 | 37 | Exercise 2: Load a time slice 38 | ----------------------------- 39 | 40 | .. md-tab-set:: 41 | 42 | .. md-tab-item:: Exercise 43 | 44 | Loop over each available time in the IDS and load the time slice inside the 45 | loop. 46 | 47 | .. md-tab-item:: IMAS-Python 48 | 49 | .. literalinclude:: imas_snippets/transform_grid.py 50 | :start-at: # Loop over each time slice 51 | :end-before: # Update comment 52 | 53 | 54 | Exercise 3: Apply the transformation 55 | ------------------------------------ 56 | 57 | We will apply the transformation of the data as follows: 58 | 59 | #. Load the :math:`R,Z` grid from the time slice 60 | #. Generate a new :math:`\rho,\theta` grid 61 | #. Calculate the rectilinear coordinates belonging to the :math:`\rho,\theta` grid: 62 | 63 | .. math:: 64 | 65 | R = R_\mathrm{axis} + \rho \cos(\theta) 66 | 67 | Z = Z_\mathrm{axis} + \rho \sin(\theta) 68 | 69 | #. For each data element, interpolate the data on the new grid. We can use 70 | :external:class:`scipy.interpolate.RegularGridInterpolator` for this. 71 | #. Finally, we store the new grid (including their rectilinear coordinates) and the 72 | transformed data in the IDS 73 | 74 | 75 | .. md-tab-set:: 76 | 77 | .. md-tab-item:: IMAS-Python 78 | 79 | .. literalinclude:: imas_snippets/transform_grid.py 80 | :start-at: # Loop over each time slice 81 | :end-before: # Finally, put the slice to disk 82 | 83 | 84 | Exercise 4: Store a time slice 85 | ------------------------------ 86 | 87 | .. md-tab-set:: 88 | 89 | .. md-tab-item:: Exercise 90 | 91 | Store the time slice after the transformation. 92 | 93 | .. md-tab-item:: IMAS-Python 94 | 95 | .. literalinclude:: imas_snippets/transform_grid.py 96 | :start-at: # Create output data entry 97 | :end-at: output_entry.create() 98 | :caption: The data entry is created once, outside the time slice loop 99 | 100 | .. literalinclude:: imas_snippets/transform_grid.py 101 | :start-at: # Finally, put the slice to disk 102 | :end-at: output_entry.put_slice 103 | :caption: Store the time slice inside the loop 104 | 105 | 106 | Exercise 5: Plotting data before and after the transformation 107 | ------------------------------------------------------------- 108 | 109 | .. md-tab-set:: 110 | 111 | .. md-tab-item:: Exercise 112 | 113 | Plot one of the data fields in the :math:`R, Z` plane (original data) and in the 114 | :math:`\rho,\theta` plane (transformed data) to verify that the transformation 115 | is correct. 116 | 117 | .. md-tab-item:: IMAS-Python 118 | 119 | .. literalinclude:: imas_snippets/transform_grid.py 120 | :start-at: # Create a plot 121 | 122 | 123 | Bringing it all together 124 | ------------------------ 125 | 126 | .. md-tab-set:: 127 | 128 | .. md-tab-item:: IMAS-Python 129 | 130 | .. literalinclude:: imas_snippets/transform_grid.py 131 | :caption: Source code for the complete exercise 132 | -------------------------------------------------------------------------------- /docs/source/courses/basic_user_training.rst: -------------------------------------------------------------------------------- 1 | .. _`IMAS-Python 101`: 2 | 3 | IMAS-Python 101 4 | =============== 5 | 6 | In this IMAS-Python training, we introduce you to the basic concepts and features of 7 | IMAS-Python. You will need some basic familiarity with Python. For a refresher, see 8 | the `Python tutorial `_. We also assume 9 | some basic knowledge of the ITER IMAS infrastructure. 10 | 11 | .. toctree:: 12 | :caption: Training contents 13 | :maxdepth: 1 14 | 15 | basic/setup 16 | basic/explore 17 | basic/analyze 18 | basic/transform 19 | basic/create 20 | -------------------------------------------------------------------------------- /docs/source/imas_structure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iterorganization/IMAS-Python/09104ac4360cc8cf220db15507b789d25ca977ba/docs/source/imas_structure.png -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. 2 | Master "index". This will be converted to a landing index.html by sphinx. We 3 | define TOC here, but it'll be put in the sidebar by the theme 4 | 5 | ================== 6 | IMAS-Python manual 7 | ================== 8 | 9 | IMAS-Python is a pure-python library to handle arbitrarily nested 10 | data structures. IMAS-Python is designed for, but not necessarily bound to, 11 | interacting with Interface Data Structures (IDSs) as defined by the 12 | Integrated Modelling & Analysis Suite (IMAS) Data Model. 13 | 14 | .. note:: 15 | 16 | IMAS-Python was formerly known as IMASPy, and was renamed with the release 17 | of the major version 2. 18 | 19 | It provides: 20 | 21 | - An easy-to-install and easy-to-get started package by 22 | * Not requiring an IMAS installation 23 | * Not strictly requiring matching a Data Dictionary (DD) version 24 | - A pythonic alternative to the IMAS Python High Level Interface (HLI) 25 | - Checking of correctness at assign time, instead of at database write time 26 | - Dynamically created in-memory pre-filled data trees from DD XML specifications 27 | 28 | The README is best read on :src:`#imas`. 29 | 30 | Read what's new in the current version of IMAS-Python in our :ref:`changelog`! 31 | 32 | 33 | Manual 34 | ------ 35 | 36 | .. toctree:: 37 | :caption: Getting Started 38 | :maxdepth: 1 39 | 40 | self 41 | installing 42 | intro 43 | multi-dd 44 | validation 45 | resampling 46 | metadata 47 | lazy_loading 48 | mdsplus 49 | identifiers 50 | configuring 51 | cli 52 | netcdf 53 | changelog 54 | 55 | .. toctree:: 56 | :caption: IMAS-Python training courses 57 | :maxdepth: 1 58 | 59 | courses/basic_user_training 60 | courses/advanced_user_training 61 | 62 | 63 | .. toctree:: 64 | :caption: API docs 65 | :maxdepth: 1 66 | 67 | api 68 | api-hidden 69 | 70 | 71 | .. toctree:: 72 | :caption: IMAS-Python development 73 | :maxdepth: 1 74 | 75 | imas_architecture 76 | code_style 77 | ci_config 78 | benchmarking 79 | release_imas 80 | 81 | 82 | LICENSE 83 | ------- 84 | 85 | .. literalinclude:: ../../LICENSE.txt 86 | :language: text 87 | -------------------------------------------------------------------------------- /docs/source/installing.rst: -------------------------------------------------------------------------------- 1 | .. _`Installing IMAS-Python`: 2 | 3 | Installing IMAS-Python 4 | ====================== 5 | 6 | IMAS-Python is a pure Python package. While it can be used without it, for full functionality 7 | of the package you need an installation of `the IMAS Core library `_. 8 | See :ref:`IMAS-Python 5 minute introduction` for a quick overview of its most basic functionalities. 9 | 10 | To get started, you can install it from `pypi.org `_: 11 | 12 | .. code-block:: bash 13 | 14 | pip install imas-python 15 | 16 | You can also install optional dependencies (e.g. netCDF and xarray): 17 | 18 | .. code-block:: bash 19 | 20 | pip install imas-python[netcdf,xarray] 21 | 22 | 23 | List of optional dependencies 24 | ----------------------------- 25 | 26 | - ``netcdf``: enables storing/loading of IDS to/from netCDF files 27 | - ``xarray``: enables loading IDS (entire IDS or part of it) into an xarray ``dataset`` 28 | - ``h5py``: enables ``analyze-db`` CLI option 29 | - ``docs``: installs required packages to build the Sphinx documentation 30 | - ``test``: installs required packages to run the tests with ``pytest`` and ``asv`` 31 | 32 | .. note:: 33 | 34 | Some tests will be skipped unless you also have ``imas_core`` installed 35 | (it is not yet available on PyPI, so you will need to install it from sources 36 | if you have access to them at https://git.iter.org/projects/IMAS/repos/al-core) 37 | 38 | 39 | Local installation from sources 40 | ------------------------------- 41 | 42 | We recommend using a :external:py:mod:`venv`. Then, clone the IMAS-Python repository 43 | and run `pip install`: 44 | 45 | .. code-block:: bash 46 | 47 | python3 -m venv ./venv 48 | . venv/bin/activate 49 | 50 | git clone git@github.com:iterorganization/IMAS-Python.git 51 | cd IMAS-Python 52 | pip install --upgrade pip 53 | pip install --upgrade wheel setuptools 54 | pip install . 55 | 56 | 57 | Development installation 58 | ------------------------ 59 | 60 | For development an installation in editable mode may be more convenient, and you 61 | will need some extra dependencies to run the test suite and build documentation. 62 | 63 | .. code-block:: bash 64 | 65 | pip install -e .[test,docs] 66 | 67 | Test your installation by trying 68 | 69 | .. code-block:: bash 70 | 71 | cd ~ 72 | python -c "import imas; print(imas.__version__)" 73 | 74 | This is how to run the IMAS-Python test suite: 75 | 76 | .. code-block:: bash 77 | 78 | # inside the IMAS-Python git repository 79 | pytest imas --mini 80 | 81 | # run with a specific backend, requires IMAS-Core installed 82 | pytest imas --ascii --mini 83 | 84 | And to build the IMAS-Python documentation, execute: 85 | 86 | .. code-block:: bash 87 | 88 | make -C docs html 89 | 90 | 91 | -------------------------------------------------------------------------------- /docs/source/mdsplus.rst: -------------------------------------------------------------------------------- 1 | .. _`MDSplus in IMAS-Python`: 2 | 3 | MDSplus in IMAS-Python 4 | ====================== 5 | 6 | `MDSplus `_ is a set of software tools for data 7 | acquisition and storage and a methodology for management of complex 8 | scientific data. IMAS-Python uses the IMAS LowLevel interface to interact 9 | with MDSplus data. The model files required to read IMAS IDS-structured 10 | data are generated on demand, whenever a specific DD version is used 11 | by the user. As this generation might take a while, MDSplus models are 12 | cached to disk, generally in ``$HOME/.cache/imas``. As multiple 13 | processes can write to this location, especially during testing, 14 | special care is taken to avoid write collisions. 15 | ``$MDSPLUS_MODEL_TIMEOUT`` can be used to specify the amount of seconds 16 | to wait in case the default is not sufficient. 17 | -------------------------------------------------------------------------------- /docs/source/release_imas.rst: -------------------------------------------------------------------------------- 1 | IMAS-Python development and release process 2 | =========================================== 3 | 4 | IMAS-Python development follows the a fork-based model described in 5 | `the contributing guidelines 6 | `_. 7 | 8 | 9 | Creating an IMAS-Python release 10 | ------------------------------- 11 | 12 | 1. Create a Pull Request from ``develop`` to ``main``. 13 | 2. Add a change log to the Pull Request, briefly describing new features, bug fixes, 14 | and update accordingly the :ref:`changelog`. 15 | 3. The PR is reviewed and merged by the maintainers who also create the release tags. 16 | 4. After the release PR is merged, update the Easybuild configurations for SDCC modules 17 | in the `easybuild-easyconfigs repository 18 | `_. 19 | See the next section for more details on how to do this. 20 | 21 | 22 | Updating and testing the IMAS-Python Easybuild configuration 23 | ------------------------------------------------------------ 24 | 25 | The following steps must be performed for each of the supported tool chains 26 | (currently ``intel-2023b``, ``foss-2023b``): 27 | 28 | 1. Create the ``.eb`` file for the new release. 29 | 30 | a. Copy the ``.eb`` file from the previous release. 31 | b. Update the ``version`` to reflect the just-released version tag. 32 | c. If any of the IMAS-Python dependencies in ``pyproject.toml`` where updated or changed 33 | since the previous release, update the easybuild dependencies: 34 | 35 | - ``builddependencies`` contains build-time dependencies which are available 36 | as a module on SDCC. 37 | - ``dependencies`` contains run-time dependencies which are available as a 38 | module on SDCC. 39 | - ``exts_list`` contains python package dependencies (and potentially 40 | dependencies of dependencies) which are not available in any of the Python 41 | modules on SDCC. 42 | 43 | d. Update the checksum of imas: download an archive of the IMAS-Python repository from 44 | bitbucket. This is easiest to do by copying the following URL, replace 45 | ```` with the version tag, and paste it in a web browser: 46 | 47 | .. code-block:: text 48 | 49 | https://github.com/iterorganization/IMAS-Python/archive/refs/tags/.tar.gz 50 | 51 | Then, calculate the hash of the downloaded archive with ``sha256sum`` and update 52 | it in the ``.eb`` file. 53 | 54 | 2. Test the easybuild configuration: 55 | 56 | a. Create an easybuild module, replace ```` with the filename of the 57 | ``.eb`` file created in step 1. 58 | 59 | .. code-block:: bash 60 | 61 | module purge 62 | module load EasyBuild 63 | eb --rebuild 64 | 65 | If this is unsuccessful, investigate the error and update the ``.eb`` 66 | configuration. A useful environment variable for debugging is ``export 67 | PIP_LOG=pip.log``, which instructs pip to write logs to the specified file 68 | (``pip.log`` in this example). 69 | b. If the module was successfully installed by easybuild, load it: 70 | 71 | .. code-block:: bash 72 | 73 | module purge 74 | module use ~/.local/easybuild/modules/all/ 75 | module load IMAS-Python/- 76 | module laod IMAS-AL-Core 77 | 78 | c. Sanity check the module, for example by running the ``pytest`` unit tests. 79 | -------------------------------------------------------------------------------- /docs/source/resampling.rst: -------------------------------------------------------------------------------- 1 | Resampling 2 | ========== 3 | 4 | For resampling of data we stick close to the numpy and scipy APIs. The relevant 5 | method signatures are reproduced here: 6 | 7 | .. code-block:: python 8 | 9 | Class scipy.interpolate.interp1d(x, y, kind='linear', axis=- 1, copy=True, 10 | bounds_error=None, fill_value=nan, assume_sorted=False) 11 | 12 | Which produces a resampling function, whose call method uses interpolation to 13 | find the value of new points. This can be used like so: 14 | 15 | .. code-block:: python 16 | 17 | pulse_schedule = imas.IDSFactory().new("pulse_schedule") 18 | f = scipy.interpolate.interp1d(pulse_schedule.time, pulse_schedule_some_1d_var) 19 | ids.pulse_schedule.some_1d_var = f(pulse_schedule.some_1d_var) 20 | 21 | 22 | A more general approach would work on the basis of scanning the tree for 23 | shared coordinates, and resampling those in the same manner (by creating a 24 | local interpolator and applying it). The :py:meth:`imas.util.visit_children` 25 | method can 26 | be used for this. For a proof-of-concept it is recommended to only resample 27 | in the time direction. 28 | 29 | For example, a proposal implementation included in 0.4.0 can be used as such 30 | (inplace interpolation on an IDS leaf node) 31 | 32 | .. code-block:: python 33 | 34 | import imas 35 | nbi = imas.IDSFactory().new("nbi") 36 | nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS 37 | nbi.time = [1, 2, 3] 38 | nbi.unit.resize(1) 39 | nbi.unit[0].energy.data = 2 * nbi.time 40 | old_id = id(nbi.unit[0].energy.data) 41 | 42 | imas.util.resample( 43 | nbi.unit[0].energy.data, 44 | nbi.time, 45 | [0.5, 1.5], 46 | nbi.ids_properties.homogeneous_time, 47 | inplace=True, 48 | fill_value="extrapolate", 49 | ) 50 | 51 | assert old_id == id(nbi.unit[0].energy.data) 52 | assert list(nbi.unit[0].energy.data) == [1, 3] 53 | 54 | 55 | Or as such (explicit in-memory copy + interpolation, producing a new data leaf/container): 56 | 57 | .. code-block:: python 58 | 59 | nbi = imas.IDSFactory().new("nbi") 60 | nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS 61 | nbi.time = [1, 2, 3] 62 | nbi.unit.resize(1) 63 | nbi.unit[0].energy.data = 2 * nbi.time 64 | old_id = id(nbi.unit[0].energy.data) 65 | 66 | new_data = imas.util.resample( 67 | nbi.unit[0].energy.data, 68 | nbi.time, 69 | [0.5, 1.5], 70 | nbi.ids_properties.homogeneous_time, 71 | inplace=False, 72 | fill_value="extrapolate", 73 | ) 74 | 75 | assert old_id != id(new_data) 76 | assert list(new_data) == [1, 3] 77 | 78 | 79 | Implementation unit tests can be found in `test_latest_dd_resample.py`. 80 | 81 | 82 | Alternative resampling methods 83 | ------------------------------ 84 | 85 | .. code-block:: python 86 | 87 | scipy.signal.resample(x, num, t=None, axis=0, window=None, domain='time') 88 | 89 | `Scipy.signal.resample` uses a Fourier method to resample, which assumes the 90 | signal is periodic. It can be very slow if the number of input or output 91 | samples is large and prime. See 92 | https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.resample.html 93 | for more information. 94 | 95 | .. code-block:: python 96 | 97 | scipy.signal.resample_poly(x, up, down, axis=0, window='kaiser', 5.0, padtype='constant', cval=None) 98 | 99 | Could be considered, which uses a low-pass FIR filter. This assumes zero 100 | values outside the boundary. See 101 | https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.resample_poly.html#scipy.signal.resample_poly 102 | for more information. We do not recommend to use simpler sampling methods 103 | such as nearest-neighbour if possible, as this reduces the data quality and 104 | does not result in a much simpler or faster implementation if care is taken. 105 | -------------------------------------------------------------------------------- /imas/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | 4 | # isort: skip_file 5 | 6 | from packaging.version import Version as _V 7 | 8 | from ._version import version as __version__ # noqa: F401 9 | from ._version import version_tuple # noqa: F401 10 | 11 | # Import logging _first_ 12 | from . import setup_logging 13 | 14 | # Import main user API objects in the imas module 15 | from .db_entry import DBEntry 16 | from .ids_factory import IDSFactory 17 | from .ids_convert import convert_ids 18 | from .ids_identifiers import identifiers 19 | 20 | # Load the IMAS-Python IMAS AL/DD core 21 | from . import ( 22 | db_entry, 23 | dd_helpers, 24 | dd_zip, 25 | util, 26 | ) 27 | 28 | PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" 29 | """URL to the published documentation.""" 30 | OLDEST_SUPPORTED_VERSION = _V("3.22.0") 31 | """Oldest Data Dictionary version that is supported by IMAS-Python.""" 32 | -------------------------------------------------------------------------------- /imas/__main__.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Support module to run imas as a module: 4 | 5 | .. code-block:: bash 6 | :caption: Options to run imas CLI interface 7 | 8 | # Run as a module (implemented in imas/__main__.py) 9 | python -m imas 10 | 11 | # Run as "program" (see project.scripts in pyproject.toml) 12 | imas 13 | """ 14 | 15 | from imas.command.cli import cli 16 | 17 | cli() 18 | -------------------------------------------------------------------------------- /imas/_to_xarray.py: -------------------------------------------------------------------------------- 1 | # xarray is an optional dependency, but this module won't be imported when xarray is not 2 | # available 3 | import numpy 4 | import xarray 5 | 6 | from imas.ids_toplevel import IDSToplevel 7 | from imas.backends.netcdf.ids_tensorizer import IDSTensorizer 8 | from imas.ids_data_type import IDSDataType 9 | 10 | fillvals = { 11 | IDSDataType.INT: -(2**31) + 1, 12 | IDSDataType.STR: "", 13 | IDSDataType.FLT: numpy.nan, 14 | IDSDataType.CPX: numpy.nan * (1 + 1j), 15 | } 16 | 17 | 18 | def to_xarray(ids: IDSToplevel, *paths: str) -> xarray.Dataset: 19 | """See :func:`imas.util.to_xarray`""" 20 | # We really need an IDS toplevel element 21 | if not isinstance(ids, IDSToplevel): 22 | raise TypeError( 23 | f"to_xarray needs a toplevel IDS element as first argument, but got {ids!r}" 24 | ) 25 | 26 | # Valid path can use / or . as separator, but IDSTensorizer expects /. The following 27 | # block checks if the paths are valid, and by using "metadata.path_string" we ensure 28 | # that / are used as separator. 29 | try: 30 | paths = [ids.metadata[path].path_string for path in paths] 31 | except KeyError as exc: 32 | raise ValueError(str(exc)) from None 33 | 34 | # Converting lazy-loaded IDSs requires users to specify at least one path 35 | if ids._lazy and not paths: 36 | raise RuntimeError( 37 | "This IDS is lazy loaded. Please provide at least one path to convert to" 38 | " xarray." 39 | ) 40 | 41 | # Use netcdf IDS Tensorizer to tensorize the data and determine metadata 42 | tensorizer = IDSTensorizer(ids, paths) 43 | tensorizer.include_coordinate_paths() 44 | tensorizer.collect_filled_data() 45 | tensorizer.determine_data_shapes() 46 | 47 | data_vars = {} 48 | coordinate_names = set() 49 | for path in tensorizer.filled_data: 50 | var_name = path.replace("/", ".") 51 | metadata = ids.metadata[path] 52 | if metadata.data_type in (IDSDataType.STRUCTURE, IDSDataType.STRUCT_ARRAY): 53 | continue # We don't store these in xarray 54 | 55 | dimensions = tensorizer.ncmeta.get_dimensions(path, tensorizer.homogeneous_time) 56 | data = tensorizer.tensorize(path, fillvals[metadata.data_type]) 57 | 58 | attrs = dict(documentation=metadata.documentation) 59 | if metadata.units: 60 | attrs["units"] = metadata.units 61 | coordinates = tensorizer.filter_coordinates(path) 62 | if coordinates: 63 | coordinate_names.update(coordinates.split(" ")) 64 | attrs["coordinates"] = coordinates 65 | 66 | data_vars[var_name] = (dimensions, data, attrs) 67 | 68 | # Remove coordinates from data_vars and put in coordinates mapping: 69 | coordinates = {} 70 | for coordinate_name in coordinate_names: 71 | coordinates[coordinate_name] = data_vars.pop(coordinate_name) 72 | 73 | return xarray.Dataset(data_vars, coordinates) 74 | -------------------------------------------------------------------------------- /imas/assets/IDS_minimal.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 0.0.1 4 | 10 | 16 | 22 | 28 | 29 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /imas/assets/IDS_minimal_2.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 0.0.2 4 | 10 | 16 | 22 | 28 | 29 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /imas/assets/IDS_minimal_struct_array.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 0.0.1 4 | 10 | 16 | 22 | 28 | 29 | 34 | 37 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /imas/assets/README.md: -------------------------------------------------------------------------------- 1 | # IDS_minimal.xml 2 | This is a minimal data dictionary which is still valid. 3 | 4 | # IDS_minimal_types.xml 5 | This is a minimal data dictionary with one entry of each primitive data type. 6 | -------------------------------------------------------------------------------- /imas/assets/core_profiles.ids: -------------------------------------------------------------------------------- 1 | ITER_134173_106_core_profiles.ids -------------------------------------------------------------------------------- /imas/assets/equilibrium.ids: -------------------------------------------------------------------------------- 1 | ITER_134173_106_equilibrium.ids -------------------------------------------------------------------------------- /imas/backends/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Logic for interacting with all data backends. 4 | 5 | Currently supported backends are: 6 | 7 | - ``imas_core``: IMAS Access Layer Core (lowlevel). 8 | 9 | Interfaces with the AL core provided by Python package ``imas_core`` (available 10 | since AL5.2). For older versions it falls back to the ``imas`` HLI module, which 11 | contains the interface to ``imas_core``. 12 | """ 13 | -------------------------------------------------------------------------------- /imas/backends/imas_core/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Subpackage implementing data access through the IMAS Access Layer Core. 4 | """ 5 | -------------------------------------------------------------------------------- /imas/backends/imas_core/uda_support.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from pathlib import Path 3 | from typing import Union 4 | from xml.etree import ElementTree as ET 5 | 6 | from imas import dd_zip 7 | 8 | from .mdsplus_model import _get_xdg_cache_dir 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str: 14 | """Parse the IDSDef.xml up to the point where the Data Dictionary version is set. 15 | 16 | Returns: 17 | The Data Dictionary version for the provided file, or None if the file cannot be 18 | parsed / contains no Data Dictionary version. 19 | """ 20 | try: 21 | for _, elem in ET.iterparse(path): 22 | if elem.tag == "version": 23 | return elem.text 24 | except OSError: 25 | pass # File not found, etc. 26 | except Exception: 27 | logger.warning("Could not read DD version from file '%s'.", path, exc_info=True) 28 | return None 29 | 30 | 31 | def extract_idsdef(dd_version: str) -> str: 32 | """Extract the IDSDef.xml for the given version and return its path. 33 | 34 | The IDSDef.xml is extracted to the imas cache folder: 35 | 36 | - If the file imas/uda/.xml already exists, we assume it is correct 37 | """ 38 | cache_dir_path = Path(_get_xdg_cache_dir()) / "imas" / "uda" 39 | cache_dir_path.mkdir(parents=True, exist_ok=True) # ensure cache folder exists 40 | idsdef_path = cache_dir_path / (dd_version + ".xml") 41 | 42 | if idsdef_path.exists(): 43 | extract = False 44 | # Check if the file is fine 45 | if get_dd_version_from_idsdef_xml(idsdef_path) != dd_version: 46 | # File is corrupt, I guess? We'll overwrite: 47 | extract = True 48 | else: 49 | extract = True 50 | 51 | if extract: 52 | # Extract XML from the dd_zip and store 53 | data = dd_zip.get_dd_xml(dd_version) 54 | idsdef_path.write_bytes(data) 55 | 56 | return str(idsdef_path) 57 | -------------------------------------------------------------------------------- /imas/backends/netcdf/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies. 4 | """ 5 | -------------------------------------------------------------------------------- /imas/backends/netcdf/iterators.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator, List, Optional, Tuple 2 | 3 | from imas.ids_base import IDSBase 4 | from imas.ids_data_type import IDSDataType 5 | from imas.ids_metadata import IDSMetadata 6 | from imas.ids_struct_array import IDSStructArray 7 | from imas.ids_structure import IDSStructure 8 | from imas.ids_toplevel import IDSToplevel 9 | 10 | 11 | def _split_on_aos(metadata: IDSMetadata): 12 | """Split paths per IDS.""" 13 | paths = [] 14 | curpath = metadata.name 15 | 16 | item = metadata 17 | while item._parent.data_type is not None: 18 | item = item._parent 19 | if item.data_type is IDSDataType.STRUCT_ARRAY: 20 | paths.append(curpath) 21 | curpath = item.name 22 | else: 23 | curpath = f"{item.name}/{curpath}" 24 | paths.append(curpath) 25 | return paths[::-1] 26 | 27 | 28 | IndexedNode = Tuple[Tuple[int, ...], IDSBase] 29 | 30 | 31 | def indexed_tree_iter( 32 | ids: IDSToplevel, metadata: Optional[IDSMetadata] = None 33 | ) -> Iterator[IndexedNode]: 34 | """Tree iterator that tracks indices of all ancestor array of structures. 35 | 36 | Args: 37 | ids: IDS top level element to iterate over 38 | metadata: Iterate over all nodes inside the IDS at the metadata object. 39 | If ``None``, all filled items in the IDS are iterated over. 40 | 41 | Yields: 42 | (aos_indices, node) for all filled nodes. 43 | 44 | Example: 45 | >>> ids = imas.IDSFactory().new("core_profiles") 46 | >>> ids.profiles_1d.resize(2) 47 | >>> ids.profiles_1d[0].time = 1.0 48 | >>> ids.profiles_1d[1].t_i_average = [1.0] 49 | >>> list(indexed_tree_iter(ids)) 50 | [ 51 | ((), ), 52 | ((0,), ), 53 | ((1,), ) 54 | ] 55 | >>> list(indexed_tree_iter(ids, ids.metadata["profiles_1d/time"])) 56 | [ 57 | ((0,), ), 58 | ((1,), ) 59 | ] 60 | """ # noqa: E501 61 | if metadata is None: 62 | # Iterate over all filled nodes in the IDS 63 | yield from _full_tree_iter(ids, ()) 64 | 65 | else: 66 | paths = _split_on_aos(metadata) 67 | if len(paths) == 1: 68 | yield (), ids[paths[0]] 69 | else: 70 | yield from _tree_iter(ids, paths, ()) 71 | 72 | 73 | def _tree_iter( 74 | structure: IDSStructure, paths: List[str], curindex: Tuple[int, ...] 75 | ) -> Iterator[IndexedNode]: 76 | aos_path, *paths = paths 77 | aos = structure[aos_path] 78 | 79 | if len(paths) == 1: 80 | path = paths[0] 81 | for i, node in enumerate(aos): 82 | yield curindex + (i,), node[path] 83 | 84 | else: 85 | for i, node in enumerate(aos): 86 | yield from _tree_iter(node, paths, curindex + (i,)) 87 | 88 | 89 | def _full_tree_iter( 90 | node: IDSStructure, cur_index: Tuple[int, ...] 91 | ) -> Iterator[IndexedNode]: 92 | for child in node.iter_nonempty_(): 93 | yield (cur_index, child) 94 | if isinstance(child, IDSStructArray): 95 | for i in range(len(child)): 96 | yield from _full_tree_iter(child[i], cur_index + (i,)) 97 | elif isinstance(child, IDSStructure): 98 | yield from _full_tree_iter(child, cur_index) 99 | -------------------------------------------------------------------------------- /imas/backends/netcdf/nc_validate.py: -------------------------------------------------------------------------------- 1 | from imas.backends.netcdf.db_entry_nc import NCDBEntryImpl 2 | from imas.backends.netcdf.nc2ids import NC2IDS 3 | from imas.db_entry import DBEntry 4 | from imas.exception import InvalidNetCDFEntry 5 | 6 | 7 | def validate_netcdf_file(filename: str) -> None: 8 | """Validate if the provided netCDF file adheres to the IMAS conventions.""" 9 | if not filename.endswith(".nc"): 10 | raise InvalidNetCDFEntry( 11 | f"Invalid filename `{filename}` provided: " 12 | "an IMAS netCDF file should end with `.nc`" 13 | ) 14 | 15 | with DBEntry(filename, "r") as entry: 16 | entry_impl: NCDBEntryImpl = entry._dbe_impl 17 | dataset = entry_impl._dataset 18 | factory = entry_impl._ds_factory 19 | 20 | ids_names = factory.ids_names() 21 | 22 | # Check that groups in the dataset correspond to an IDS/occurrence and no 23 | # additional variables are smuggled inside: 24 | groups = [dataset] + [dataset[group] for group in dataset.groups] 25 | for group in groups: 26 | group_name = group.path.split("/")[-1] 27 | if group.variables or group.dimensions: 28 | raise InvalidNetCDFEntry( 29 | "NetCDF file should not have variables or dimensions in the " 30 | f"{group_name} group." 31 | ) 32 | if group is dataset: 33 | continue 34 | if group_name not in ids_names: 35 | raise InvalidNetCDFEntry( 36 | f"Invalid group name {group_name}: there is no IDS with this name." 37 | ) 38 | for subgroup in group.groups: 39 | try: 40 | int(subgroup) 41 | except ValueError: 42 | raise InvalidNetCDFEntry( 43 | f"Invalid group name {group_name}/{subgroup}: " 44 | f"{subgroup} is not a valid occurrence number." 45 | ) 46 | 47 | for ids_name in ids_names: 48 | for occurrence in entry.list_all_occurrences(ids_name): 49 | group = dataset[f"{ids_name}/{occurrence}"] 50 | ids = factory.new(ids_name) 51 | try: 52 | NC2IDS(group, ids, ids.metadata, None).validate_variables() 53 | except InvalidNetCDFEntry as exc: 54 | occ = f":{occurrence}" if occurrence else "" 55 | raise InvalidNetCDFEntry(f"Invalid IDS {ids_name}{occ}: {exc}") 56 | -------------------------------------------------------------------------------- /imas/command/helpers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | import click 5 | from packaging.version import Version 6 | from rich.logging import RichHandler 7 | 8 | from imas.backends.imas_core.imas_interface import ll_interface 9 | 10 | 11 | def setup_rich_log_handler(quiet: bool): 12 | """Setup rich.logging.RichHandler on the root logger. 13 | 14 | Args: 15 | quiet: When True: set log level of the `imas` logger to WARNING or higher. 16 | """ 17 | # Disable default imas log handler 18 | imas_logger = logging.getLogger("imas") 19 | for handler in imas_logger.handlers: 20 | imas_logger.removeHandler(handler) 21 | # Disable any root log handlers 22 | root_logger = logging.getLogger() 23 | for handler in root_logger.handlers: 24 | root_logger.removeHandler(handler) 25 | # Install rich handler on the root logger: 26 | root_logger.addHandler(RichHandler()) 27 | if quiet: # Silence IMAS-Python INFO messages 28 | # If loglevel is less than WARNING, set it to WARNING: 29 | imas_logger.setLevel(max(logging.WARNING, imas_logger.getEffectiveLevel())) 30 | 31 | 32 | def min_version_guard(al_version: Version): 33 | """Print an error message if the loaded AL version is too old. 34 | 35 | Args: 36 | al_version: Minimum imas_core version required for this command. 37 | """ 38 | used_version = ll_interface._al_version 39 | if used_version >= al_version: 40 | return 41 | click.echo( 42 | f"This command requires at least version {al_version} of the Access Layer." 43 | ) 44 | click.echo(f"The current loaded version is {used_version}, which is too old.") 45 | sys.exit(1) 46 | -------------------------------------------------------------------------------- /imas/command/timer.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Utility class to time different sections of a CLI app.""" 4 | 5 | import time 6 | from contextlib import contextmanager 7 | 8 | from rich.align import Align 9 | from rich.table import Table 10 | 11 | 12 | class Timer: 13 | """Convenience class to time sections in a CLI app. 14 | 15 | Usage: 16 | 17 | .. code-block:: python 18 | 19 | # Construct a timer with column/row labels "X" and "Y" 20 | timer = Timer("X", "Y") 21 | 22 | # Time code 23 | with timer("x-value1", "y-value1"): 24 | ... # Code to be timed 25 | 26 | # Output table with timing information 27 | rich.print(timer.get_table()) 28 | """ 29 | 30 | def __init__(self, *axes): 31 | self.axes = axes 32 | self.axes_values = tuple({} for _ in axes) 33 | self.data = {} 34 | 35 | @contextmanager 36 | def __call__(self, *items): 37 | assert len(items) == len(self.axes) 38 | tic = time.time() 39 | yield 40 | self.data[items] = time.time() - tic 41 | for i, item in enumerate(items): 42 | # Use dict to keep insertion order 43 | self.axes_values[i][item] = None 44 | 45 | def get_table(self, title="Timings") -> Table: 46 | """Construct a table with timing details. 47 | 48 | Currently only implemented when timing on two axes. 49 | """ 50 | if len(self.axes) == 2: 51 | table = Table(title=title, show_footer=True) 52 | 53 | # Calculate totals per column 54 | totals = {value: 0 for value in self.axes_values[0]} 55 | for (col, _), value in self.data.items(): 56 | totals[col] += value 57 | 58 | # Create columns 59 | table.add_column(footer="TOTAL:", justify="right") 60 | for value in self.axes_values[0]: 61 | table.add_column( 62 | header=Align(value, "center"), 63 | footer=f"{totals[value]:.3f} s", 64 | justify="right", 65 | ) 66 | 67 | # Fill table 68 | for row in self.axes_values[1]: 69 | row_values = ( 70 | f"{self.data[col, row]:.3f} s" if (col, row) in self.data else "-" 71 | for col in self.axes_values[0] 72 | ) 73 | table.add_row(row, *row_values) 74 | 75 | return table 76 | # non-2D is not implemented 77 | raise NotImplementedError() 78 | -------------------------------------------------------------------------------- /imas/exception.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Exception classes used in IMAS-Python. 4 | """ 5 | 6 | import difflib 7 | import logging 8 | from typing import TYPE_CHECKING, List 9 | 10 | from imas.backends.imas_core import imas_interface as _imas_interface 11 | 12 | if TYPE_CHECKING: 13 | from imas.ids_factory import IDSFactory 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | # Expose ALException, which may be thrown by the lowlevel 20 | if _imas_interface.has_imas: 21 | ALException = _imas_interface.lowlevel.ALException 22 | else: 23 | ALException = None 24 | 25 | 26 | class UnknownDDVersion(ValueError): 27 | """Error raised when an unknown DD version is specified.""" 28 | 29 | def __init__(self, version: str, available: List[str], note: str = "") -> None: 30 | close_matches = difflib.get_close_matches(version, available, n=1) 31 | if close_matches: 32 | suggestions = f"Did you mean {close_matches[0]!r}?" 33 | else: 34 | suggestions = f"Available versions are {', '.join(reversed(available))}" 35 | super().__init__( 36 | f"Data dictionary version {version!r} cannot be found. {suggestions}{note}" 37 | ) 38 | 39 | 40 | class IDSNameError(ValueError): 41 | """Error raised by DBEntry.get(_slice) when providing an invalid IDS name.""" 42 | 43 | def __init__(self, ids_name: str, factory: "IDSFactory") -> None: 44 | suggestions = "" 45 | close_matches = difflib.get_close_matches(ids_name, factory.ids_names(), n=1) 46 | if close_matches: 47 | suggestions = f" Did you mean {close_matches[0]!r}?" 48 | super().__init__(f"IDS {ids_name!r} does not exist.{suggestions}") 49 | 50 | 51 | class DataEntryException(RuntimeError): 52 | """Error raised by DBEntry for unexpected data in the backend.""" 53 | 54 | 55 | class MDSPlusModelError(RuntimeError): 56 | """Error raised when building MDS+ models.""" 57 | 58 | def __init__(self, msg: str) -> None: 59 | super().__init__(f"Error building MDSplus data model: {msg}") 60 | 61 | 62 | class LowlevelError(RuntimeError): 63 | """Error raised when lowlevel returns nonzero status""" 64 | 65 | def __init__(self, function: str, status: int): 66 | super().__init__( 67 | f"An Access Layer lowlevel operation ({function}) was unsuccessful " 68 | f"({status=}). " 69 | "More debug information should be available earlier in the program output." 70 | ) 71 | 72 | 73 | class CoordinateLookupError(Exception): 74 | """Error raised by IDSCoordinate.__getitem__ when a coordinate cannot be found.""" 75 | 76 | 77 | class ValidationError(Exception): 78 | """Error raised by IDSToplevel.validate() to indicate the IDS is not valid.""" 79 | 80 | 81 | class CoordinateError(ValidationError): 82 | """Error raised by ids.validate() to indicate a coordinate check has failed.""" 83 | 84 | def __init__(self, node, dimension, expected_size, coor_path): 85 | """Create a new CoordinateError 86 | 87 | Args: 88 | node: IDSBase object that this coordinate error is raised for 89 | dimension: (0-based) dimension with incorrect size 90 | expected_size: size of the coordinate for the specified dimension 91 | coor_path: path of the coordinate, may be None when a coordinate is of fixed 92 | size (e.g. ``1...3``) 93 | """ 94 | if coor_path is not None: # Error message when coordinate size doesnt match 95 | details = ( 96 | f"its coordinate in dimension {dimension + 1} (`{coor_path}`) has " 97 | f"size {expected_size}." 98 | ) 99 | else: 100 | details = f"dimension {dimension + 1} must have size {expected_size}." 101 | super().__init__( 102 | f"Element `{node._path}` has incorrect shape {node.shape}: {details}" 103 | ) 104 | 105 | 106 | class InvalidNetCDFEntry(Exception): 107 | """Error raised when loading an IDS from a NetCDF file that fails validation.""" 108 | -------------------------------------------------------------------------------- /imas/ids_data_type.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Data Dictionary type handling functionality. 4 | """ 5 | 6 | from enum import Enum 7 | from functools import lru_cache 8 | from typing import Optional, Tuple 9 | 10 | import numpy as np 11 | 12 | from imas.ids_defs import ( 13 | CHAR_DATA, 14 | COMPLEX_DATA, 15 | DOUBLE_DATA, 16 | EMPTY_COMPLEX, 17 | EMPTY_FLOAT, 18 | EMPTY_INT, 19 | INTEGER_DATA, 20 | ) 21 | 22 | 23 | class IDSDataType(Enum): 24 | """Enum representing the possible data types in an IDS""" 25 | 26 | STRUCTURE = "structure" 27 | """IDS structure. Maps to an IDSStructure object.""" 28 | 29 | STRUCT_ARRAY = "struct_array" 30 | """IDS array of structures. Maps to an IDSStructArray object with IDSStructure 31 | children.""" 32 | 33 | STR = "STR" 34 | """String data.""" 35 | 36 | INT = "INT" 37 | """Integer data.""" 38 | 39 | FLT = "FLT" 40 | """Floating point data.""" 41 | 42 | CPX = "CPX" 43 | """Complex data.""" 44 | 45 | def __init__(self, value) -> None: 46 | self.default = { 47 | "STR": "", 48 | "INT": EMPTY_INT, 49 | "FLT": EMPTY_FLOAT, 50 | "CPX": EMPTY_COMPLEX, 51 | }.get(value, None) 52 | """Default value for a field with this type.""" 53 | 54 | self.al_type = { 55 | "STR": CHAR_DATA, 56 | "INT": INTEGER_DATA, 57 | "FLT": DOUBLE_DATA, 58 | "CPX": COMPLEX_DATA, 59 | }.get(value, None) 60 | """Lowlevel identifier for this type.""" 61 | 62 | self.python_type = { 63 | "STR": str, 64 | "INT": int, 65 | "FLT": float, 66 | "CPX": complex, 67 | }.get(value, None) 68 | """Python type for 0D instances of this type.""" 69 | 70 | self.numpy_dtype = { 71 | "INT": np.int32, 72 | "FLT": np.float64, 73 | "CPX": np.complex128, 74 | }.get(value, None) 75 | """Numpy dtype for array instances of this type.""" 76 | 77 | @staticmethod 78 | @lru_cache(maxsize=None) 79 | def parse(data_type: Optional[str]) -> Tuple[Optional["IDSDataType"], int]: 80 | """Parse data type string from the Data Dictionary. 81 | 82 | Args: 83 | data_type: Data type string from the DD. 84 | 85 | Returns: 86 | IDSDataType instance representing the parsed data type and number of 87 | dimensions. 88 | 89 | Examples: 90 | >>> IDSDataType.parse("STR_1D") 91 | (, 1) 92 | >>> IDSDataType.parse("struct_array") 93 | (, 1) 94 | >>> IDSDataType.parse("structure") 95 | (, 0) 96 | >>> IDSDataType.parse("CPX_5D") 97 | (, 5) 98 | """ 99 | if data_type is None: 100 | return None, 0 101 | if data_type == "structure": 102 | ndim = 0 103 | elif data_type == "struct_array": 104 | ndim = 1 105 | else: 106 | dtype, *rest = data_type.upper().split("_") 107 | if rest == ["TYPE"]: # legacy str_type, int_type, flt_type, cpx_type: 108 | ndim = 0 109 | elif rest and "0" <= rest[0][0] <= "9": 110 | # works for both legacy flt_1d_type and regular TYP_ND 111 | ndim = int(rest[0][0]) 112 | else: 113 | raise ValueError(f"Unknown IDS data type: {data_type}") 114 | data_type = dtype 115 | return IDSDataType(data_type), ndim 116 | -------------------------------------------------------------------------------- /imas/ids_factory.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Tools for generating IDSs from a Data Dictionary version. 4 | """ 5 | 6 | import logging 7 | from functools import partial 8 | from typing import Any, Iterable, Iterator, List, Optional 9 | 10 | from imas import dd_zip 11 | from imas.exception import IDSNameError 12 | from imas.ids_toplevel import IDSToplevel 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class IDSFactory: 18 | """Factory class generating IDSToplevel elements for specific DD versions. 19 | 20 | Example: 21 | 22 | >>> factory = IDSFactory() 23 | >>> factory.core_profiles() 24 | 25 | >>> factory.new("core_profiles") 26 | 27 | """ 28 | 29 | def __init__( 30 | self, version: Optional[str] = None, xml_path: Optional[str] = None 31 | ) -> None: 32 | """Create a new IDS Factory 33 | 34 | See :meth:`imas.dd_zip.dd_etree` for further details on the ``version`` and 35 | ``xml_path`` arguments. 36 | 37 | Args: 38 | version: DD version string, e.g. "3.38.1". 39 | xml_path: XML file containing data dictionary definition. 40 | """ 41 | self._xml_path = xml_path 42 | self._etree = dd_zip.dd_etree(version, xml_path) 43 | self._ids_elements = { 44 | ele.get("name"): ele for ele in self._etree.findall("IDS") 45 | } 46 | 47 | version_element = self._etree.find("version") 48 | if version_element is not None: 49 | self._version = version_element.text 50 | elif version: 51 | self._version = version 52 | else: 53 | logger.warning("Ignoring missing Data Dictionary version in loaded DD.") 54 | self._version = "-1" 55 | if version and version != self._version: 56 | raise RuntimeError( 57 | f"There is a mismatch between the requested DD version {version} and " 58 | f"the actual loaded DD version {self._version}." 59 | ) 60 | 61 | def __copy__(self) -> "IDSFactory": 62 | return self 63 | 64 | def __deepcopy__(self, memo) -> "IDSFactory": 65 | return self 66 | 67 | def __dir__(self) -> Iterable[str]: 68 | return sorted(set(object.__dir__(self)).union(self._ids_elements)) 69 | 70 | def __getattr__(self, name: str) -> Any: 71 | if name in self._ids_elements: 72 | # Note: returning a partial to mimic AL HLI, e.g. factory.core_profiles() 73 | return partial(IDSToplevel, self, self._ids_elements[name]) 74 | raise AttributeError(f"{type(self)!r} object has no attribute {name!r}") 75 | 76 | def __iter__(self) -> Iterator[str]: 77 | """Iterate over the IDS names defined by the loaded Data Dictionary""" 78 | return iter(self._ids_elements) 79 | 80 | def ids_names(self) -> List[str]: 81 | """Get a list of all known IDS names in the loaded Data Dictionary""" 82 | return list(self._ids_elements) 83 | 84 | def new(self, ids_name: str, *, _lazy: bool = False) -> IDSToplevel: 85 | """Create a new IDSToplevel element for the provided IDS name 86 | 87 | Args: 88 | ids_name: Name of the IDS toplevel to create, e.g. "core_profiles". 89 | 90 | Keyword args: 91 | _lazy: Internal usage only! Create an IDS Toplevel suitable for lazy loading 92 | when set to True. 93 | """ 94 | if ids_name not in self._ids_elements: 95 | raise IDSNameError(ids_name, self) 96 | return IDSToplevel(self, self._ids_elements[ids_name], _lazy) 97 | 98 | def exists(self, ids_name: str) -> bool: 99 | """Check if an IDS type with the given name exists.""" 100 | return ids_name in self._ids_elements 101 | 102 | @property 103 | def version(self) -> str: 104 | """Get the DD version used by this IDS factory""" 105 | return self._version 106 | 107 | # dd_version is an alias for version 108 | dd_version = version 109 | -------------------------------------------------------------------------------- /imas/setup_logging.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Create a default log handler when IMAS-Python is imported. 4 | """ 5 | 6 | import logging 7 | import os 8 | 9 | 10 | class _PrettyFormatter(logging.Formatter): 11 | """Logging Formatter to add colors and count warning / errors""" 12 | 13 | light_grey = "\x1b[90m" 14 | yellow = "\x1b[33m" 15 | red = "\x1b[31m" 16 | bold_red = "\x1b[31;1m" 17 | reset = "\x1b[0m" 18 | 19 | formatstr = ( 20 | "%(asctime)s %(levelname)-8s %(message)s " 21 | f"{light_grey}@%(filename)s:%(lineno)d{reset}" 22 | ) 23 | time_format = "%H:%M:%S" 24 | 25 | FORMATS = { 26 | logging.DEBUG: logging.Formatter(light_grey + formatstr, time_format), 27 | logging.INFO: logging.Formatter(formatstr, time_format), 28 | logging.WARNING: logging.Formatter(yellow + formatstr, time_format), 29 | logging.ERROR: logging.Formatter(red + formatstr, time_format), 30 | logging.CRITICAL: logging.Formatter(bold_red + formatstr, time_format), 31 | } 32 | 33 | def format(self, record): 34 | formatter = self.FORMATS.get(record.levelno) 35 | return formatter.format(record) 36 | 37 | 38 | def test_messages(): 39 | """Print out a message on each logging level""" 40 | logger = logging.getLogger("imas.testlogger") 41 | logger.debug("Debug message") 42 | logger.info("Info message") 43 | logger.warning("Warning message") 44 | logger.error("Error message") 45 | logger.critical("Critical message") 46 | 47 | 48 | def connect_formatter(logger): 49 | """Connect general formatter to given logger""" 50 | ch = logging.StreamHandler() 51 | ch.setLevel(logging.DEBUG) 52 | ch.setFormatter(_PrettyFormatter()) 53 | logger.addHandler(ch) 54 | 55 | 56 | # Log to console by default, and output it all 57 | logger = logging.getLogger("imas") 58 | connect_formatter(logger) 59 | 60 | loglevel = os.getenv("IMAS_LOGLEVEL") or "INFO" 61 | logger.setLevel(loglevel) 62 | 63 | if __name__ == "__main__": 64 | test_messages() 65 | -------------------------------------------------------------------------------- /imas/test/test_all_dd_versions.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from imas import dd_zip, ids_metadata 4 | from imas.ids_factory import IDSFactory 5 | 6 | 7 | @pytest.fixture 8 | def skip_object_caches(monkeypatch: pytest.MonkeyPatch): 9 | monkeypatch.setattr(dd_zip, "_load_etree", dd_zip._load_etree.__wrapped__) 10 | monkeypatch.setattr( 11 | ids_metadata, 12 | "get_toplevel_metadata", 13 | ids_metadata.get_toplevel_metadata.__wrapped__, 14 | ) 15 | 16 | 17 | @pytest.fixture(params=dd_zip.dd_xml_versions()) 18 | def dd_version(request): 19 | return request.param 20 | 21 | 22 | @pytest.mark.slow 23 | def test_create_ids_dd_version(dd_version, skip_object_caches): 24 | # Test creation of all IDSs, test that IDSMetadata is correctly instantiated for 25 | # all known DD verions 26 | factory = IDSFactory(version=dd_version) 27 | for ids_name in factory: 28 | factory.new(ids_name) 29 | -------------------------------------------------------------------------------- /imas/test/test_cli.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from click.testing import CliRunner 5 | from packaging.version import Version 6 | 7 | from imas.backends.imas_core.imas_interface import has_imas 8 | from imas.backends.imas_core.imas_interface import ll_interface 9 | from imas.command.cli import print_version 10 | from imas.command.db_analysis import analyze_db, process_db_analysis 11 | from imas.db_entry import DBEntry 12 | from imas.test.test_helpers import fill_with_random_data 13 | 14 | 15 | @pytest.mark.cli 16 | def test_imas_version(): 17 | runner = CliRunner() 18 | result = runner.invoke(print_version) 19 | assert result.exit_code == 0 20 | 21 | 22 | @pytest.mark.cli 23 | @pytest.mark.skipif( 24 | not has_imas or ll_interface._al_version < Version("5.0"), 25 | reason="Needs AL >= 5 AND Requires IMAS Core.", 26 | ) 27 | def test_db_analysis( 28 | tmp_path, 29 | ): 30 | # This only tests the happy flow, error handling is not tested 31 | db_path = tmp_path / "test_db_analysis" 32 | with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: 33 | ids = entry.factory.core_profiles() 34 | fill_with_random_data(ids) 35 | entry.put(ids) 36 | 37 | runner = CliRunner() 38 | with runner.isolated_filesystem(temp_dir=tmp_path) as td: 39 | analyze_result = runner.invoke(analyze_db, [str(db_path)]) 40 | assert analyze_result.exit_code == 0, analyze_result.output 41 | 42 | outfile = Path(td) / "imas-db-analysis.json.gz" 43 | assert outfile.exists() 44 | 45 | # Show detailed output for core_profiles, and then an empty input to exit cleanly: 46 | process_result = runner.invoke( 47 | process_db_analysis, [str(outfile)], input="core_profiles\n\n" 48 | ) 49 | assert process_result.exit_code == 0, process_result.output 50 | assert "core_profiles" in process_result.output 51 | -------------------------------------------------------------------------------- /imas/test/test_dbentry.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import imas 4 | import imas.ids_defs 5 | from imas.backends.imas_core.imas_interface import has_imas, ll_interface 6 | from imas.exception import UnknownDDVersion 7 | from imas.test.test_helpers import compare_children, open_dbentry 8 | 9 | 10 | def test_dbentry_contextmanager(requires_imas): 11 | entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) 12 | entry.create() 13 | ids = entry.factory.core_profiles() 14 | ids.ids_properties.homogeneous_time = 0 15 | ids.ids_properties.comment = "test context manager" 16 | entry.put(ids) 17 | 18 | with imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) as entry2: 19 | ids2 = entry2.get("core_profiles") 20 | assert ids2.ids_properties.comment == ids.ids_properties.comment 21 | 22 | # Check that entry2 was closed 23 | assert entry2._dbe_impl is None 24 | 25 | 26 | @pytest.mark.skipif( 27 | not has_imas or ll_interface._al_version.major < 5, 28 | reason="URI API not available", 29 | ) 30 | def test_dbentry_contextmanager_uri(tmp_path): 31 | entry = imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") 32 | ids = entry.factory.core_profiles() 33 | ids.ids_properties.homogeneous_time = 0 34 | ids.ids_properties.comment = "test context manager" 35 | entry.put(ids) 36 | 37 | with imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "r") as entry2: 38 | ids2 = entry2.get("core_profiles") 39 | assert ids2.ids_properties.comment == ids.ids_properties.comment 40 | 41 | # Check that entry2 was closed 42 | assert entry2._dbe_impl is None 43 | 44 | 45 | def get_entry_attrs(entry: imas.DBEntry): 46 | return ( 47 | entry.backend_id, 48 | entry.db_name, 49 | entry.pulse, 50 | entry.run, 51 | entry.user_name, 52 | entry.data_version, 53 | ) 54 | 55 | 56 | def test_dbentry_constructor(): 57 | with pytest.raises(TypeError): 58 | imas.DBEntry() # no arguments 59 | with pytest.raises(TypeError): 60 | imas.DBEntry(1) # not enough arguments 61 | with pytest.raises(TypeError): 62 | imas.DBEntry(1, 2, 3) # not enough arguments 63 | with pytest.raises(TypeError): 64 | imas.DBEntry(1, 2, 3, 4, 5, 6, 7) # too many arguments 65 | with pytest.raises(TypeError): 66 | imas.DBEntry("test", uri="test") # Double URI argument 67 | with pytest.raises(TypeError): 68 | imas.DBEntry(1, 2, 3, 4, shot=5) # Multiple values for argument pulse 69 | with pytest.raises(ValueError): 70 | imas.DBEntry(1, 2, pulse=3, run=4, shot=5) # Both shot and pulse 71 | 72 | entry = imas.DBEntry(1, 2, 3, 4) 73 | assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) 74 | entry = imas.DBEntry(backend_id=1, db_name=2, pulse=3, run=4) 75 | assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) 76 | # Shot behaves as alias of pulse 77 | entry = imas.DBEntry(backend_id=1, db_name=2, shot=3, run=4) 78 | assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) 79 | entry = imas.DBEntry(1, 2, 3, 4, 5, 6) 80 | assert get_entry_attrs(entry) == (1, 2, 3, 4, 5, 6) 81 | entry = imas.DBEntry(1, 2, 3, 4, data_version=6) 82 | assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) 83 | 84 | 85 | def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): 86 | entry = open_dbentry(imas.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) 87 | ids = entry.factory.core_profiles() 88 | ids.ids_properties.homogeneous_time = 0 89 | ids.ids_properties.comment = "Test unknown DD version" 90 | # Put this IDS with an invalid DD version 91 | with monkeypatch.context() as m: 92 | m.setattr(entry.factory, "_version", "invalid DD version") 93 | assert entry.dd_version == "invalid DD version" 94 | entry.put(ids) 95 | 96 | with pytest.raises(UnknownDDVersion) as exc_info: 97 | entry.get("core_profiles") 98 | assert "ignore_unknown_dd_version" in str(exc_info.value) 99 | ids2 = entry.get("core_profiles", ignore_unknown_dd_version=True) 100 | assert ids2.ids_properties.version_put.data_dictionary == "invalid DD version" 101 | compare_children(ids, ids2) 102 | # Test that autoconvert plays nicely with this option as well 103 | ids3 = entry.get("core_profiles", ignore_unknown_dd_version=True, autoconvert=False) 104 | compare_children(ids, ids3) 105 | -------------------------------------------------------------------------------- /imas/test/test_dd_helpers.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import shutil 3 | import pytest 4 | import os 5 | import zipfile 6 | 7 | from imas.dd_helpers import prepare_data_dictionaries, _idsdef_zip_relpath, _build_dir 8 | 9 | _idsdef_unzipped_relpath = Path("idsdef_unzipped") 10 | 11 | 12 | @pytest.mark.skip(reason="skipping IDSDef.zip generation") 13 | def test_prepare_data_dictionaries(): 14 | prepare_data_dictionaries() 15 | assert os.path.exists( 16 | _idsdef_zip_relpath 17 | ), f"IDSDef.zip file does not exist at path: {_idsdef_zip_relpath}" 18 | 19 | expected_xml_files = [ 20 | _build_dir / "3.40.0.xml", 21 | _build_dir / "3.41.0.xml", 22 | _build_dir / "3.42.0.xml", 23 | _build_dir / "4.0.0.xml", 24 | ] 25 | 26 | for xml_file in expected_xml_files: 27 | assert os.path.exists(xml_file), f"{xml_file} does not exist" 28 | 29 | with zipfile.ZipFile(_idsdef_zip_relpath, "r") as zip_ref: 30 | zip_ref.extractall(_idsdef_unzipped_relpath) 31 | 32 | expected_ids_directories = [ 33 | _idsdef_unzipped_relpath / "data-dictionary" / "3.40.0.xml", 34 | _idsdef_unzipped_relpath / "data-dictionary" / "3.41.0.xml", 35 | _idsdef_unzipped_relpath / "data-dictionary" / "3.42.0.xml", 36 | _idsdef_unzipped_relpath / "data-dictionary" / "4.0.0.xml", 37 | _idsdef_unzipped_relpath 38 | / "identifiers" 39 | / "core_sources" 40 | / "core_source_identifier.xml", 41 | _idsdef_unzipped_relpath 42 | / "identifiers" 43 | / "equilibrium" 44 | / "equilibrium_profiles_2d_identifier.xml", 45 | ] 46 | 47 | for file_path in expected_ids_directories: 48 | assert os.path.exists( 49 | file_path 50 | ), f"Expected_ids_directories {file_path} does not exist" 51 | 52 | if _build_dir.exists(): 53 | shutil.rmtree(_idsdef_unzipped_relpath) 54 | -------------------------------------------------------------------------------- /imas/test/test_dd_zip.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from packaging.version import InvalidVersion 3 | 4 | from imas.dd_zip import get_dd_xml, parse_dd_version 5 | from imas.exception import UnknownDDVersion 6 | 7 | 8 | def test_known_version(): 9 | """Test if 3.30.0 is part of the IDSDef.zip 10 | Mostly this tests if IDSDef.zip has been made.""" 11 | 12 | get_dd_xml("3.30.0") 13 | 14 | 15 | def test_known_failing_version(): 16 | """Test if 0.0 is not part of the IDSDef.zip""" 17 | 18 | with pytest.raises(UnknownDDVersion): 19 | get_dd_xml("0.0") 20 | 21 | 22 | def test_parse_dd_version(): 23 | release_version = parse_dd_version("3.39.0") 24 | dev_version = parse_dd_version("3.39.0-30-g7735675") 25 | assert dev_version > release_version 26 | dev_version2 = parse_dd_version("3.39.0-31-g7735675") 27 | assert dev_version2 > dev_version 28 | with pytest.raises(InvalidVersion): 29 | parse_dd_version("garbage") 30 | -------------------------------------------------------------------------------- /imas/test/test_deepcopy.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | import imas 4 | from imas.ids_struct_array import IDSStructArray 5 | from imas.ids_structure import IDSStructure 6 | from imas.test.test_helpers import compare_children, fill_with_random_data 7 | 8 | 9 | def validate_parent(node): 10 | for child in node: 11 | assert child._parent is node 12 | if isinstance(child, (IDSStructure, IDSStructArray)): 13 | validate_parent(child) 14 | 15 | 16 | def test_deepcopy(): 17 | factory = imas.IDSFactory() 18 | cp = factory.core_profiles() 19 | fill_with_random_data(cp) 20 | 21 | cp2 = copy.deepcopy(cp) 22 | compare_children(cp, cp2) 23 | 24 | validate_parent(cp) 25 | validate_parent(cp2) 26 | -------------------------------------------------------------------------------- /imas/test/test_exception.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import imas 4 | from imas.backends.imas_core.imas_interface import ll_interface 5 | 6 | 7 | def test_catch_al_exception(requires_imas): 8 | # Do something which lets the lowlevel Cython interface throw an ALException 9 | # Ensure we can catch it: 10 | with pytest.raises(imas.exception.ALException): 11 | # Try to write an unknown data type (object) 12 | ll_interface.write_data(-1, "X", "", object()) 13 | -------------------------------------------------------------------------------- /imas/test/test_identifiers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from imas.dd_zip import dd_identifiers 4 | from imas.ids_factory import IDSFactory 5 | from imas.ids_identifiers import IDSIdentifier, identifiers 6 | 7 | 8 | def test_list_identifiers(): 9 | assert identifiers.identifiers == dd_identifiers() 10 | # Check a known identifier, which we'll also use in more tests 11 | assert "core_source_identifier" in identifiers.identifiers 12 | 13 | 14 | def test_identifier_enum(): 15 | csid = identifiers.core_source_identifier 16 | # Test item access 17 | assert csid is identifiers["core_source_identifier"] 18 | 19 | # Class and inheritance tests 20 | assert csid.__name__ == "core_source_identifier" 21 | assert csid.__qualname__ == "imas.ids_identifiers.core_source_identifier" 22 | assert issubclass(csid, IDSIdentifier) 23 | assert isinstance(csid.total, csid) 24 | assert isinstance(csid.total, IDSIdentifier) 25 | 26 | # Check access methods 27 | assert csid.total is csid(1) 28 | assert csid.total is csid["total"] 29 | 30 | # Check attributes 31 | assert csid.total.name == "total" 32 | assert csid.total.index == csid.total.value == 1 33 | assert isinstance(csid.total.description, str) 34 | assert csid.total.description != "" 35 | 36 | 37 | def test_identifier_struct_assignment(caplog): 38 | csid = identifiers.core_source_identifier 39 | cs = IDSFactory("3.39.0").core_sources() 40 | cs.source.resize(3) 41 | assert cs.source[0].identifier.metadata.identifier_enum is csid 42 | # Test assignment options: identifier instance, index and name 43 | cs.source[0].identifier = csid.total 44 | cs.source[1].identifier = "total" 45 | cs.source[2].identifier = 1 46 | for source in cs.source: 47 | assert source.identifier.name == "total" 48 | assert source.identifier.index == 1 49 | assert source.identifier.description == csid.total.description 50 | # Test equality of identifier structure and enum: 51 | assert source.identifier == csid.total 52 | assert source.identifier != csid(0) 53 | # Test fuzzy equality 54 | caplog.clear() 55 | # Empty description is okay 56 | source.identifier.description = "" 57 | assert source.identifier == csid.total 58 | assert not caplog.records 59 | # Incorrect description logs a warning 60 | source.identifier.description = "XYZ" 61 | assert source.identifier == csid.total 62 | assert len(caplog.records) == 1 63 | assert caplog.records[0].levelname == "WARNING" 64 | source.identifier.description = "" 65 | # Empty name is okay 66 | source.identifier.name = "" 67 | assert source.identifier == csid.total 68 | # But non-matching name is not okay 69 | source.identifier.name = "XYZ" 70 | assert source.identifier != csid.total 71 | 72 | 73 | def test_identifier_aos_assignment(): 74 | cfid = identifiers.pf_active_coil_function_identifier 75 | pfa = IDSFactory("3.39.0").pf_active() 76 | pfa.coil.resize(1) 77 | pfa.coil[0].function.resize(3) 78 | assert pfa.coil[0].function.metadata.identifier_enum is cfid 79 | # Test assignment options: identifier instance, index and name 80 | pfa.coil[0].function[0] = cfid.flux 81 | pfa.coil[0].function[1] = "flux" 82 | pfa.coil[0].function[2] = 0 83 | for function in pfa.coil[0].function: 84 | assert function.name == "flux" 85 | assert function.index == 0 86 | assert function.description == cfid.flux.description 87 | # Test equality of identifier structure and enum: 88 | assert function == cfid.flux 89 | assert function != cfid.b_field_shaping 90 | assert pfa.coil[0].function[0] == cfid.flux 91 | 92 | 93 | def test_invalid_identifier_assignment(): 94 | cfid = identifiers.pf_active_coil_function_identifier 95 | cs = IDSFactory("3.39.0").core_sources() 96 | cs.source.resize(1) 97 | 98 | with pytest.raises(TypeError): 99 | # Incorrect identifier type 100 | cs.source[0].identifier = cfid.flux 101 | with pytest.raises(ValueError): 102 | cs.source[0].identifier = "identifier names never contain spaces" 103 | with pytest.raises(ValueError): 104 | # negative identifiers are reserved for user-defined identifiers 105 | cs.source[0].identifier = -1 106 | -------------------------------------------------------------------------------- /imas/test/test_ids_ascii_data.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | from pathlib import PosixPath 4 | 5 | import pytest 6 | 7 | try: 8 | from importlib.resources import files 9 | except ImportError: # Python 3.8 support 10 | from importlib_resources import files 11 | 12 | import imas.training 13 | 14 | 15 | def test_data_exists(): 16 | data_file: PosixPath = files(imas) / "assets/ITER_134173_106_equilibrium.ids" 17 | assert data_file.exists() 18 | 19 | 20 | @pytest.fixture 21 | def test_data(requires_imas): 22 | db_entry = imas.training.get_training_db_entry() 23 | yield db_entry 24 | db_entry.close() 25 | 26 | 27 | def test_data_is_sane(test_data): 28 | assert isinstance(test_data, imas.DBEntry) 29 | eq = test_data.get("equilibrium") 30 | assert len(eq.time_slice) == 3 31 | ts = eq.time_slice[0] 32 | r = ts.boundary.outline.r 33 | z = ts.boundary.outline.z 34 | 35 | # Test a few relevant numbers to check if data loading went okay 36 | assert r.value[0] == pytest.approx(7.2896011) 37 | assert r.value[-1] == pytest.approx(7.29120937) 38 | assert z.value[0] == pytest.approx(-1.00816660e-01) 39 | assert z.value[-1] == pytest.approx(-9.60027185e-14) 40 | -------------------------------------------------------------------------------- /imas/test/test_ids_data_type.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from imas.ids_data_type import IDSDataType 4 | 5 | 6 | def test_legacy_type(): 7 | assert IDSDataType.parse("str_type") == (IDSDataType.STR, 0) 8 | assert IDSDataType.parse("str_1d_type") == (IDSDataType.STR, 1) 9 | assert IDSDataType.parse("flt_type") == (IDSDataType.FLT, 0) 10 | assert IDSDataType.parse("flt_1d_type") == (IDSDataType.FLT, 1) 11 | assert IDSDataType.parse("int_type") == (IDSDataType.INT, 0) 12 | 13 | 14 | @pytest.mark.parametrize("ndim", range(2)) # max string dimension is STR_1D 15 | def test_str_types(ndim): 16 | assert IDSDataType.parse(f"STR_{ndim}D") == (IDSDataType.STR, ndim) 17 | 18 | 19 | @pytest.mark.parametrize("ndim", range(4)) # max integer dimension is INT_3D 20 | def test_int_types(ndim): 21 | assert IDSDataType.parse(f"INT_{ndim}D") == (IDSDataType.INT, ndim) 22 | 23 | 24 | @pytest.mark.parametrize("ndim", range(7)) # max floatt dimension is FLT_6D 25 | def test_flt_types(ndim): 26 | assert IDSDataType.parse(f"FLT_{ndim}D") == (IDSDataType.FLT, ndim) 27 | 28 | 29 | @pytest.mark.parametrize("ndim", range(7)) # max complex dimension is CPX_6D 30 | def test_cpx_types(ndim): 31 | assert IDSDataType.parse(f"CPX_{ndim}D") == (IDSDataType.CPX, ndim) 32 | 33 | 34 | def test_default_values(): 35 | assert IDSDataType.STR.default == "" 36 | assert IDSDataType.INT.default == -999_999_999 37 | assert IDSDataType.FLT.default == -9e40 38 | assert IDSDataType.CPX.default == -9e40 - 9e40j 39 | assert IDSDataType.STRUCT_ARRAY.default is None 40 | assert IDSDataType.STRUCTURE.default is None 41 | -------------------------------------------------------------------------------- /imas/test/test_ids_factory.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from imas.dd_zip import latest_dd_version 4 | from imas.ids_factory import IDSFactory 5 | 6 | 7 | def test_ids_factory_with_version(): 8 | IDSFactory(version="3.39.0") 9 | 10 | 11 | def test_ids_factory_with_invalid_version(): 12 | # This raises a packaging.version.InvalidVersion exception, but any exception is ok 13 | with pytest.raises(Exception): 14 | IDSFactory(version="invalid") 15 | # This is a valid version string, but we don't have it available 16 | with pytest.raises(ValueError): 17 | IDSFactory(version="0.1.2.3.4") 18 | 19 | 20 | def test_ids_factory_with_xml_path(ids_minimal): 21 | IDSFactory(xml_path=ids_minimal) 22 | 23 | 24 | def test_ids_factory_latest(monkeypatch: pytest.MonkeyPatch): 25 | monkeypatch.delenv("IMAS_VERSION", raising=False) 26 | monkeypatch.delenv("IMAS_PREFIX", raising=False) 27 | factory = IDSFactory() 28 | assert factory._version == latest_dd_version() 29 | 30 | 31 | def test_ids_factory_from_env(monkeypatch: pytest.MonkeyPatch): 32 | version = "3.35.0" 33 | monkeypatch.setenv("IMAS_VERSION", version) 34 | factory = IDSFactory() 35 | assert factory._version == version 36 | -------------------------------------------------------------------------------- /imas/test/test_ids_metadata.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | 3 | import pytest 4 | 5 | from imas.ids_factory import IDSFactory 6 | from imas.ids_metadata import IDSType, get_toplevel_metadata 7 | 8 | 9 | def test_metadata_cache(fake_structure_xml): 10 | meta = get_toplevel_metadata(fake_structure_xml) 11 | meta2 = get_toplevel_metadata(fake_structure_xml) 12 | assert meta is meta2 13 | 14 | 15 | def test_metadata_init_structure_xml(fake_structure_xml): 16 | meta = get_toplevel_metadata(fake_structure_xml) 17 | assert fake_structure_xml.attrib["name"] == "gyrokinetics" 18 | assert meta.name == "gyrokinetics" 19 | 20 | 21 | def test_metadata_deepcopy(fake_structure_xml): 22 | meta = get_toplevel_metadata(fake_structure_xml) 23 | meta2 = deepcopy(meta) 24 | 25 | # Test that deepcopy returns the same reference 26 | assert meta is meta2 27 | assert meta == meta2 28 | 29 | 30 | def test_metadata_immutable(fake_structure_xml): 31 | meta = get_toplevel_metadata(fake_structure_xml) 32 | with pytest.raises(RuntimeError): 33 | meta.immutable = True 34 | with pytest.raises(RuntimeError): 35 | del meta.name 36 | 37 | 38 | def test_ids_type(): 39 | assert not IDSType.NONE.is_dynamic 40 | assert not IDSType.CONSTANT.is_dynamic 41 | assert not IDSType.STATIC.is_dynamic 42 | assert IDSType.DYNAMIC.is_dynamic 43 | 44 | 45 | def test_metadata_indexing(): 46 | core_profiles = IDSFactory("3.39.0").core_profiles() 47 | metadata = core_profiles.metadata 48 | assert metadata["ids_properties"] is core_profiles.ids_properties.metadata 49 | assert ( 50 | metadata["ids_properties/version_put"] 51 | is core_profiles.ids_properties.version_put.metadata 52 | ) 53 | assert metadata["time"] is core_profiles.time.metadata 54 | p1d_time_meta = metadata["profiles_1d/time"] 55 | core_profiles.profiles_1d.resize(1) 56 | assert p1d_time_meta is core_profiles.profiles_1d[0].time.metadata 57 | 58 | # Test period (.) as separator: 59 | assert ( 60 | metadata["profiles_1d/electrons/temperature"] 61 | is metadata["profiles_1d.electrons.temperature"] 62 | ) 63 | 64 | # Test invalid path 65 | with pytest.raises(KeyError): 66 | metadata["DoesNotExist"] 67 | -------------------------------------------------------------------------------- /imas/test/test_ids_mixin.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | 4 | 5 | def test_toplevel(fake_filled_toplevel): 6 | top = fake_filled_toplevel 7 | assert top.wavevector._toplevel == top 8 | assert top.wavevector[0].radial_component_norm._toplevel == top 9 | 10 | 11 | def test_path(fake_filled_toplevel): 12 | top = fake_filled_toplevel 13 | assert top.wavevector._path == "wavevector" 14 | assert top.ids_properties.creation_date._path == "ids_properties/creation_date" 15 | assert top.wavevector._path == "wavevector" 16 | assert top.wavevector[0]._path == "wavevector[0]" 17 | assert ( 18 | top.wavevector[0].radial_component_norm._path 19 | == "wavevector[0]/radial_component_norm" 20 | ) 21 | -------------------------------------------------------------------------------- /imas/test/test_ids_struct_array.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pprint 3 | from copy import deepcopy 4 | 5 | import pytest 6 | 7 | from imas.ids_factory import IDSFactory 8 | from imas.ids_struct_array import IDSStructArray 9 | 10 | 11 | @pytest.fixture 12 | def struct_array(fake_filled_toplevel) -> IDSStructArray: 13 | struct_array = fake_filled_toplevel.wavevector 14 | struct_array.resize(3) 15 | assert len(struct_array.value) == 3 16 | return struct_array 17 | 18 | 19 | @pytest.mark.parametrize("keep", (True, False)) 20 | @pytest.mark.parametrize("target_len", (1, 3, 7)) 21 | def test_resize(keep, target_len, struct_array): 22 | pre_struct_array_len = len(struct_array) 23 | pre_struct_array = deepcopy(struct_array) 24 | n_comp_values = min(target_len, pre_struct_array_len) 25 | pre_values = [struct_array[ii] for ii in range(n_comp_values)] 26 | 27 | # Test if resize works for 3->1, 3->3, and 3->7 28 | struct_array.resize(target_len, keep=keep) 29 | 30 | # Test if internal data is the right length 31 | assert len(struct_array) == target_len 32 | 33 | # Test if internal data is explicitly new (keep = False) or 34 | # explicitly kept (keep = True) 35 | for ii in range(n_comp_values): 36 | if keep: 37 | assert ( 38 | struct_array[ii] is pre_values[ii] 39 | ), f"On element {ii} of {struct_array.value} vs {pre_struct_array.value}" 40 | else: 41 | assert ( 42 | struct_array[ii] is not pre_values[ii] 43 | ), f"On element {ii} of {struct_array.value} vs {pre_struct_array.value}" 44 | 45 | 46 | def test_pretty_print(struct_array): 47 | assert ( 48 | pprint.pformat(struct_array) 49 | == "" 50 | ) 51 | 52 | 53 | def test_path_non_indexable_parent(caplog, fake_filled_toplevel): 54 | top = fake_filled_toplevel 55 | top.wavevector.resize(1) 56 | wv = top.wavevector[0] 57 | with caplog.at_level(logging.WARNING): 58 | assert wv._path == "wavevector[0]" 59 | for record in caplog.records: 60 | assert record.levelname != "WARNING" 61 | 62 | # Remove the referenced profiles_1d from its parent 63 | top.wavevector.resize(0) 64 | 65 | # Check if singular warning is raised 66 | with caplog.at_level(logging.WARNING): 67 | assert wv._path == "wavevector[?]" 68 | assert len(caplog.records) == 1 69 | assert caplog.records[0].levelname == "WARNING" 70 | 71 | 72 | def test_struct_array_eq(): 73 | cp1 = IDSFactory("3.39.0").core_profiles() 74 | cp2 = IDSFactory("3.39.0").core_profiles() 75 | 76 | assert cp1.profiles_1d != 1 77 | assert cp1.profiles_1d != "profiles_1d" 78 | 79 | assert cp1.profiles_1d == cp2.profiles_1d 80 | cp1.profiles_1d.resize(1) 81 | assert cp1.profiles_1d != cp2.profiles_1d 82 | cp2.profiles_1d.resize(2) 83 | assert cp1.profiles_1d != cp2.profiles_1d 84 | cp1.profiles_1d.resize(2) 85 | assert cp1.profiles_1d == cp2.profiles_1d 86 | cp1.profiles_1d[0].time = 1 87 | assert cp1.profiles_1d != cp2.profiles_1d 88 | cp2.profiles_1d[0].time = 1 89 | assert cp1.profiles_1d == cp2.profiles_1d 90 | -------------------------------------------------------------------------------- /imas/test/test_ids_structure.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | import copy 4 | import pprint 5 | 6 | import pytest 7 | 8 | from imas.ids_factory import IDSFactory 9 | from imas.ids_structure import IDSStructure 10 | 11 | 12 | @pytest.fixture 13 | def structure(fake_filled_toplevel) -> IDSStructure: 14 | yield fake_filled_toplevel.ids_properties 15 | 16 | 17 | def test_pretty_print(structure): 18 | assert ( 19 | pprint.pformat(structure) == "" 20 | ) 21 | 22 | 23 | def test_dict_like_access(fake_filled_toplevel): 24 | assert fake_filled_toplevel["ids_properties"] is fake_filled_toplevel.ids_properties 25 | node = fake_filled_toplevel.ids_properties.provenance.node 26 | assert fake_filled_toplevel["ids_properties/provenance/node"] is node 27 | node.resize(1) 28 | assert fake_filled_toplevel["ids_properties/provenance/node[0]"] is node[0] 29 | assert fake_filled_toplevel["ids_properties/provenance/node[0]"] is node[0] 30 | path = node[0].path 31 | assert fake_filled_toplevel["ids_properties/provenance/node[0]/path"] is path 32 | 33 | 34 | def test_dict_like_setitem(): 35 | cp = IDSFactory("3.39.0").core_profiles() 36 | 37 | cp["time"] = [1, 2, 3] 38 | assert cp.time[0] == 1, cp.time[2] == 3 39 | 40 | cp["ids_properties/homogeneous_time"] = 1 41 | assert cp.ids_properties.homogeneous_time == 1 42 | 43 | provenance_copy = copy.deepcopy(cp.ids_properties.provenance) 44 | provenance_copy.node.resize(1) 45 | provenance_copy.node[0].path = "test" 46 | cp["ids_properties/provenance/node"] = provenance_copy.node 47 | 48 | 49 | def test_structure_eq(): 50 | cp1 = IDSFactory("3.39.0").core_profiles() 51 | cp2 = IDSFactory("3.39.0").core_profiles() 52 | 53 | assert cp1 != 1 54 | assert cp1 != "1" 55 | assert cp1 != cp1.time 56 | 57 | assert cp1 == cp2 58 | assert cp1.ids_properties == cp2.ids_properties 59 | cp1.ids_properties.comment = "x" 60 | assert cp1 != cp2 61 | assert cp1.ids_properties != cp2.ids_properties 62 | cp2.ids_properties.comment = "x" 63 | assert cp1 == cp2 64 | assert cp1.ids_properties == cp2.ids_properties 65 | cp2.ids_properties.homogeneous_time = 1 66 | assert cp1 != cp2 67 | assert cp1.ids_properties != cp2.ids_properties 68 | cp1.ids_properties.homogeneous_time = 1 69 | assert cp1 == cp2 70 | assert cp1.ids_properties == cp2.ids_properties 71 | -------------------------------------------------------------------------------- /imas/test/test_ids_toplevel.py: -------------------------------------------------------------------------------- 1 | """A testcase checking higher-level IDSToplevel features with a fake 2 | constant-in-time DD 3 | """ 4 | 5 | import pprint 6 | from pathlib import Path 7 | 8 | import pytest 9 | 10 | from imas.ids_factory import IDSFactory 11 | from imas.ids_toplevel import IDSToplevel 12 | from imas.test.test_helpers import compare_children, fill_with_random_data 13 | 14 | 15 | @pytest.fixture 16 | def ids(fake_toplevel_xml: Path): 17 | return IDSFactory(xml_path=fake_toplevel_xml).new("gyrokinetics") 18 | 19 | 20 | def test_toplevel_init(ids): 21 | assert isinstance(ids, IDSToplevel) 22 | 23 | 24 | def test_structure_xml_noncopy(ids): 25 | assert id(list(ids.metadata._structure_xml)[0].attrib) == id( 26 | ids.ids_properties.metadata._structure_xml.attrib 27 | ) 28 | 29 | 30 | def test_metadata_lifecycle_status(ids): 31 | assert ids.metadata.lifecycle_status == "alpha" 32 | assert ids.wavevector.metadata.structure_reference == "gyrokinetics_wavevector" 33 | 34 | 35 | def test_metadata_non_exist(ids): 36 | with pytest.raises(AttributeError): 37 | ids.wavevector.metadata.lifecycle_status 38 | 39 | 40 | def test_metadata_attribute_not_exists(ids): 41 | with pytest.raises(AttributeError): 42 | ids.metadata.blergh 43 | 44 | 45 | def test_pretty_print(ids): 46 | assert pprint.pformat(ids) == "" 47 | 48 | 49 | def test_serialize_nondefault_dd_version(requires_imas): 50 | ids = IDSFactory("3.31.0").core_profiles() 51 | fill_with_random_data(ids) 52 | data = ids.serialize() 53 | ids2 = IDSFactory("3.31.0").core_profiles() 54 | ids2.deserialize(data) 55 | compare_children(ids, ids2) 56 | -------------------------------------------------------------------------------- /imas/test/test_latest_dd_autofill.py: -------------------------------------------------------------------------------- 1 | """A testcase checking if writing and then reading works for the latest full 2 | data dictionary version. 3 | """ 4 | 5 | import copy 6 | 7 | import pytest 8 | 9 | from imas.ids_defs import ( 10 | ASCII_SERIALIZER_PROTOCOL, 11 | FLEXBUFFERS_SERIALIZER_PROTOCOL, 12 | MEMORY_BACKEND, 13 | ) 14 | from imas.ids_factory import IDSFactory 15 | from imas.test.test_helpers import ( 16 | compare_children, 17 | fill_with_random_data, 18 | open_dbentry, 19 | ) 20 | from imas.util import visit_children 21 | 22 | 23 | def test_latest_dd_autofill_consistency(ids_name): 24 | ids = IDSFactory().new(ids_name) 25 | fill_with_random_data(ids) 26 | 27 | # check that each element in ids has _parent set. 28 | visit_children(has_parent, ids, leaf_only=False) 29 | 30 | 31 | def has_parent(child): 32 | """Check that the child has _parent set""" 33 | assert child._parent is not None 34 | 35 | 36 | def test_latest_dd_autofill(ids_name, backend, worker_id, tmp_path): 37 | """Write and then read again all IDSToplevels.""" 38 | dbentry = open_dbentry(backend, "w", worker_id, tmp_path) 39 | ids = IDSFactory().new(ids_name) 40 | fill_with_random_data(ids) 41 | 42 | dbentry.put(ids) 43 | ids_ref = copy.deepcopy(ids) 44 | # the deepcopy comes after the put() since that updates dd version and AL lang 45 | 46 | dbentry2 = open_dbentry(backend, "a", worker_id, tmp_path) 47 | ids = dbentry2.get(ids_name) 48 | compare_children(ids, ids_ref) 49 | 50 | dbentry.close() 51 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 52 | dbentry2.close() 53 | 54 | 55 | @pytest.mark.parametrize( 56 | "serializer", [ASCII_SERIALIZER_PROTOCOL, FLEXBUFFERS_SERIALIZER_PROTOCOL] 57 | ) 58 | def test_latest_dd_autofill_serialize(serializer, ids_name, has_imas): 59 | """Serialize and then deserialize again all IDSToplevels""" 60 | if serializer is None: 61 | pytest.skip("Unsupported serializer") 62 | 63 | factory = IDSFactory() 64 | ids = factory.new(ids_name) 65 | fill_with_random_data(ids) 66 | 67 | if not has_imas: 68 | return # rest of the test requires an IMAS install 69 | data = ids.serialize(serializer) 70 | 71 | ids2 = factory.new(ids_name) 72 | ids2.deserialize(data) 73 | 74 | compare_children(ids, ids2) 75 | -------------------------------------------------------------------------------- /imas/test/test_latest_dd_manual.py: -------------------------------------------------------------------------------- 1 | """A testcase checking if writing and then reading works for the latest full 2 | data dictionary version. 3 | """ 4 | 5 | from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, MEMORY_BACKEND 6 | from imas.ids_factory import IDSFactory 7 | from imas.test.test_helpers import open_dbentry 8 | 9 | 10 | def test_latest_dd_manual(backend, worker_id, tmp_path): 11 | """Write and then read again a single IDSToplevel.""" 12 | dbentry = open_dbentry(backend, "w", worker_id, tmp_path) 13 | ids_name = "pulse_schedule" 14 | ids = IDSFactory().new(ids_name) 15 | ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 16 | ids.ids_properties.comment = "test" 17 | 18 | assert ids.ids_properties.comment.value == "test" 19 | 20 | dbentry.put(ids) 21 | 22 | dbentry2 = open_dbentry(backend, "a", worker_id, tmp_path) 23 | ids2 = dbentry2.get(ids_name) 24 | assert ids2.ids_properties.comment.value == "test" 25 | 26 | dbentry.close() 27 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 28 | dbentry2.close() 29 | 30 | 31 | def test_dir(): 32 | """Test calling `dir()` on `IDSFactory` to test if we can see IDSes""" 33 | factory = IDSFactory() 34 | f_dir = dir(factory) 35 | # Check if we can see the first and last stable IDS 36 | assert "amns_data" in f_dir, "Could not find amns_data in dir(IDSFactory())" 37 | assert "workflow" in f_dir, "Could not find workflow in dir(IDSFactory())" 38 | assert "__init__" in f_dir, "Could not find base attributes in dir(IDSFactory())" 39 | -------------------------------------------------------------------------------- /imas/test/test_latest_dd_resample.py: -------------------------------------------------------------------------------- 1 | """A testcase checking if resampling works for the latest data dictionary version. 2 | """ 3 | 4 | import numpy as np 5 | 6 | import imas 7 | from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS 8 | from imas.ids_factory import IDSFactory 9 | 10 | 11 | def test_single_resample_inplace(): 12 | nbi = IDSFactory().new("nbi") 13 | nbi.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 14 | nbi.time = [1, 2, 3] 15 | nbi.unit.resize(1) 16 | nbi.unit[0].energy.data = 2 * nbi.time 17 | old_id = id(nbi.unit[0].energy.data) 18 | 19 | assert nbi.unit[0].energy.data.coordinates.time_index == 0 20 | 21 | imas.util.resample( 22 | nbi.unit[0].energy.data, 23 | nbi.time, 24 | [0.5, 1.5], 25 | nbi.ids_properties.homogeneous_time, 26 | inplace=True, 27 | fill_value="extrapolate", 28 | ) 29 | 30 | assert old_id == id(nbi.unit[0].energy.data) 31 | assert np.array_equal(nbi.unit[0].energy.data, [1, 3]) 32 | 33 | 34 | def test_single_resample_copy(): 35 | nbi = IDSFactory().new("nbi") 36 | nbi.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 37 | nbi.time = [1, 2, 3] 38 | nbi.unit.resize(1) 39 | nbi.unit[0].energy.data = 2 * nbi.time 40 | old_id = id(nbi.unit[0].energy.data) 41 | 42 | assert nbi.unit[0].energy.data.coordinates.time_index == 0 43 | 44 | new_data = imas.util.resample( 45 | nbi.unit[0].energy.data, 46 | nbi.time, 47 | [0.5, 1.5], 48 | nbi.ids_properties.homogeneous_time, 49 | inplace=False, 50 | fill_value="extrapolate", 51 | ) 52 | 53 | assert old_id != id(new_data) 54 | assert np.array_equal(new_data, [1, 3]) 55 | 56 | 57 | def test_full_resample_inplace(): 58 | nbi = IDSFactory().new("nbi") 59 | nbi.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 60 | nbi.time = [1, 2, 3] 61 | nbi.unit.resize(1) 62 | nbi.unit[0].energy.data = 2 * nbi.time 63 | old_id = id(nbi.unit[0].energy.data) 64 | 65 | assert nbi.unit[0].energy.data.coordinates.time_index == 0 66 | 67 | _ = imas.util.resample( 68 | nbi, 69 | nbi.time, 70 | [0.5, 1.5], 71 | nbi.ids_properties.homogeneous_time, 72 | inplace=True, 73 | fill_value="extrapolate", 74 | ) 75 | 76 | assert old_id == id(nbi.unit[0].energy.data) 77 | assert np.array_equal(nbi.unit[0].energy.data, [1, 3]) 78 | assert np.array_equal(nbi.time, [0.5, 1.5]) 79 | 80 | 81 | def test_full_resample_copy(): 82 | nbi = IDSFactory().new("nbi") 83 | nbi.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS 84 | nbi.time = [1, 2, 3] 85 | nbi.unit.resize(1) 86 | nbi.unit[0].energy.data = 2 * nbi.time 87 | old_id = id(nbi.unit[0].energy.data) 88 | 89 | assert nbi.unit[0].energy.data.coordinates.time_index == 0 90 | 91 | new_nbi = imas.util.resample( 92 | nbi, 93 | nbi.time, 94 | [0.5, 1.5], 95 | nbi.ids_properties.homogeneous_time, 96 | inplace=False, 97 | fill_value="extrapolate", 98 | ) 99 | 100 | assert old_id != id(new_nbi.unit[0].energy.data) 101 | assert np.array_equal(new_nbi.unit[0].energy.data, [1, 3]) 102 | assert np.array_equal(new_nbi.time, [0.5, 1.5]) 103 | -------------------------------------------------------------------------------- /imas/test/test_list_occurrences.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from packaging.version import Version 3 | 4 | import imas 5 | from imas.backends.imas_core.imas_interface import ll_interface 6 | from imas.test.test_helpers import open_dbentry 7 | 8 | 9 | @pytest.fixture 10 | def filled_dbentry(backend, worker_id, tmp_path): 11 | if backend == imas.ids_defs.MEMORY_BACKEND: 12 | pytest.skip("list_occurrences is not implemented for the MEMORY backend") 13 | entry = open_dbentry(backend, "w", worker_id, tmp_path) 14 | 15 | for i in range(3): 16 | cp = entry.factory.core_profiles() 17 | cp.ids_properties.homogeneous_time = 0 18 | cp.ids_properties.comment = f"core_profiles occurrence {i}" 19 | entry.put(cp, i) 20 | 21 | for i in [0, 1, 3, 6]: 22 | mag = entry.factory.core_sources() 23 | mag.ids_properties.homogeneous_time = 0 24 | mag.ids_properties.comment = f"core_sources occurrence {i}" 25 | entry.put(mag, i) 26 | 27 | yield entry 28 | entry.close() 29 | 30 | 31 | def test_list_occurrences_no_path(filled_dbentry): 32 | if ll_interface._al_version >= Version("5.1"): 33 | occurrences = filled_dbentry.list_all_occurrences("core_profiles") 34 | assert occurrences == [0, 1, 2] 35 | 36 | occurrences = filled_dbentry.list_all_occurrences("core_sources") 37 | assert occurrences == [0, 1, 3, 6] 38 | 39 | assert filled_dbentry.list_all_occurrences("magnetics") == [] 40 | 41 | else: # AL 5.0 or lower 42 | with pytest.raises(RuntimeError): 43 | filled_dbentry.list_all_occurrences("core_profiles") 44 | with pytest.raises(RuntimeError): 45 | filled_dbentry.list_all_occurrences("core_sources") 46 | with pytest.raises(RuntimeError): 47 | filled_dbentry.list_all_occurrences("magnetics") 48 | 49 | 50 | def test_list_occurrences_with_path(backend, filled_dbentry): 51 | if backend == imas.ids_defs.ASCII_BACKEND: 52 | pytest.skip("Lazy loading is not supported by the ASCII backend") 53 | 54 | comment = "ids_properties/comment" 55 | if ll_interface._al_version >= Version("5.1"): 56 | res = filled_dbentry.list_all_occurrences("core_profiles", comment) 57 | assert res[0] == [0, 1, 2] 58 | assert res[1] == [ 59 | "core_profiles occurrence 0", 60 | "core_profiles occurrence 1", 61 | "core_profiles occurrence 2", 62 | ] 63 | 64 | res = filled_dbentry.list_all_occurrences("core_sources", comment) 65 | assert res[0] == [0, 1, 3, 6] 66 | assert res[1] == [ 67 | "core_sources occurrence 0", 68 | "core_sources occurrence 1", 69 | "core_sources occurrence 3", 70 | "core_sources occurrence 6", 71 | ] 72 | 73 | res = filled_dbentry.list_all_occurrences("magnetics", comment) 74 | assert res == ([], []) 75 | 76 | else: # AL 5.0 or lower 77 | with pytest.raises(RuntimeError): 78 | filled_dbentry.list_all_occurrences("core_profiles", comment) 79 | with pytest.raises(RuntimeError): 80 | filled_dbentry.list_all_occurrences("core_sources", comment) 81 | with pytest.raises(RuntimeError): 82 | filled_dbentry.list_all_occurrences("magnetics", comment) 83 | -------------------------------------------------------------------------------- /imas/test/test_minimal.py: -------------------------------------------------------------------------------- 1 | # A minimal testcase loading an IDS file and checking that the structure built is ok 2 | 3 | from imas.ids_factory import IDSFactory 4 | 5 | 6 | def test_load_minimal(ids_minimal): 7 | minimal = IDSFactory(xml_path=ids_minimal).new("minimal") 8 | 9 | # Check if the datatypes are loaded correctly 10 | assert minimal.a.data_type == "FLT_0D" 11 | assert minimal.ids_properties.comment.data_type == "STR_0D" 12 | 13 | # Check the documentation 14 | assert minimal.a.metadata.documentation == "A float" 15 | assert minimal.ids_properties.metadata.documentation == "Properties of this IDS" 16 | assert minimal.ids_properties.comment.metadata.documentation == "A string comment" 17 | 18 | # Check the units 19 | assert minimal.a.metadata.units == "unitless" 20 | 21 | # Check the static/dynamic/constant annotation 22 | assert minimal.a.metadata.type.value == "static" 23 | assert minimal.ids_properties.comment.metadata.type.value == "constant" 24 | 25 | 26 | def test_load_multiple_minimal(ids_minimal, ids_minimal_types): 27 | minimal = IDSFactory(xml_path=ids_minimal).new("minimal") 28 | 29 | # Check if the datatypes are loaded correctly 30 | assert minimal.a.data_type == "FLT_0D" 31 | assert minimal.ids_properties.comment.data_type == "STR_0D" 32 | 33 | minimal2 = IDSFactory(xml_path=ids_minimal_types).new("minimal") 34 | 35 | # Check if the datatypes are loaded correctly 36 | assert minimal2.flt_0d.data_type == "FLT_0D" 37 | assert minimal2.ids_properties.comment.data_type == "STR_0D" 38 | -------------------------------------------------------------------------------- /imas/test/test_minimal_conversion.py: -------------------------------------------------------------------------------- 1 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MEMORY_BACKEND 2 | from imas.test.test_helpers import open_dbentry 3 | 4 | 5 | def test_minimal_io_read_flt_int( 6 | backend, ids_minimal, ids_minimal2, worker_id, tmp_path 7 | ): 8 | """Write and then read again a number on our minimal IDS.""" 9 | dbentry = open_dbentry(backend, "w", worker_id, tmp_path, xml_path=ids_minimal) 10 | minimal = dbentry.factory.new("minimal") 11 | minimal.a = 2.4 12 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 13 | dbentry.put(minimal) 14 | assert minimal.a.value == 2.4 15 | 16 | # ids_minimal2 changed a float to an int 17 | dbentry2 = open_dbentry(backend, "r", worker_id, tmp_path, xml_path=ids_minimal2) 18 | minimal2 = dbentry2.get("minimal") 19 | assert minimal2.a.value == 2 20 | 21 | dbentry.close() 22 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 23 | dbentry2.close() 24 | 25 | 26 | def test_minimal_io_read_int_flt( 27 | backend, ids_minimal, ids_minimal2, worker_id, tmp_path 28 | ): 29 | """Write and then read again a number on our minimal IDS.""" 30 | dbentry2 = open_dbentry(backend, "w", worker_id, tmp_path, xml_path=ids_minimal2) 31 | minimal2 = dbentry2.factory.new("minimal") 32 | minimal2.a = 2 33 | minimal2.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 34 | dbentry2.put(minimal2) 35 | assert minimal2.a.value == 2 36 | 37 | # ids_minimal2 changed a float to an int 38 | dbentry = open_dbentry(backend, "r", worker_id, tmp_path, xml_path=ids_minimal) 39 | minimal = dbentry.get("minimal") 40 | assert minimal.a.value == 2.0 41 | 42 | dbentry.close() 43 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 44 | dbentry2.close() 45 | -------------------------------------------------------------------------------- /imas/test/test_minimal_io.py: -------------------------------------------------------------------------------- 1 | # A minimal testcase loading an IDS file and checking that the structure built is ok 2 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MEMORY_BACKEND 3 | from imas.test.test_helpers import open_dbentry 4 | 5 | 6 | def test_minimal_io(backend, ids_minimal, worker_id, tmp_path): 7 | """Write and then read again a number on our minimal IDS.""" 8 | dbentry = open_dbentry(backend, "w", worker_id, tmp_path, xml_path=ids_minimal) 9 | minimal = dbentry.factory.new("minimal") 10 | minimal.a = 2.0 11 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 12 | dbentry.put(minimal) 13 | assert minimal.a.value == 2.0 14 | 15 | dbentry2 = open_dbentry(backend, "a", worker_id, tmp_path, xml_path=ids_minimal) 16 | minimal2 = dbentry2.get("minimal") 17 | assert minimal2.a.value == 2.0 18 | 19 | dbentry.close() 20 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 21 | dbentry2.close() 22 | -------------------------------------------------------------------------------- /imas/test/test_minimal_struct_array_io.py: -------------------------------------------------------------------------------- 1 | # A minimal testcase loading an IDS file and checking that the structure built is ok 2 | import pytest 3 | 4 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MDSPLUS_BACKEND, MEMORY_BACKEND 5 | from imas.test.test_helpers import open_dbentry 6 | 7 | 8 | def test_minimal_struct_array_maxoccur( 9 | backend, ids_minimal_struct_array, worker_id, tmp_path 10 | ): 11 | dbentry = open_dbentry( 12 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_struct_array 13 | ) 14 | minimal_struct_array = dbentry.factory.new("minimal_struct_array") 15 | minimal_struct_array.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 16 | # maxoccur is 2, so this should raise an exception with the MDS+ backend 17 | minimal_struct_array.struct_array.resize(3) 18 | 19 | if backend == MDSPLUS_BACKEND: 20 | with pytest.raises(RuntimeError): 21 | dbentry.put(minimal_struct_array) 22 | else: 23 | dbentry.put(minimal_struct_array) 24 | 25 | dbentry.close() 26 | 27 | 28 | def test_minimal_struct_array_io( 29 | backend, ids_minimal_struct_array, worker_id, tmp_path 30 | ): 31 | """Write and then read again a number on our minimal IDS.""" 32 | dbentry = open_dbentry( 33 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_struct_array 34 | ) 35 | minimal_struct_array = dbentry.factory.new("minimal_struct_array") 36 | a = minimal_struct_array.struct_array 37 | minimal_struct_array.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 38 | a.append(a._element_structure) 39 | 40 | # TODO: these are nested one too deeply in my opinion. 41 | # (a struct array contains an array of structures directly, 42 | # without the intermediate one?) 43 | a[0].a.flt_0d = 2.0 44 | a.append(a._element_structure) 45 | a[1].a.flt_0d = 4.0 46 | 47 | dbentry.put(minimal_struct_array) 48 | assert a[0].a.flt_0d.value == 2.0 49 | assert a[1].a.flt_0d.value == 4.0 50 | 51 | dbentry2 = open_dbentry( 52 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_struct_array 53 | ) 54 | minimal_struct_array2 = dbentry2.get("minimal_struct_array") 55 | assert minimal_struct_array2.struct_array[0].a.flt_0d.value == 2.0 56 | assert minimal_struct_array2.struct_array[1].a.flt_0d.value == 4.0 57 | 58 | dbentry.close() 59 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 60 | dbentry2.close() 61 | -------------------------------------------------------------------------------- /imas/test/test_minimal_types_io.py: -------------------------------------------------------------------------------- 1 | """A minimal testcase loading an IDS file and checking that the structure built is ok""" 2 | 3 | import numpy as np 4 | 5 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MEMORY_BACKEND 6 | from imas.test.test_helpers import open_dbentry, randdims 7 | 8 | TEST_DATA = { 9 | "str_0d": "test", 10 | "str_1d": ["test0", "test1"], 11 | "str_type": "test_legacy", 12 | "str_1d_type": ["test0_legacy", "test1_legacy"], 13 | "flt_type": 2.0, 14 | "flt_1d_type": np.asarray([3.0, 4.0]), 15 | "int_type": 5, 16 | } 17 | for i in range(0, 7): 18 | # dimensions are random 19 | TEST_DATA["flt_%dd" % i] = np.random.random_sample(size=randdims(i)) 20 | if i < 4: 21 | TEST_DATA["int_%dd" % i] = np.random.randint(0, 1000, size=randdims(i)) 22 | 23 | 24 | def test_minimal_types_io(backend, ids_minimal_types, worker_id, tmp_path): 25 | """Write and then read again a number on our minimal IDS.""" 26 | dbentry = open_dbentry( 27 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 28 | ) 29 | minimal = dbentry.factory.new("minimal") 30 | for k, v in TEST_DATA.items(): 31 | minimal[k] = v 32 | 33 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 34 | dbentry.put(minimal) 35 | 36 | dbentry2 = open_dbentry( 37 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 38 | ) 39 | minimal2 = dbentry2.get("minimal") 40 | for k, v in TEST_DATA.items(): 41 | if isinstance(v, np.ndarray): 42 | assert np.array_equal(minimal2[k].value, v) 43 | else: 44 | assert minimal2[k].value == v 45 | 46 | dbentry.close() 47 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 48 | dbentry2.close() 49 | 50 | 51 | def test_large_numbers(backend, ids_minimal_types, worker_id, tmp_path): 52 | """Write and then read again a large number""" 53 | dbentry = open_dbentry( 54 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 55 | ) 56 | minimal = dbentry.factory.new("minimal") 57 | minimal["int_0d"] = 955683416 58 | 59 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 60 | dbentry.put(minimal) 61 | 62 | dbentry2 = open_dbentry( 63 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 64 | ) 65 | minimal2 = dbentry2.get("minimal") 66 | assert minimal2["int_0d"] == 955683416 67 | 68 | dbentry.close() 69 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 70 | dbentry2.close() 71 | -------------------------------------------------------------------------------- /imas/test/test_minimal_types_io_automatic.py: -------------------------------------------------------------------------------- 1 | """A minimal testcase loading an IDS file and checking that the structure built is ok 2 | """ 3 | 4 | import numpy as np 5 | 6 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MEMORY_BACKEND 7 | from imas.ids_factory import IDSFactory 8 | from imas.test.test_helpers import fill_with_random_data, open_dbentry 9 | from imas.test.test_minimal_types_io import TEST_DATA 10 | 11 | 12 | def test_minimal_types_str_1d_decode(ids_minimal_types): 13 | minimal = IDSFactory(xml_path=ids_minimal_types).new("minimal") 14 | minimal.str_1d = [b"test", b"test2"] 15 | assert minimal.str_1d.value == ["test", "test2"] 16 | 17 | 18 | def test_minimal_types_str_1d_decode_and_put( 19 | backend, ids_minimal_types, worker_id, tmp_path 20 | ): 21 | """The access layer changed 1d string types to bytes. 22 | This is unexpected, especially since on read it is converted from bytes to string 23 | again (which implies that the proper form for in python is as strings)""" 24 | dbentry = open_dbentry( 25 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 26 | ) 27 | minimal = dbentry.factory.new("minimal") 28 | minimal.str_1d = [b"test", b"test2"] 29 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 30 | 31 | assert minimal.str_1d.value == ["test", "test2"] 32 | dbentry.put(minimal) 33 | assert minimal.str_1d.value == ["test", "test2"] 34 | dbentry.close() 35 | 36 | 37 | def test_minimal_types_io_automatic(backend, ids_minimal_types, worker_id, tmp_path): 38 | """Write and then read again our minimal IDS.""" 39 | dbentry = open_dbentry( 40 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 41 | ) 42 | minimal = dbentry.factory.new("minimal") 43 | fill_with_random_data(minimal) 44 | 45 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 46 | dbentry.put(minimal) 47 | 48 | dbentry2 = open_dbentry( 49 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 50 | ) 51 | minimal2 = dbentry2.get("minimal") 52 | for k, v in TEST_DATA.items(): 53 | if isinstance(v, np.ndarray): 54 | assert np.array_equal(minimal2[k].value, minimal[k].value) 55 | else: 56 | if isinstance(minimal2[k].value, np.ndarray): 57 | assert np.array_equal( 58 | minimal2[k].value, 59 | np.asarray(minimal[k].value, dtype=minimal2[k].value.dtype), 60 | ) 61 | else: 62 | assert minimal2[k].value == minimal[k].value 63 | 64 | dbentry.close() 65 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 66 | dbentry2.close() 67 | -------------------------------------------------------------------------------- /imas/test/test_nc_autofill.py: -------------------------------------------------------------------------------- 1 | from imas.db_entry import DBEntry 2 | from imas.exception import InvalidNetCDFEntry 3 | from imas.test.test_helpers import compare_children, fill_consistent 4 | import re 5 | import pytest 6 | import netCDF4 7 | from packaging import version 8 | 9 | 10 | def test_nc_latest_dd_autofill_put_get_skip_complex(ids_name, tmp_path): 11 | with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: 12 | ids = entry.factory.new(ids_name) 13 | fill_consistent(ids, leave_empty=0.5, skip_complex=True) 14 | 15 | entry.put(ids) 16 | ids2 = entry.get(ids_name) 17 | 18 | compare_children(ids, ids2) 19 | 20 | 21 | @pytest.mark.skipif( 22 | version.parse(netCDF4.__version__) >= version.parse("1.7.0"), 23 | reason="NetCDF4 versions < 1.7.0 do not support complex numbers", 24 | ) 25 | def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4(ids_name, tmp_path): 26 | with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: 27 | ids = entry.factory.new(ids_name) 28 | fill_consistent(ids, leave_empty=0.5, skip_complex=False) 29 | try: 30 | entry.put(ids) 31 | ids2 = entry.get(ids_name) 32 | compare_children(ids, ids2) 33 | except InvalidNetCDFEntry as e: 34 | # This is expected, as these versions of NetCDF4 do not support 35 | # complex numbers. 36 | if not re.search( 37 | r".*NetCDF 1.7.0 or later is required for complex data types", str(e) 38 | ): 39 | raise InvalidNetCDFEntry(e) from e 40 | 41 | 42 | @pytest.mark.skipif( 43 | version.parse(netCDF4.__version__) < version.parse("1.7.0"), 44 | reason="NetCDF4 versions >= 1.7.0 support complex numbers", 45 | ) 46 | def test_nc_latest_dd_autofill_put_get_with_complex_newer_netCDF4(ids_name, tmp_path): 47 | with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: 48 | ids = entry.factory.new(ids_name) 49 | fill_consistent(ids, leave_empty=0.5, skip_complex=False) 50 | 51 | entry.put(ids) 52 | ids2 = entry.get(ids_name) 53 | 54 | compare_children(ids, ids2) 55 | -------------------------------------------------------------------------------- /imas/test/test_nc_entry.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from imas.db_entry import DBEntry 4 | from imas.exception import DataEntryException 5 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT 6 | from imas.ids_factory import IDSFactory 7 | 8 | 9 | def test_readwrite(tmp_path): 10 | fname = tmp_path / "test-rw.nc" 11 | ids = IDSFactory().core_profiles() 12 | ids.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 13 | 14 | with pytest.raises(FileNotFoundError): 15 | DBEntry(fname, "r") # File does not exist 16 | with DBEntry(fname, "x") as entry: 17 | entry.put(ids) 18 | with DBEntry(fname, "w") as entry: 19 | with pytest.raises(DataEntryException): 20 | entry.get("core_profiles") # File overwritten, IDS does not exist 21 | entry.put(ids) 22 | with pytest.raises(OSError): 23 | DBEntry(fname, "x") # file already exists 24 | with DBEntry(fname, "a") as entry: 25 | with pytest.raises(RuntimeError): # FIXME: change error class 26 | entry.put(ids) # Cannot overwrite existing IDS 27 | # But we can write a new occurrence 28 | entry.put(ids, 1) 29 | -------------------------------------------------------------------------------- /imas/test/test_snippets.py: -------------------------------------------------------------------------------- 1 | import os 2 | import runpy 3 | from pathlib import Path 4 | 5 | import pytest 6 | 7 | courses = Path(__file__, "../../../", "docs/source/courses").resolve() 8 | course_snippets = [] 9 | for course in ["basic", "advanced"]: 10 | course_snippets.extend((courses / course).glob("*snippets/*.py")) 11 | 12 | 13 | @pytest.mark.skip(reason="skipping hli test") 14 | @pytest.mark.filterwarnings("ignore:The input coordinates to pcolormesh:UserWarning") 15 | @pytest.mark.parametrize("snippet", course_snippets) 16 | def test_script_execution(snippet, monkeypatch, tmp_path, requires_imas): 17 | monkeypatch.chdir(tmp_path) 18 | # Prevent showing plots in a GUI 19 | monkeypatch.delenv("DISPLAY", raising=False) 20 | if "IMAS_HOME" not in os.environ: 21 | # Only execute those snippets that don't need access to the public IMAS DB 22 | script_text = snippet.read_text() 23 | if '"public"' in script_text: # ugly hack :( 24 | pytest.skip("Snippet requires the public IMAS DB, which is not available") 25 | runpy.run_path(str(snippet)) 26 | -------------------------------------------------------------------------------- /imas/test/test_static_ids.py: -------------------------------------------------------------------------------- 1 | # Testing static IDS behavior as defined in IMAS-3330 2 | 3 | import logging 4 | 5 | import pytest 6 | 7 | import imas 8 | from imas.ids_defs import IDS_TIME_MODE_HETEROGENEOUS, IDS_TIME_MODE_INDEPENDENT 9 | from imas.ids_metadata import IDSType 10 | 11 | 12 | def test_ids_valid_type(): 13 | factory = imas.IDSFactory() 14 | ids_types = set() 15 | for ids_name in factory: 16 | ids = factory.new(ids_name) 17 | ids_types.add(ids.metadata.type) 18 | 19 | # For DD versions <4, `type` is never set at IDS top levels. 20 | # For DD versions >=4, `type` may be CONSTANT (i.e. no root time node) or DYNAMIC 21 | assert ids_types in ({IDSType.NONE}, {IDSType.CONSTANT, IDSType.DYNAMIC}) 22 | 23 | 24 | def test_constant_ids(caplog, requires_imas): 25 | ids = imas.IDSFactory().new("amns_data") 26 | if ids.metadata.type is IDSType.NONE: 27 | pytest.skip("IDS definition has no constant IDSs") 28 | 29 | ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HETEROGENEOUS 30 | dbe = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 0, 0) 31 | dbe.create() 32 | 33 | caplog.clear() 34 | with caplog.at_level(logging.WARNING, logger="imas"): 35 | dbe.put(ids) 36 | assert ids.ids_properties.homogeneous_time == IDS_TIME_MODE_INDEPENDENT 37 | assert len(caplog.records) == 1 38 | msg = caplog.records[0].message 39 | assert "ids_properties/homogeneous_time has been set to 2" in msg 40 | -------------------------------------------------------------------------------- /imas/test/test_str_1d.py: -------------------------------------------------------------------------------- 1 | # A minimal testcase loading an IDS file and checking that the structure built is ok 2 | import string 3 | 4 | from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT, MEMORY_BACKEND 5 | from imas.test.test_helpers import open_dbentry 6 | 7 | 8 | def test_str_1d_empty(backend, ids_minimal_types, worker_id, tmp_path): 9 | """Write and then read again a string on our minimal IDS.""" 10 | dbentry = open_dbentry( 11 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 12 | ) 13 | minimal = dbentry.factory.new("minimal") 14 | minimal.str_1d = [] 15 | 16 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 17 | dbentry.put(minimal) 18 | 19 | dbentry2 = open_dbentry( 20 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 21 | ) 22 | minimal2 = dbentry2.get("minimal") 23 | assert list(minimal2.str_1d.value) == [] 24 | 25 | dbentry.close() 26 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 27 | dbentry2.close() 28 | 29 | 30 | def test_str_1d_long_single(backend, ids_minimal_types, worker_id, tmp_path): 31 | """Write and then read again a string on our minimal IDS.""" 32 | dbentry = open_dbentry( 33 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 34 | ) 35 | minimal = dbentry.factory.new("minimal") 36 | minimal.str_1d = [string.ascii_uppercase * 100] 37 | 38 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 39 | dbentry.put(minimal) 40 | 41 | dbentry2 = open_dbentry( 42 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 43 | ) 44 | minimal2 = dbentry2.get("minimal") 45 | assert minimal2.str_1d.value == [string.ascii_uppercase * 100] 46 | 47 | dbentry.close() 48 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 49 | dbentry2.close() 50 | 51 | 52 | def test_str_1d_multiple(backend, ids_minimal_types, worker_id, tmp_path): 53 | """Write and then read again a string on our minimal IDS.""" 54 | dbentry = open_dbentry( 55 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 56 | ) 57 | minimal = dbentry.factory.new("minimal") 58 | minimal.str_1d = [string.ascii_uppercase, string.ascii_lowercase] 59 | 60 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 61 | dbentry.put(minimal) 62 | 63 | dbentry2 = open_dbentry( 64 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 65 | ) 66 | minimal2 = dbentry2.get("minimal") 67 | assert minimal2.str_1d.value == [ 68 | string.ascii_uppercase, 69 | string.ascii_lowercase, 70 | ] 71 | 72 | dbentry.close() 73 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 74 | dbentry2.close() 75 | 76 | 77 | def test_str_1d_long_multiple(backend, ids_minimal_types, worker_id, tmp_path): 78 | """Write and then read again a string on our minimal IDS.""" 79 | dbentry = open_dbentry( 80 | backend, "w", worker_id, tmp_path, xml_path=ids_minimal_types 81 | ) 82 | minimal = dbentry.factory.new("minimal") 83 | minimal.str_1d = [string.ascii_uppercase * 100, string.ascii_lowercase * 100] 84 | 85 | minimal.ids_properties.homogeneous_time = IDS_TIME_MODE_INDEPENDENT 86 | dbentry.put(minimal) 87 | 88 | dbentry2 = open_dbentry( 89 | backend, "a", worker_id, tmp_path, xml_path=ids_minimal_types 90 | ) 91 | minimal2 = dbentry2.get("minimal") 92 | assert minimal2.str_1d.value == [ 93 | string.ascii_uppercase * 100, 94 | string.ascii_lowercase * 100, 95 | ] 96 | 97 | dbentry.close() 98 | if backend != MEMORY_BACKEND: # MEM backend already cleaned up, prevent SEGFAULT 99 | dbentry2.close() 100 | -------------------------------------------------------------------------------- /imas/test/test_to_xarray.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import imas 5 | import imas.training 6 | from imas.util import to_xarray 7 | 8 | pytest.importorskip("xarray") 9 | 10 | 11 | @pytest.fixture 12 | def entry(requires_imas, monkeypatch): 13 | monkeypatch.setenv("IMAS_VERSION", "3.39.0") # Use fixed DD version 14 | return imas.training.get_training_db_entry() 15 | 16 | 17 | def test_to_xarray_invalid_argtype(): 18 | ids = imas.IDSFactory("3.39.0").core_profiles() 19 | 20 | with pytest.raises(TypeError): 21 | to_xarray("test") 22 | with pytest.raises(TypeError): 23 | to_xarray(ids.time) 24 | with pytest.raises(TypeError): 25 | to_xarray(ids.ids_properties) 26 | 27 | 28 | def test_to_xarray_invalid_paths(): 29 | ids = imas.IDSFactory("3.39.0").core_profiles() 30 | 31 | with pytest.raises(ValueError, match="xyz"): 32 | to_xarray(ids, "xyz") 33 | with pytest.raises(ValueError, match="ids_properties/xyz"): 34 | to_xarray(ids, "ids_properties/xyz") 35 | with pytest.raises(ValueError, match="Xtime"): 36 | to_xarray(ids, "time", "Xtime") 37 | 38 | 39 | def validate_trainingdb_electron_temperature_dataset(ds): 40 | assert ds.sizes == {"time": 3, "profiles_1d.grid.rho_tor_norm:i": 101} 41 | assert ds.data_vars.keys() == {"profiles_1d.electrons.temperature"} 42 | assert ds.coords.keys() == {"time", "profiles_1d.grid.rho_tor_norm"} 43 | 44 | # Check that values are loaded as expected 45 | assert np.allclose(ds["time"], [3.987222, 432.937598, 792.0]) 46 | assert np.allclose( 47 | ds.isel(time=1)["profiles_1d.electrons.temperature"][10:13], 48 | [17728.81703089, 17440.78020568, 17139.35431082], 49 | ) 50 | 51 | 52 | def test_to_xarray_lazy_loaded(entry): 53 | ids = entry.get("core_profiles", lazy=True) 54 | 55 | with pytest.raises(RuntimeError): 56 | to_xarray(ids) 57 | 58 | ds = to_xarray(ids, "profiles_1d.electrons.temperature") 59 | validate_trainingdb_electron_temperature_dataset(ds) 60 | 61 | 62 | def test_to_xarray_from_trainingdb(entry): 63 | ids = entry.get("core_profiles") 64 | 65 | ds = to_xarray(ids) 66 | validate_trainingdb_electron_temperature_dataset( 67 | ds["profiles_1d.electrons.temperature"].to_dataset() 68 | ) 69 | ds = to_xarray(ids, "profiles_1d.electrons.temperature") 70 | validate_trainingdb_electron_temperature_dataset(ds) 71 | 72 | ds = to_xarray( 73 | ids, "profiles_1d.electrons.temperature", "profiles_1d/electrons/density" 74 | ) 75 | assert ds.data_vars.keys() == { 76 | "profiles_1d.electrons.temperature", 77 | "profiles_1d.electrons.density", 78 | } 79 | 80 | 81 | def test_to_xarray(): 82 | ids = imas.IDSFactory("3.39.0").core_profiles() 83 | 84 | ids.profiles_1d.resize(2) 85 | ids.profiles_1d[0].electrons.temperature = [1.0, 2.0] 86 | ids.profiles_1d[0].grid.rho_tor_norm = [0.0, 1.0] 87 | ids.profiles_1d[0].time = 0.0 88 | 89 | # These should all be identical: 90 | ds1 = to_xarray(ids) 91 | ds2 = to_xarray(ids, "profiles_1d.electrons.temperature") 92 | ds3 = to_xarray(ids, "profiles_1d/electrons/temperature") 93 | assert ds1.equals(ds2) 94 | assert ds2.equals(ds3) 95 | -------------------------------------------------------------------------------- /imas/test/test_uda_support.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from zlib import crc32 3 | 4 | from imas import dd_zip 5 | from imas.backends.imas_core.uda_support import extract_idsdef 6 | 7 | 8 | def test_extract_idsdef(): 9 | fname = extract_idsdef("4.0.0") 10 | expected_crc = dd_zip.get_dd_xml_crc("4.0.0") 11 | actual_crc = crc32(Path(fname).read_bytes()) 12 | assert expected_crc == actual_crc 13 | -------------------------------------------------------------------------------- /imas/training.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | """Functions that are useful for the IMAS-Python training courses. 4 | """ 5 | 6 | import importlib 7 | from unittest.mock import patch 8 | 9 | try: 10 | from importlib.resources import files 11 | except ImportError: # Python 3.8 support 12 | from importlib_resources import files 13 | 14 | import imas 15 | from imas.backends.imas_core.imas_interface import ll_interface 16 | 17 | 18 | def _initialize_training_db(DBEntry_cls): 19 | assets_path = files(imas) / "assets/" 20 | pulse, run, user, database = 134173, 106, "public", "ITER" 21 | if ll_interface._al_version.major == 4: 22 | entry = DBEntry_cls(imas.ids_defs.ASCII_BACKEND, database, pulse, run, user) 23 | entry.open(options=f"-prefix {assets_path}/") 24 | else: 25 | entry = DBEntry_cls(f"imas:ascii?path={assets_path}", "r") 26 | 27 | output_entry = DBEntry_cls(imas.ids_defs.MEMORY_BACKEND, database, pulse, run) 28 | output_entry.create() 29 | for ids_name in ["core_profiles", "equilibrium"]: 30 | ids = entry.get(ids_name) 31 | with patch.dict("os.environ", {"IMAS_AL_DISABLE_VALIDATE": "1"}): 32 | output_entry.put(ids) 33 | entry.close() 34 | return output_entry 35 | 36 | 37 | def get_training_db_entry() -> imas.DBEntry: 38 | """Open and return an ``imas.DBEntry`` pointing to the training data.""" 39 | return _initialize_training_db(imas.DBEntry) 40 | 41 | 42 | def get_training_imas_db_entry(): 43 | """Open and return an ``imas.DBEntry`` pointing to the training data.""" 44 | imas = importlib.import_module("imas") 45 | return _initialize_training_db(imas.DBEntry) 46 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = 3 | # line break before binary operator 4 | W503 5 | # space before : 6 | E203 7 | E701 8 | E704 9 | exclude= 10 | .eggs 11 | docs 12 | max-line-length = 88 13 | per-file-ignores= 14 | # Ignore import errors in __init__.py (import not at top of file; imported but 15 | # unused) 16 | imas/__init__.py:E402,F401 17 | # Lots of CLASSPATHS in this test file: adhering to line length would be less 18 | # readable 19 | imas/test/test_dd_helpers.py:E501 20 | -------------------------------------------------------------------------------- /tools/extract_test_data.py: -------------------------------------------------------------------------------- 1 | # This file is part of IMAS-Python. 2 | # You should have received the IMAS-Python LICENSE file with this project. 3 | import os 4 | 5 | import imas 6 | 7 | # Open input datafile 8 | pulse, run, user, database = 134173, 106, "public", "ITER" 9 | input = imas.DBEntry(imas.imasdef.MDSPLUS_BACKEND, database, pulse, run, user) 10 | input.open() 11 | 12 | # Read Te profile and the associated normalised toroidal flux coordinate 13 | get_these_idss = ["equilibrium", "core_profiles"] 14 | idss = {} 15 | # The reference has 871 timepoints 16 | for time_index in [0, 433, 871]: 17 | for ids_name in get_these_idss: 18 | if ids_name not in idss: 19 | idss[ids_name] = [] 20 | idss[ids_name].append( 21 | input.get_slice( 22 | ids_name, 23 | time_index, 24 | imas.imasdef.PREVIOUS_INTERP, 25 | occurrence=0, 26 | ) 27 | ) 28 | 29 | # Close the datafile 30 | input.close() 31 | 32 | # Dump the data to ASCII 33 | # Create output datafile 34 | temp = imas.DBEntry(imas.imasdef.MEMORY_BACKEND, database, pulse, run, user) 35 | temp.create() 36 | for ids_name, ids_list in idss.items(): 37 | for ids_slice in ids_list: 38 | temp.put_slice(ids_slice) 39 | 40 | uber_idss = {} 41 | for ids_name in idss: 42 | uber_idss[ids_name] = temp.get(ids_name) 43 | temp.close() 44 | 45 | 46 | user = os.getenv("USER") 47 | # Because we use the ASCII backend, this results in a .ids file in the cwd 48 | output = imas.DBEntry(imas.imasdef.ASCII_BACKEND, database, pulse, run, user) 49 | output.create() 50 | 51 | # Save the IDS 52 | for ids_name, ids in uber_idss.items(): 53 | print(f"Putting {ids_name}") 54 | output.put(ids) 55 | 56 | # Close the output datafile 57 | output.close() 58 | -------------------------------------------------------------------------------- /tools/get_tagnames.py: -------------------------------------------------------------------------------- 1 | """ Print out tag information contained in given DD XML 2 | """ 3 | import sys 4 | import xml.etree.ElementTree as ET 5 | from pathlib import Path 6 | from pprint import pprint 7 | from functools import reduce 8 | 9 | 10 | def parse_element(element: ET.Element): 11 | if element.tag == "field": 12 | dtype = element.attrib.get("data_type", None) 13 | summary.setdefault(dtype, set()).add(frozenset(element.attrib)) 14 | else: 15 | ignored_tags.add(element.tag) 16 | for child in element: 17 | parse_element(child) 18 | 19 | 20 | if __name__ == "__main__": 21 | # Parse user arguments 22 | if len(sys.argv) > 1: 23 | xml_path = Path(sys.argv[1]) 24 | else: 25 | xml_path = Path("IDSDef.xml") 26 | 27 | tree = ET.parse(xml_path) 28 | root = tree.getroot() 29 | 30 | summary = {} 31 | ignored_tags = set() 32 | 33 | # Parse tree 34 | parse_element(root) 35 | 36 | # Always print pretty, overwrites build-ins, I know 37 | print = pprint 38 | 39 | print("Ignored tags:") 40 | print(ignored_tags) 41 | 42 | print("Summary:") 43 | 44 | for dtype in summary: 45 | print(f"Data type: {dtype}") 46 | print(reduce(set.union, summary[dtype], set())) 47 | 48 | print("All:") 49 | print( 50 | reduce( 51 | set.union, (reduce(set.union, value, set()) for value in summary.values()) 52 | ) 53 | ) 54 | --------------------------------------------------------------------------------