├── disruption_py
├── __init__.py
├── core
│ ├── __init__.py
│ ├── physics_method
│ │ ├── __init__.py
│ │ ├── errors.py
│ │ ├── params.py
│ │ ├── caching.py
│ │ ├── decorator.py
│ │ └── metadata.py
│ ├── utils
│ │ ├── enums.py
│ │ ├── shared_instance.py
│ │ └── misc.py
│ └── retrieval_manager.py
├── inout
│ └── __init__.py
├── machine
│ ├── __init__.py
│ ├── hbtep
│ │ ├── __init__.py
│ │ ├── config.toml
│ │ └── util.py
│ ├── generic
│ │ ├── __init__.py
│ │ └── physics.py
│ ├── d3d
│ │ ├── __init__.py
│ │ ├── util.py
│ │ ├── config.toml
│ │ └── efit.py
│ ├── cmod
│ │ ├── __init__.py
│ │ ├── config.toml
│ │ └── efit.py
│ ├── east
│ │ ├── __init__.py
│ │ ├── config.toml
│ │ └── efit.py
│ ├── method_holders.py
│ └── tokamak.py
├── config.toml
├── settings
│ ├── __init__.py
│ ├── retrieval_settings.py
│ ├── log_settings.py
│ └── shotlist_setting.py
├── config.py
└── data
│ ├── cmod_vde.csv
│ ├── README.md
│ └── cmod_ufo.csv
├── docs
├── index.md
├── INSTALL.md
├── REFERENCES.md
├── usage
│ ├── workflow_reference.md
│ ├── settings
│ │ ├── log_settings.md
│ │ ├── retrieval_settings.md
│ │ ├── nickname_setting.md
│ │ ├── time_setting.md
│ │ ├── shotlist_setting.md
│ │ └── output_setting.md
│ ├── mds_connection_reference.md
│ ├── physics_methods
│ │ ├── cmod_built_in_method_reference.md
│ │ ├── d3d_built_in_method_reference.md
│ │ ├── east_built_in_method_reference.md
│ │ ├── hbtep_built_in_method_reference.md
│ │ ├── decorator_reference.md
│ │ ├── disruption_parameters_reference.md
│ │ └── physics_method_reference.md
│ └── sql_database.md
├── workflow.png
├── stylesheets
│ └── disruptions.css
├── javascripts
│ └── katex.js
└── examples
│ ├── custom_time_setting.py
│ └── custom_physics_method.py
├── SECURITY.md
├── .github
├── dependabot.yml
└── workflows
│ ├── stale.yml
│ ├── lint.yml
│ ├── docs.yml
│ ├── build.yml
│ ├── install.yml
│ └── tests.yml
├── .gitignore
├── tests
├── test_quick.py
├── utils
│ ├── pytest_helper.py
│ ├── factory.py
│ └── data_difference.py
├── test_config.py
├── test_shotlist_setting.py
├── test_decorator.py
├── test_time_setting.py
├── test_output_setting.py
├── test_retrieval_settings.py
├── conftest.py
└── test_against_cache.py
├── LICENSE
├── examples
├── defaults.py
├── mdsplus.py
├── efit.py
└── sql.py
├── pyproject.toml
├── makefile
├── mkdocs.yml
├── REFERENCES.md
└── INSTALL.md
/disruption_py/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/disruption_py/core/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/disruption_py/inout/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/disruption_py/machine/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | --8<-- "README.md"
--------------------------------------------------------------------------------
/docs/INSTALL.md:
--------------------------------------------------------------------------------
1 | --8<-- "INSTALL.md"
--------------------------------------------------------------------------------
/docs/REFERENCES.md:
--------------------------------------------------------------------------------
1 | --8<-- "REFERENCES.md"
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/usage/workflow_reference.md:
--------------------------------------------------------------------------------
1 | ::: disruption_py.workflow
2 | handler: python
--------------------------------------------------------------------------------
/docs/workflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MIT-PSFC/disruption-py/HEAD/docs/workflow.png
--------------------------------------------------------------------------------
/docs/usage/settings/log_settings.md:
--------------------------------------------------------------------------------
1 | ::: disruption_py.settings.log_settings.LogSettings
2 | handler: python
--------------------------------------------------------------------------------
/docs/stylesheets/disruptions.css:
--------------------------------------------------------------------------------
1 | :root {
2 | color-scheme: dark;
3 | --md-primary-fg-color: #ff3636;
4 | --md-accent-fg-color: #ff3636;
5 |
6 | }
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 |
2 | # Security Policy
3 |
4 | To report a security vulnerability, please [send us an email](mailto:gtrevisan@psfc.mit.edu?subject=disruption-py%20security%20vulnerability).
--------------------------------------------------------------------------------
/disruption_py/machine/hbtep/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Package initialization for the HBTEP machine module."""
4 |
5 | from disruption_py.machine.hbtep.physics import HbtepPhysicsMethods
6 |
7 | METHOD_HOLDERS = [HbtepPhysicsMethods]
8 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | version: 2
4 | updates:
5 |
6 | - package-ecosystem: "github-actions"
7 | directory: "/"
8 | target-branch: "dev"
9 | schedule:
10 | interval: "monthly"
11 | reviewers:
12 | - gtrevisan
13 |
--------------------------------------------------------------------------------
/disruption_py/config.toml:
--------------------------------------------------------------------------------
1 | [default.tests]
2 | match_fraction = 0.95 # Fraction of signals that must match between MDSplus and SQL
3 | val_tolerance = 0.01 # Tolerance for comparing values between MDSplus and SQL
4 | verbose_output = true
5 |
6 | [default.time]
7 | time_const = 1e-6
8 |
--------------------------------------------------------------------------------
/disruption_py/machine/generic/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Package initialization for the generic machine module.
5 | """
6 |
7 | from disruption_py.machine.generic.physics import GenericPhysicsMethods
8 |
9 | METHOD_HOLDERS = [GenericPhysicsMethods]
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | data/*
2 | *.pyc
3 | *.csv
4 | *.hdf5
5 | *.h5
6 | *~
7 | *.egg-info/*
8 | .vscode/*
9 | *.pdf
10 | *.log
11 | *.pkl
12 | *.joblib
13 | shot_*profileX*
14 | scripts/output/*
15 | scripts/*.txt
16 | .venv
17 | site
18 | _site
19 | .idea
20 | .nfs*
21 | .coverage
22 | htmlcov/*
23 | local
--------------------------------------------------------------------------------
/disruption_py/machine/d3d/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Package initialization for the DIII-D machine module."""
4 |
5 | from disruption_py.machine.d3d.efit import D3DEfitMethods
6 | from disruption_py.machine.d3d.physics import D3DPhysicsMethods
7 |
8 | METHOD_HOLDERS = [D3DPhysicsMethods, D3DEfitMethods]
9 |
--------------------------------------------------------------------------------
/disruption_py/machine/cmod/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Package initialization for the CMOD machine module."""
4 |
5 | from disruption_py.machine.cmod.efit import CmodEfitMethods
6 | from disruption_py.machine.cmod.physics import CmodPhysicsMethods
7 |
8 | METHOD_HOLDERS = [CmodPhysicsMethods, CmodEfitMethods]
9 |
--------------------------------------------------------------------------------
/disruption_py/machine/east/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Package initialization for the EAST machine module."""
4 |
5 | from disruption_py.machine.east.efit import EastEfitMethods
6 | from disruption_py.machine.east.physics import EastPhysicsMethods
7 |
8 | METHOD_HOLDERS = [EastEfitMethods, EastPhysicsMethods]
9 |
--------------------------------------------------------------------------------
/tests/test_quick.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | execute a few simple workflows as tests.
5 | """
6 |
7 | from examples.efit import main as test_efit
8 | from examples.mdsplus import main as test_mdsplus
9 | from examples.sql import main as test_sql
10 |
11 | __all__ = ["test_efit", "test_mdsplus", "test_sql"]
12 |
--------------------------------------------------------------------------------
/docs/usage/settings/retrieval_settings.md:
--------------------------------------------------------------------------------
1 | A module for handling the options for retrieving data for a shot, passed in the `retrieval_settings` parameter of [`get_shots_data`][disruption_py.workflow.get_shots_data].
2 |
3 | ::: disruption_py.settings.retrieval_settings
4 | handler: python
5 | options:
6 | filters:
7 | - "!^resolve$"
--------------------------------------------------------------------------------
/docs/usage/mds_connection_reference.md:
--------------------------------------------------------------------------------
1 |
2 | The [`MDSConnection`][disruption_py.inout.mds.MDSConnection] class should be used for all data retrieval tasks from MDSplus. It is a simple wrapper for the MDSplus thin client.
3 |
4 | ::: disruption_py.inout.mds
5 | handler: python
6 | options:
7 | filters: ["!^_[^_]"]
8 | members_order: "source"
9 |
--------------------------------------------------------------------------------
/disruption_py/machine/hbtep/config.toml:
--------------------------------------------------------------------------------
1 | [hbtep.inout.mds]
2 | mdsplus_connection_string = "maxwell.ap.columbia.edu:8003"
3 |
4 | [hbtep.inout.sql]
5 | host = ""
6 |
7 | [hbtep.tests]
8 | expected_failure_columns = []
9 | test_columns = []
10 |
11 | [hbtep.tests.shots]
12 | flattop1_full = 77324
13 | flattop2_full = 102709
14 | flattop3_fast = 103590
15 | flattop4_full = 117676
16 |
--------------------------------------------------------------------------------
/docs/javascripts/katex.js:
--------------------------------------------------------------------------------
1 | document$.subscribe(({ body }) => {
2 | renderMathInElement(body, {
3 | delimiters: [
4 | { left: "$$", right: "$$", display: true },
5 | { left: "$", right: "$", display: false },
6 | { left: "\\(", right: "\\)", display: false },
7 | { left: "\\[", right: "\\]", display: true }
8 | ],
9 | })
10 | })
11 |
--------------------------------------------------------------------------------
/docs/usage/physics_methods/cmod_built_in_method_reference.md:
--------------------------------------------------------------------------------
1 | ## Built-in Methods For CMod { .doc .doc-heading }
2 | ::: disruption_py.machine.cmod.physics
3 | handler: python
4 | options:
5 | heading_level: 2
6 | show_source: true
7 | show_if_no_docstring: true
8 | show_root_heading: false
9 | show_root_toc_entry: false
10 | show_root_members_full_path: false
11 | filters:
12 | - "!^_"
--------------------------------------------------------------------------------
/docs/usage/physics_methods/d3d_built_in_method_reference.md:
--------------------------------------------------------------------------------
1 |
2 | ## Built-in Methods For D3D { .doc .doc-heading }
3 | ::: disruption_py.machine.d3d.physics
4 | handler: python
5 | options:
6 | heading_level: 2
7 | show_source: true
8 | show_if_no_docstring: true
9 | show_root_heading: false
10 | show_root_toc_entry: false
11 | show_root_members_full_path: false
12 | filters:
13 | - "!^_"
--------------------------------------------------------------------------------
/docs/usage/physics_methods/east_built_in_method_reference.md:
--------------------------------------------------------------------------------
1 | ## Built-in Methods For EAST { .doc .doc-heading }
2 | ::: disruption_py.machine.east.physics
3 | handler: python
4 | options:
5 | heading_level: 2
6 | show_source: true
7 | show_if_no_docstring: true
8 | show_root_heading: false
9 | show_root_toc_entry: false
10 | show_root_members_full_path: false
11 | filters:
12 | - "!^_"
--------------------------------------------------------------------------------
/docs/usage/physics_methods/hbtep_built_in_method_reference.md:
--------------------------------------------------------------------------------
1 | ## Built-in Methods For HBT-EP { .doc .doc-heading }
2 | ::: disruption_py.machine.hbtep.physics
3 | handler: python
4 | options:
5 | heading_level: 2
6 | show_source: true
7 | show_if_no_docstring: true
8 | show_root_heading: false
9 | show_root_toc_entry: false
10 | show_root_members_full_path: false
11 | filters:
12 | - "!^_"
13 |
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/errors.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Custom exceptions for the physics methods.
5 | """
6 |
7 |
8 | class CalculationError(Exception):
9 | """
10 | Custom error specific to physics methods that should be raised when we know
11 | the result of a calculation is invalid.
12 | """
13 |
14 | def __init__(self, message):
15 | super().__init__(message)
16 |
--------------------------------------------------------------------------------
/disruption_py/settings/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Package initialization for the settings module."""
4 |
5 | from .log_settings import LogSettings
6 | from .output_setting import (
7 | OutputSetting,
8 | OutputSettingParams,
9 | )
10 | from .retrieval_settings import RetrievalSettings
11 | from .shotlist_setting import DatabaseShotlistSetting, FileShotlistSetting
12 | from .time_setting import TimeSetting, TimeSettingParams
13 |
14 | __all__ = [
15 | "LogSettings",
16 | "OutputSetting",
17 | "OutputSettingParams",
18 | "RetrievalSettings",
19 | "DatabaseShotlistSetting",
20 | "FileShotlistSetting",
21 | "TimeSetting",
22 | "TimeSettingParams",
23 | ]
24 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Stale
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | schedule:
8 | - cron: '30 1 * * MON-FRI'
9 |
10 | permissions:
11 | issues: write
12 | pull-requests: write
13 |
14 | jobs:
15 | stale:
16 | runs-on: ubuntu-22.04
17 | steps:
18 | - uses: actions/stale@v10
19 | with:
20 | days-before-stale: 90
21 | days-before-close: 30
22 | stale-issue-message: >
23 | This issue has been open for 3 months with no activity.
24 | stale-pr-message: >
25 | This pull request has been open for 3 months with no activity.
26 | stale-issue-label: stale
27 | exempt-issue-labels: eternal
28 | exempt-pr-labels: eternal
29 |
--------------------------------------------------------------------------------
/docs/usage/physics_methods/decorator_reference.md:
--------------------------------------------------------------------------------
1 |
2 | ## Decorators { .doc .doc-heading }
3 | Methods that calculate physics parameters can be decorated with the following functions that are called at runtime before the decorated method is run:
4 |
5 | ::: disruption_py.core.physics_method.decorator
6 | handler: python
7 | options:
8 | heading_level: 3
9 | show_source: false
10 | show_root_heading: false
11 | show_root_toc_entry: false
12 | show_root_members_full_path: true
13 |
14 | ::: disruption_py.core.physics_method.caching
15 | handler: python
16 | options:
17 | heading_level: 3
18 | show_source: false
19 | show_root_heading: false
20 | show_root_toc_entry: false
21 | show_root_members_full_path: true
22 | members:
23 | - cache_method
--------------------------------------------------------------------------------
/tests/utils/pytest_helper.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides utility functions for pytest like extracting parameters
5 | from pytest command-line arguments and saving data to temporary CSV files.
6 | """
7 |
8 | import re
9 |
10 |
11 | def extract_param(config):
12 | """
13 | Extract the data column from the pytest command.
14 |
15 | E.g. will return ip given
16 | `pytest -s tests/test_against_sql.py -k test_data_columns[ip]`
17 |
18 | Params:
19 | config: pytestconfig fixture
20 |
21 | Returns:
22 | List[str], the data column if it exists, otherwise None.
23 | """
24 | args = config.invocation_params.args
25 | if len(args) == 0:
26 | return None
27 | m = re.search(r"\[(.+)\]$", args[-1])
28 | return [m[1]] if m is not None else None
29 |
--------------------------------------------------------------------------------
/docs/examples/custom_time_setting.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Example usage of `get_shots_data` demonstrating using a custom time setting."""
4 |
5 |
6 | from disruption_py.settings import RetrievalSettings, TimeSetting, TimeSettingParams
7 | from disruption_py.workflow import get_shots_data
8 |
9 |
10 | class PRadTime(TimeSetting):
11 | """Class for retrieving prad times"""
12 |
13 | def _get_times(self, params: TimeSettingParams):
14 | """Return prad times"""
15 | (time_array,) = params.mds_conn.get_dims(
16 | r"\twopi_diode", tree_name="spectroscopy"
17 | )
18 | time_array = time_array[time_array > 0]
19 | return time_array
20 |
21 |
22 | retrieval_settings = RetrievalSettings(time_setting=PRadTime())
23 |
24 | shot_data = get_shots_data(
25 | tokamak="cmod",
26 | shotlist_setting=[1150805012],
27 | retrieval_settings=retrieval_settings,
28 | )
29 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """This module contains tests to ensure all of the config settings load properly."""
4 |
5 | import os
6 |
7 | from disruption_py.config import config
8 |
9 |
10 | def change_directory(test, tmpdir="/tmp"):
11 | """
12 | Change the current working directory before a test and revert back to the
13 | original directory after the test completes.
14 | """
15 |
16 | def wrapper():
17 | original_dir = os.getcwd()
18 | os.chdir(tmpdir)
19 | test()
20 | os.chdir(original_dir)
21 |
22 | return wrapper
23 |
24 |
25 | @change_directory
26 | def test_settings_file():
27 | """
28 | Temporarily change the current working directory to test if the config settings
29 | file is reachable.
30 | """
31 | assert config().time.time_const is not None
32 |
33 |
34 | def test_access_tokamak_settings(tokamak):
35 | """
36 | Test each tokamak's unique settings are accessible.
37 | """
38 | assert config(tokamak)
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2016, Cristina Rea & MIT PSFC Disruption Studies Group
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/tests/utils/factory.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides utility functions for retrieving testing settings like the
5 | shots and columns based on the provided tokamak.
6 | """
7 |
8 | import os
9 |
10 | from disruption_py.config import config
11 | from disruption_py.machine.tokamak import Tokamak
12 |
13 |
14 | def get_tokamak_test_expected_failure_columns(tokamak: Tokamak):
15 | """Return the columns expected to fail from the config"""
16 | return config(tokamak).tests.expected_failure_columns
17 |
18 |
19 | def get_tokamak_test_shotlist(tokamak: Tokamak) -> list[int]:
20 | """
21 | Return the shot ids used for testing from the config. Return a smaller
22 | shotlist when running as a Github action.
23 | """
24 | shot_id_dict = config(tokamak).tests.shots
25 |
26 | if "GITHUB_ACTIONS" in os.environ:
27 | shot_id_dict = {
28 | key: value for key, value in shot_id_dict.items() if "_fast" in key
29 | }
30 |
31 | return list(shot_id_dict.values())
32 |
33 |
34 | def get_tokamak_test_columns(tokamak: Tokamak):
35 | """Return the columns used for testing from the config."""
36 | return config(tokamak).tests.test_columns
37 |
--------------------------------------------------------------------------------
/disruption_py/machine/method_holders.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Module for retrieving physics method classes based on the specified tokamak."""
4 |
5 | from disruption_py.machine.tokamak import Tokamak
6 |
7 |
8 | def get_method_holders(tokamak: Tokamak):
9 | """
10 | Return a list of classes containing the built-in physics methods.
11 | """
12 | # such an import pattern lets us avoid dynamic imports or code introspection
13 | # pylint: disable=import-outside-toplevel
14 | from disruption_py.machine.generic import METHOD_HOLDERS as GENERIC
15 |
16 | if tokamak is Tokamak.D3D:
17 | from disruption_py.machine.d3d import METHOD_HOLDERS
18 |
19 | return GENERIC + METHOD_HOLDERS
20 | if tokamak is Tokamak.CMOD:
21 | from disruption_py.machine.cmod import METHOD_HOLDERS
22 |
23 | return GENERIC + METHOD_HOLDERS
24 | if tokamak is Tokamak.EAST:
25 | from disruption_py.machine.east import METHOD_HOLDERS
26 |
27 | return GENERIC + METHOD_HOLDERS
28 | if tokamak is Tokamak.HBTEP:
29 | from disruption_py.machine.hbtep import METHOD_HOLDERS
30 |
31 | return METHOD_HOLDERS
32 | raise ValueError(f"Invalid tokamak for physics methods {tokamak}")
33 |
--------------------------------------------------------------------------------
/disruption_py/core/utils/enums.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides functions to convert string attributes of an object
5 | to corresponding enum values and to convert string values to enum values.
6 | """
7 |
8 |
9 | def map_string_to_enum(value, enum_class, should_raise=True):
10 | """
11 | Convert a string value to the corresponding enum value.
12 |
13 | Parameters
14 | ----------
15 | value : str
16 | The string value to convert to an enum.
17 | enum_class : type
18 | The enum class to which the value should be converted.
19 | should_raise : bool, optional
20 | Whether to raise an exception if the conversion fails (default is True).
21 |
22 | Returns
23 | -------
24 | enum_class
25 | The corresponding enum value if conversion is successful, otherwise None
26 | if should_raise is False.
27 | """
28 | if isinstance(value, enum_class):
29 | enum_value = value
30 | else:
31 | try:
32 | enum_value = enum_class(value)
33 | except ValueError as e:
34 | if should_raise:
35 | raise ValueError(
36 | f"Cannot convert value '{value}' to enum for '{enum_class}'"
37 | ) from e
38 | return None
39 | return enum_value
40 |
--------------------------------------------------------------------------------
/examples/defaults.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Example usage of `get_shots_data` with all the default arguments explicitly assigned.
5 | """
6 |
7 | from disruption_py.settings import LogSettings, RetrievalSettings
8 | from disruption_py.workflow import get_shots_data
9 |
10 | retrieval_settings = RetrievalSettings(
11 | efit_nickname_setting="disruption",
12 | # method/column selection
13 | # default None: all methods/columns
14 | run_methods=None,
15 | run_columns=None,
16 | only_requested_columns=False,
17 | custom_physics_methods=[],
18 | # timebase settings
19 | time_setting="disruption_warning",
20 | )
21 |
22 | shot_data = get_shots_data(
23 | # required argument
24 | shotlist_setting=[],
25 | # default None: detect from environment
26 | tokamak=None,
27 | # default None: standard SQL/MDSplus connection
28 | database_initializer=None,
29 | mds_connection_initializer=None,
30 | retrieval_settings=retrieval_settings,
31 | output_setting="dataset",
32 | num_processes=1,
33 | log_settings=LogSettings(
34 | # default None: "output.log" in temporary session folder
35 | file_path=None,
36 | file_level="DEBUG",
37 | # default None: VERBOSE, or higher based on number of shots
38 | console_level=None,
39 | ),
40 | )
41 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Lint
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | push:
8 | branches:
9 | - main
10 | - dev
11 | pull_request:
12 | branches:
13 | - main
14 | - dev
15 |
16 | jobs:
17 |
18 |
19 | lint:
20 | runs-on: ubuntu-22.04
21 | if: |
22 | github.event_name == 'push' ||
23 | github.event.pull_request.draft == false
24 | strategy:
25 | matrix:
26 | tool: [black, isort, pylint, ruff, toml-sort, yamllint]
27 | name: ${{ matrix.tool }}
28 | steps:
29 | - uses: actions/checkout@v5
30 |
31 | - name: Install poetry
32 | run: pipx install poetry
33 |
34 | - name: Setup Python
35 | uses: actions/setup-python@v6
36 | with:
37 | python-version: '3.12'
38 | cache: 'poetry'
39 |
40 | - name: Install requirements
41 | run: make install-all
42 |
43 | - name: Run ${{ matrix.tool }}
44 | run: make ${{ matrix.tool }}
45 |
46 |
47 | shellcheck:
48 | runs-on: ubuntu-22.04
49 | if: |
50 | github.event_name == 'push' ||
51 | github.event.pull_request.draft == false
52 | steps:
53 |
54 | - uses: actions/checkout@v5
55 |
56 | - name: Install shellcheck
57 | run: sudo apt-get install shellcheck
58 |
59 | - name: Run shellcheck
60 | run: make shellcheck
61 |
--------------------------------------------------------------------------------
/disruption_py/machine/hbtep/util.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for helper, not physics, methods.
5 | """
6 |
7 | import numpy as np
8 |
9 |
10 | class HbtepUtilMethods:
11 | """
12 | A class of helper methods that might fetch and compute data from MDSplus
13 | but are not physics methods.
14 | """
15 |
16 | @staticmethod
17 | def unwrap_phase(phase: np.ndarray):
18 | """
19 | Takes in phase array (in radians). I think it needs to be centered about 0.
20 | Unwraps phase data so that it is continuous.
21 | This is important for phase data when you want to take it's derivative to
22 | get frequency.
23 |
24 | Parameters
25 | ----------
26 | phase : numpy.ndarray
27 | data being unwrapped
28 |
29 | Returns
30 | ------
31 | phase_unwrapped : numpy.ndarray
32 | unwrapped data array
33 |
34 | """
35 | phase_unwrapped = np.zeros(len(phase))
36 | offset = 0
37 | phase_unwrapped[0] = phase[0]
38 | for i in range(1, len(phase)):
39 | if phase[i - 1] > np.pi / 4 and phase[i] < -np.pi / 4:
40 | offset += 2 * np.pi
41 | elif phase[i - 1] < -np.pi / 4 and phase[i] > np.pi / 4:
42 | offset -= 2 * np.pi
43 | phase_unwrapped[i] = phase[i] + offset
44 | return phase_unwrapped
45 |
--------------------------------------------------------------------------------
/disruption_py/machine/d3d/util.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for helper, not physics, methods.
3 | """
4 |
5 | import numpy as np
6 |
7 | from disruption_py.core.physics_method.params import PhysicsMethodParams
8 |
9 |
10 | class D3DUtilMethods:
11 | """
12 | A class of helper methods that might fetch and compute data from MDSPlus
13 | but are not physics methods.
14 | """
15 |
16 | @staticmethod
17 | def get_polarity(params: PhysicsMethodParams):
18 | """
19 | Get the plasma current polarity. Accepts PhysicsMethodParams to access
20 | the MDS connection, but it is not a physics method.
21 |
22 | Returns the first value of polarity array if the polarity is not constant.
23 |
24 | Parameters
25 | ----------
26 | params : PhysicsMethodParams
27 | Parameters containing MDS connection and shot information.
28 |
29 | Returns
30 | -------
31 | polarity value, -1 or 1.
32 | """
33 | polarity = np.unique(
34 | params.mds_conn.get_data(f"ptdata('iptdirect', {params.shot_id})")
35 | )
36 | if len(polarity) > 1:
37 | params.logger.info(
38 | "Polarity of Ip target is not constant. Using value at first timestep.",
39 | )
40 | params.logger.debug("Polarity array: {polarity}", polarity=polarity)
41 | polarity = polarity[0]
42 | return polarity
43 |
--------------------------------------------------------------------------------
/examples/mdsplus.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | example module for MDSplus.
5 | """
6 |
7 | from disruption_py.machine.tokamak import Tokamak, resolve_tokamak_from_environment
8 | from disruption_py.workflow import get_mdsplus_class
9 |
10 |
11 | def main():
12 | """
13 | execute a simple fetch to test MDSplus connection.
14 | """
15 |
16 | tokamak = resolve_tokamak_from_environment()
17 |
18 | node = r"dim_of(\efit_aeqdsk:li)"
19 | if tokamak is Tokamak.D3D:
20 | shot = 161228
21 | shape = (196,)
22 | tree = "efit01"
23 | elif tokamak is Tokamak.CMOD:
24 | shot = 1150805012
25 | shape = (62,)
26 | tree = "analysis"
27 | elif tokamak is Tokamak.EAST:
28 | shot = 55555
29 | shape = (102,)
30 | tree = "efit_east"
31 | elif tokamak is Tokamak.HBTEP:
32 | shot = 103590
33 | shape = (15358,)
34 | tree = "hbtep2"
35 | node = r"\top.sensors.rogowskis:ip"
36 | else:
37 | raise ValueError(f"Unspecified or unsupported tokamak: {tokamak}.")
38 |
39 | mds = get_mdsplus_class(tokamak).conn
40 | print(f"Initialized MDSplus: {mds.hostspec}")
41 |
42 | mds.openTree(tree, shot)
43 | print("#", shot)
44 |
45 | print(">", node)
46 |
47 | out = mds.get(node).data()
48 | print("=", out.shape)
49 | print(out)
50 |
51 | assert out.shape == shape
52 |
53 |
54 | if __name__ == "__main__":
55 | main()
56 |
--------------------------------------------------------------------------------
/disruption_py/config.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Loads configuration settings using Dynaconf for a given tokamak.
5 | """
6 |
7 | import os
8 | from enum import Enum
9 | from typing import Union
10 |
11 | from dynaconf import Dynaconf
12 |
13 | configs = {}
14 |
15 |
16 | def config(tokamak: Union[Enum, str] = None):
17 | """
18 | Load and cache the configuration.
19 |
20 | Parameters
21 | ----------
22 | tokamak : Union[Enum, str], optional
23 | Tokamak name or Enum. Defaults to "default".
24 |
25 | Returns
26 | -------
27 | Dynaconf
28 | Configuration settings.
29 | """
30 | if tokamak is None:
31 | tokamak = "default"
32 | elif isinstance(tokamak, Enum):
33 | tokamak = tokamak.value
34 |
35 | if tokamak not in configs:
36 |
37 | # enforce permissions for user config
38 | user_config = os.path.expanduser("~/.config/disruption-py/user.toml")
39 | if os.path.exists(user_config):
40 | os.chmod(user_config, 0o600)
41 |
42 | configs[tokamak] = Dynaconf(
43 | envvar_prefix="DISPY",
44 | root_path=os.path.dirname(__file__),
45 | settings_files=[
46 | "config.toml",
47 | f"machine/{tokamak}/config.toml",
48 | user_config,
49 | ],
50 | environments=True,
51 | default_env="default",
52 | env=tokamak,
53 | merge_enabled=True,
54 | )
55 | return configs[tokamak]
56 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Docs
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | push:
8 | branches:
9 | - main
10 | - dev
11 | pull_request:
12 | branches:
13 | - main
14 | - dev
15 |
16 | permissions:
17 | contents: read
18 | pages: write
19 | id-token: write
20 |
21 | concurrency:
22 | group: "pages"
23 | cancel-in-progress: false
24 |
25 | jobs:
26 |
27 | docs:
28 | runs-on: ubuntu-22.04
29 | if: |
30 | github.event_name == 'push' ||
31 | github.event.pull_request.draft == false
32 | steps:
33 |
34 | - name: Checkout
35 | uses: actions/checkout@v5
36 |
37 | - name: Install poetry
38 | run: pipx install poetry
39 |
40 | - name: Setup Python
41 | uses: actions/setup-python@v6
42 | with:
43 | python-version: '3.12'
44 | cache: 'poetry'
45 |
46 | - name: Install dependencies
47 | run: make install-all
48 |
49 | - name: Build documentation
50 | run: poetry run mkdocs build --site-dir _site
51 |
52 | - name: Upload artifact
53 | uses: actions/upload-pages-artifact@v4
54 |
55 | deploy:
56 | if: |
57 | github.event_name == 'push' &&
58 | github.ref == 'refs/heads/main'
59 | environment:
60 | name: github-pages
61 | url: ${{ steps.deployment.outputs.page_url }}
62 | runs-on: ubuntu-22.04
63 | needs: docs
64 | steps:
65 | - name: Deploy to GitHub Pages
66 | id: deployment
67 | uses: actions/deploy-pages@v4
68 |
--------------------------------------------------------------------------------
/examples/efit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | example module for EFIT.
5 | """
6 |
7 | import pytest
8 |
9 | from disruption_py.machine.tokamak import Tokamak, resolve_tokamak_from_environment
10 | from disruption_py.settings import RetrievalSettings
11 | from disruption_py.workflow import get_shots_data
12 |
13 |
14 | def main():
15 | """
16 | execute a simple workflow to fetch EFIT parameters.
17 | """
18 |
19 | tokamak = resolve_tokamak_from_environment()
20 |
21 | run_methods = ["get_efit_parameters"]
22 | if tokamak is Tokamak.D3D:
23 | shotlist = [161228]
24 | shape = (247, 16)
25 | elif tokamak is Tokamak.CMOD:
26 | shotlist = [1150805012]
27 | shape = (62, 22)
28 | elif tokamak is Tokamak.EAST:
29 | shotlist = [55555]
30 | shape = (69, 16)
31 | elif tokamak is Tokamak.HBTEP:
32 | pytest.skip("No EFIT for HBT-EP")
33 | assert False
34 | else:
35 | raise ValueError(f"Unspecified or unsupported tokamak: {tokamak}.")
36 |
37 | print(f"Initialized for tokamak: {tokamak.value}")
38 |
39 | retrieval_settings = RetrievalSettings(
40 | run_methods=run_methods,
41 | efit_nickname_setting="default",
42 | )
43 |
44 | result = get_shots_data(
45 | tokamak=tokamak,
46 | shotlist_setting=shotlist,
47 | retrieval_settings=retrieval_settings,
48 | output_setting="dataset",
49 | )
50 |
51 | print(result)
52 |
53 | assert len(result), len(result.time) == shape
54 |
55 |
56 | if __name__ == "__main__":
57 | main()
58 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Build
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | push:
8 | branches:
9 | - main
10 | - dev
11 | pull_request:
12 | branches:
13 | - main
14 | - dev
15 | release:
16 | types:
17 | - published
18 |
19 |
20 | jobs:
21 |
22 |
23 | build:
24 | if: |
25 | github.event_name == 'push' ||
26 | github.event_name == 'release' ||
27 | github.event.pull_request.draft == false
28 | runs-on: ubuntu-22.04
29 | steps:
30 |
31 | - name: Checkout
32 | uses: actions/checkout@v5
33 |
34 | - name: Install poetry
35 | run: pipx install poetry
36 |
37 | - name: Setup Python
38 | uses: actions/setup-python@v6
39 | with:
40 | python-version: '3.12'
41 | cache: 'poetry'
42 |
43 | - name: Print versions
44 | run: |
45 | python --version
46 | poetry --version
47 |
48 | - name: Poetry check
49 | run: poetry check --lock
50 |
51 | - name: Poetry install
52 | run: make install-all
53 |
54 | - name: Test import
55 | run: poetry run python -c "import disruption_py"
56 |
57 | - name: Discard dev package
58 | if: github.event_name == 'release'
59 | run: "! poetry version | grep dev"
60 |
61 | - name: Build package
62 | run: poetry build
63 |
64 | - name: Publish package
65 | if: github.event_name == 'release'
66 | env:
67 | POETRY_PYPI_TOKEN_PYPI: "${{ secrets.PYPI_TOKEN }}"
68 | run: poetry publish
69 |
--------------------------------------------------------------------------------
/.github/workflows/install.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Install
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | push:
8 | branches:
9 | - main
10 | - dev
11 | pull_request:
12 | branches:
13 | - main
14 | - dev
15 |
16 | jobs:
17 |
18 | install:
19 | if: |
20 | github.event_name == 'push' ||
21 | github.event.pull_request.draft == false
22 | runs-on: ubuntu-22.04
23 | strategy:
24 | fail-fast: false
25 | matrix:
26 | version:
27 | - '3.11'
28 | - '3.12'
29 | - '3.13'
30 | tool:
31 | - poetry
32 | - uv
33 | route:
34 | - pyproject
35 | - lockfile
36 |
37 | steps:
38 | - name: Checkout
39 | uses: actions/checkout@v5
40 |
41 | - name: Setup Python
42 | uses: actions/setup-python@v6
43 | with:
44 | python-version: '${{ matrix.version }}'
45 |
46 | - name: Install tool via pipx
47 | run: pipx install ${{ matrix.tool }}
48 |
49 | - name: Log versions
50 | run: |
51 | python --version
52 | pip --version
53 | pipx --version
54 | ${{ matrix.tool }} --version
55 |
56 | - name: Remove lockfiles
57 | if: ${{ matrix.route == 'pyproject' }}
58 | run: rm -fv *.lock
59 |
60 | - name: Install via poetry
61 | if: ${{ matrix.tool == 'poetry' }}
62 | run: poetry install -v
63 |
64 | - name: Install via uv
65 | if: ${{ matrix.tool == 'uv' }}
66 | run: uv sync -v
67 |
68 | - name: Test import
69 | run: ${{ matrix.tool }} run python -c "import disruption_py"
70 |
--------------------------------------------------------------------------------
/tests/test_shotlist_setting.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Unit tests for the shotlist_setting"""
4 |
5 | import numpy as np
6 | import pytest
7 |
8 | from disruption_py.settings.shotlist_setting import (
9 | ShotlistSetting,
10 | ShotlistSettingParams,
11 | shotlist_setting_runner,
12 | )
13 |
14 | # Use a few integers instead of reasonable shot ids because there's nothing
15 | # grabbing actual shot data here
16 | REFERENCE_SHOTLIST = list(range(5))
17 |
18 |
19 | def get_shotlists():
20 | """
21 | Generate custom shot requests, along with 1, 2, and 3 dimensional Python lists
22 | and Numpy arrays.
23 | """
24 |
25 | class CustomShotlistSetting(ShotlistSetting):
26 | """A custom shotlist setting for testing"""
27 |
28 | def _get_shotlist(self, params: ShotlistSettingParams):
29 | """Return Numpy array of shots"""
30 | return np.array([REFERENCE_SHOTLIST])
31 |
32 | return [
33 | CustomShotlistSetting(),
34 | np.array(REFERENCE_SHOTLIST),
35 | np.array([REFERENCE_SHOTLIST]),
36 | np.array([[[i] for i in REFERENCE_SHOTLIST]]),
37 | REFERENCE_SHOTLIST,
38 | [REFERENCE_SHOTLIST],
39 | [[[i] for i in REFERENCE_SHOTLIST]],
40 | ]
41 |
42 |
43 | @pytest.mark.parametrize("shotlist", get_shotlists())
44 | def test_shotlist_setting_runner(shotlist):
45 | """
46 | Ensure all variations of shotlist_settings get flattened to a single dimensional
47 | list.
48 | """
49 | shot_ids_request_params = ShotlistSettingParams(database=None, tokamak=None)
50 | result = shotlist_setting_runner(shotlist, shot_ids_request_params)
51 | assert REFERENCE_SHOTLIST == result
52 |
--------------------------------------------------------------------------------
/disruption_py/machine/cmod/config.toml:
--------------------------------------------------------------------------------
1 | [cmod.efit]
2 | tree = "efit18"
3 |
4 | [cmod.inout.mds]
5 | mdsplus_connection_string = "alcdata-archives"
6 |
7 | [cmod.inout.sql]
8 | db_name = "logbook"
9 | driver = "ODBC"
10 | host = "alcdb2"
11 | port = 1433
12 |
13 | [cmod.physics.time_domain_thresholds]
14 | dipprog_dt = 50e3
15 | ip_prog = 100e3
16 |
17 | [cmod.tests]
18 | expected_failure_columns = [
19 | "greenwald_fraction",
20 | "ne_peaking",
21 | "p_oh",
22 | "pressure_peaking",
23 | "radiated_fraction",
24 | "sxr",
25 | "te_peaking",
26 | "te_width",
27 | "v_loop",
28 | "v_z",
29 | "z_times_v_z",
30 | "zcur",
31 | ]
32 | test_columns = [
33 | "beta_n",
34 | "beta_p",
35 | "dbetap_dt",
36 | "dip_dt",
37 | "dipprog_dt",
38 | "dli_dt",
39 | "dn_dt",
40 | "dprad_dt",
41 | "dwmhd_dt",
42 | "greenwald_fraction",
43 | "i_efc",
44 | "ip",
45 | "ip_error",
46 | "kappa",
47 | "kappa_area",
48 | "li",
49 | "lower_gap",
50 | "n_e",
51 | "n_equal_1_mode",
52 | "n_equal_1_normalized",
53 | "n_over_ncrit",
54 | "ne_peaking",
55 | "p_icrf",
56 | "p_lh",
57 | "p_oh",
58 | "p_rad",
59 | "pressure_peaking",
60 | "q0",
61 | "q95",
62 | "qstar",
63 | "radiated_fraction",
64 | "shot",
65 | "ssep",
66 | "sxr",
67 | "te_peaking",
68 | "te_width",
69 | "time",
70 | "time_until_disrupt",
71 | "upper_gap",
72 | "v_loop",
73 | "v_loop_efit",
74 | "v_z",
75 | "wmhd",
76 | "z_error",
77 | "z_times_v_z",
78 | "zcur",
79 | ]
80 |
81 | [cmod.tests.shots]
82 | flattop1_fast = 1150805012
83 | flattop2_full = 1150805022
84 | no_disrup1_full = 1150805013
85 | no_disrup2_full = 1150805014
86 | rampdown1_full = 1150805015
87 | rampdown2_full = 1150805016
88 | rampdown3_full = 1150805017
89 | rampdown4_full = 1150805019
90 | rampdown5_full = 1150805020
91 | rampdown6_full = 1150805021
92 |
--------------------------------------------------------------------------------
/examples/sql.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | example module for SQL.
5 | """
6 |
7 | import pytest
8 |
9 | from disruption_py.machine.tokamak import Tokamak, resolve_tokamak_from_environment
10 | from disruption_py.workflow import get_database
11 |
12 |
13 | def main():
14 | """
15 | execute a few meaningful queries to test DB connection.
16 | """
17 |
18 | queries = [
19 | "select count(distinct shot) from disruption_warning",
20 | "select count(distinct shot) from disruption_warning"
21 | + " where shot not in (select shot from disruptions)",
22 | "select count(distinct shot) from disruption_warning"
23 | + " where shot in (select shot from disruptions)",
24 | "select count(distinct shot) from disruptions",
25 | ]
26 |
27 | tokamak = resolve_tokamak_from_environment()
28 | if tokamak is Tokamak.HBTEP:
29 | pytest.skip("No SQL for HBT-EP")
30 | assert False
31 |
32 | db = get_database(tokamak=tokamak)
33 |
34 | if tokamak is Tokamak.D3D:
35 | vals = [13245, 8055, 5190, 24219]
36 | elif tokamak is Tokamak.CMOD:
37 | vals = [10435, 6640, 3795, 13785]
38 | elif tokamak is Tokamak.EAST:
39 | vals = [18568, 9875, 8693, 30482]
40 | else:
41 | raise ValueError(f"Unspecified or unsupported tokamak: {tokamak}.")
42 |
43 | print(f"Initialized DB: {db.user}@{db.host}/{db.db_name}")
44 | print("Version:", db.get_version())
45 |
46 | while queries:
47 |
48 | query = queries.pop(0)
49 | print(">", query.strip(" "))
50 |
51 | out = db.query(query)
52 | print("=", out.shape)
53 |
54 | print(out.iloc[0] if out.shape[0] == 1 else out, "\n")
55 | if vals:
56 | assert out.iloc[0, 0] == vals.pop(0)
57 |
58 |
59 | if __name__ == "__main__":
60 | main()
61 |
--------------------------------------------------------------------------------
/disruption_py/machine/tokamak.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Module for handling tokamak types and resolving tokamak configurations."""
4 |
5 | import os
6 | from enum import Enum
7 | from typing import Union
8 |
9 | from disruption_py.config import config
10 | from disruption_py.core.utils.enums import map_string_to_enum
11 |
12 |
13 | class Tokamak(Enum):
14 | """
15 | For documentation of supported tokamaks:
16 | # --8<-- [start:allowed_tokamak_types_snippet]
17 | Currently supported tokamak type strings are: `"cmod", "d3d"`
18 | # --8<-- [end:allowed_tokamak_types_snippet]
19 | """
20 |
21 | D3D = "d3d"
22 | CMOD = "cmod"
23 | EAST = "east"
24 | HBTEP = "hbtep"
25 |
26 |
27 | def resolve_tokamak_from_environment(tokamak: Union[Tokamak, str] = None):
28 | """
29 | Method to resolve the tokamak:
30 | 1. return if it's already a tokamak;
31 | 2. read the argument, and overwrite the config;
32 | 3. read the config;
33 | 4. look for specific folders that will indicate presence on a given machine,
34 | and thus infer the tokamak from the cluster;
35 | 5. raise exception.
36 | """
37 | if isinstance(tokamak, Tokamak):
38 | # case 1
39 | return tokamak
40 | if tokamak:
41 | # case 2
42 | config().tokamak = tokamak
43 | else:
44 | # case 3
45 | tokamak = config().get("tokamak")
46 | if tokamak:
47 | return map_string_to_enum(tokamak, Tokamak)
48 | # case 4
49 | if os.path.exists("/usr/local/mfe/disruptions"):
50 | return Tokamak.CMOD
51 | if os.path.exists("/fusion/projects/disruption_warning"):
52 | return Tokamak.D3D
53 | if os.path.exists("/project/disruption"):
54 | return Tokamak.EAST
55 | if os.path.exists("/opt/hbt/disruptions"):
56 | return Tokamak.HBTEP
57 | # case 5
58 | raise ValueError(
59 | "Tokamak is unspecified and could not be determined from the environment."
60 | )
61 |
--------------------------------------------------------------------------------
/tests/test_decorator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module contains tests to ensure that physics methods can be executed
5 | correctly when the tokamak parameter is set to either `None` or a specific
6 | tokamak instance.
7 | """
8 |
9 | import os
10 |
11 | import pytest
12 |
13 | from disruption_py.core.physics_method.decorator import physics_method
14 | from disruption_py.core.physics_method.params import PhysicsMethodParams
15 | from disruption_py.machine.tokamak import resolve_tokamak_from_environment
16 | from disruption_py.settings import LogSettings, RetrievalSettings
17 | from disruption_py.workflow import get_shots_data
18 |
19 |
20 | @pytest.mark.parametrize("tok", [None, resolve_tokamak_from_environment()])
21 | def test_tokamak_parameter(shotlist, tok, test_folder_f):
22 | """
23 | Ensure physics methods run when the tokamak parameter is set as either
24 | `None` or a specific tokamak.
25 | """
26 | col_name = "x"
27 |
28 | # The physics method needs to be defined in the global scope because
29 | # multiprocessing and pickling don't work with locally-defined functions.
30 | # pylint: disable-next=global-variable-undefined
31 | global my_physics_method
32 |
33 | @physics_method(columns=[col_name], tokamak=tok)
34 | def my_physics_method(params: PhysicsMethodParams):
35 | return {col_name: params.times**0}
36 |
37 | retrieval_settings = RetrievalSettings(
38 | run_columns=[col_name],
39 | only_requested_columns=True,
40 | custom_physics_methods=[my_physics_method],
41 | )
42 | shot_data = get_shots_data(
43 | shotlist_setting=shotlist[:1],
44 | retrieval_settings=retrieval_settings,
45 | output_setting=os.path.join(test_folder_f, "output.nc"),
46 | log_settings=LogSettings(
47 | console_level="WARNING",
48 | file_path=os.path.join(test_folder_f, "output.log"),
49 | ),
50 | )
51 | assert col_name in shot_data.data_vars
52 | assert shotlist[0] in shot_data.shot
53 | assert len(shot_data.time)
54 | assert all(shot_data[col_name].values == 1)
55 |
--------------------------------------------------------------------------------
/docs/usage/settings/nickname_setting.md:
--------------------------------------------------------------------------------
1 | ## Overview { .doc .doc-heading }
2 | A module for handling the EFIT tree nickname passed in the [`RetrievalSettings`][disruption_py.settings.retrieval_settings] class.
3 | DisruptionPy uses the nickname setting to determine which MDSplus EFIT tree to get data from.
4 |
5 | This module defines the abstract class `NicknameSetting` that can have subclasses passed as the
6 | `efit_nickname_setting` parameter to the [`RetrievalSettings`][disruption_py.settings.retrieval_settings] class.
7 | It also provides built-in classes and mappings to easily set the nickname for data retrieval for common use cases.
8 |
9 | ### Usage { .doc .doc-heading }
10 | Currently, these are the options that can be passed to the `efit_nickname_setting` parameter in [`RetrievalSettings`][disruption_py.settings.retrieval_settings]:
11 |
12 | - An instance of a subclass of `NicknameSetting`
13 | - A string identifier in the `_nickname_setting_mappings` dictionary:
14 | ```python
15 | --8<--
16 | disruption_py/settings/nickname_setting.py:nickname_setting_keys
17 | --8<--
18 | ```
19 | - A dictionary mapping tokamak type strings to the desired `NicknameSetting` for that tokamak. E.g. `{'cmod': 'efit'}`.
20 | --8<-- "disruption_py/machine/tokamak.py:allowed_tokamak_types_snippet"
21 |
22 | ## Built-in Implemenations { .doc .doc-heading }
23 |
24 | ::: disruption_py.settings.nickname_setting
25 | handler: python
26 | options:
27 | show_root_heading: false
28 | show_root_toc_entry: false
29 | show_root_members_full_path: true
30 | members:
31 | - StaticNicknameSetting
32 | - DefaultNicknameSetting
33 | - DisruptionNicknameSetting
34 |
35 | ## Custom Implementations { .doc .doc-heading }
36 | Custom implementations of a nickname setting must inherit from the `NicknameSetting` abstract class, implementing the abstract methods.
37 |
38 | ::: disruption_py.settings.nickname_setting
39 | handler: python
40 | options:
41 | show_root_heading: false
42 | show_root_toc_entry: false
43 | show_root_members_full_path: true
44 | members:
45 | - NicknameSetting
46 | - NicknameSettingParams
47 |
--------------------------------------------------------------------------------
/docs/usage/sql_database.md:
--------------------------------------------------------------------------------
1 | DisruptionPy uses logbook sql databases for convenience when retrieving data from MDSPlus. Users may also use DisruptionPy to directly retrieve data from the logbook database's `disruption_warning` tables.
2 |
3 | ## The `disruption_warning` table { .doc .doc-heading }
4 | The `disruption_warning` sql tables for CMod and DIII-D contain important disruption parameters for a large number of shots.
5 |
6 | ### CMod Dataset
7 | The dataset contains unique plasma discharges from MIT's Alcator C-Mod tokamak, from the 2012 to 2016 experimental campaigns, plus additional discharges from 2005.
8 | ??? info "Available columns on CMod"
9 |
10 | ```
11 | 'dbkey', 'shot', 'time', 'time_until_disrupt', 'ip_error', 'dip_dt',
12 | 'beta_p', 'beta_n', 'li', 'n_equal_1_normalized', 'z_error', 'v_z',
13 | 'z_times_v_z', 'kappa', 'pressure_peaking', 'H98', 'q0', 'qstar', 'q95',
14 | 'v_0', 'v_mid', 'v_edge', 'dn_dt', 'p_rad_slow', 'p_oh_slow', 'p_icrf',
15 | 'p_lh', 'radiated_fraction', 'power_supply_railed', 'v_loop_efit',
16 | 'r_dd', 'lower_gap', 'upper_gap', 'dbetap_dt', 'dli_dt', 'ip', 'zcur',
17 | 'n_e', 'dipprog_dt', 'v_loop', 'p_rad', 'p_oh', 'ssep', 'dWmhd_dt',
18 | 'dprad_dt', 'v_0_uncalibrated', 'Te_width', 'Greenwald_fraction',
19 | 'intentional_disruption', 'Te_width_ECE', 'Wmhd', 'n_over_ncrit',
20 | 'n_equal_1_mode', 'Mirnov', 'Mirnov_norm_btor', 'Mirnov_norm_bpol',
21 | 'Te_peaking', 'ne_peaking', 'Te_peaking_ECE', 'SXR_peaking',
22 | 'kappa_area', 'I_efc', 'SXR', 'H_alpha', 'Prad_peaking_CVA'
23 | ```
24 |
25 | For more details on computed values please see [parameter reference][disruption-parameter-descriptions].
26 |
27 | ## Retrieving data from the SQL database { .doc .doc-heading }
28 | Here is an example retrieving data from `disruption_warning` or `disruptions` table
29 | ```python
30 | --8<--
31 | examples/sql.py
32 | --8<--
33 | ```
34 |
35 | ## Database Class Reference { .doc .doc-heading }
36 |
37 | ::: disruption_py.inout.sql
38 | handler: python
39 | options:
40 | heading_level: 3
41 | show_root_heading: false
42 | show_root_toc_entry: false
43 | filters: ["!^_[^_]"]
44 |
45 |
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/params.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for defining parameters used in physics methods for DisruptionPy.
5 | """
6 | from dataclasses import dataclass, field
7 | from typing import Any, Dict, Tuple
8 |
9 | import numpy as np
10 | from loguru import logger
11 |
12 | from disruption_py.core.utils.misc import shot_msg_patch, to_tuple
13 | from disruption_py.inout.mds import MDSConnection
14 | from disruption_py.machine.tokamak import Tokamak
15 |
16 |
17 | @dataclass
18 | class PhysicsMethodParams:
19 | """
20 | Holder for useful variables for the physics methods like an MDSplus connection
21 | and the timebase for the data.
22 | """
23 |
24 | shot_id: int
25 | tokamak: Tokamak
26 | disruption_time: float
27 | mds_conn: MDSConnection
28 | times: np.ndarray
29 |
30 | def __post_init__(self):
31 | self.logger = shot_msg_patch(logger, self.shot_id)
32 |
33 | cached_results: Dict[str, Any] = field(default_factory=dict)
34 |
35 | @property
36 | def disrupted(self) -> bool:
37 | """
38 | Check if the disruption time is set.
39 |
40 | Returns
41 | -------
42 | bool
43 | True if disruption time is not None, False otherwise.
44 | """
45 | return self.disruption_time is not None
46 |
47 | def cleanup(self) -> None:
48 | """
49 | Clean up resources used by the physics method parameters.
50 | """
51 | self.mds_conn.cleanup()
52 | self.times = None
53 | self.cached_results.clear()
54 |
55 | def to_coords(self) -> Dict[str, Tuple[str, np.ndarray]]:
56 | """
57 | Create a dictionary of coordinates based on the parameters.
58 |
59 | Returns
60 | -------
61 | Dict[str, Tuple[str, np.ndarray]]
62 | A dictionary with `shot` and `time` as coordinates for dimension `idx`.
63 | """
64 | return to_tuple(
65 | data={
66 | "shot": len(self.times) * [self.shot_id],
67 | "time": self.times.astype("float32"),
68 | },
69 | dim="idx",
70 | )
71 |
--------------------------------------------------------------------------------
/docs/usage/settings/time_setting.md:
--------------------------------------------------------------------------------
1 | ## Overview { .doc .doc-heading }
2 | A module for handling time settings passed in the [`RetrievalSettings`][disruption_py.settings.retrieval_settings] class.
3 | Set time settings used by DisruptionPy to set the timebase for data retrieval from MDSPlus and any SQL tables.
4 |
5 | This module defines the abstract class [`TimeSetting`][disruption_py.settings.time_setting.TimeSetting] that can have subclasses passed as the
6 | `time_setting` parameter to the [`RetrievalSettings`][disruption_py.settings.retrieval_settings] class.
7 | It also provides built-in classes and mappings to easily set the timebase for data retrievel for common use cases.
8 |
9 | ### Usage { .doc .doc-heading }
10 | Currently, these are the options that can be passed to the `time_setting` parameter in [`RetrievalSettings`][disruption_py.settings.retrieval_settings]:
11 |
12 | - An instance of a subclass of `TimeSetting`
13 | - A string identifier in the `_time_setting_mappings` dictionary:
14 |
15 | ```python
16 | --8<--
17 | disruption_py/settings/time_setting.py:time_setting_dict
18 | --8<--
19 | ```
20 | - A Python list, NumPy array, or Pandas Series (with the timebase as the values) that should be used as the times for the timebase. See [`ListTimeSetting`][disruption_py.settings.time_setting.ListTimeSetting] for more details.
21 | - A dictionary mapping tokamak type strings to the desired `TimeSetting` for that tokamak. E.g. `{'cmod': 'efit'}`.
22 |
23 | ## Built-in Implemenations { .doc .doc-heading }
24 |
25 | ::: disruption_py.settings.time_setting
26 | options:
27 | show_root_heading: false
28 | show_root_toc_entry: false
29 | show_root_members_full_path: true
30 | filters:
31 | - "!^TimeSetting"
32 | - "!^TimeSettingParams$"
33 |
34 | ## Custom Implementations { .doc .doc-heading }
35 | Custom implementations of time settings must inherit from the `TimeSetting` abstract class, implementing the abstract methods.
36 |
37 | ::: disruption_py.settings.time_setting
38 | handler: python
39 | options:
40 | heading_level: 3
41 | members:
42 | - TimeSetting
43 | - TimeSettingParams
44 |
45 | ### Custom time settings example
46 |
47 | ::: docs.examples.custom_time_setting
48 | handler: python
49 | options:
50 | heading_level: 4
--------------------------------------------------------------------------------
/disruption_py/machine/east/config.toml:
--------------------------------------------------------------------------------
1 | [east.inout.mds]
2 | mdsplus_connection_string = "mds.ipp.ac.cn"
3 |
4 | [east.inout.sql]
5 | db_name = "east_disruption"
6 | driver = "MySQL"
7 | host = "202.127.205.10"
8 | port = 3306
9 |
10 | [east.physics.time_domain_thresholds]
11 | dipprog_dt = 1e3
12 |
13 | [east.tests]
14 | expected_failure_columns = [
15 | "beta_n",
16 | "beta_p",
17 | "beta_p_rt",
18 | "greenwald_fraction",
19 | "ip_error_rt",
20 | "kappa",
21 | "kappa_area",
22 | "li",
23 | "li_rt",
24 | "mirnov_std",
25 | "mirnov_std_normalized",
26 | "p_ecrh",
27 | "p_input",
28 | "p_oh",
29 | "prad_peaking",
30 | "q0",
31 | "q95",
32 | "qstar",
33 | "rad_input_frac",
34 | "rad_loss_frac",
35 | "rmp_n_equal_1_phase",
36 | "wmhd",
37 | "wmhd_rt",
38 | ]
39 | test_columns = [
40 | "beta_n",
41 | "beta_p",
42 | "beta_p_rt",
43 | "btor",
44 | "dipprog_dt",
45 | "greenwald_fraction",
46 | "ip",
47 | "ip_error",
48 | "ip_error_normalized",
49 | "ip_error_rt",
50 | "kappa",
51 | "kappa_area",
52 | "li",
53 | "li_rt",
54 | "lower_gap",
55 | "mirnov_std",
56 | "mirnov_std_normalized",
57 | "n_e",
58 | "n_equal_1_mode",
59 | "n_equal_1_normalized",
60 | "n_equal_1_phase",
61 | "p_ecrh",
62 | "p_icrf",
63 | "p_input",
64 | "p_lh",
65 | "p_lh_rt",
66 | "p_nbi",
67 | "p_nbi_rt",
68 | "p_oh",
69 | "p_rad",
70 | "p_rad_rt",
71 | "paminor",
72 | "pbeta_n",
73 | "pbeta_p",
74 | "pkappa",
75 | "pli",
76 | "pq95",
77 | "prad_peaking",
78 | "pwmhd",
79 | "q0",
80 | "q95",
81 | "q95_rt",
82 | "qstar",
83 | "rad_input_frac",
84 | "rad_loss_frac",
85 | "rmp_n_equal_1",
86 | "rmp_n_equal_1_phase",
87 | "time_until_disrupt",
88 | "upper_gap",
89 | "v_loop",
90 | "wmhd",
91 | "wmhd_rt",
92 | "z_error",
93 | "z_error_lmsz",
94 | "z_error_lmsz_normalized",
95 | "zcur",
96 | "zcur_lmsz",
97 | "zcur_lmsz_normalized",
98 | ]
99 |
100 | [east.tests.shots]
101 | disrup1_fast = 55012
102 | disrup2_full = 56020
103 | disrup3_full = 56549
104 | disrup4_full = 57508
105 |
106 | [east.time]
107 | disruption_time_start = 0.2
108 | disruption_time_step = 0.1
109 | dt_before_disruption = 0.010
110 | duration_before_disruption = 0.25
111 | end_of_current_threshold = 200e3
112 | minimum_duration = 0.6
113 | minimum_ip = 200e3
114 |
--------------------------------------------------------------------------------
/disruption_py/core/utils/shared_instance.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides a singleton class that ensures only one instance of a
5 | given class is created for a specific set of arguments, allowing for
6 | shared instances across the same process.
7 | """
8 |
9 | import os
10 |
11 |
12 | class SharedInstance:
13 | """
14 | Singleton class for creating shared instances of a specified class.
15 |
16 | Attributes
17 | ----------
18 | cls_arg : type
19 | The class for which shared instances will be created.
20 | _instances : dict
21 | A dictionary to store shared instances keyed by a unique identifier.
22 | """
23 |
24 | _instances = {}
25 |
26 | def __init__(self, cls_arg):
27 | """
28 | Initialize with a class.
29 |
30 | Parameters
31 | ----------
32 | cls_arg : type
33 | The class for which instances will be shared.
34 | """
35 | self.cls_arg = cls_arg
36 |
37 | def get_instance(self, *args, **kwargs):
38 | """
39 | Get a shared instance of the specified class, creating it if necessary.
40 |
41 | Parameters
42 | ----------
43 | *args : tuple
44 | Positional arguments to pass to the class constructor.
45 | **kwargs : dict
46 | Keyword arguments to pass to the class constructor.
47 |
48 | Returns
49 | -------
50 | object
51 | The shared instance of the specified class.
52 | """
53 |
54 | # Convert any dictionary in args or kwargs to a hashable form
55 | def make_hashable(obj):
56 | if isinstance(obj, dict):
57 | return tuple(sorted((k, make_hashable(v)) for k, v in obj.items()))
58 | if isinstance(obj, (list, set)):
59 | return tuple(sorted(make_hashable(e) for e in obj))
60 | return obj
61 |
62 | pid = os.getpid()
63 | hashable_args = tuple(make_hashable(arg) for arg in args)
64 | hashable_kwargs = tuple(
65 | sorted((k, make_hashable(v)) for k, v in kwargs.items())
66 | )
67 |
68 | key = (pid, self.cls_arg, hashable_args, hashable_kwargs)
69 | if key not in SharedInstance._instances:
70 | instance = self.cls_arg(*args, **kwargs)
71 | SharedInstance._instances[key] = instance
72 | return SharedInstance._instances[key]
73 |
--------------------------------------------------------------------------------
/docs/usage/settings/shotlist_setting.md:
--------------------------------------------------------------------------------
1 | ## Overview { .doc .doc-heading }
2 | A module for handling shot ids passed in the [`get_shots_data`][disruption_py.workflow.get_shots_data]
3 | method. DisruptionPy will retrieve MDSplus data for those shot ids.
4 |
5 | This module defines the abstract class [`ShotlistSetting`][disruption_py.settings.shotlist_setting.ShotlistSetting] that can have subclasses passed as the `shotlist_setting` argument to the `get_shots_data` method.
6 | It also provides built-in classes and mappings to easily define shot ids for common use cases.
7 |
8 | ### Usage { .doc .doc-heading }
9 | Currently, these are the options that can be passed as the `shotlist_setting` argument to `get_shots_data`:
10 |
11 | - An instance of a subclass of `ShotlistSetting`
12 | - A single shot id as an `int` or `str`
13 | - A Python list of shot ids as any combination of `int` or `str`
14 | - A dictionary key as a string from the built-in mappings to data files in the `_get_shotlist_setting_mappings` dictionary:
15 | ```python
16 | --8<--
17 | disruption_py/settings/shotlist_setting.py:get_shotlist_setting_dict
18 | --8<--
19 | ```
20 | - A file path as a string with its suffix mapped to a `ShotlistSetting` type in the `_file_suffix_to_shotlist_setting` dictionary:
21 | ```python
22 | --8<--
23 | disruption_py/settings/shotlist_setting.py:file_suffix_to_shotlist_setting_dict
24 | --8<--
25 | ```
26 | - A dictionary mapping tokamak type strings to the desired shot ids option for that tokamak. E.g. `{'cmod': 'cmod_test'}`.
27 | - A Python list of any other shot id request option that can be passed as the `shotlist_setting` argument to `get_shots_data` (all options listed previously). All designated shot numbers will be concatenated and any duplicates will be removed.
28 |
29 | ## Built-in Implemenations { .doc .doc-heading }
30 |
31 | ::: disruption_py.settings.shotlist_setting
32 | options:
33 | show_root_heading: false
34 | show_root_toc_entry: false
35 | show_root_members_full_path: true
36 | filters:
37 | - "!^ShotIdsRequestParams$"
38 | - "!^ShotIdsRequest$"
39 |
40 | ## Custom Implementations { .doc .doc-heading }
41 | Custom implementations of shot id settings must inherit from the `ShotlistSetting` abstract class, implementing the abstract methods.
42 |
43 | ::: disruption_py.settings.shotlist_setting
44 | handler: python
45 | options:
46 | heading_level: 2
47 | members:
48 | - ShotIdsRequest
49 | - ShotIdsRequestParams
50 |
--------------------------------------------------------------------------------
/disruption_py/machine/d3d/config.toml:
--------------------------------------------------------------------------------
1 | [d3d.efit]
2 | runtag = "DIS"
3 |
4 | [d3d.inout.mds]
5 | mdsplus_connection_string = "atlas"
6 |
7 | [d3d.inout.sql]
8 | db_name = "d3drdb"
9 | driver = "FreeTDS"
10 | host = "d3drdb"
11 | port = 8001
12 |
13 | [d3d.physics.time_domain_thresholds]
14 | dipprog_dt = 2e3
15 | ip_prog = 100e3
16 | power_supply_railed = 1
17 |
18 | [d3d.tests]
19 | expected_failure_columns = [
20 | "aminor",
21 | "beta_n",
22 | "beta_p",
23 | "beta_p_rt",
24 | "dbetap_dt",
25 | "delta",
26 | "dipprog_dt_rt",
27 | "dli_dt",
28 | "dn_dt",
29 | "dwmhd_dt",
30 | "greenwald_fraction",
31 | "kappa",
32 | "kappa_area",
33 | "li",
34 | "li_rt",
35 | "lower_gap",
36 | "n_e",
37 | "n_equal_1_mode",
38 | "n_equal_1_normalized",
39 | "ne_peaking_cva_rt",
40 | "p_ohm",
41 | "p_rad",
42 | "prad_peaking_cva_rt",
43 | "prad_peaking_xdiv_rt",
44 | "q0",
45 | "q95",
46 | "q95_rt",
47 | "qstar",
48 | "radiated_fraction",
49 | "squareness",
50 | "te_peaking_cva_rt",
51 | "upper_gap",
52 | "wmhd",
53 | "zcur",
54 | "zcur_normalized",
55 | ]
56 | test_columns = [
57 | "aminor",
58 | "beta_n",
59 | "beta_p",
60 | "beta_p_rt",
61 | "dbetap_dt",
62 | "delta",
63 | "dip_dt",
64 | "dipprog_dt",
65 | "dipprog_dt_rt",
66 | "dli_dt",
67 | "dn_dt",
68 | "dwmhd_dt",
69 | "greenwald_fraction",
70 | "greenwald_fraction_rt",
71 | "h98",
72 | "h_alpha",
73 | "ip",
74 | "ip_error",
75 | "ip_error_rt",
76 | "ip_rt",
77 | "kappa",
78 | "kappa_area",
79 | "li",
80 | "li_rt",
81 | "lower_gap",
82 | "n1rms_normalized",
83 | "n_e",
84 | "n_e_rt",
85 | "n_equal_1_mode",
86 | "n_equal_1_normalized",
87 | "ne_peaking_cva_rt",
88 | "p_ech",
89 | "p_nbi",
90 | "p_ohm",
91 | "p_rad",
92 | "power_supply_railed",
93 | "prad_peaking_cva_rt",
94 | "prad_peaking_xdiv_rt",
95 | "q0",
96 | "q95",
97 | "q95_rt",
98 | "qstar",
99 | "radiated_fraction",
100 | "shot",
101 | "squareness",
102 | "te_peaking_cva_rt",
103 | "time",
104 | "time_until_disrupt",
105 | "upper_gap",
106 | "v_loop",
107 | "wmhd",
108 | "wmhd_rt",
109 | "zcur",
110 | "zcur_normalized",
111 | ]
112 |
113 | [d3d.tests.shots]
114 | disrup1_fast = 161228
115 | disrup2_full = 161237
116 | no_disrup1_full = 166177
117 | no_disrup2_full = 166253
118 |
119 | [d3d.time]
120 | disruption_time_start = 0.1
121 | disruption_time_step = 0.025
122 | dt_before_disruption = 0.002
123 | duration_before_disruption = 0.10
124 | end_of_current_threshold = 100e3
125 | minimum_duration = 0.1
126 | minimum_ip = 400e3
127 |
--------------------------------------------------------------------------------
/docs/examples/custom_physics_method.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Used in the documentation for the physics methods."""
4 |
5 | import numpy as np
6 |
7 | from disruption_py.core.physics_method.decorator import physics_method
8 | from disruption_py.core.physics_method.params import PhysicsMethodParams
9 | from disruption_py.core.utils.math import interp1
10 | from disruption_py.machine.tokamak import Tokamak
11 | from disruption_py.settings.retrieval_settings import RetrievalSettings
12 | from disruption_py.workflow import get_shots_data
13 |
14 |
15 | @physics_method(columns=["upper_gap", "lower_gap"], tokamak=Tokamak.D3D)
16 | def decorated_physics_method(params: PhysicsMethodParams) -> dict:
17 | """
18 | All parametrized methods passed to `get_shots_data` will be called once for every
19 | shot retrieved. Decorated methods may call other decorated methods, however
20 | execution order is not guaranteed as calls will be reordered to minimize resource
21 | usage based on the `physics_method` decorator.
22 |
23 | Parameters
24 | ----------
25 | params : PhysicsMethodParams
26 | Parameters passed by disruption_py to the decorated method that should be
27 | used to help retrieve the shot data from MDSplus.
28 |
29 | Returns
30 | -------
31 | dict
32 | Dictionary containing the results of the decorated method, with each returned
33 | parameter being a key-value pair. Each of the dictionary's values should
34 | be the same length as the timebase (`params.times`).
35 | """
36 | _ = params
37 |
38 |
39 | # # Paramater cached method example
40 | # # --8<-- [start:kappa_area_request_example]
41 |
42 |
43 | # pylint: disable=duplicate-code
44 | @physics_method(columns=["custom_kappa_area"], tokamak=Tokamak.CMOD)
45 | def get_custom_kappa_area(params: PhysicsMethodParams):
46 | aminor = params.mds_conn.get_data(r"\efit_aeqdsk:aminor", tree_name="_efit_tree")
47 | area = params.mds_conn.get_data(r"\efit_aeqdsk:area", tree_name="_efit_tree")
48 | times = params.mds_conn.get_data(r"\efit_aeqdsk:time", tree_name="_efit_tree")
49 |
50 | # Ensure aminor and area are not 0 or less than 0
51 | aminor[aminor <= 0] = 0.001
52 | area[area <= 0] = 3.14 * 0.001**2
53 | return {
54 | "custom_kappa_area": interp1(times, area / (np.pi * aminor**2), params.times)
55 | }
56 |
57 |
58 | # --8<-- [end:kappa_area_request_example]
59 |
60 | retrieval_settings = RetrievalSettings(
61 | custom_physics_methods=[get_custom_kappa_area],
62 | )
63 |
64 | shot_data = get_shots_data(
65 | tokamak="cmod",
66 | shotlist_setting=[1150805012],
67 | retrieval_settings=retrieval_settings,
68 | )
69 |
--------------------------------------------------------------------------------
/docs/usage/settings/output_setting.md:
--------------------------------------------------------------------------------
1 | ## Overview { .doc .doc-heading }
2 | A module for handling the output setting passed in the [`get_shots_data`][disruption_py.workflow.get_shots_data]
3 | method. The output setting is used to handle the output of data from DisruptionPy as it is retrieved. This may include collecting all the data from a request and returning it as a list or streaming outputted data to a file as it is retrieved.
4 |
5 | This module defines the abstract class [`OutputSetting`][disruption_py.settings.output_setting.OutputSetting] that can have subclasses passed as the
6 | `output_setting` argument to the [`get_shots_data`][disruption_py.workflow.get_shots_data] method.
7 | It also provides built-in classes and mappings to easily set the output type for common use cases.
8 |
9 | ### Usage { .doc .doc-heading }
10 | Currently, these are the options that can be passed as the `output_setting` argument to [`get_shots_data`][disruption_py.workflow.get_shots_data]:
11 |
12 | - An instance of a subclass of [`OutputSetting`][disruption_py.settings.output_setting.OutputSetting]
13 | - A string identifier in the `_output_setting_mappings` dictionary:
14 | ```python
15 | --8<--
16 | disruption_py/settings/output_setting.py:output_setting_dict
17 | --8<--
18 | ```
19 | - A file path as a string with its suffix mapped to an [`OutputSetting`][disruption_py.settings.output_setting.OutputSetting] type in the `_file_suffix_to_output_setting` dictionary:
20 | ```python
21 | --8<--
22 | disruption_py/settings/output_setting.py:file_suffix_to_output_setting_dict
23 | --8<--
24 | ```
25 | - A Python list of any other output type request option that can be passed as the [`OutputSetting`][disruption_py.settings.output_setting.OutputSetting] argument to [`get_shots_data`][disruption_py.workflow.get_shots_data] (all options listed previously). See [`OutputSettingList`][disruption_py.settings.output_setting.OutputSettingList] for more details.
26 |
27 | ## Built-in Implementations { .doc .doc-heading }
28 | ::: disruption_py.settings.output_setting
29 | options:
30 | show_root_heading: false
31 | show_root_toc_entry: false
32 | show_root_members_full_path: true
33 | filters:
34 | - "!^OutputSetting$"
35 | - "!^CompleteOutputSettingParams$"
36 |
37 | ## Custom Implementations { .doc .doc-heading }
38 | Custom implementations of output type requests must inherit from the `OutputTypeRequest` abstract class, implementing the abstract methods.
39 |
40 | ::: disruption_py.settings.output_setting
41 | handler: python
42 | options:
43 | show_root_heading: false
44 | show_root_toc_entry: false
45 | show_root_members_full_path: true
46 | members:
47 | - OutputSetting
48 | - OutputSettingParams
49 | - CompleteOutputSettingParams
50 |
51 |
--------------------------------------------------------------------------------
/docs/usage/physics_methods/disruption_parameters_reference.md:
--------------------------------------------------------------------------------
1 |
2 | ## Disruption Parameter Descriptions
3 |
4 | | Parameter | Description | Units | Validity Range |
5 | |---|---|---|---|
6 | | Greenwald_fraction | Greenwald density fraction = n_e/n_G, where n_e is the line-averaged density and n_G is Greenwald density limit | - | [0, 1.5] |
7 | | Te_width | Half width half max of electron temperature profile from Thomson scattering | m | [0.04, 0.5] |
8 | | Wmhd | Total magnetic energy stored in the plasma | J | [0, 2e5] |
9 | | beta_p | Plasma poloidal beta, ratio between plasma pressure and magnetic pressure | - | [0, 1.1] |
10 | | beta_n | Normalized beta, ratio between plasma kinetic energy and magnetic energy | - | [0, 2] |
11 | | dipprog_dt | Time derivative of the programmed plasma current | A/s | - |
12 | | intentional_disruption | Whether a disruption was unintentional (0), intentional (1), or non-disrupted (NaN) | - | [0,1,NaN] |
13 | | ip | Plasma current | A | - |
14 | | ip_error | Error on the plasma current (ip-ipprog) | A | - |
15 | | kappa | Plasma elongation | - | [0.8, 2] |
16 | | li | Plasma normalized internal inductance | - | [0.2, 4.5] |
17 | | lower_gap | Lower gap | m | [0.025, 0.3] |
18 | | n_e | Line-averaged electron density of the plasma core | m^-3 | - |
19 | | n_equal_1_mode | n=1 component of the perturbed magnetic field | T | - |
20 | | n_over_ncrit | Vertical stability parameter | - | [-0.5, 2] |
21 | | p_icrf | Ion cyclotron power | W | [0, 6e6] |
22 | | p_lh | Lower hybrid power | W | [0, 1e6] |
23 | | p_oh | Ohmic power | W | [0, 20e6] |
24 | | p_rad | Radiated power from the plasma | W | [0, 20e6] |
25 | | q0 | Safety factor at the core plasma | - | [0, 10] |
26 | | q95 | Safety factor at 95% of poloidal flux surface | - | [0, 20] |
27 | | qstar | Cylindrical safety factor | - | [0, 30] |
28 | | radiated_fraction | Total injected power from the beams divided by the radiated power from the plasma | - | [0,2-3] |
29 | | shot | Discharge identifier, replicated per each time slice | - | - |
30 | | ssep | Distance on midplane between 1st and 2nd separatrices | m | - |
31 | | time | Time during the discharge | s | - |
32 | | time_until_disrupt | Elapsing time before the disruption event. Target variable | s | NaN or numeric |
33 | | upper_gap | Upper gap | m | [0, 0.21] |
34 | | v_loop | Edge loop voltage; time derivative of a weighted average of flux loops obtained from MFLUXloop voltage | V | [-7, 26] |
35 | | z_error | Difference between the actual position of the current centroid and the requested one (Z_prog) | m | - |
36 | | zcur | Actual vertical position of the current centroid, z_error - Z_prog | m | - |
37 | | Mirnov | Fluctuation amplitude of one magnetic probe. Measurement of MHD activity and plasma instability | T/s | [0, 50] |
38 |
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/caching.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides decorators and utility functions for caching the results of
5 | expensive method calls in physics calculations.
6 | """
7 |
8 | import functools
9 | import threading
10 | from typing import Callable
11 |
12 | import numpy as np
13 |
14 | from disruption_py.core.physics_method.params import PhysicsMethodParams
15 |
16 |
17 | def cache_method(method: Callable) -> Callable:
18 | """
19 | Decorates a function as a cached method and instantiates its cache.
20 |
21 | Cached methods are functions that run expensive operations on data in the shot
22 | and may be reused. The cache is used to store the results of the parameter
23 | method so that it is only calculated once per shot for a given timebase.
24 |
25 | Parameters
26 | ----------
27 | method : Callable
28 | The method to be cached.
29 |
30 | Returns
31 | -------
32 | Callable
33 | The wrapped method with caching functionality.
34 | """
35 |
36 | @functools.wraps(method)
37 | def wrapper(*args, **kwargs):
38 | physics_method_params: PhysicsMethodParams = (
39 | kwargs["params"] if "params" in kwargs else args[-1]
40 | )
41 |
42 | other_params = {k: v for k, v in kwargs.items() if k != "params"}
43 |
44 | cache_key = get_method_cache_key(
45 | method, physics_method_params.times, other_params
46 | )
47 |
48 | if cache_key in physics_method_params.cached_results:
49 | return physics_method_params.cached_results[cache_key]
50 | result = method(*args, **kwargs)
51 | physics_method_params.cached_results[cache_key] = result
52 | return result
53 |
54 | if isinstance(method, staticmethod):
55 | return staticmethod(wrapper)
56 | if isinstance(method, classmethod):
57 | return classmethod(wrapper)
58 | return wrapper
59 |
60 |
61 | def get_method_cache_key(
62 | method: Callable, times: np.ndarray, other_params: dict = None
63 | ):
64 | """
65 | Generate a cache key for the specified method and parameters.
66 |
67 | Parameters
68 | ----------
69 | method : Callable
70 | The method for which the cache key is being generated.
71 | times : np.ndarray
72 | An array of time values, where the first and last times are used in the key.
73 | other_params : dict, optional
74 | A dictionary of additional parameters that may affect the cache key. Default is None.
75 |
76 | Returns
77 | -------
78 | tuple
79 | A tuple representing the cache key.
80 | """
81 | current_thread_id = threading.get_ident()
82 | hashable_other_params = frozenset((other_params or {}).items())
83 | return (
84 | current_thread_id,
85 | method,
86 | times[0],
87 | times[-1],
88 | len(times),
89 | hashable_other_params,
90 | )
91 |
--------------------------------------------------------------------------------
/disruption_py/data/cmod_vde.csv:
--------------------------------------------------------------------------------
1 | shot,onset_time,notes
2 | 1120105021,1.455,
3 | 1120105027,1.515,
4 | 1120105029,1.335,
5 | 1120203005,1.48,
6 | 1120215005,1.125,
7 | 1120215009,1.475,
8 | 1120215012,1.47,
9 | 1120215018,1.06,
10 | 1120217021,1.42,
11 | 1120217025,1.425,
12 | 1120222011,1.44,long one that swings down before coming up
13 | 1120222013,0.925,
14 | 1120222014,1.48,another longer one that swings
15 | 1120503017,1.34,another longer one that swings
16 | 1120514008,0.44,
17 | 1120523033,1.34,
18 | 1120601015,0.64,"looks like VC was deliberately shot off at 640ms, but logbook doesn't mention anything"
19 | 1120621011,0.75,
20 | 1120621012,0.86,long swing
21 | 1120703011,1.4,big swing
22 | 1120710019,0.766,
23 | 1120710027,0.917,
24 | 1120713007,1.24,"looks like VC was deliberately shot off at 640ms, but logbook doesn't mention anything"
25 | 1120727026,0.98,"very long, with low fs during swing"
26 | 1120731005,0.94,
27 | 1120801033,1.12,small swing
28 | 1120809024,1.136,"doesn't look deliberate, but onset is very clear"
29 | 1120824017,1.06,"doesn't look deliberate, but onset is very clear"
30 | 1120831016,1.425,
31 | 1120831017,1.4,
32 | 1120904011,0.84,
33 | 1120905018,1.159,very clear onset
34 | 1120907004,1.223,very clear onset
35 | 1120914018,1.24,
36 | 1120917011,1.4,
37 | 1121002011,1.24,
38 | 1140205010,1.018,
39 | 1140226017,1.045,
40 | 1140327019,0.66,
41 | 1140328013,1.354,
42 | 1140328017,0.56,
43 | 1140328025,1.22,
44 | 1140402009,1.396,
45 | 1140411019,1.44,
46 | 1140416023,1.18,
47 | 1140515015,1.25,very clear onset
48 | 1140515017,1.395,
49 | 1140515023,1.344,
50 | 1140515024,1.24,
51 | 1140521001,1.22,
52 | 1140523015,0.834,
53 | 1140605001,1.38,
54 | 1140610006,0.76,
55 | 1140610007,0.76,
56 | 1140610016,0.76,
57 | 1140611001,1.078,
58 | 1140618005,0.768,
59 | 1140618007,0.76,
60 | 1140702012,1.38,
61 | 1140723001,0.46,
62 | 1140724022,1.4,
63 | 1140731030,1.26,
64 | 1140805004,1.477,
65 | 1140805011,1.372,
66 | 1140805026,0.82,
67 | 1140805032,1.406,very clear onset
68 | 1140808003,0.956,
69 | 1140808008,1.202,
70 | 1140808014,1.095,
71 | 1140808030,1.2,started assigning end at 0 instead of peak
72 | 1140814026,1.42,
73 | 1140819017,1.2,
74 | 1140819023,1.32,
75 | 1140826009,0.77,
76 | 1140826011,0.773,
77 | 1140826026,1,
78 | 1140827029,0.736,
79 | 1150506022,0.62,very long with multiple swings leading up to final crash
80 | 1150610006,1.42,very clear onset
81 | 1150610007,1.36,
82 | 1150610008,1.36,honestly kinda hard to nail down start
83 | 1150610009,1.408,
84 | 1150610010,1.412,
85 | 1150610011,1.436,
86 | 1150616004,1.437,very clear onset
87 | 1150616005,1.357,
88 | 1150616007,1.35,
89 | 1150623007,0.46,
90 | 1150625009,0.42,
91 | 1150630019,1.295,
92 | 1150709032,1.375,
93 | 1150709033,1.463,
94 | 1150714002,0.75,
95 | 1150714007,0.578,long swing
96 | 1150715008,0.8,
97 | 1150715015,0.46,long swing
98 | 1150715017,0.462,long swing
99 | 1150717010,0.98,very clear onset
100 | 1150721019,0.6,very clear onset
101 |
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/decorator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module provides a decorator to signify methods that calculate physics quantities.
5 | """
6 |
7 | import time
8 | from functools import wraps
9 | from typing import Callable, List, Union
10 |
11 | from disruption_py.core.physics_method.caching import cache_method
12 | from disruption_py.core.physics_method.metadata import MethodMetadata
13 | from disruption_py.machine.tokamak import Tokamak
14 |
15 |
16 | def physics_method(
17 | cache: bool = True,
18 | tokamak: Union[Tokamak, List[Tokamak]] = None,
19 | columns: Union[List[str], Callable] = None,
20 | ) -> Callable:
21 | """
22 | Decorator to signify a method to be run by DisruptionPy.
23 |
24 | The decorated method calculates disruption parameters and returns a Dataset.
25 | All decorated methods must take the single argument params of type
26 | `PhysicsMethodParams`. The decorated method will be run if designated by the
27 | `run_methods` or `run_columns` attributes of the `RetrievalSettings`
28 | class, and if included inside of the `custom_physics_methods` argument of the
29 | `retrieval_settings` or in the built-in method list. If run, the result of the
30 | decorated method will be output to the `output_setting`.
31 |
32 | A common pattern for parameterized methods is first retrieving data from MDSplus
33 | using the `mds_conn` and then using that retrieved data to compute data to return.
34 |
35 | Parameters
36 | ----------
37 | cache : bool, optional
38 | Whether to cache the result of the method, by default True.
39 | tokamak : Union['Tokamak', List['Tokamak']], optional
40 | A list of Tokamak objects that represent which tokamak this method may
41 | be used for, by default None, allows the method to be run for any tokamak.
42 | columns : Union[List[str], Callable], optional
43 | The columns that are in the DataFrame returned by the method. Alternatively,
44 | you can pass a method that returns the names of used trees at runtime. Default
45 | value is an empty list implying that no columns are returned by the function.
46 |
47 | Returns
48 | -------
49 | Callable
50 | A decorated method with caching and metadata attributes.
51 | """
52 |
53 | def outer_wrapper(method: Callable) -> Callable:
54 | if cache:
55 | wrapper = cache_method(method)
56 | else:
57 | wrapper = method
58 |
59 | # Add elapsed time log
60 | @wraps(wrapper)
61 | def timed_wrapper(params, *args, **kwargs):
62 | start_time = time.time()
63 | result = wrapper(params, *args, **kwargs)
64 | params.logger.verbose(
65 | "{t:.3f}s : {name}",
66 | name=method.__name__,
67 | t=time.time() - start_time,
68 | )
69 | return result
70 |
71 | method_metadata = MethodMetadata(
72 | name=method.__name__,
73 | cache=cache,
74 | tokamaks=tokamak,
75 | columns=columns,
76 | )
77 |
78 | timed_wrapper.method_metadata = method_metadata
79 |
80 | return timed_wrapper
81 |
82 | return outer_wrapper
83 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "poetry.core.masonry.api"
3 | requires = ["poetry-core>=2.0"]
4 |
5 | [dependency-groups]
6 | dev = [
7 | "black>=25.1.0",
8 | "mdsthin>=1.5.0",
9 | "pylint>=3.3.0",
10 | "pytest>=8.3.0",
11 | "ruff>=0.9.0",
12 | "toml-sort>=0.24.0",
13 | "yamllint>=1.36.0",
14 | ]
15 | docs = [
16 | "black>=25.1.0",
17 | "mkdocs>=1.6.0",
18 | "mkdocs-literate-nav>=0.6.0",
19 | "mkdocs-material>=9.6.0",
20 | "mkdocstrings[python]>=0.29.0",
21 | ]
22 | lab = [
23 | "ipykernel>=6.29.0",
24 | "ipympl>=0.9.0",
25 | "jupyterlab>=4.3.0",
26 | "matplotlib>=3.10.0",
27 | ]
28 |
29 | [project]
30 | authors = [
31 | {name = "Gregorio L. Trevisan", email = "gtrevisan@psfc.mit.edu"},
32 | {name = "Cristina Rea", email = "crea@psfc.mit.edu"},
33 | {name = "MIT PSFC Disruption Studies Group", email = "disruption-py@lists.psfc.mit.edu"},
34 | ]
35 | classifiers = [
36 | "Development Status :: 4 - Beta",
37 | "Environment :: Console",
38 | "Intended Audience :: Science/Research",
39 | "Natural Language :: English",
40 | "Operating System :: POSIX :: Linux",
41 | "Programming Language :: Python :: 3",
42 | "Topic :: Scientific/Engineering",
43 | "Topic :: Scientific/Engineering :: Physics",
44 | "Topic :: Scientific/Engineering :: Artificial Intelligence",
45 | "Topic :: Software Development :: Libraries :: Python Modules",
46 | "Topic :: Software Development :: Version Control :: Git",
47 | ]
48 | dependencies = [
49 | "dynaconf>=3.2.0",
50 | "loguru>=0.7.0",
51 | "netcdf4>=1.7.0,<1.7.3",
52 | "numpy>=1.26.0,<2.0.0",
53 | "pandas>=2.2.0,<2.3.3",
54 | "pyodbc>=5.2.0",
55 | "scipy>=1.15.0",
56 | "SQLAlchemy>=2.0.0",
57 | "tqdm>=4.67.0",
58 | "xarray>=2025.1.0",
59 | ]
60 | description = "An open-source physics-based Scientific Framework for Disruption Analysis of Fusion Plasmas for AI/ML applications"
61 | keywords = [
62 | "plasma physics",
63 | "nuclear fusion",
64 | "tokamak",
65 | "disruptions",
66 | ]
67 | license = "MIT"
68 | maintainers = [
69 | {name = "Gregorio L. Trevisan", email = "gtrevisan@psfc.mit.edu"},
70 | ]
71 | name = "disruption-py"
72 | readme = "README.md"
73 | requires-python = ">=3.11,<3.14"
74 | version = "0.12.0"
75 |
76 | [project.scripts]
77 | disruption-py = "disruption_py.workflow:cli"
78 |
79 | [project.urls]
80 | documentation = "https://mit-psfc.github.io/disruption-py/"
81 | homepage = "https://disruptions.mit.edu/"
82 | repository = "https://github.com/MIT-PSFC/disruption-py/"
83 |
84 | [tool.poetry.group.dev]
85 | optional = true
86 |
87 | [tool.poetry.group.docs]
88 | optional = true
89 |
90 | [tool.poetry.group.lab]
91 | optional = true
92 |
93 | [tool.pylint]
94 | disable = [
95 | "fixme",
96 | "too-few-public-methods",
97 | "too-many-arguments",
98 | "too-many-branches",
99 | "too-many-instance-attributes",
100 | "too-many-lines",
101 | "too-many-locals",
102 | "too-many-positional-arguments",
103 | "too-many-statements",
104 | ]
105 | extension-pkg-allow-list = ["pyodbc"]
106 | ignored-modules = ["MDSplus"]
107 | min-similarity-lines = 5
108 |
109 | [tool.tomlsort]
110 | ignore_case = true
111 | in_place = true
112 | sort_table_keys = true
113 | trailing_comma_inline_array = true
114 |
115 | [tool.uv]
116 | default-groups = []
117 |
--------------------------------------------------------------------------------
/disruption_py/settings/retrieval_settings.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module defines the RetrievalSettings class, which is used to configure
5 | settings for retrieving data for a single shot.
6 | """
7 |
8 | from dataclasses import dataclass, field
9 | from typing import List
10 |
11 | from disruption_py.settings.nickname_setting import (
12 | NicknameSetting,
13 | resolve_nickname_setting,
14 | )
15 | from disruption_py.settings.time_setting import TimeSetting, resolve_time_setting
16 |
17 |
18 | @dataclass
19 | class RetrievalSettings:
20 | """
21 | Settings for retrieving data for a single shot.
22 |
23 | Attributes
24 | ----------
25 | efit_nickname_setting : NicknameSetting, optional
26 | Nickname setting for retrieving efit tree data (default is "disruption").
27 | run_methods : list of str, optional
28 | List of physics methods to run (default is None). If None, and run_columns
29 | is None, all methods will be run. Named methods will be run when retrieving
30 | data from MDSplus for the shot. Named methods must have the physics_method
31 | decorator and either be passed in the `custom_physics_methods` argument
32 | or included in the built-in method holders.
33 | run_columns : list of str, optional
34 | List of columns to retrieve (default is None). If None, and run_methods is
35 | None, all methods will be run. If specified, all methods with the physics_method
36 | decorator referencing the specified column will be run and all columns returned
37 | by those methods will be used. If you wish to only return the requested columns,
38 | set only_requested_columns to True in the retrieval_settings.
39 | only_requested_columns : bool, optional
40 | Whether to only include requested columns in the result (default is False).
41 | custom_physics_methods : list, optional
42 | List of custom physics methods (default is an empty list). The Methods are
43 | collected and run when retrieving data from MDSplus if the method is included
44 | through either the run_methods or run_columns setting.
45 | time_setting : TimeSetting, optional
46 | Time setting for the shot (default is "disruption_warning"). The retrieved
47 | data will be interpolated to this timebase. Can pass any `TimeSettingType`
48 | that resolves to a TimeSetting. See TimeSetting for more details.
49 | """
50 |
51 | # Shot creation settings
52 | efit_nickname_setting: NicknameSetting = "disruption"
53 |
54 | # Shot run settings
55 | run_methods: List[str] | None = None
56 | run_columns: List[str] | None = None
57 | only_requested_columns: bool = False
58 | custom_physics_methods: list = field(default_factory=list)
59 |
60 | # Timebase setting
61 | time_setting: TimeSetting = "disruption_warning"
62 |
63 | def __post_init__(self):
64 | """Resolve settings after initialization."""
65 | self.resolve()
66 |
67 | def resolve(self):
68 | """
69 | Resolve preset values into specific objects or enums.
70 |
71 | This method resolves passed strings, lists, and dictionaries into specific
72 | request types or enums.
73 | """
74 |
75 | self.time_setting = resolve_time_setting(self.time_setting)
76 | self.efit_nickname_setting = resolve_nickname_setting(
77 | self.efit_nickname_setting
78 | )
79 |
80 | if self.run_columns is not None:
81 | self.run_columns = [col.lower() for col in self.run_columns]
82 |
--------------------------------------------------------------------------------
/disruption_py/data/README.md:
--------------------------------------------------------------------------------
1 |
2 | # C-MOD shotlists
3 |
4 | > [!TIP]
5 | > Shot numbers on Alcator C-Mod correspond to the date and order in which the shot was taken, with the following the pattern: `[1]YYMMDDNNN`, where `YY` is the last 2 digits of the year (`99` for 1999, `100` for 2000, and so on), `MM` is the month, `DD` is the day, and `NNN` is the N-th shot taken that day.
6 |
7 | ## Vertical Displacement Events (VDEs)
8 |
9 | info|value
10 | -|-
11 | _file_ | `cmod_vde.csv`
12 | _machine_ | **Alcator C-MOD**
13 | _total shots_ | **99**
14 | _years_ | **2012-2016**
15 | _maintainer_ | [Alex Saperstein](https://github.com/alexsaperstein)
16 |
17 | This is a list of disruptive shots with Vertical Displacement Events (VDEs) on Alcator C-Mod.
18 |
19 | This list does not descriminate between hot and cold VDEs.
20 | Where 'hot' refers to the VDEs preceding the Thermal Quench (TQ), while 'cold' refers to the VDEs following it.
21 |
22 | ### Details
23 |
24 | Column description:
25 | - `shot` (integer): shot numbers,
26 | - `onset_time` (float, seconds): manually labeled onset times of the VDEs,
27 | - `notes` (string): any additional notes about the shot.
28 |
29 | All of these shots have been manually vetted by @AlexSaperstein for the presence of VDEs and their onset times.
30 | Onset times were chosen based on significant deviations (> ~2 cm) in the `z_error` feature.
31 |
32 | > [!NOTE]
33 | > It should be noted that onset times were chosen using a version of the disruption-py dataset that had poor time-resolution more than 20ms before the disruption.
34 | > As a result, onset times which precede the disruption by more than 20ms have an error of ~20ms associated with them.
35 |
36 | ## "Unidentified Flying Object" (UFO) disruptions
37 |
38 | info|value
39 | -|-
40 | _file_ | `cmod_ufo.csv`
41 | _machine_ | **Alcator C-MOD**
42 | _total shots_ | **122**
43 | _years_ | **2012-2016**
44 | _maintainer_ | [Henry Wietfeldt](https://github.com/hwietfeldt)
45 |
46 | This is a list of disruptive shots with likely UFO disruptions on Alcator C-Mod.
47 |
48 | This list was generated from an automatically generated database of high-Z injection events on C-Mod discharges.
49 | High-Z injection events were labeled using a Mo +31 charge state signal measured from a VUV spectrometer.
50 | Increases in the Mo +31 charge state signal were labeled as injections if:
51 | - global radiated power increased by 1 MW around the time of injection and before the thermal quench,
52 | - soft x-ray radiation increased around the time of injection and before the thermal quench,
53 | - low error in the vertical position at the time of injection.
54 |
55 | This list contains likely UFO disruptions, defined as shots with a labeled high-Z injection within 10 ms prior to the thermal quench onset.
56 | Only 1/6th of these shots were manually validated as containing a UFO disruption by @HWietfeldt.
57 | Whether a shot has been validated is indicated in the "vetted" column.
58 | Other shots may have labeling errors since they have not yet been manually validated.
59 |
60 | ### Details
61 |
62 | Column description:
63 | - `shot` (integer): shot numbers,
64 | - `injection_time` (float, seconds): time of the injection, based on when the Mo +31 charge state signal began to increase,
65 | - `duration` (float, seconds): time between when the Mo +31 charge state signal began to increase and stopped increasing,
66 | - `thermal_quench_time` (float, seconds): time of the thermal quench, determined from core Te measurements using an ECE diagnostic system,
67 | - `vetted` (bool): whether the shot has been manually validated as a UFO disruption, True (1) or False (0),
68 | - `notes` (string): additional notes.
69 |
--------------------------------------------------------------------------------
/tests/test_time_setting.py:
--------------------------------------------------------------------------------
1 | """
2 | Unit tests for the time_setting
3 |
4 | Implemented tests:
5 | - 'ip_efit': tests SharedTimeSetting, IpTimeSetting, and EfitTimeSetting
6 | - SignalTimeSetting
7 |
8 | Time settings that are tested elsewhere:
9 | - EfitTimeSetting: tested in test_quick.py
10 | - DisruptionTimeSetting: tested in test_against_cache.py
11 | """
12 |
13 | import os
14 |
15 | import numpy as np
16 | import pytest
17 |
18 | from disruption_py.machine.tokamak import Tokamak
19 | from disruption_py.settings import LogSettings, RetrievalSettings
20 | from disruption_py.settings.time_setting import SignalTimeSetting, TimeSetting
21 | from disruption_py.workflow import get_shots_data
22 |
23 |
24 | def run_test_time_setting(
25 | tokamak: Tokamak,
26 | time_setting: TimeSetting,
27 | efit_tree: str,
28 | shot: int,
29 | t_start: float,
30 | t_stop: float,
31 | length: float,
32 | test_folder: str,
33 | ):
34 | """
35 | Retrieve data, then the check time array against the specified targets.
36 | """
37 | column = "kappa_area"
38 | if tokamak == Tokamak.HBTEP:
39 | column = "ip"
40 | # Retrieve data
41 | retrieval_settings = RetrievalSettings(
42 | efit_nickname_setting=efit_tree,
43 | run_columns=[column],
44 | time_setting=time_setting,
45 | )
46 | shot_data = get_shots_data(
47 | tokamak=tokamak,
48 | shotlist_setting=[shot],
49 | retrieval_settings=retrieval_settings,
50 | output_setting=os.path.join(test_folder, "output.nc"),
51 | log_settings=LogSettings(
52 | console_level="WARNING",
53 | file_path=os.path.join(test_folder, "output.log"),
54 | ),
55 | )
56 | times = shot_data["time"].to_numpy()
57 | # Check start, end, and length of time array
58 | assert times[0] == pytest.approx(t_start, abs=1e-4)
59 | assert times[-1] == pytest.approx(t_stop, abs=1e-4)
60 | assert len(times) == length
61 | # Check for duplicated time point
62 | assert len(times) == len(np.unique(times))
63 |
64 |
65 | def test_shared_time_setting(tokamak: Tokamak, test_folder_f: str):
66 | """
67 | Test SharedTimeSetting by using the 'ip_efit' shortcut.
68 | """
69 | if tokamak is Tokamak.HBTEP:
70 | pytest.skip("HBT-EP signals share the same time base")
71 | assert False
72 | test_setup = {
73 | Tokamak.CMOD: ["analysis", 1150805012, 0.0601, 1.2799, 6100],
74 | Tokamak.D3D: ["efit01", 161228, 0.1, 5.04, 9881],
75 | Tokamak.EAST: ["efit_east", 55012, 0.301, 5.7, 5401],
76 | }
77 | run_test_time_setting(tokamak, "ip_efit", *test_setup[tokamak], test_folder_f)
78 |
79 |
80 | def test_signal_time_setting(tokamak: Tokamak, test_folder_f: str):
81 | """
82 | Test SignalTimeSetting using a signal that is not Ip or a EFIT signal.
83 | """
84 | test_setup = {
85 | Tokamak.CMOD: [
86 | SignalTimeSetting("spectroscopy", r"\twopi_diode"),
87 | "analysis",
88 | 1150805012,
89 | -1.4997,
90 | 3.9559,
91 | 16384,
92 | ],
93 | Tokamak.D3D: [
94 | SignalTimeSetting("rf", r"\top.ech.total:echpwrc"),
95 | "efit01",
96 | 161228,
97 | -0.05,
98 | 10,
99 | 201000,
100 | ],
101 | Tokamak.EAST: [
102 | SignalTimeSetting("pcs_east", r"\pcvloop"),
103 | "efit_east",
104 | 55012,
105 | -5.5,
106 | 9.199,
107 | 14702,
108 | ],
109 | Tokamak.HBTEP: [
110 | SignalTimeSetting("hbtep2", r"\top.sensors.magnetic:pa1_s01p"),
111 | "disruption",
112 | 102709,
113 | -0.001998,
114 | 0.028726,
115 | 15363,
116 | ],
117 | }
118 | run_test_time_setting(tokamak, *test_setup[tokamak], test_folder_f)
119 |
--------------------------------------------------------------------------------
/makefile:
--------------------------------------------------------------------------------
1 |
2 | # parameters #
3 |
4 | PYLINT_DIRS := disruption_py examples tests
5 | DELETE_OBJS := __pycache__ .pytest_cache
6 |
7 | # environment #
8 |
9 | GITHUB_ACTIONS ?= 0
10 | ifeq ($(GITHUB_ACTIONS), true)
11 | CHECK_ARG := --check
12 | FORMAT_ARG := --output-format=github
13 | endif
14 |
15 | # git #
16 |
17 | .PHONY: status fetch
18 |
19 | status: fetch
20 | git status
21 |
22 | fetch:
23 | git fetch -p -a
24 |
25 | # clean #
26 |
27 | .PHONY: clean-list clean-delete
28 |
29 | clean-list:
30 | echo $(DELETE_OBJS) | xargs -n1 find -name
31 | find -type d -empty
32 |
33 | clean-delete:
34 | echo $(DELETE_OBJS) | xargs -n1 find -name | xargs rm -rfv
35 | find -type d -empty -delete
36 |
37 | # poetry #
38 |
39 | .PHONY: install install-all uninstall reinstall lock update show release
40 |
41 | install:
42 | poetry install --with dev
43 |
44 | install-all:
45 | poetry install --with dev,docs,lab
46 |
47 | uninstall:
48 | poetry env list | cut -d' ' -f1 | xargs poetry env remove
49 |
50 | reinstall: uninstall install
51 |
52 | lock:
53 | poetry lock
54 | uv lock || true
55 | git status
56 |
57 | update:
58 | poetry update
59 | uv lock --upgrade || true
60 | git status
61 |
62 | show:
63 | poetry show --latest --why --top-level --with dev,lab,docs
64 |
65 | release:
66 | poetry version minor
67 | make lock
68 | git commit -am 'bump version to v$(shell poetry version -s)'
69 | git show
70 |
71 | # test #
72 |
73 | .PHONY: quick test test-fast
74 |
75 | quick:
76 | poetry run pytest -v tests/test_quick.py
77 |
78 | test:
79 | poetry run pytest -v --durations=0 tests
80 |
81 | test-fast:
82 | GITHUB_ACTIONS=true poetry run pytest -v tests
83 |
84 | # lint #
85 |
86 | .PHONY: lint check isort black ruff pylint pylint-only pylint-todos shellcheck yamllint toml-sort
87 |
88 | lint: isort black ruff pylint shellcheck yamllint toml-sort
89 |
90 | check:
91 | make lint GITHUB_ACTIONS=true
92 |
93 | black:
94 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
95 | poetry run black --version
96 | poetry run black $(CHECK_ARG) .
97 |
98 | isort:
99 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
100 | poetry run isort --version
101 | poetry run isort $(CHECK_ARG) --profile black .
102 |
103 | pylint:
104 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
105 | poetry run pylint --version
106 | find $(PYLINT_DIRS) -type f -name '*.py' -not -empty \
107 | | xargs poetry run pylint -v --jobs 4 $(FORMAT_ARG)
108 |
109 | pylint-only:
110 | find $(PYLINT_DIRS) -type f -name '*.py' -not -empty \
111 | | xargs poetry run pylint -v --disable=all --enable=$(CODE)
112 |
113 | pylint-todos:
114 | CODE=fixme make pylint-only
115 |
116 | ruff:
117 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
118 | poetry run ruff --version
119 | find $(PYLINT_DIRS) -type f -name '*.py' -not -empty \
120 | | xargs poetry run ruff check $(FORMAT_ARG)
121 |
122 | shellcheck:
123 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
124 | shellcheck --version
125 | find -type f -not -path '*/.git/*' -not -path '*/.venv/*' \
126 | | xargs grep -l '^#!/bin/bash' \
127 | | while read -r F; \
128 | do \
129 | echo "--> $$F"; \
130 | shellcheck "$$F"; \
131 | done
132 |
133 | yamllint:
134 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
135 | poetry run yamllint --version
136 | find -type f -iname '*.y*ml' -not -empty -not -path '*/.venv/*' \
137 | | while read -r F; \
138 | do \
139 | echo "--> $$F"; \
140 | poetry run yamllint "$$F"; \
141 | done
142 |
143 | toml-sort:
144 | @[ "$(GITHUB_ACTIONS)" != "true" ] || \
145 | poetry run toml-sort --version
146 | find -maxdepth 1 -type f -iname '*.toml' -not -empty -not -path '*/.venv/*'\
147 | | while read -r F; \
148 | do \
149 | echo "--> $$F"; \
150 | poetry run toml-sort $(CHECK_ARG) "$$F"; \
151 | done
152 | find -mindepth 2 -type f -iname '*.toml' -not -empty -not -path '*/.venv/*' \
153 | | while read -r F; \
154 | do \
155 | echo "--> $$F"; \
156 | poetry run toml-sort $(CHECK_ARG) --all "$$F"; \
157 | done
158 |
--------------------------------------------------------------------------------
/tests/test_output_setting.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Unit tests for ensuring data can be outputted in multiple formats including
5 | lists, dictionaries, DataFrames, csv, hdf5, and to an SQL table.
6 | """
7 |
8 | import os
9 | from typing import Dict
10 |
11 | import pandas as pd
12 | import pytest
13 | import xarray as xr
14 |
15 | from disruption_py.machine.tokamak import Tokamak
16 | from disruption_py.settings.log_settings import LogSettings
17 | from disruption_py.settings.output_setting import DataTreeOutputSetting
18 | from disruption_py.settings.retrieval_settings import RetrievalSettings
19 | from disruption_py.workflow import get_shots_data
20 |
21 |
22 | @pytest.fixture(scope="module", name="fresh_data")
23 | def fresh_data_fixture(shotlist, tokamak, test_folder_m) -> Dict:
24 | """
25 | Get data in multiple formats.
26 | """
27 | output_settings = [
28 | os.path.join(test_folder_m, "output/"),
29 | os.path.join(test_folder_m, "dataset.nc"),
30 | os.path.join(test_folder_m, "dataframe.csv"),
31 | DataTreeOutputSetting(path=os.path.join(test_folder_m, "datatree.nc")),
32 | ]
33 | column = "kappa_area"
34 | if tokamak == Tokamak.HBTEP:
35 | column = "ip"
36 | retrieval_settings = RetrievalSettings(
37 | efit_nickname_setting="disruption",
38 | run_columns=[column],
39 | only_requested_columns=True,
40 | )
41 | return get_shots_data(
42 | tokamak=tokamak,
43 | shotlist_setting=shotlist,
44 | retrieval_settings=retrieval_settings,
45 | output_setting=output_settings,
46 | log_settings=LogSettings(
47 | console_level="WARNING",
48 | file_path=os.path.join(test_folder_m, "output.log"),
49 | ),
50 | num_processes=2,
51 | )
52 |
53 |
54 | def test_output_exists(fresh_data, test_folder_m):
55 | """
56 | Test creation of all output formats.
57 | """
58 |
59 | # run and get from memory
60 | dict_out, ds_out, df_out, dt_out = fresh_data
61 |
62 | # build paths
63 | dict_path = os.path.join(test_folder_m, "output/")
64 | ds_path = os.path.join(test_folder_m, "dataset.nc")
65 | dt_path = os.path.join(test_folder_m, "datatree.nc")
66 | df_path = os.path.join(test_folder_m, "dataframe.csv")
67 |
68 | # path existence
69 | assert os.path.exists(dict_path), "Could not find dict folder"
70 | assert os.path.exists(ds_path), "Could not find dataset file"
71 | assert os.path.exists(dt_path), "Could not find datatree file"
72 | assert os.path.exists(df_path), "Could not find dataframe file"
73 |
74 | # get from disk
75 | ds_dsk = xr.open_dataset(ds_path)
76 | dt_dsk = xr.open_datatree(dt_path)
77 | df_dsk = pd.read_csv(df_path, index_col=0)
78 | for col in df_dsk:
79 | if df_dsk[col].dtype == "float64":
80 | df_dsk[col] = df_dsk[col].astype("float32")
81 |
82 | # format types
83 | assert isinstance(dict_out, dict), "Wrong type for dict output"
84 | assert isinstance(ds_out, xr.Dataset), "Wrong type for Dataset output"
85 | assert isinstance(ds_dsk, xr.Dataset), "Wrong type for Dataset output"
86 | assert isinstance(dt_out, xr.DataTree), "Wrong type for DataTree output"
87 | assert isinstance(dt_dsk, xr.DataTree), "Wrong type for DataTree output"
88 | assert isinstance(df_out, pd.DataFrame), "Wrong type for DataFrame output"
89 | assert isinstance(df_dsk, pd.DataFrame), "Wrong type for DataFrame output"
90 |
91 | # disk equivalence
92 | xr.testing.assert_identical(ds_out, ds_dsk)
93 | xr.testing.assert_identical(dt_out, dt_dsk)
94 | pd.testing.assert_frame_equal(df_out, df_dsk)
95 |
96 | # format equivalence
97 | xr.testing.assert_identical(ds_out, xr.concat(dict_out.values(), dim="idx"))
98 | xr.testing.assert_identical(
99 | ds_out, xr.concat([dt.to_dataset() for dt in dt_out.values()], dim="idx")
100 | )
101 | pd.testing.assert_frame_equal(df_out, ds_out.to_dataframe()[df_out.columns])
102 |
--------------------------------------------------------------------------------
/disruption_py/core/physics_method/metadata.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for defining metadata classes for physics methods.
5 | """
6 |
7 | from dataclasses import dataclass, fields
8 | from typing import Callable, List, Union
9 |
10 | from disruption_py.core.physics_method.params import PhysicsMethodParams
11 | from disruption_py.machine.tokamak import Tokamak
12 |
13 |
14 | @dataclass(frozen=True)
15 | class MethodMetadata:
16 | """
17 | Holder for the arguments to the decorator.
18 | """
19 |
20 | name: str
21 |
22 | cache: bool
23 | tokamaks: Union[Tokamak, List[Tokamak]]
24 | columns: Union[List[str], Callable]
25 |
26 | ALLOWED_UNRESOLVED = [
27 | "columns",
28 | "tokamaks",
29 | ]
30 |
31 | def __post_init__(self):
32 | object.__setattr__(self, "columns", self.columns or [])
33 |
34 |
35 | @dataclass(frozen=True)
36 | class BoundMethodMetadata(MethodMetadata):
37 | """
38 | Metadata for a bound method, extending `MethodMetadata`.
39 |
40 | Attributes
41 | ----------
42 | bound_method : Callable
43 | The method that is bound to this metadata.
44 | """
45 |
46 | bound_method: Callable
47 |
48 | @classmethod
49 | def bind(
50 | cls,
51 | method_metadata: MethodMetadata,
52 | bound_method: Callable,
53 | physics_method_params: PhysicsMethodParams,
54 | ):
55 | """
56 | Bind a method to its metadata and resolve any callable parameters.
57 |
58 | Parameters
59 | ----------
60 | method_metadata : MethodMetadata
61 | Metadata instance containing the method's unresolved parameters.
62 | bound_method : Callable
63 | The callable method to be bound.
64 | physics_method_params : PhysicsMethodParams
65 | Parameters required for resolving the method.
66 |
67 | Returns
68 | -------
69 | BoundMethodMetadata
70 | A new instance of `BoundMethodMetadata` with resolved parameters.
71 | """
72 | new_method_metadata_params = {}
73 | bind_to = (getattr(bound_method, "__self__", None),)
74 | for field in fields(method_metadata):
75 | field_value = getattr(method_metadata, field.name)
76 | if field.name in method_metadata.ALLOWED_UNRESOLVED and callable(
77 | field_value
78 | ):
79 | new_val = (
80 | field_value(physics_method_params)
81 | if bind_to is None
82 | else field_value(bind_to, physics_method_params)
83 | )
84 | new_method_metadata_params[field.name] = new_val
85 | else:
86 | new_method_metadata_params[field.name] = field_value
87 |
88 | return cls(bound_method=bound_method, **new_method_metadata_params)
89 |
90 |
91 | # Utility methods for decorated methods
92 |
93 |
94 | def is_physics_method(method: Callable) -> bool:
95 | """
96 | Returns whether the method is decorated with `physics_method` decorator
97 |
98 | Parameters
99 | ----------
100 | method : Callable
101 | The method to check if decorated.
102 |
103 | Returns
104 | -------
105 | bool
106 | Whether the passed method is decorated.
107 | """
108 | return hasattr(method, "method_metadata")
109 |
110 |
111 | def get_method_metadata(method: Callable, should_throw: bool = False) -> MethodMetadata:
112 | """
113 | Get method metadata for method
114 |
115 | Parameters
116 | ----------
117 | method : Callable
118 | The method decorated with the `physics_method` decorator
119 | should_throw : bool
120 | Throw an error if the method was not decorated with the `physics_method` decorator
121 |
122 | Returns
123 | -------
124 | MethodMetadata
125 | The `MethodMetadata` object holding parameters for the cached method
126 | """
127 | method_metadata = getattr(method, "method_metadata", None)
128 | if should_throw and method_metadata is None:
129 | raise ValueError(f"The method {method} was not decorated with physics_method")
130 | return method_metadata
131 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | site_name: DisruptionPy Documentation
3 | theme:
4 | name: material
5 | features:
6 | - announce.dismiss
7 | - content.action.edit
8 | - content.action.view
9 | - content.code.annotate
10 | - content.code.copy
11 | - content.tooltips
12 | - navigation.footer
13 | - navigation.indexes
14 | - navigation.sections
15 | - navigation.tabs
16 | - navigation.tabs.sticky
17 | - navigation.top
18 | - search.highlight
19 | - search.suggest
20 | - toc.follow
21 | palette:
22 | scheme: slate
23 | primary: custom
24 | accent: custom
25 | extra_css:
26 | - stylesheets/disruptions.css
27 | - https://unpkg.com/katex@0/dist/katex.min.css
28 | nav:
29 | - Overview:
30 | - Background: index.md
31 | - Installation: INSTALL.md
32 | - References: REFERENCES.md
33 | - Usage:
34 | - Entry points:
35 | - Workflow: usage/workflow_reference.md
36 | - Settings:
37 | - Settings Classes:
38 | - Log Settings: usage/settings/log_settings.md
39 | - Nickname Setting: usage/settings/nickname_setting.md
40 | - Output Setting: usage/settings/output_setting.md
41 | - Retrieval Settings: usage/settings/retrieval_settings.md
42 | - Shotlist Setting: usage/settings/shotlist_setting.md
43 | - Time Setting: usage/settings/time_setting.md
44 | - Calculated Parameters:
45 | - Physics Methods:
46 | - Overview: usage/physics_methods/physics_method_reference.md
47 | - C-Mod Built-in Methods: "usage/physics_methods/\
48 | cmod_built_in_method_reference.md"
49 | - D3D Built-in Methods: "usage/physics_methods/\
50 | d3d_built_in_method_reference.md"
51 | - EAST Built-in Methods: "usage/physics_methods/\
52 | east_built_in_method_reference.md"
53 | - HBT-EP Built-in Methods: "usage/physics_methods/\
54 | hbtep_built_in_method_reference.md"
55 | - Physics Method Decorators: "usage/physics_methods/\
56 | decorator_reference.md"
57 | - Parameter Reference: "usage/physics_methods/\
58 | disruption_parameters_reference.md"
59 | - Data I/O:
60 | - SQL Database: usage/sql_database.md
61 | - MDSplus Connection: usage/mds_connection_reference.md
62 | - GitHub: https://github.com/MIT-PSFC/disruption-py
63 | markdown_extensions:
64 | - attr_list
65 | - admonition
66 | - footnotes
67 | - pymdownx.arithmatex:
68 | generic: true
69 | - pymdownx.details
70 | - pymdownx.emoji:
71 | emoji_index: !!python/name:material.extensions.emoji.twemoji
72 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
73 | - pymdownx.magiclink
74 | - pymdownx.snippets:
75 | base_path: ['']
76 | dedent_subsections: true
77 | check_paths: true
78 | - pymdownx.superfences
79 | - pymdownx.tabbed:
80 | alternate_style: true
81 | slugify: !!python/object/apply:pymdownx.slugs.slugify
82 | kwds:
83 | case: lower
84 | - pymdownx.tasklist:
85 | custom_checkbox: true
86 | - pymdownx.tilde
87 | - toc:
88 | permalink: ¤
89 | plugins:
90 | - search
91 | - autorefs
92 | - mkdocstrings:
93 | default_handler: python
94 | handlers:
95 | python:
96 | options:
97 | # https://mkdocstrings.github.io/python/usage/configuration/
98 | docstring_style: numpy
99 | docstring_section_style: spacy
100 | show_source: true
101 | show_signature_annotations: true
102 | docstring_options:
103 | ignore_init_summary: true
104 | inherited_members: false
105 | merge_init_into_class: false
106 | separate_signature: true
107 | # members_order: source
108 | filters: []
109 |
110 | # Heading options
111 | heading_level: 3
112 | show_root_heading: true
113 | show_root_full_path: true
114 | show_root_members_full_path: false
115 | show_object_full_path: false
116 | extra_javascript:
117 | - javascripts/katex.js
118 | - https://unpkg.com/katex@0/dist/katex.min.js
119 | - https://unpkg.com/katex@0/dist/contrib/auto-render.min.js
120 |
--------------------------------------------------------------------------------
/disruption_py/core/utils/misc.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for utility functions related to class instantiation, data manipulation, and version control.
5 | """
6 |
7 | import os
8 | import subprocess
9 | import sys
10 | import time
11 | from functools import lru_cache
12 | from pathlib import Path
13 | from tempfile import mkdtemp
14 | from typing import Dict, List, Tuple, Type
15 |
16 | import numpy as np
17 | from loguru import logger
18 |
19 |
20 | def without_duplicates(lst: List):
21 | """
22 | Get list without duplicates while maintaining order.
23 |
24 | Parameters
25 | ----------
26 | lst : List
27 | List to get without duplicates.
28 |
29 | Returns
30 | -------
31 | List
32 | The list lst with duplicates removed.
33 | """
34 | seen = set()
35 | return [x for x in lst if not (x in seen or seen.add(x))]
36 |
37 |
38 | @lru_cache
39 | def get_commit_hash() -> str:
40 | """
41 | Retrieve the current Git commit hash.
42 |
43 | Returns
44 | -------
45 | str
46 | The commit hash, if available.
47 | """
48 | try:
49 | commit_hash = (
50 | subprocess.check_output(["git", "rev-parse", "HEAD"])
51 | .decode("ascii")
52 | .strip()
53 | )
54 | except subprocess.CalledProcessError:
55 | commit_hash = ""
56 | return commit_hash
57 |
58 |
59 | @lru_cache
60 | def get_temporary_folder() -> str:
61 | """
62 | Create and return a temporary folder.
63 | The result is cached to return the same path for different invocations.
64 |
65 | Returns
66 | -------
67 | str
68 | Resulting temporary folder.
69 | """
70 |
71 | # create temporary top folder
72 | top = os.path.join(
73 | "/tmp",
74 | os.getenv("USER"),
75 | "disruption-py",
76 | ("." if "pytest" in sys.modules else "") + time.strftime("%Y-%m-%d"),
77 | )
78 | Path(top).mkdir(parents=True, exist_ok=True)
79 |
80 | # create temporary sub folder
81 | return mkdtemp(dir=top, prefix=time.strftime("%H.%M.%S-"))
82 |
83 |
84 | def shot_msg(message: str) -> str:
85 | """
86 | Modify a message by prepending a shot format string.
87 |
88 | Parameters
89 | ----------
90 | message : str
91 | The message to modify.
92 |
93 | Returns
94 | -------
95 | str
96 | The modified message to be formatted downstream.
97 | """
98 | return "#{shot} | " + message
99 |
100 |
101 | def shot_msg_patch(mylogger: Type[logger], shot: int):
102 | """
103 | Patch a logger by prepending the shot number to its message.
104 |
105 | Parameters
106 | ----------
107 | mylogger:
108 | The logger to modify.
109 | shot: int
110 | The shot id to prepend.
111 | """
112 | return mylogger.patch(
113 | lambda r: r.update(message=shot_msg(r["message"]).format(shot=shot))
114 | )
115 |
116 |
117 | def get_elapsed_time(elapsed: float) -> str:
118 | """
119 | Convert elapsed seconds into human-readable format.
120 |
121 | Parameters
122 | ----------
123 | elapsed : float
124 | Elapsed number of seconds.
125 |
126 | Returns
127 | -------
128 | str
129 | Human-readable formatted message.
130 | """
131 |
132 | out = []
133 | d = elapsed // (24 * 3600)
134 | elapsed -= d * (24 * 3600)
135 | h = elapsed // 3600
136 | elapsed -= h * 3600
137 | m = elapsed // 60
138 | elapsed -= m * 60
139 | s = int(elapsed)
140 | ms = (elapsed - s) * 1000
141 | if d > 0:
142 | out += [f"{d:.0f}d"]
143 | if h > 0:
144 | out += [f"{h:.0f}h"]
145 | if m > 0:
146 | out += [f"{m:.0f}m"]
147 | if s > 0:
148 | out += [f"{s:.0f}s"]
149 | if not out:
150 | out += [f"{ms:.0f}ms"]
151 | return " ".join(out)
152 |
153 |
154 | def to_tuple(
155 | data: Dict[str, np.ndarray], dim: str
156 | ) -> Dict[str, Tuple[str, np.ndarray]]:
157 | """
158 | Recreate a dictionary by making all values a 2-tuple with a given string.
159 |
160 | Parameters
161 | ----------
162 | data : Dict[str, np.ndarray]
163 | Dictionary of array data.
164 | dim : str
165 | String to be added as first element of the tuple.
166 |
167 | Returns
168 | -------
169 | Dict[str, Tuple[str, np.ndarray]]
170 | """
171 | return {k: (dim, v) for k, v in data.items()}
172 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Tests
4 |
5 | # yamllint disable-line rule:truthy
6 | on:
7 | push:
8 | branches:
9 | - main
10 | - dev
11 | pull_request:
12 | branches:
13 | - main
14 | - dev
15 |
16 | jobs:
17 |
18 |
19 | pytest:
20 | runs-on: ubuntu-22.04
21 | if: |
22 | github.event_name == 'push' ||
23 | github.event.pull_request.draft == false
24 | strategy:
25 | fail-fast: false
26 | matrix:
27 | tokamak:
28 | - C-MOD
29 | - DIII-D
30 | steps:
31 |
32 | - name: Checkout
33 | uses: actions/checkout@v5
34 |
35 | - name: Add MDSplus sources
36 | env:
37 | GPG: http://www.mdsplus.org/dist/mdsplus.gpg.key
38 | KEY: /usr/share/keyrings/mdsplus.asc
39 | REPO: http://www.mdsplus.org/dist/Ubuntu22/repo
40 | APT: /etc/apt/sources.list.d/mdsplus.list
41 | run: |
42 | curl -fsSL "$GPG" \
43 | | sudo tee "$KEY"
44 | echo "deb [arch=amd64 signed-by=$KEY] $REPO MDSplus alpha" \
45 | | sudo tee "$APT"
46 |
47 | - name: Update sources
48 | run: sudo apt-get update
49 |
50 | - name: Install prerequisites
51 | run: |
52 | sudo apt-get install msodbcsql17 # pre-installed
53 | sudo apt-get install tdsodbc unixodbc # freetds
54 | sudo apt-get install mdsplus-alpha-python # mdsplus
55 |
56 | - name: Configure MDSplus
57 | run: |
58 | {
59 | echo PYTHONPATH=/usr/local/mdsplus/python
60 | echo LD_LIBRARY_PATH=/usr/local/mdsplus/lib
61 | } \
62 | | tee -a "$GITHUB_ENV"
63 |
64 | - name: Setup SSH key
65 | run: |
66 | mkdir -p ~/.ssh
67 | echo "${{ secrets.SSH_TUNNEL }}" \
68 | | tee ~/.ssh/id_rsa \
69 | | sha256sum
70 | chmod 600 ~/.ssh/id_rsa
71 |
72 | - name: Setup C-MOD
73 | if: ${{ matrix.tokamak == 'C-MOD' }}
74 | run: |
75 | ssh "${{ secrets.CMOD_USER }}@${{ secrets.CMOD_HOST }}" \
76 | -p "${{ secrets.CMOD_PORT }}" \
77 | -L 8000:alcdata-archives:8000 \
78 | -L 1433:alcdb2:1433 \
79 | -o StrictHostKeyChecking=no \
80 | -N -v 2> ssh.log &
81 | echo "$!" \
82 | | tee ssh.pid
83 | echo "127.0.0.1 alcdata-archives alcdb2" \
84 | | sudo tee -a /etc/hosts
85 | echo "${{ secrets.CMOD_LOGIN }}" \
86 | | tee ~/logbook.sybase_login \
87 | | sha256sum
88 | echo DISPY_TOKAMAK=cmod \
89 | | tee -a "$GITHUB_ENV"
90 |
91 | - name: Setup DIII-D
92 | if: ${{ matrix.tokamak == 'DIII-D' }}
93 | env:
94 | TDS: /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so
95 | run: |
96 | ssh "${{ secrets.DIIID_USER }}@${{ secrets.DIIID_HOST }}" \
97 | -p "${{ secrets.DIIID_PORT }}" \
98 | -L 8000:atlas:8000 \
99 | -L 8001:d3drdb:8001 \
100 | -o StrictHostKeyChecking=no \
101 | -N -v 2> ssh.log &
102 | echo "$!" \
103 | | tee ssh.pid
104 | echo "127.0.0.1 atlas d3drdb" \
105 | | sudo tee -a /etc/hosts
106 | echo "${{ secrets.DIIID_LOGIN }}" \
107 | | tee ~/D3DRDB.sybase_login \
108 | | sha256sum
109 | echo -e "[FreeTDS]\nDescription = FreeTDS\nDriver = $TDS" \
110 | | sudo tee -a /etc/odbcinst.ini
111 | echo DISPY_TOKAMAK=d3d \
112 | | tee -a "$GITHUB_ENV"
113 |
114 | - name: Install poetry
115 | run: pipx install poetry
116 |
117 | - name: Setup Python
118 | uses: actions/setup-python@v6
119 | with:
120 | python-version: '3.12'
121 | cache: 'poetry'
122 |
123 | - name: Install requirements
124 | run: make install-all
125 |
126 | - name: Check tunnel
127 | run: |
128 | xargs -a ssh.pid ps -p || true
129 | if ! grep 'Authenticated.*publickey' ssh.log
130 | then
131 | curl -s ipinfo.io && echo
132 | cat -n ssh.log
133 | false
134 | fi
135 |
136 | - name: Test SQL
137 | run: poetry run python examples/sql.py
138 |
139 | - name: Test MDSplus
140 | run: poetry run python examples/mdsplus.py
141 |
142 | - name: Test EFIT
143 | run: poetry run python examples/efit.py
144 |
145 | - name: Run all tests
146 | run: make test
147 |
148 | - name: Close tunnel
149 | run: xargs -a ssh.pid kill -TERM
150 |
--------------------------------------------------------------------------------
/disruption_py/machine/cmod/efit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for retrieving and processing EFIT parameters for CMOD.
5 | """
6 |
7 | import numpy as np
8 |
9 | from disruption_py.core.physics_method.decorator import physics_method
10 | from disruption_py.core.physics_method.params import PhysicsMethodParams
11 | from disruption_py.core.utils.math import interp1
12 | from disruption_py.inout.mds import mdsExceptions
13 | from disruption_py.machine.tokamak import Tokamak
14 |
15 |
16 | class CmodEfitMethods:
17 | """
18 | Class for retrieving and processing EFIT parameters for CMOD.
19 |
20 | Attributes
21 | ----------
22 | efit_cols : dict
23 | A dictionary mapping parameter names to their corresponding EFIT data paths.
24 | efit_derivs : dict
25 | A dictionary mapping derivative parameter names to their corresponding base parameters.
26 | """
27 |
28 | efit_cols = {
29 | "beta_p": r"\efit_aeqdsk:betap",
30 | "kappa": r"\efit_aeqdsk:eout",
31 | "li": r"\efit_aeqdsk:ali",
32 | "upper_gap": r"\efit_aeqdsk:otop/100",
33 | "lower_gap": r"\efit_aeqdsk:obott/100",
34 | "q0": r"\efit_aeqdsk:qqmagx",
35 | "qstar": r"\efit_aeqdsk:qsta",
36 | "q95": r"\efit_aeqdsk:qpsib",
37 | "v_loop_efit": r"\efit_aeqdsk:vloopt",
38 | "wmhd": r"\efit_aeqdsk:wplasm",
39 | "ssep": r"\efit_aeqdsk:ssep/100",
40 | "n_over_ncrit": r"-\efit_aeqdsk:xnnc",
41 | "tritop": r"\efit_aeqdsk:doutu",
42 | "tribot": r"\efit_aeqdsk:doutl",
43 | "a_minor": r"\efit_aeqdsk:aout/100",
44 | "rmagx": r"\efit_aeqdsk:rmagx/100",
45 | "chisq": r"\efit_aeqdsk:tsaisq",
46 | }
47 |
48 | efit_derivs = {"dbetap_dt": "beta_p", "dli_dt": "li", "dwmhd_dt": "wmhd"}
49 |
50 | @staticmethod
51 | @physics_method(
52 | columns=[
53 | *efit_cols.keys(),
54 | *efit_derivs.keys(),
55 | ],
56 | tokamak=Tokamak.CMOD,
57 | )
58 | def get_efit_parameters(params: PhysicsMethodParams):
59 | """
60 | Retrieve EFIT parameters for CMOD.
61 |
62 | Parameters
63 | ----------
64 | params : PhysicsMethodParams
65 | The parameters containing the MDS connection and shot information.
66 |
67 | Returns
68 | -------
69 | dict
70 | A dictionary containing the retrieved EFIT parameters.
71 | """
72 | efit_time = params.mds_conn.get_data(
73 | r"\efit_aeqdsk:time", tree_name="_efit_tree"
74 | ) # [s]
75 | efit_data = {}
76 |
77 | # Get data from each of the columns in efit_cols one at a time
78 | for param, path in CmodEfitMethods.efit_cols.items():
79 | try:
80 | efit_data[param] = params.mds_conn.get_data(
81 | path=path,
82 | tree_name="_efit_tree",
83 | )
84 | except mdsExceptions.MdsException as e:
85 | params.logger.warning(repr(e))
86 | params.logger.opt(exception=True).debug(e)
87 | efit_data[param] = np.full(len(efit_time), np.nan)
88 |
89 | for deriv_param, param in CmodEfitMethods.efit_derivs.items():
90 | efit_data[deriv_param] = np.gradient(
91 | efit_data[param],
92 | efit_time,
93 | edge_order=1,
94 | )
95 |
96 | if not np.array_equal(params.times, efit_time):
97 | for param in efit_data:
98 | efit_data[param] = interp1(efit_time, efit_data[param], params.times)
99 |
100 | return efit_data
101 |
102 | @staticmethod
103 | def efit_check(params: PhysicsMethodParams):
104 | """
105 | Check the validity of EFIT parameters for the given shot.
106 | # TODO: Get description from Jinxiang
107 |
108 | Parameters
109 | ----------
110 | params : PhysicsMethodParams
111 | The parameters containing the MDS connection and shot information.
112 |
113 | Returns
114 | -------
115 | tuple
116 | A tuple containing valid indices and corresponding times.
117 | """
118 | values = [
119 | params.mds_conn.get(expr, tree_name="analysis")
120 | for expr in [
121 | r"_lf=\efit_aeqdsk:lflag",
122 | r"_l0=((sum(_lf,1) - _lf[*,20] - _lf[*,1])==0)",
123 | r"_n=\efit_fitout:nitera,(_l0 and (_n>4))",
124 | ]
125 | ]
126 | _n = values[2].data()
127 | valid_indices = np.nonzero(_n)
128 | (times,) = params.mds_conn.get_dims(r"\efit_aeqdsk:lflag", tree_name="analysis")
129 | return valid_indices, times[valid_indices]
130 |
--------------------------------------------------------------------------------
/tests/test_retrieval_settings.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Unit tests for the retrieval settings.
5 | """
6 |
7 | import os
8 |
9 | import pytest
10 | import xarray as xr
11 |
12 | from disruption_py.machine.tokamak import Tokamak
13 | from disruption_py.settings import RetrievalSettings
14 | from disruption_py.settings.log_settings import LogSettings
15 | from disruption_py.workflow import get_shots_data
16 | from tests.conftest import skip_on_fast_execution
17 |
18 |
19 | @pytest.fixture(scope="module", name="data")
20 | def data_fixture(tokamak, shotlist, test_folder_m) -> xr.Dataset:
21 | """
22 | Get data.
23 | """
24 | return get_shots_data(
25 | tokamak=tokamak,
26 | shotlist_setting=shotlist,
27 | output_setting=os.path.join(test_folder_m, "output.nc"),
28 | log_settings=LogSettings(
29 | console_level="WARNING",
30 | file_path=os.path.join(test_folder_m, "output.log"),
31 | ),
32 | num_processes=2,
33 | )
34 |
35 |
36 | @skip_on_fast_execution
37 | def test_only_requested_columns(tokamak, shotlist, test_folder_f):
38 | """
39 | Ensure `only_requested_columns` works. `ip` is returned by
40 | `get_ip_parameters`, so we should not see any of the other quantities like
41 | `dip_dt` returned. `q95` is from efit, so none of the other efit quantities
42 | should be returned.
43 | """
44 | run_columns = ["ip", "q95"]
45 | if tokamak == Tokamak.HBTEP:
46 | run_columns = ["ip"] # HBT-EP has no EFIT data
47 | retrieval_settings = RetrievalSettings(
48 | run_columns=run_columns,
49 | only_requested_columns=True,
50 | )
51 | results = get_shots_data(
52 | tokamak=tokamak,
53 | shotlist_setting=shotlist,
54 | retrieval_settings=retrieval_settings,
55 | output_setting=os.path.join(test_folder_f, "output.nc"),
56 | log_settings=LogSettings(
57 | console_level="WARNING",
58 | file_path=os.path.join(test_folder_f, "output.log"),
59 | ),
60 | num_processes=2,
61 | )
62 | assert set(run_columns) == set(results.data_vars)
63 |
64 |
65 | @skip_on_fast_execution
66 | @pytest.mark.parametrize(
67 | "run_methods, run_columns, expected_cols, forbidden_cols",
68 | [
69 | # Test run_methods with run_columns=None
70 | (None, None, None, []),
71 | ([], None, [], []),
72 | (["~get_kappa_area"], None, None, ["kappa_area"]),
73 | (["get_kappa_area"], None, {"kappa_area"}, []),
74 | # Test run_columns with run_methods=None
75 | (None, [], [], []),
76 | (None, ["kappa_area"], {"kappa_area"}, []),
77 | # Test run_methods and run_columns combo
78 | ([], [], [], []),
79 | (["get_kappa_area"], [], {"kappa_area"}, []),
80 | (
81 | ["get_kappa_area"],
82 | ["greenwald_fraction"],
83 | {"kappa_area", "n_e", "dn_dt", "greenwald_fraction"},
84 | [],
85 | ),
86 | (
87 | ["~get_kappa_area"],
88 | ["greenwald_fraction"],
89 | {"n_e", "dn_dt", "greenwald_fraction"},
90 | [],
91 | ),
92 | (["~get_kappa_area"], ["kappa_area"], [], []),
93 | ],
94 | )
95 | def test_run_methods_and_columns(
96 | tokamak,
97 | shotlist,
98 | run_methods,
99 | run_columns,
100 | expected_cols,
101 | forbidden_cols,
102 | data,
103 | test_folder_f,
104 | ):
105 | """
106 | Test the `run_methods` and `run_columns` parameters of RetrievalSettings.
107 |
108 | - If both are None, all methods are run
109 | - If one is None or [], the methods/columns specified by the other are run
110 | - If both are specified, the combined methods/columns are run
111 | - If `run_methods` excludes a method returning a column specified in `run_columns`,
112 | the method is not run
113 | """
114 | if tokamak is Tokamak.HBTEP:
115 | pytest.skip("Skip framework tests for HBT-EP")
116 | assert False
117 | retrieval_settings = RetrievalSettings(
118 | run_methods=run_methods,
119 | run_columns=run_columns,
120 | )
121 | results = get_shots_data(
122 | tokamak=tokamak,
123 | shotlist_setting=shotlist,
124 | retrieval_settings=retrieval_settings,
125 | output_setting=os.path.join(test_folder_f, "output.nc"),
126 | log_settings=LogSettings(
127 | console_level="CRITICAL",
128 | file_path=os.path.join(test_folder_f, "output.log"),
129 | ),
130 | num_processes=2,
131 | )
132 | # Expected columns None means all columns (except forbidden cols) are returned
133 | if expected_cols is None:
134 | assert set(results.data_vars) == set(
135 | k for k in data.data_vars if k not in forbidden_cols
136 | )
137 | else:
138 | assert set(results.data_vars) == set(expected_cols)
139 | assert all(col not in results.data_vars for col in forbidden_cols)
140 |
--------------------------------------------------------------------------------
/disruption_py/machine/generic/physics.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for generic physics methods.
5 | """
6 | import numpy as np
7 |
8 | from disruption_py.config import config
9 | from disruption_py.core.physics_method.decorator import physics_method
10 | from disruption_py.core.physics_method.params import PhysicsMethodParams
11 | from disruption_py.machine.cmod import CmodPhysicsMethods
12 | from disruption_py.machine.d3d import D3DPhysicsMethods
13 | from disruption_py.machine.east import EastPhysicsMethods
14 | from disruption_py.machine.tokamak import Tokamak
15 |
16 |
17 | class GenericPhysicsMethods:
18 | """
19 | Class to hold generic physics methods.
20 | """
21 |
22 | @staticmethod
23 | @physics_method(columns=["time_domain"])
24 | def get_time_domain(params: PhysicsMethodParams):
25 | r"""
26 | Get the domain (or phase) of every time point in a shot and return it
27 | as a categorical feature:
28 |
29 | - 1: ramp-up
30 | - 2: flat-top
31 | - 3: ramp-down
32 |
33 | Parameters
34 | ----------
35 | params : PhysicsMethodParams
36 | The parameters containing the MDSplus connection, shot id and more.
37 |
38 | Returns
39 | -------
40 | dict
41 | A dictionary containing the categorical feature `time_domain`.
42 |
43 | References
44 | -------
45 | - original source:
46 | - cmod: [get_flattop_indices.m](https://github.com/MIT-PSFC/disruption-py
47 | /blob/matlab/CMOD/matlab-core/get_flattop_indices.m)
48 | - d3d: [get_flattop_indices.m](https://github.com/MIT-PSFC/disr
49 | uption-py/blob/matlab/DIII-D/get_flattop_indices.m)
50 | - east: [get_flattop_indices.m](https:/github.com/MIT-PSFC/disruption-py/
51 | blob/matlab/EAST/utils/get_flattop_indices.m), [get_flattop_times.m](https://github
52 | .com/MIT-PSFC/disruption-py/blob/matlab/EAST/utils/get_flattop_times.m)
53 | - pull requests: #[433](https://github.com/MIT-PSFC/disruption-py/pull/433)
54 | - issues: #[408](https://github.com/MIT-PSFC/disruption-py/issues/408)
55 | """
56 | # Initialize dictionaries
57 | signals = {}
58 | thresholds = config(params.tokamak).physics.time_domain_thresholds
59 | conditions = {
60 | "dipprog_dt": lambda signal, threshold: np.abs(signal) <= threshold,
61 | "ip_prog": lambda signal, threshold: np.abs(signal) >= threshold,
62 | "power_supply_railed": lambda signal, railed: signal != railed,
63 | }
64 | # Get data and threshold parameters
65 | if params.tokamak == Tokamak.CMOD:
66 | ip_parameters = CmodPhysicsMethods.get_ip_parameters(params=params)
67 | signals["dipprog_dt"] = ip_parameters["dipprog_dt"]
68 | signals["ip_prog"] = ip_parameters["ip_prog"]
69 | elif params.tokamak == Tokamak.D3D:
70 | ip_parameters = D3DPhysicsMethods.get_ip_parameters(params=params)
71 | signals["dipprog_dt"] = ip_parameters["dipprog_dt"]
72 | signals["ip_prog"] = ip_parameters["ip_prog"]
73 | signals["power_supply_railed"] = ip_parameters["power_supply_railed"]
74 | elif params.tokamak == Tokamak.EAST:
75 | ip_parameters = EastPhysicsMethods.get_ip_parameters(params=params)
76 | signals["dipprog_dt"] = ip_parameters["dipprog_dt"]
77 | else:
78 | return {"time_domain": [np.nan]}
79 |
80 | # Check if all signals are available and valid
81 | for signal in signals.values():
82 | if np.isnan(signal).all():
83 | return {"time_domain": [np.nan]}
84 |
85 | time_domain = np.full(len(params.times), np.nan)
86 | # Get flattop domain indices
87 | indices_flattop = np.arange(len(time_domain))
88 | for name in ["dipprog_dt", "ip_prog", "power_supply_railed"]:
89 | sig, thr = signals.get(name, None), thresholds.get(name, None)
90 | if all(v is not None for v in (sig, thr)):
91 | (indices,) = np.where(conditions[name](sig, thr))
92 | indices_flattop = np.intersect1d(
93 | indices_flattop, indices, assume_unique=True
94 | )
95 |
96 | # Get the longest subsequence of indices_flattop
97 | indices_flattop = max(
98 | np.split(indices_flattop, np.where(np.diff(indices_flattop) != 1)[0] + 1),
99 | key=len,
100 | )
101 | # Assign shot domains
102 | if len(indices_flattop) == 0:
103 | # Shot only has ramp up phase
104 | time_domain[:] = 1
105 | else:
106 | flattop_start, flattop_end = indices_flattop[0], indices_flattop[-1] + 1
107 | time_domain[:flattop_start] = 1
108 | time_domain[flattop_start:flattop_end] = 2
109 | time_domain[flattop_end:] = 3
110 |
111 | return {"time_domain": time_domain}
112 |
--------------------------------------------------------------------------------
/disruption_py/machine/d3d/efit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for retrieving and processing EFIT parameters for DIII-D.
5 | """
6 |
7 | import numpy as np
8 |
9 | from disruption_py.core.physics_method.decorator import physics_method
10 | from disruption_py.core.physics_method.params import PhysicsMethodParams
11 | from disruption_py.core.utils.math import interp1
12 | from disruption_py.machine.tokamak import Tokamak
13 |
14 |
15 | class D3DEfitMethods:
16 | """
17 | A class for retrieving and processing EFIT parameters from DIII-D.
18 | """
19 |
20 | # EFIT Variables
21 | efit_cols = {
22 | "beta_n": r"\efit_a_eqdsk:betan",
23 | "beta_p": r"\efit_a_eqdsk:betap",
24 | "kappa": r"\efit_a_eqdsk:kappa",
25 | "li": r"\efit_a_eqdsk:li",
26 | "upper_gap": r"\efit_a_eqdsk:gaptop",
27 | "lower_gap": r"\efit_a_eqdsk:gapbot",
28 | "q0": r"\efit_a_eqdsk:q0",
29 | "qstar": r"\efit_a_eqdsk:qstar",
30 | "q95": r"\efit_a_eqdsk:q95",
31 | "wmhd": r"\efit_a_eqdsk:wmhd",
32 | "chisq": r"\efit_a_eqdsk:chisq",
33 | }
34 |
35 | efit_derivs = {"dbetap_dt": "beta_p", "dli_dt": "li", "dwmhd_dt": "wmhd"}
36 | rt_efit_cols = {
37 | "beta_p_rt": r"\efit_a_eqdsk:betap",
38 | "li_rt": r"\efit_a_eqdsk:li",
39 | "q95_rt": r"\efit_a_eqdsk:q95",
40 | "wmhd_rt": r"\efit_a_eqdsk:wmhd",
41 | "chisq_rt": r"\efit_a_eqdsk:chisq",
42 | }
43 |
44 | @staticmethod
45 | @physics_method(
46 | columns=[*efit_cols.keys(), *efit_derivs.keys()],
47 | tokamak=Tokamak.D3D,
48 | )
49 | def get_efit_parameters(params: PhysicsMethodParams):
50 | """
51 | Retrieve EFIT parameters and their time derivatives.
52 |
53 | Parameters
54 | ----------
55 | params : PhysicsMethodParams
56 | Parameters containing MDS connection and shot information.
57 |
58 | Returns
59 | -------
60 | dict
61 | A dictionary containing the EFIT parameters and their derivatives.
62 | """
63 | efit_data = {
64 | k: params.mds_conn.get_data(v, tree_name="_efit_tree")
65 | for k, v in D3DEfitMethods.efit_cols.items()
66 | }
67 | efit_time = (
68 | params.mds_conn.get_data(r"\efit_a_eqdsk:atime", tree_name="_efit_tree")
69 | / 1.0e3
70 | ) # [ms] -> [s]
71 |
72 | # EFIT reconstructions are sometimes invalid, particularly when very close
73 | # to a disruption. There are a number of EFIT parameters that can indicate
74 | # invalid reconstructions, such as 'terror' and 'chisq'. Here we use
75 | # 'chisq' to determine which time slices should be excluded from our
76 | # disruption warning database.
77 | invalid_indices = np.where(efit_data["chisq"] > 50)
78 |
79 | for param in efit_data:
80 | efit_data[param][invalid_indices] = np.nan
81 | for deriv_param, param in D3DEfitMethods.efit_derivs.items():
82 | if len(efit_data[param]) < 2:
83 | efit_data[deriv_param] = [np.nan]
84 | continue
85 | efit_data[deriv_param] = np.gradient(efit_data[param], efit_time)
86 | if not np.array_equal(params.times, efit_time):
87 | for param in efit_data:
88 | efit_data[param] = interp1(efit_time, efit_data[param], params.times)
89 | return efit_data
90 |
91 | @staticmethod
92 | @physics_method(
93 | columns=[*rt_efit_cols.keys()],
94 | tokamak=Tokamak.D3D,
95 | )
96 | def get_rt_efit_parameters(params: PhysicsMethodParams):
97 | """
98 | Retrieve real-time EFIT parameters.
99 |
100 | Parameters
101 | ----------
102 | params : PhysicsMethodParams
103 | Parameters containing MDS connection and shot information.
104 |
105 | Returns
106 | -------
107 | dict
108 | A dictionary containing the real-time EFIT parameters.
109 | """
110 | efit_data = {
111 | k: params.mds_conn.get_data(v, tree_name="efitrt1")
112 | for k, v in D3DEfitMethods.rt_efit_cols.items()
113 | }
114 | efit_time = (
115 | params.mds_conn.get_data(r"\efit_a_eqdsk:atime", tree_name="efitrt1")
116 | / 1.0e3
117 | ) # [ms] -> [s]
118 | # EFIT reconstructions are sometimes invalid, particularly when very close
119 | # to a disruption. There are a number of EFIT parameters that can indicate
120 | # invalid reconstructions, such as 'terror' and 'chisq'. Here we use
121 | # 'chisq' to determine which time slices should be excluded from our
122 | # disruption warning database.
123 | invalid_indices = np.where(efit_data["chisq_rt"] > 50)
124 |
125 | for param in efit_data:
126 | efit_data[param][invalid_indices] = np.nan
127 | if not np.array_equal(params.times, efit_time):
128 | for param in efit_data:
129 | efit_data[param] = interp1(efit_time, efit_data[param], params.times)
130 | return efit_data
131 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Pytest configuration module for setting up fixtures."""
4 |
5 | import os
6 | import tempfile
7 | from unittest.mock import patch
8 |
9 | import pytest
10 |
11 | from disruption_py.core.utils.math import matlab_gradient_1d_vectorized
12 | from disruption_py.core.utils.misc import get_temporary_folder
13 | from disruption_py.machine.tokamak import resolve_tokamak_from_environment
14 | from tests.utils.factory import (
15 | get_tokamak_test_columns,
16 | get_tokamak_test_expected_failure_columns,
17 | get_tokamak_test_shotlist,
18 | )
19 |
20 |
21 | def pytest_addoption(parser):
22 | """Add custom command-line options for verbose output to pytest."""
23 | parser.addoption(
24 | "--verbose_output", action="store_true", help="More testing information."
25 | )
26 |
27 |
28 | @pytest.fixture(scope="session")
29 | def verbose_output(pytestconfig):
30 | """Fixture to retrieve the verbose output option from pytest configuration."""
31 | return pytestconfig.getoption("verbose_output")
32 |
33 |
34 | def pytest_generate_tests(metafunc):
35 | """
36 | Parametrize `data_column` and mark expected failure columns. Marked columns
37 | will xfail on assert False and xpass on assert True.
38 | """
39 | tokamak = resolve_tokamak_from_environment()
40 |
41 | # parameterized across tests
42 | if "data_column" in metafunc.fixturenames:
43 | test_columns = get_tokamak_test_columns(tokamak)
44 | xfail_columns = get_tokamak_test_expected_failure_columns(tokamak)
45 | data_columns = []
46 | for test_col in test_columns:
47 | if test_col in xfail_columns:
48 | data_columns.append(pytest.param(test_col, marks=pytest.mark.xfail))
49 | else:
50 | data_columns.append(test_col)
51 | metafunc.parametrize("data_column", data_columns)
52 |
53 |
54 | @pytest.fixture(scope="session", name="tokamak")
55 | def tokamak_fixture():
56 | """Fixture to resolve the tokamak from the environment."""
57 | return resolve_tokamak_from_environment()
58 |
59 |
60 | @pytest.fixture(scope="module")
61 | def shotlist(tokamak):
62 | """Fixture to retrieve the test shotlist for the tokamak."""
63 | return get_tokamak_test_shotlist(tokamak)
64 |
65 |
66 | @pytest.fixture(scope="module", name="data_columns")
67 | def data_columns_fixture(tokamak):
68 | """Fixture to retrieve the test data columns for the tokamak."""
69 | return get_tokamak_test_columns(tokamak)
70 |
71 |
72 | @pytest.fixture(scope="module")
73 | def expected_failure_columns(tokamak):
74 | """Fixture to retrieve the expected failure columns for the tokamak."""
75 | return get_tokamak_test_expected_failure_columns(tokamak)
76 |
77 |
78 | @pytest.fixture(scope="session", autouse=True)
79 | def mock_numpy_gradient():
80 | """
81 | This fixture patches NumPy's gradient function with a MATLAB-compatible
82 | gradient function for the duration of the test session.
83 | """
84 | with patch("numpy.gradient", new=matlab_gradient_1d_vectorized):
85 | yield
86 |
87 |
88 | def test_folder(request) -> str:
89 | """
90 | Generate a folder for each test.
91 |
92 | Parameters
93 | ----------
94 | request : FixtureRequest | str
95 | The request object for the current test, or a string.
96 |
97 | Returns
98 | -------
99 | str
100 | The temporary folder.
101 | """
102 | if not isinstance(request, str):
103 | request = request.node.name
104 | folder = os.path.join(get_temporary_folder(), request)
105 | os.makedirs(folder, exist_ok=True)
106 | return folder
107 |
108 |
109 | @pytest.fixture(scope="function")
110 | def test_folder_f(request) -> str:
111 | """
112 | Function-scoped fixture to generate a folder for each test.
113 | """
114 | return test_folder(request)
115 |
116 |
117 | @pytest.fixture(scope="module")
118 | def test_folder_m(request) -> str:
119 | """
120 | Module-scoped fixture to generate a folder for each test.
121 | """
122 | return test_folder(request)
123 |
124 |
125 | @pytest.fixture(scope="module")
126 | def test_file_path_f(request):
127 | """
128 | Fixture to generate unique file paths for test files.
129 |
130 | Parameters
131 | ----------
132 | request : FixtureRequest
133 | The request object for the current test.
134 |
135 | Returns
136 | -------
137 | function
138 | A function that generates file paths with the specified name.
139 | """
140 |
141 | def inner(name):
142 | base = os.path.join(get_temporary_folder(), request.node.name)
143 | os.makedirs(base, exist_ok=True)
144 | unique = tempfile.mkdtemp(dir=base, prefix="")
145 | return os.path.join(unique, name)
146 |
147 | return inner
148 |
149 |
150 | def skip_on_fast_execution(method):
151 | """Decorator to skip tests on fast execution environments."""
152 | if "GITHUB_ACTIONS" in os.environ:
153 |
154 | @pytest.mark.skip("fast execution")
155 | def wrapper(method):
156 | return method
157 |
158 | return wrapper
159 | return method
160 |
--------------------------------------------------------------------------------
/docs/usage/physics_methods/physics_method_reference.md:
--------------------------------------------------------------------------------
1 | ## What is a Physics Method? { .doc .doc-heading }
2 | In DisruptionPy, physics methods are methods that produce tabular data in a standardized time base. Physics methods must take a single argument `params` that is an instance of [`PhysicsMethodParams`][disruption_py.core.physics_method.params].
3 |
4 | ## Built-in Physics Methods { .doc .doc-heading }
5 | While you can define your own, existing built-in physics methods are defined inside of the `disruption_py.machine` package.
6 |
7 | For more information on available methods please see the built-in method documentation pages:
8 |
9 | - [CMod Physics Methods](cmod_built_in_method_reference.md)
10 | - [DIII-D Physics Methods](d3d_built_in_method_reference.md)
11 | - [EAST Physics Methods](east_built_in_method_reference.md)
12 | - [HBT-EP Physics Methods](hbtep_built_in_method_reference.md)
13 |
14 | ## Custom Physics Methods { .doc .doc-heading }
15 | Users of DisruptionPy can create their own custom physics methods by adding the [`@physics_method`][disruption_py.core.physics_method.decorator.physics_method] decorator to a method. These custom physics methods can then be passed as the `custom_physics_methods` parameter in [`RetrievalSettings`][disruption_py.settings.retrieval_settings] and their results will be included alongside those returned by the built-in methods. See [Physics Method Decorators](decorator_reference.md) for more details on decorators.
16 |
17 | ### Physics methods structure
18 |
19 | ::: docs.examples.custom_physics_method.decorated_physics_method
20 | handler: python
21 | options:
22 | heading_level: 4
23 | show_source: false
24 | show_root_heading: false
25 | show_root_toc_entry: false
26 |
27 | ### Walkthrough { .doc .doc-heading }
28 | The steps for creating a custom physics method are as follows:
29 |
30 | 1. Create a function that takes an argument named `params` of type [`PhysicsMethodParams`][disruption_py.core.physics_method.params.PhysicsMethodParams] and returns a Python dictionary. The method must be decorated with the [`physics_method`][disruption_py.core.physics_method.decorator.physics_method] decorator. The arguments passed to the decorator are important for DisruptionPy to run efficiently. See [`physics_method`][disruption_py.core.physics_method.decorator.physics_method] for more details about available arguments.
31 | ```python
32 | from disruption_py.core.physics_method.params import PhysicsMethodParams
33 | from disruption_py.core.physics_method.decorator import physics_method
34 |
35 | @physics_method(...)
36 | def ***_method(params: PhysicsMethodParams) -> dict:
37 | ...
38 | ```
39 |
40 | 2. To retrieve data from MDSplus use the `params` ([`PhysicsMethodParams`][disruption_py.core.physics_method.params.PhysicsMethodParams]) object. It contains many useful attributes, among which are the following:
41 | - `params.shot_id`: the shot id of the shot for which data is being retrieved.
42 | - `params.mds_conn`: a wrapper around the MDSplus connection for the process, that makes it easier to get data for a shot. See [`MDSConnection`][disruption_py.inout.mds.MDSConnection] for details.
43 | - `params.times`: the timebase of the shot for which data is being retrieved as a NumPy array of times.
44 | ??? example "Shot Data Request Examples"
45 |
46 | === "Kappa Area Parameter in C-Mod"
47 |
48 | ```python
49 | --8<--
50 | docs/examples/custom_physics_method.py:kappa_area_request_example
51 | --8<--
52 | ```
53 |
54 | !!! warning
55 | When two output columns have the same name, the column that appears in the final dataset is not guaranteed. This issue will be fixed in the near future.
56 |
57 | ## Running Physics Methods { .doc .doc-heading }
58 | Users can use a number of built-in physics methods and/or create their own methods.
59 |
60 | For a physics method to be run after calling [`get_shots_data`][disruption_py.workflow.get_shots_data] it must meet the following conditions:
61 |
62 | 1. The method must either:
63 | - be included inside of the `disruption_py.machine.method_holders.py` list (built-in method)
64 | - be included inside of the `custom_physics_methods` argument of [`RetrievalSettings`][disruption_py.settings.retrieval_settings] when getting shot data.
65 |
66 | 2. The method must have the `physics_method` decorator with its `tokamak` parameter either not set or set to the tokamak that you are retrieving data from.
67 |
68 | 3. The method is included to run via either the `run_methods` or `run_columns` parameters of the shot settings.
69 | - To be included via `run_methods`, the method name must be listed inside of `run_methods`
70 | - To be included via `run_columns`, the method must have a column list in the `columns` parameter of the `physics_method` decorator that is included in `run_columns`
71 | - If neither `run_methods` nor `run_columns` is specified, all built-in methods will be run
72 |
73 |
74 | Once all designated methods have been collected, DisruptionPy optimizes their execution order to minimize resource usage by using the information supplied in the `physics_method` decorator. Once reordering is complete, the methods are run.
75 |
76 |
77 | ::: disruption_py.core.physics_method.params
78 | handler: python
79 | options:
80 | heading_level: 2
81 | members:
82 | - PhysicsMethodParams
83 |
--------------------------------------------------------------------------------
/tests/test_against_cache.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Unit tests for workflows involving get_dataset_df() for obtaining CMOD data.
5 |
6 | Expects to be run on the MFE workstations.
7 | Expects MDSplus to be installed and configured.
8 | Expects SQL credentials to be configured.
9 | """
10 |
11 | import argparse
12 | from typing import Dict, List
13 |
14 | import pandas as pd
15 | import pytest
16 |
17 | from disruption_py.machine.tokamak import Tokamak, resolve_tokamak_from_environment
18 | from tests.utils.eval_against_sql import (
19 | eval_against_cache,
20 | eval_shots_against_cache,
21 | get_cached_from_fresh,
22 | get_fresh_data,
23 | )
24 | from tests.utils.factory import (
25 | get_tokamak_test_expected_failure_columns,
26 | get_tokamak_test_shotlist,
27 | )
28 | from tests.utils.pytest_helper import extract_param
29 |
30 |
31 | @pytest.fixture(scope="module", name="fresh_data")
32 | def fresh_data_fixture(
33 | tokamak: Tokamak,
34 | shotlist: List[int],
35 | test_folder_m: str,
36 | pytestconfig,
37 | ) -> Dict[int, pd.DataFrame]:
38 | """
39 | Fixture to retrieve fresh data for the specified tokamak and shotlist.
40 |
41 | Parameters
42 | ----------
43 | tokamak : Tokamak
44 | The tokamak object used to retrieve data.
45 | shotlist : List[int]
46 | The list of shot identifiers to retrieve data for.
47 | test_folder_m : str
48 | Output folder.
49 | pytestconfig : Config
50 | The pytest configuration object.
51 |
52 | Returns
53 | -------
54 | Dict[int, pd.DataFrame]
55 | A dictionary mapping shot identifiers to their corresponding fresh DataFrames.
56 | """
57 | return get_fresh_data(
58 | tokamak=tokamak,
59 | shotlist=shotlist,
60 | folder=test_folder_m,
61 | test_columns=extract_param(pytestconfig),
62 | )
63 |
64 |
65 | @pytest.fixture(scope="module", name="cache_data")
66 | def cache_data_fixture(
67 | tokamak: Tokamak,
68 | shotlist: List[int],
69 | fresh_data: Dict[int, pd.DataFrame],
70 | pytestconfig,
71 | ) -> Dict[int, pd.DataFrame]:
72 | """
73 | Fixture to retrieve cached data based on fresh data for the specified tokamak and shotlist.
74 |
75 | Parameters
76 | ----------
77 | tokamak : Tokamak
78 | The tokamak object used to retrieve data.
79 | shotlist : List[int]
80 | The list of shot identifiers to retrieve data for.
81 | fresh_data : Dict[int, pd.DataFrame]
82 | The fresh data retrieved for the specified shotlist.
83 | pytestconfig : Config
84 | The pytest configuration object.
85 |
86 | Returns
87 | -------
88 | Dict[int, pd.DataFrame]
89 | A dictionary mapping shot identifiers to their corresponding cached DataFrames.
90 | """
91 | return get_cached_from_fresh(
92 | tokamak=tokamak,
93 | shotlist=shotlist,
94 | fresh_data=fresh_data,
95 | test_columns=extract_param(pytestconfig),
96 | )
97 |
98 |
99 | def test_data_columns(
100 | shotlist: List[int],
101 | fresh_data: Dict[int, pd.DataFrame],
102 | cache_data: Dict[int, pd.DataFrame],
103 | data_column,
104 | expected_failure_columns: List[str],
105 | ):
106 | """
107 | Test that the data columns are the same between fresh and cached sources across
108 | specified data columns.
109 |
110 | Data column is parameterized in pytest_generate_tests.
111 | """
112 | eval_shots_against_cache(
113 | shotlist=shotlist,
114 | fresh_data=fresh_data,
115 | cache_data=cache_data,
116 | data_columns=[data_column],
117 | expected_failure_columns=expected_failure_columns,
118 | )
119 |
120 |
121 | def main():
122 | """
123 | main function called by command-line invocation.
124 | """
125 |
126 | parser = argparse.ArgumentParser()
127 | parser.add_argument(
128 | "-c",
129 | "--data-column",
130 | type=str.lower,
131 | default=None,
132 | help="Data column to test, use all data columns if not specified",
133 | )
134 |
135 | parser.add_argument(
136 | "-s",
137 | "--shot-id",
138 | type=int,
139 | action="store",
140 | default=None,
141 | help="Shot number to test, uses the default shot list if not specified",
142 | )
143 |
144 | parser.add_argument(
145 | "-l",
146 | "--log-level",
147 | type=str.lower,
148 | action="store",
149 | default="INFO",
150 | help="Console log level",
151 | )
152 |
153 | args = parser.parse_args()
154 |
155 | data_columns = [args.data_column] if args.data_column else None
156 | tokamak = resolve_tokamak_from_environment()
157 |
158 | if args.shot_id is None:
159 | shotlist = get_tokamak_test_shotlist(tokamak)
160 | else:
161 | shotlist = [args.shot_id]
162 |
163 | expected_failure_columns = get_tokamak_test_expected_failure_columns(tokamak)
164 |
165 | data_differences = eval_against_cache(
166 | tokamak=tokamak,
167 | shotlist=shotlist,
168 | expected_failure_columns=expected_failure_columns,
169 | test_columns=data_columns,
170 | console_level=args.log_level,
171 | )
172 |
173 | columns = {dd.data_column for dd in data_differences}
174 | print(
175 | f"Python tests complete. Checked {len(shotlist)} shots with {len(columns)} columns."
176 | )
177 |
178 |
179 | if __name__ == "__main__":
180 | main()
181 |
--------------------------------------------------------------------------------
/tests/utils/data_difference.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for handling and analyzing data differences between fresh MDSplus data
5 | and cached SQL data.
6 | """
7 |
8 | from dataclasses import dataclass, field
9 |
10 | import numpy as np
11 | import pandas as pd
12 |
13 | from disruption_py.config import config
14 |
15 |
16 | @dataclass
17 | class DataDifference:
18 | """
19 | Data difference between fresh MDSplus data and cached SQL data.
20 | """
21 |
22 | shot_id: int
23 | data_column: str
24 |
25 | missing_cache_data: bool
26 | missing_fresh_data: bool
27 |
28 | anomalies: np.ndarray = field(init=False) # 1 if anomaly, 0 o.w.
29 | relative_difference: np.ndarray = field(init=False)
30 | fresh_column_data: pd.Series
31 | cache_column_data: pd.Series
32 | expect_failure: bool
33 |
34 | fresh_time: pd.Series
35 | cache_time: pd.Series
36 |
37 | def __post_init__(self):
38 | self.anomalies, self.relative_difference = self.compute_numeric_anomalies()
39 |
40 | @property
41 | def num_anomalies(self) -> int:
42 | """Sum the number of anomalies"""
43 | return np.sum(self.anomalies)
44 |
45 | @property
46 | def timebase_length(self) -> int:
47 | """Get the timebase length"""
48 | return len(self.anomalies)
49 |
50 | @property
51 | def missing_data(self) -> bool:
52 | """Return True if either fresh or cache is missing data."""
53 | return self.missing_cache_data or self.missing_fresh_data
54 |
55 | @property
56 | def failed(self) -> str:
57 | """Return True if missing data or if there are too many anomalies."""
58 | if self.missing_data:
59 | return True
60 | return (
61 | self.num_anomalies / self.timebase_length
62 | > 1 - config().tests.match_fraction
63 | )
64 |
65 | @property
66 | def column_mismatch_string(self) -> str:
67 | """Create a string showing the difference between fresh and cache data."""
68 | # Missing data handled here because difference_df expects data to exist
69 | s = f"Shot {self.shot_id} column {self.data_column}"
70 | if self.missing_cache_data or self.missing_fresh_data:
71 | fresh_str = (
72 | "Missing fresh data" if self.missing_fresh_data else "Have fresh data"
73 | )
74 | cache_str = (
75 | "Missing cache data" if self.missing_cache_data else "Have cache data"
76 | )
77 | return f"{s}: {fresh_str} and {cache_str}"
78 | return s + f" with arrays:\n{self.difference_df.to_string()}"
79 |
80 | @property
81 | def difference_df(self) -> pd.DataFrame:
82 | """
83 | Create a dataframe with columns for time, fresh data, cache data, the
84 | ratio between the two data, the absolute difference between them, the relative
85 | difference, and whether the point is an anomaly.
86 | """
87 | indices = (
88 | np.arange(self.timebase_length)
89 | if config().tests.verbose_output
90 | else self.anomalies.flatten()
91 | )
92 | anomaly = self.anomalies[indices]
93 | fresh_data = self.fresh_column_data.iloc[indices]
94 | cache_data = self.cache_column_data.iloc[indices]
95 | return pd.DataFrame(
96 | {
97 | "Time": self.fresh_time[indices],
98 | "Fresh Data": fresh_data,
99 | "Cache Data": cache_data,
100 | "Fresh/Cache": fresh_data / cache_data,
101 | "Absolute difference": abs(fresh_data - cache_data),
102 | "Relative difference": self.relative_difference[indices],
103 | "Anomaly": anomaly,
104 | }
105 | )
106 |
107 | def compute_numeric_anomalies(self):
108 | """
109 | Get the indices of the data where numeric differences exist between the
110 | cached and fresh data.
111 | """
112 |
113 | # handle missing data case
114 | if self.missing_fresh_data or self.missing_cache_data:
115 | if self.missing_fresh_data and self.missing_cache_data:
116 | missing_timebase_length = 0
117 | elif self.missing_fresh_data:
118 | missing_timebase_length = len(self.cache_column_data)
119 | else:
120 | missing_timebase_length = len(self.fresh_column_data)
121 | return np.ones(missing_timebase_length, dtype=bool), np.zeros(
122 | missing_timebase_length
123 | )
124 |
125 | cache_is_nan = pd.isnull(self.cache_column_data)
126 | fresh_is_nan = pd.isnull(self.fresh_column_data)
127 |
128 | # handle case where both arrays are all null
129 | if cache_is_nan.all() and fresh_is_nan.all():
130 | return np.zeros(len(self.fresh_column_data), dtype=bool), np.zeros(
131 | len(self.fresh_column_data)
132 | )
133 |
134 | relative_difference = np.where(
135 | self.cache_column_data != 0,
136 | np.abs(
137 | (self.fresh_column_data - self.cache_column_data)
138 | / self.cache_column_data
139 | ),
140 | np.where(self.fresh_column_data != 0, np.inf, np.nan),
141 | )
142 |
143 | numeric_anomalies_mask = np.where(
144 | np.isnan(relative_difference),
145 | False,
146 | relative_difference > config().tests.val_tolerance,
147 | )
148 | nan_anomalies_mask = cache_is_nan != fresh_is_nan
149 | anomalies: pd.Series = numeric_anomalies_mask | nan_anomalies_mask
150 |
151 | return anomalies.to_numpy(), relative_difference
152 |
--------------------------------------------------------------------------------
/disruption_py/machine/east/efit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for retrieving and processing EFIT parameters for EAST.
5 | """
6 |
7 | import numpy as np
8 |
9 | from disruption_py.core.physics_method.decorator import physics_method
10 | from disruption_py.core.physics_method.params import PhysicsMethodParams
11 | from disruption_py.core.utils.math import interp1
12 | from disruption_py.machine.tokamak import Tokamak
13 |
14 |
15 | class EastEfitMethods:
16 | """
17 | Class for retrieving and processing EFIT parameters for EAST.
18 |
19 | Attributes
20 | ----------
21 | efit_cols : dict
22 | A dictionary mapping parameter names to their corresponding EFIT data paths.
23 | """
24 |
25 | # TODO: confirm units
26 | efit_cols = {
27 | "beta_n": r"\efit_aeqdsk:betan",
28 | "beta_p": r"\efit_aeqdsk:betap",
29 | "kappa": r"\efit_aeqdsk:kappa",
30 | "li": r"\efit_aeqdsk:li",
31 | "q0": r"\efit_aeqdsk:q0",
32 | "qstar": r"\efit_aeqdsk:qstar",
33 | "q95": r"\efit_aeqdsk:q95",
34 | "wmhd": r"\efit_aeqdsk:wmhd",
35 | "area": r"\efit_aeqdsk:area*1e-4", # [cm^2] -> [m^2], from get_kappa_area
36 | "aminor": r"\efit_aeqdsk:aout",
37 | "chisq": r"\efit_aeqdsk:chisq",
38 | }
39 | efit_derivs = {"dbetap_dt": "beta_p", "dli_dt": "li", "dwmhd_dt": "wmhd"}
40 |
41 | pefit_cols = {
42 | "pbeta_n": r"\efit_aeqdsk:betan",
43 | "pbeta_p": r"\efit_aeqdsk:betap",
44 | "pkappa": r"\efit_aeqdsk:kappa",
45 | "pli": r"\efit_aeqdsk:li",
46 | "pq0": r"\efit_aeqdsk:q0",
47 | "pqstar": r"\efit_aeqdsk:qstar",
48 | "pq95": r"\efit_aeqdsk:q95",
49 | "pwmhd": r"\efit_aeqdsk:wmhd",
50 | "parea": r"\efit_aeqdsk:area*1e-4", # [cm^2] -> [m^2]
51 | "paminor": r"\efit_aeqdsk:aout",
52 | "pchisq": r"\efit_aeqdsk:chisq",
53 | "pconvergence": r"\efit_aeqdsk:error",
54 | }
55 |
56 | @staticmethod
57 | @physics_method(
58 | columns=[*efit_cols.keys(), *efit_derivs.keys()],
59 | tokamak=Tokamak.EAST,
60 | )
61 | def get_efit_parameters(params: PhysicsMethodParams):
62 | """
63 | Retrieve EFIT parameters for EAST.
64 |
65 | Parameters
66 | ----------
67 | params : PhysicsMethodParams
68 | The parameters containing the MDS connection and shot information.
69 |
70 | Returns
71 | -------
72 | dict
73 | A dictionary containing the retrieved EFIT parameters.
74 | """
75 | # pylint: disable=duplicate-code
76 |
77 | efit_data = {
78 | k: params.mds_conn.get_data(v, tree_name="_efit_tree")
79 | for k, v in EastEfitMethods.efit_cols.items()
80 | }
81 | efit_time = params.mds_conn.get_data(
82 | r"\efit_aeqdsk:atime", tree_name="_efit_tree"
83 | ) # TODO: [unit?]
84 |
85 | # EFIT reconstructions are sometimes invalid, particularly when very close
86 | # to a disruption. There are a number of EFIT parameters that can indicate
87 | # invalid reconstructions, such as 'terror' and 'chisq'. Here we use
88 | # 'chisq' to determine which time slices should be excluded from our
89 | # disruption warning database.
90 | invalid_indices = np.where(efit_data["chisq"] > 50)
91 |
92 | for param in efit_data:
93 | efit_data[param][invalid_indices] = np.nan
94 | for deriv_param, param in EastEfitMethods.efit_derivs.items():
95 | efit_data[deriv_param] = np.gradient(efit_data[param], efit_time)
96 | if not np.array_equal(params.times, efit_time):
97 | for param in efit_data:
98 | efit_data[param] = interp1(efit_time, efit_data[param], params.times)
99 | return efit_data
100 |
101 | @staticmethod
102 | @physics_method(
103 | columns=[
104 | *pefit_cols.keys(),
105 | ],
106 | tokamak=Tokamak.EAST,
107 | )
108 | def get_pefit_parameters(params: PhysicsMethodParams):
109 | """
110 | Retrieve real-time P-EFIT (Parallel-EFIT) parameters from
111 | the 'pefit_east' tree.
112 |
113 | Parameters
114 | ----------
115 | params : PhysicsMethodParams
116 | The parameters containing the MDS connection and shot information.
117 |
118 | Returns
119 | -------
120 | dict
121 | A dictionary containing the retrieved P-EFIT parameters.
122 | """
123 | # pylint: disable=duplicate-code
124 |
125 | efit_time = params.mds_conn.get_data(
126 | r"\efit_a_eqdsk:atime", tree_name="pefit_east"
127 | ) # TODO: [unit?]
128 |
129 | # Deal with bug
130 | efit_time, unique_indices = np.unique(efit_time, return_index=True)
131 |
132 | efit_data = {
133 | k: params.mds_conn.get_data(v, tree_name="pefit_east")[unique_indices]
134 | for k, v in EastEfitMethods.pefit_cols.items()
135 | }
136 |
137 | # P-EFIT reconstructions are sometimes invalid, particularly when very close
138 | # to a disruption. There are a number of P-EFIT parameters that can indicate
139 | # invalid reconstructions, such as 'error' and 'chisq'. Here we use
140 | # 'chisq' to determine which time slices should be excluded from our
141 | # disruption warning database.
142 | # Yao Huang suggests to use data with:
143 | # - chisq < 20
144 | # - convergence error < 1 (data stored in MDS+ is multiplied by 1e3)
145 | # - ip > 180 kA
146 | # For now, we only check the first two conditions.
147 | # If ever we want to extend analysis to ramp up or down we need to check ip.
148 | invalid_indices = np.where(
149 | (efit_data["pchisq"] > 50) & (efit_data["pconvergence"] < 1)
150 | )
151 | for param in efit_data:
152 | efit_data[param][invalid_indices] = np.nan
153 | if not np.array_equal(params.times, efit_time):
154 | for param in efit_data:
155 | efit_data[param] = interp1(efit_time, efit_data[param], params.times)
156 | return efit_data
157 |
--------------------------------------------------------------------------------
/REFERENCES.md:
--------------------------------------------------------------------------------
1 |
2 | # DisruptionPy References
3 |
4 | Here follows a non-exhaustive list of projects, publications, and conference contributions that leveraged or referenced DisruptionPy.
5 |
6 | ### Projects
7 |
8 | - C Rea, _et al._ (2023), _"Open and FAIR Fusion for Machine Learning Applications"_, [Project website](https://crea-psfc.github.io/open-fair-fusion/).
9 |
10 | ### Publications
11 |
12 | - AR Saperstein, _et al._ (2025), _"Design and development of an off-normal warning system for SPARC"_, Nucl. Fusion **65** 116007, DOI: [10.1088/1741-4326/ae074e](https://doi.org/10.1088/1741-4326/ae074e)
13 | - GL Trevisan, _et al._ (2025), _"DisruptionPy: An open-source physics-based scientific framework for disruption analysis of fusion plasmas"_, JOSS, [under review](https://github.com/openjournals/joss-reviews/issues/9364)
14 | - L Spangher, _et al._ (2025), _"DisruptionBench and Complimentary New Models: Two Advancements in Machine Learning Driven Disruption Prediction"_, J. Fusion. Energ. **44** 26, DOI: [10.1007/s10894-025-00495-2](https://doi.org/10.1007/s10894-025-00495-2)
15 | - J Stillerman, _et al._ (2025), _"MDSplusML: Optimizations for data access to facilitate machine learning pipelines"_, Fus. Eng. Des. **211** 114770, DOI: [10.1016/j.fusengdes.2024.114770](https://doi.org/10.1016/j.fusengdes.2024.114770)
16 | - AD Maris, _et al._ (2024), _"Correlation of the L-mode density limit with edge collisionality"_, Nucl. Fusion **65** 016051, DOI: [10.1088/1741-4326/ad90f0](https://doi.org/10.1088/1741-4326/ad90f0)
17 | - GL Trevisan, _et al._ (2024), _"DisruptionPy: An open-source physics-based scientific framework for disruption analysis of fusion plasmas"_, Zenodo, DOI: [10.5281/zenodo.13935223](https://doi.org/10.5281/zenodo.13935223)
18 | - Z Keith, _et al._ (2024), _"Risk-aware framework development for disruption prediction: Alcator C-Mod and DIII-D survival analysis"_, J. Fusion. Energ. **43** 21, DOI: [10.1007/s10894-024-00413-y](https://doi.org/10.1007/s10894-024-00413-y)
19 |
20 | ### Conferences
21 |
22 | - [67th APS-DPP Meeting (2025)](https://meetings.aps.org/Meeting/DPP25)
23 | - AD Maris, _et al._ (2025), _"Collisionality scaling of the tokamak density limit: data-driven analysis, cross-device prediction, and real-time avoidance"_ [BO04.5](https://schedule.aps.org/dpp/2025/events/BO04/5)
24 | - Z Keith, _et al._ (2025), _"Enabling data-driven NTM studies with advanced mode labeling"_, [BP13.167](https://schedule.aps.org/dpp/2025/events/BP13/167)
25 | - C Rea, _et al._ (2025), _"Research in support of the SPARC Off-Normal Warning System"_, [JO04.9](https://schedule.aps.org/dpp/2025/events/JO04/9)
26 | - AR Saperstein, _et al._ (2025), _"Validation of simulated radiative collapse events in TORAX"_, [NP13.161](https://schedule.aps.org/dpp/2025/events/NP13/161)
27 | - H Wietfeldt, _et al._ (2025), _"Characterization of UFOs on Alcator C-Mod and WEST to inform SPARC operation"_ [NP13.163](https://schedule.aps.org/dpp/2025/events/NP13/163)
28 | - EdD Zapata-Cornejo, _et al._ (2025), _"Time series classification algorithms for confinement regime identification in C-Mod"_ [NP13.169](https://schedule.aps.org/dpp/2025/events/NP13/169)
29 | - GL Trevisan, _et al._ (2025), _"A large-scale automated EFIT recomputation workflow for disruption studies at 1 kHz"_, [PP13.93](https://schedule.aps.org/dpp/2025/events/PP13/93)
30 |
31 | - [6th International Conference on Data-Driven Plasma Science (2025)](https://web.cvent.com/event/7de9d238-e170-4fbf-8de5-20abc5c6eb49/summary)
32 | - Z Keith, _et al._ (2025), _"A tearing mode database for Alcator C-Mod and DIII-D"_
33 | - Y Wei, _et al._ (2025), _"DisruptionPy: An open-source physics-based scientific package for disruption studies on magnetic fusion experiment devices"_
34 |
35 | - [45th ITPA-MDC Meeting (2025)](https://sites.google.com/pppl.gov/itpa-mdc2025)
36 | - AD Maris, _et al._ (2025), _"Cross-device prediction and real-time avoidance of the density limit"_
37 | - AR Saperstein, _et al._ (2025), _"Progress on the development of an off-normal warning system for SPARC"_
38 | - H Wietfeldt, _et al._ (2025), _"Characterizing UFO Disruptions on Alcator C-Mod"_
39 |
40 | - [3rd AI for Fusion CRP (2025)](https://nucleus.iaea.org/sites/ai4atoms/ai4fusion/SitePages/3rd-Meeting-of-the-AI-for-Fusion-(CRP).aspx)
41 | - C Rea, _et al._ (2025), _"Open and FAIR Fusion for Machine Learning Applications"_
42 |
43 | - [Open Source Software for Fusion Energy (2025)](https://ossfe.org/OSSFE_2025/#welcome-to-ossfe-2025)
44 | - Y Wei, _et al._ (2025), _"DisruptionPy: an open-source Python library for disruption study."_, [POS-10](https://ossfe.org/OSSFE_2025/y-disruptionpy)
45 |
46 | - [66th APS-DPP Meeting (2024)](https://meetings.aps.org/Meeting/DPP24)
47 | - AD Maris, _et al._ (2024), _"Correlation of the tokamak density limit with edge collisionality"_, [BI02.2](https://meetings.aps.org/Meeting/DPP24/Session/BI02.2)
48 | - H Wietfeldt, _et al._ (2024), _"Discerning Why Some High-Z UFOs in C-Mod Caused Immediate Disruptions while Others Did Not"_, [NP12.111](https://meetings.aps.org/Meeting/DPP24/Session/NP12.111)
49 | - GL Trevisan, _et al._ (2024), _"Functional Improvements and Technical Developments of a Community-driven and Physics-informed Numerical Library for Disruption Studies"_, [PP12.9](https://meetings.aps.org/Meeting/DPP24/Session/PP12.9)
50 | - Y Wei, _et al._ (2024), _"Physics validation of parameter methods in DisruptionPy"_, [PP12.10](https://meetings.aps.org/Meeting/DPP24/Session/PP12.10)
51 | - C Rea, _et al._ (2024), _"Open and FAIR Fusion for Machine Learning Applications"_, [PP12.27](https://meetings.aps.org/Meeting/DPP24/Session/PP12.27)
52 | - AR Saperstein, _et al._ (2024), _"Development and preliminary calibration of an off-normal warning system for SPARC"_, [TO06.9](https://meetings.aps.org/Meeting/DPP24/Session/TO06.9)
53 |
54 | - [3rd IAEA Technical Meeting on Plasma Disruptions and their Mitigation (2024)](https://conferences.iaea.org/event/380/)
55 | - AD Maris, _et al._ (2024), _"Correlation of the tokamak density limit with edge collisionality"_, [32301](https://conferences.iaea.org/event/380/contributions/32301/)
56 | - AR Saperstein, _et al._ (2024), _"Development and preliminary calibration of an off-normal warning system for SPARC"_, [32305](https://conferences.iaea.org/event/380/contributions/32305/)
57 |
58 | - [65th APS-DPP Meeting (2023)](https://meetings.aps.org/Meeting/DPP23)
59 | - A Maris, _et al._ (2023), _"Data-driven tokamak density limit boundary identification"_, [BP11.108](https://meetings.aps.org/Meeting/DPP23/Session/BP11.108)
60 | - L Spangher, _et al._ (2023), _"Do Fusion Plasma Time-Series Have a Persistent Memory that Machine Learning May Exploit?"_, [JP11.121](https://meetings.aps.org/Meeting/DPP23/Session/JP11.121)
61 | - AR Saperstein, _et al._ (2023), _"Off-normal warning threshold development on SPARC"_, [JP11.123](https://meetings.aps.org/Meeting/DPP23/Session/JP11.123)
62 | - Z Keith, _et al._ (2023), _"Risk-aware framework development for disruption prediction: Alcator C-Mod survival analysis"_, [JP11.124](https://meetings.aps.org/Meeting/DPP23/Session/JP11.124)
63 |
--------------------------------------------------------------------------------
/INSTALL.md:
--------------------------------------------------------------------------------
1 |
2 | # Installation
3 |
4 | Several _public_ installations are currently maintained automatically:
5 |
6 | - on the C-MOD MFE workstations,
7 | - on the DIII-D `omega` cluster,
8 | - on the EAST `cs` servers,
9 | - on the HBT-EP `landau` server.
10 |
11 | A _private_ installation is possible on any machine, either on a personal laptop, or on any experimental cluster.
12 |
13 | ## Public installations
14 |
15 | The MIT PSFC Disruption Studies Group hosts a `disruption-py` installation in a NFS folder.
16 | The following steps apply to both C-MOD workstations and the DIII-D cluster.
17 |
18 | Snippets for quick addition to a user's `.bashrc` file are provided in the subsections below.
19 |
20 | #### Python
21 |
22 | A minimal Python 3.12 distribution is installed in the `"$DISPY_DIR"/cpython/3.12` folder, and can be used in order to create further virtual environments.
23 |
24 | #### Dependency management
25 |
26 | We use [Poetry](https://python-poetry.org/) and [uv](https://docs.astral.sh/uv/) for dependency management.
27 |
28 | The helper scripts automatically activate the appropriate virtual environment, so direct Poetry or uv usage is not needed.
29 | They may be used directly from the `"$DISPY_DIR"/poetry` or `"$DISPY_DIR"/uv` folders.
30 |
31 | #### Branches
32 |
33 | Two branches are installed publicly and kept up to date:
34 |
35 | - `main`, for stable workflows;
36 | - `dev`, for fresh features.
37 |
38 | The target branch can be controlled through the `DISPY_BRANCH` environment variable.
39 |
40 | #### Virtual environments
41 |
42 | For each branch, a Python 3.12 virtual environment is available for usage.
43 |
44 | The helper scripts will choose the appropriate virtual environment based on the `DISPY_BRANCH` environment variable.
45 | It may also be used directly from the `"$DISPY_DIR"/venv` folder.
46 |
47 | #### Setup and activation
48 |
49 | A setup script will set all the required environment variables to ensure functionality and reproducibility.
50 | It has to be _sourced_, rather than executed, through:
51 |
52 | ```bash
53 | source "$DISPY_DIR"/repo/auto/setup.sh
54 | ```
55 |
56 | More often, a user may choose to directly _activate_ the chosen virtual environment, through:
57 |
58 | ```bash
59 | source "$DISPY_DIR"/repo/auto/activate.sh
60 | # or the alias defined in our snippet
61 | disruption-activate
62 | ```
63 |
64 | The helper scripts rely on the user adopting the [Bash shell](https://www.gnu.org/software/bash/).
65 |
66 | #### Execution
67 |
68 | Even without setup or activation, execution can be invoked through `disruption-python`, which should work as a drop-in replacement for the usual `python` command.
69 | Since `disruption-python` is an executable, its shorthand execution is made possible by the presence of the `"$DISPY_DIR"/repo/auto` folder within the `PATH` environment variable, as suggested in the installation-specific snippets below.
70 |
71 | For example, one may begin a disruption-py interactive session through:
72 |
73 | ```bash
74 | disruption-python -ic "import disruption_py"
75 | ```
76 |
77 | Or execute a disruption-py-based script through:
78 |
79 | ```bash
80 | disruption-python workflow.py
81 | ```
82 |
83 | A script can therefore be executed seamlessly through `disruption-python` by specifying a custom shebang:
84 |
85 | ```
86 | user@host:~$ head -n1 workflow.py
87 | #!/usr/bin/env disruption-python
88 |
89 | user@host:~$ chmod +x workflow.py
90 |
91 | user@host:~$ ./workflow.py
92 | ```
93 |
94 | All these helper scripts are subject to change -- we welcome any suggestion to make the process even smoother.
95 |
96 | ### C-MOD
97 |
98 | Suggested snippet to be appended to the user's `~/.bashrc` file:
99 |
100 | ```bash
101 | # disruption-py
102 | export DISPY_DIR=/usr/local/mfe/disruptions/disruption-py
103 | export DISPY_BRANCH=main # default. or dev
104 | export PATH=$PATH:$DISPY_DIR/repo/auto:$DISPY_DIR/poetry/bin:$DISPY_DIR/uv
105 | alias disruption-activate='source "$DISPY_DIR"/repo/auto/activate.sh'
106 | ```
107 |
108 | ### DIII-D
109 |
110 | Suggested snippet to be appended to the user's `~/.bashrc` file:
111 |
112 | ```bash
113 | # disruption-py
114 | export DISPY_DIR=/fusion/projects/disruption_warning/disruption-py
115 | export DISPY_BRANCH=main # default. or dev
116 | export PATH=$PATH:$DISPY_DIR/repo/auto:$DISPY_DIR/poetry/bin:$DISPY_DIR/uv
117 | alias disruption-activate='source "$DISPY_DIR"/repo/auto/activate.sh'
118 | ```
119 |
120 | ### EAST
121 |
122 | Suggested snippet to be appended to the user's `~/.bashrc` file:
123 |
124 | ```bash
125 | # disruption-py
126 | export DISPY_DIR=/project/disruption-py
127 | export DISPY_BRANCH=main # default. or dev
128 | export PATH=$PATH:$DISPY_DIR/repo/auto:$DISPY_DIR/poetry/bin:$DISPY_DIR/uv
129 | alias disruption-activate='source "$DISPY_DIR"/repo/auto/activate.sh'
130 | ```
131 |
132 | ### HBT-EP
133 |
134 | Suggested snippet to be appended to the user's `~/.bashrc` file:
135 |
136 | ```bash
137 | # disruption-py
138 | export DISPY_DIR=/opt/hbt/disruptions/disruption-py
139 | export DISPY_BRANCH=main # default. or dev
140 | export PATH=$PATH:$DISPY_DIR/repo/auto:$DISPY_DIR/poetry/bin:$DISPY_DIR/uv
141 | alias disruption-activate='source "$DISPY_DIR"/repo/auto/activate.sh'
142 | ```
143 |
144 | ## Private installation
145 |
146 | As Free/Libre and Open-Source Software (FLOSS), disruption-py can also be installed on any machine.
147 | We currently provide an installation guide for Ubuntu-based boxes, but generic Unix machines or Windows systems should support similar or equivalent steps.
148 |
149 | ### Pre-requisites
150 |
151 | Disruption-py currently needs non-python software to be installed as a pre-requisite:
152 |
153 | 1. [MDSplus](https://www.mdsplus.org/): to connect to MDSplus data servers,
154 | 2. SQL drivers: to connect to SQL database servers.
155 |
156 | MDSplus can be installed using their [installation guide](https://www.mdsplus.org/index.php/Downloads).
157 |
158 | On Ubuntu-based systems, SQL drivers might be installed for example through the [Microsoft ODBC Driver](https://learn.microsoft.com/en-us/sql/connect/odbc/linux-mac/installing-the-microsoft-odbc-driver-for-sql-server?view=sql-server-ver16) `msodbcsql18` package, or [FreeTDS](https://www.freetds.org/) `{tds,unix}odbc` packages.
159 |
160 | Note:
161 |
162 | On C-MOD workstations, MDSplus is pre-installed system-wide but its configuration is only set up for the system python through a small path file (e.g.: `/usr/lib/python3/dist-packages/mdsplus.pth`).
163 | For virtual environments, even those created off the system python, this small path file needs to be copied over in the `site-packages` folder (e.g.: `venv/lib/python3.10/site-packages/`) as it is _not inherited_ upon creation of the virtual environment.
164 |
165 | Alternatively, one might adopt the more standard and more obvious solution of adding the equivalent path to the `PYTHONPATH` environment variable, which does get read by virtual environments and inherited by subshells.
166 |
167 | ### Requirements
168 |
169 | Python is obviously a requirement for disruption-py -- please make sure you are running the desired Python version before creating a new installation, e.g.:
170 |
171 | ```bash
172 | which python
173 | ```
174 |
175 | Python requirements should be installed from the Poetry-native lockfile `poetry.lock` committed to the repository in the main folder.
176 | If necessary, backward-compatible pip-style `requirements.txt` files can be produced through the [Poetry export command](https://python-poetry.org/docs/cli/#export).
177 |
178 | ### Virtual environments
179 |
180 | We _strongly encourage_ users to create a specific virtual environment for disruption-py usage.
181 | If using Poetry, one will be created automatically, e.g.:
182 |
183 | ```bash
184 | poetry install --with dev
185 | ```
186 |
--------------------------------------------------------------------------------
/disruption_py/settings/log_settings.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | This module defines the LogSettings class, which provides settings and setup for
5 | logging in both files and console with customizable levels and formats.
6 | """
7 |
8 | import importlib.metadata
9 | import os
10 | import sys
11 | from dataclasses import dataclass
12 | from functools import partialmethod
13 | from typing import Union
14 |
15 | from loguru import logger
16 | from tqdm.auto import tqdm
17 |
18 | from disruption_py.core.utils.misc import get_commit_hash, get_temporary_folder
19 |
20 | LogSettingsType = Union["LogSettings", str, int]
21 |
22 |
23 | @dataclass
24 | class LogSettings:
25 | """
26 | Settings for configuring logging.
27 |
28 | Attributes
29 | ----------
30 | file_path : str, optional
31 | Path to the log file. If None, no log file will be created.
32 | By default, a log file will be created in a temporary folder.
33 | file_level : str
34 | Logging level for the log file (default is "DEBUG").
35 | Possible values are:
36 | "TRACE", "DEBUG", "VERBOSE" (custom), "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL".
37 | See: https://loguru.readthedocs.io/en/stable/api/logger.html#levels
38 | console_level : str or int, optional
39 | The log level for the console. Default is None, so log level will be determined
40 | dynamically based on the number of shots.
41 | Possible values are:
42 | "TRACE", "DEBUG", "VERBOSE" (custom), "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL".
43 | See: https://loguru.readthedocs.io/en/stable/api/logger.html#levels
44 | warning_threshold : int
45 | If number of shots is greater than this threshold, the console log level will
46 | be "WARNING". Default is 1000.
47 | success_threshold : int
48 | If number of shots is greater than this threshold and less than the warning_threshold,
49 | the console log level will be "SUCCESS". Default is 500.
50 | info_threshold : int
51 | If number of shots is greater than this threshold and less than the success_threshold,
52 | the console log level will be "INFO". Default is 50.
53 | _logging_has_been_setup : bool
54 | Internal flag to prevent multiple setups (default is False).
55 | """
56 |
57 | file_path: str = os.path.join(get_temporary_folder(), "output.log")
58 | file_level: str = "DEBUG"
59 | console_level: str = None
60 |
61 | warning_threshold: int = 1000
62 | success_threshold: int = 500
63 | info_threshold: int = 50
64 |
65 | _logging_has_been_setup: bool = False
66 |
67 | def reset_handlers(self, num_shots: int = None):
68 | """
69 | Remove default logger and set up custom handlers.
70 |
71 | Parameters
72 | ----------
73 | num_shots : int, optional
74 | Number of shots to determine the console log level dynamically.
75 | """
76 | # Remove default logger
77 | logger.remove()
78 |
79 | # formats
80 | message_format = "[{level:^7s}] {message}"
81 | console_format = "{time:HH:mm:ss.SSS} " + message_format
82 | file_format = "{time:YYYY-MM-DD HH:mm:ss.SSS} " + message_format
83 |
84 | if self.console_level is None:
85 | # Determine console log level dynamically based on the number of shots
86 | console_level = "VERBOSE"
87 | if num_shots and num_shots > self.warning_threshold:
88 | console_level = "WARNING"
89 | elif num_shots and num_shots > self.success_threshold:
90 | console_level = "SUCCESS"
91 | elif num_shots and num_shots > self.info_threshold:
92 | console_level = "INFO"
93 | elif isinstance(self.console_level, str):
94 | console_level = self.console_level.upper()
95 | else:
96 | console_level = self.console_level
97 |
98 | # Add console handler
99 | logger.add(
100 | lambda msg: tqdm.write(msg, end=""),
101 | level=console_level,
102 | format=console_format,
103 | colorize=True,
104 | enqueue=True,
105 | backtrace=False,
106 | diagnose=True,
107 | )
108 |
109 | # Add file handler if log file path is provided
110 | if self.file_path is not None:
111 | logger.add(
112 | self.file_path,
113 | level=self.file_level,
114 | format=file_format,
115 | mode="w",
116 | enqueue=True,
117 | backtrace=False,
118 | diagnose=True,
119 | )
120 |
121 | def setup_logging(self):
122 | """
123 | Set up logging with custom styles and levels.
124 | """
125 | if self._logging_has_been_setup:
126 | return
127 |
128 | # Set custom style and add a VERBOSE level. This only needs to be done
129 | # once, so there is no need to add it to the reset_handlers method.
130 | logger.level("TRACE", color="")
131 | logger.level("DEBUG", color="")
132 | # Ensure the level does not already exist because the level no can only
133 | # be added once. This might happen if the logger is re-initialized.
134 | try:
135 | logger.level("VERBOSE", color="")
136 | except ValueError:
137 | logger.level("VERBOSE", color="", no=15)
138 | logger.level("INFO", color="")
139 | logger.level("SUCCESS", color="")
140 | logger.level("WARNING", color="")
141 | logger.level("ERROR", color="")
142 | # Bind the verbose level to the class so it can be used in any file even
143 | # after changing the logger instance
144 | logger.__class__.verbose = partialmethod(logger.__class__.log, "VERBOSE")
145 |
146 | self.reset_handlers(num_shots=None)
147 |
148 | # header
149 | package, *_ = __name__.split(".")
150 | commit = get_commit_hash()
151 | logger.info(
152 | "Starting: {p} ~ v{v}{t}{c} / {u}@{h}",
153 | p=package,
154 | v=importlib.metadata.version(package),
155 | t=" # " if commit else "",
156 | c=commit,
157 | u=os.getenv("USER"),
158 | h=os.uname().nodename,
159 | )
160 | if self.file_path is not None:
161 | logger.info("Logging: {l}", l=self.file_path)
162 | logger.debug(
163 | "Repository: {url}{append}{commit}",
164 | url="https://github.com/MIT-PSFC/disruption-py",
165 | append="/commit/" if commit else "",
166 | commit=commit,
167 | )
168 | logger.debug("Executable: {e}", e=sys.executable)
169 |
170 | self._logging_has_been_setup = True
171 |
172 |
173 | def resolve_log_settings(
174 | log_settings: LogSettingsType,
175 | ) -> LogSettings:
176 | """
177 | Resolve the log settings to a LogSettings instance.
178 |
179 | Parameters
180 | ----------
181 | log_settings : LogSettingsType
182 | The log setting to resolve, which can be an instance of LogSettings, or
183 | a string or int representing the console log level
184 |
185 | Returns
186 | -------
187 | LogSettings
188 | The resolved LogSettings instance.
189 | """
190 | if isinstance(log_settings, LogSettings):
191 | return log_settings
192 |
193 | if isinstance(log_settings, (str, int)):
194 | return LogSettings(console_level=log_settings)
195 |
196 | if isinstance(log_settings, dict):
197 | return LogSettings(**log_settings)
198 |
199 | if log_settings is None:
200 | return LogSettings()
201 |
202 | raise ValueError(f"Invalid log settings {log_settings}")
203 |
--------------------------------------------------------------------------------
/disruption_py/settings/shotlist_setting.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Handles retrieving shotlists from various sources including lists, files, and SQL
5 | databases.
6 | """
7 |
8 | from abc import ABC, abstractmethod
9 | from dataclasses import dataclass
10 | from importlib import resources
11 | from typing import Dict, List, Type, Union
12 |
13 | import numpy as np
14 | import pandas as pd
15 |
16 | import disruption_py.data
17 | from disruption_py.core.utils.enums import map_string_to_enum
18 | from disruption_py.inout.sql import ShotDatabase
19 | from disruption_py.machine.tokamak import Tokamak
20 |
21 |
22 | @dataclass
23 | class ShotlistSettingParams:
24 | """
25 | Params passed by disruption_py to _get_shotlist() method.
26 |
27 | Attributes
28 | ----------
29 | database : ShotDatabase
30 | Database object to use for getting shotlist.
31 | A different database connection is used by each process.
32 | Defaults to logbook.
33 | tokamak : Tokamak
34 | The tokamak that is data is being retrieved for.
35 | """
36 |
37 | database: ShotDatabase
38 | tokamak: Tokamak
39 |
40 |
41 | class ShotlistSetting(ABC):
42 | """ShotlistSetting abstract class that should be inherited by all shotlist setting classes."""
43 |
44 | def get_shotlist(self, params: ShotlistSettingParams) -> List:
45 | """
46 | Retrieve the shotlist based on the provided parameters.
47 |
48 | Parameters
49 | ----------
50 | params : ShotlistSettingParams
51 | The parameters containing the database, tokamak, and logger used
52 | to determine the shotlist.
53 |
54 | Returns
55 | -------
56 | List
57 | A list of shot IDs retrieved.
58 | """
59 | if hasattr(self, "tokamak_overrides"):
60 | if params.tokamak in self.tokamak_overrides:
61 | return self.tokamak_overrides[params.tokamak](params)
62 | return self._get_shotlist(params)
63 |
64 | @abstractmethod
65 | def _get_shotlist(self, params: ShotlistSettingParams) -> List:
66 | """
67 | Abstract method implemented by subclasses to get shotlist for the given setting params.
68 |
69 | Parameters
70 | ----------
71 | params : ShotlistSettingParams
72 | Params that can be used to determine shotlist.
73 | """
74 |
75 |
76 | class FileShotlistSetting(ShotlistSetting):
77 | """
78 | Use `pandas.read_csv` to read a file, then extract and use values from any column.
79 |
80 | Directly passing a file path as a string to the shotlist setting with the file name suffixed
81 | by txt or csv will automatically create a new FileShotlistSetting object with that file path.
82 |
83 | Parameters
84 | ----------
85 | file_path : str
86 | The file path of the file that should be used for retrieving the shotlist.
87 | column_index : int
88 | The index of the column that should be read. Defaults to 0.
89 | **kwargs : Dict
90 | Optional keyword arguments dictionary to be passed to `pandas.read_csv`.
91 | """
92 |
93 | def __init__(self, file_path: str, column_index: int = 0, **kwargs: Dict):
94 | self.file_path = file_path
95 | self.column_index = column_index
96 | self.kwargs = kwargs
97 | self.shotlist = []
98 |
99 | def _get_shotlist(self, params: ShotlistSettingParams) -> List:
100 | if not self.shotlist:
101 | self.kwargs.setdefault("header", "infer")
102 | df = pd.read_csv(self.file_path, **self.kwargs)
103 | arr = df.values[:, self.column_index]
104 | self.shotlist = arr.astype(int).tolist()
105 | return self.shotlist
106 |
107 |
108 | class IncludedShotlistSetting(FileShotlistSetting):
109 | """
110 | Use the shotlist from one of the provided data files.
111 |
112 | Directly passing a key from the _get_shotlist_setting_mappings dictionary as a string will
113 | automatically create a new IncludedShotlistSetting object with that file_name.
114 |
115 | Parameters
116 | ----------
117 | file_name : str
118 | The name of the datafile that should be used to retrieve the shotlist.
119 | **kwargs : Dict
120 | Optional keyword arguments dictionary to be passed to `FileShotlistSetting`.
121 | """
122 |
123 | def __init__(self, file_name: str, **kwargs: Dict):
124 | data = resources.files(disruption_py.data)
125 | file = data.joinpath(file_name)
126 | with resources.as_file(file) as file_path:
127 | super().__init__(file_path, **kwargs)
128 |
129 |
130 | class DatabaseShotlistSetting(ShotlistSetting):
131 | """
132 | Use an sql query of the database to retrieve the shotlist.
133 |
134 | Parameters
135 | ----------
136 | sql_query : str
137 | The sql query that should be used for retrieving shotlist.
138 | use_pandas : bool
139 | Whether Pandas should be used to do the sql query. Defaults to true.
140 | """
141 |
142 | def __init__(self, sql_query, use_pandas=True):
143 | self.sql_query = sql_query
144 | self.use_pandas = use_pandas
145 |
146 | def _get_shotlist(self, params: ShotlistSettingParams) -> List:
147 | if self.use_pandas:
148 | query_result_df = params.database.query(
149 | query=self.sql_query, use_pandas=True
150 | )
151 | return query_result_df.iloc[:, 0].tolist()
152 | query_result = params.database.query(query=self.sql_query, use_pandas=False)
153 | return [row[0] for row in query_result]
154 |
155 |
156 | # --8<-- [start:get_shotlist_setting_dict]
157 | _get_shotlist_setting_mappings: Dict[str, ShotlistSetting] = {
158 | "disruption_warning": DatabaseShotlistSetting(
159 | "select distinct shot from disruption_warning"
160 | ),
161 | "plasmas": DatabaseShotlistSetting(
162 | """
163 | if exists (select * from information_schema.tables where table_name = 'summary')
164 | begin
165 | select distinct shot from summary where ipmax > 100e3 and pulse_length > 0.1;
166 | end
167 | else if exists (select * from information_schema.tables where table_name = 'summaries')
168 | begin
169 | select distinct shot from summaries where ipmax > 100e3 and pulse_length > 0.1;
170 | end
171 | """
172 | ),
173 | "cmod_ufo": IncludedShotlistSetting("cmod_ufo.csv"),
174 | "cmod_vde": IncludedShotlistSetting("cmod_vde.csv"),
175 | }
176 | # --8<-- [end:get_shotlist_setting_dict]
177 |
178 | # --8<-- [start:file_suffix_to_shotlist_setting_dict]
179 | _file_suffix_to_shotlist_setting: Dict[str, Type[ShotlistSetting]] = {
180 | ".txt": FileShotlistSetting,
181 | ".csv": FileShotlistSetting,
182 | }
183 | # --8<-- [end:file_suffix_to_shotlist_setting_dict]
184 |
185 | ShotlistSettingType = Union[
186 | "ShotlistSetting",
187 | int,
188 | str,
189 | Dict[Tokamak, "ShotlistSettingType"],
190 | List["ShotlistSettingType"],
191 | ]
192 |
193 |
194 | def shotlist_setting_runner(shotlist_setting, params: ShotlistSettingParams):
195 | """
196 | Retrieve list of shot ids for the given shotlist setting.
197 | """
198 | if isinstance(shotlist_setting, ShotlistSetting):
199 | # Do not immediately return the list because it may be multidimensional
200 | # and would need to be handled as such below
201 | shotlist_setting = shotlist_setting.get_shotlist(params)
202 |
203 | if isinstance(shotlist_setting, (int, np.int64)) or (
204 | isinstance(shotlist_setting, str) and shotlist_setting.isdigit()
205 | ):
206 | return [shotlist_setting]
207 |
208 | if isinstance(shotlist_setting, str):
209 | shotlist_setting_object = _get_shotlist_setting_mappings.get(shotlist_setting)
210 | if shotlist_setting_object is not None:
211 | return shotlist_setting_object.get_shotlist(params)
212 |
213 | if isinstance(shotlist_setting, str):
214 | # assume that it is a file path
215 | for suffix, shotlist_setting_type in _file_suffix_to_shotlist_setting.items():
216 | if shotlist_setting.endswith(suffix):
217 | return shotlist_setting_type(shotlist_setting).get_shotlist(params)
218 |
219 | if isinstance(shotlist_setting, dict):
220 | shotlist_setting = {
221 | map_string_to_enum(tokamak, Tokamak): shotlist_setting_mapping
222 | for tokamak, shotlist_setting_mapping in shotlist_setting.items()
223 | }
224 | chosen_setting = shotlist_setting.get(params.tokamak)
225 | if chosen_setting is not None:
226 | return shotlist_setting_runner(chosen_setting, params)
227 |
228 | if isinstance(shotlist_setting, (list, np.ndarray)):
229 | all_results = []
230 | for setting in shotlist_setting:
231 | sub_result = shotlist_setting_runner(setting, params)
232 | if sub_result is not None:
233 | all_results.append(sub_result)
234 |
235 | return [shot_id for sub_list in all_results for shot_id in sub_list]
236 |
237 | raise ValueError("Invalid shot id setting")
238 |
--------------------------------------------------------------------------------
/disruption_py/core/retrieval_manager.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Module for managing retrieval of shot data from a tokamak.
5 | """
6 |
7 | import numpy as np
8 | import pandas as pd
9 | from loguru import logger
10 |
11 | from disruption_py.core.physics_method.params import PhysicsMethodParams
12 | from disruption_py.core.physics_method.runner import populate_shot
13 | from disruption_py.core.utils.misc import shot_msg
14 | from disruption_py.inout.mds import MDSConnection, ProcessMDSConnection, mdsExceptions
15 | from disruption_py.inout.sql import ShotDatabase
16 | from disruption_py.machine.tokamak import Tokamak
17 | from disruption_py.settings.nickname_setting import NicknameSettingParams
18 | from disruption_py.settings.retrieval_settings import RetrievalSettings
19 | from disruption_py.settings.time_setting import TimeSettingParams
20 |
21 |
22 | class RetrievalManager:
23 | """
24 | Class for managing the retrieval of shot data from a tokamak.
25 |
26 | Attributes
27 | ----------
28 | tokamak : Tokamak
29 | The tokamak instance.
30 | process_database : ShotDatabase
31 | The SQL database
32 | process_mds_conn : ProcessMDSConnection
33 | The MDS connection
34 | """
35 |
36 | def __init__(
37 | self,
38 | tokamak: Tokamak,
39 | process_database: ShotDatabase,
40 | process_mds_conn: ProcessMDSConnection,
41 | ):
42 | """
43 | Parameters
44 | ----------
45 | tokamak : Tokamak
46 | The tokamak instance.
47 | process_database : ShotDatabase
48 | The SQL database.
49 | process_mds_conn : ProcessMDSConnection
50 | The MDS connection.
51 | """
52 | self.tokamak = tokamak
53 | self.process_database = process_database
54 | self.process_mds_conn = process_mds_conn
55 |
56 | def get_shot_data(
57 | self, shot_id, retrieval_settings: RetrievalSettings
58 | ) -> pd.DataFrame | None:
59 | """
60 | Get data for a single shot. May be run across different processes.
61 |
62 | Parameters
63 | ----------
64 | shot_id : int
65 | The ID of the shot to retrieve data for.
66 | retrieval_settings : RetrievalSettings
67 | The settings for data retrieval.
68 |
69 | Returns
70 | -------
71 | pd.DataFrame, or None
72 | The retrieved shot data as a DataFrame, or None if an error occurred.
73 | """
74 |
75 | logger.trace(shot_msg("Starting retrieval."), shot=shot_id)
76 |
77 | # shot setup
78 | try:
79 | physics_method_params = self.shot_setup(
80 | shot_id=int(shot_id),
81 | retrieval_settings=retrieval_settings,
82 | )
83 | # pylint: disable-next=broad-exception-caught
84 | except Exception as e:
85 | logger.critical(shot_msg("Failed setup! {e}"), shot=shot_id, e=repr(e))
86 | logger.opt(exception=True).debug(shot_msg("Failed setup!"), shot=shot_id)
87 | return None
88 |
89 | # shot retrieval
90 | try:
91 | retrieved_data = populate_shot(
92 | retrieval_settings=retrieval_settings,
93 | physics_method_params=physics_method_params,
94 | )
95 | # pylint: disable-next=broad-exception-caught
96 | except Exception as e:
97 | # exceptions should be caught by runner.py
98 | logger.critical(shot_msg("Failed retrieval! {e}"), shot=shot_id, e=repr(e))
99 | logger.opt(exception=True).debug(
100 | shot_msg("Failed retrieval!"), shot=shot_id
101 | )
102 | if isinstance(e, mdsExceptions.MDSplusERROR):
103 | physics_method_params.mds_conn.reconnect()
104 | retrieved_data = None
105 |
106 | # shot cleanup
107 | try:
108 | self.shot_cleanup(physics_method_params)
109 | # pylint: disable-next=broad-exception-caught
110 | except Exception as e:
111 | logger.critical(shot_msg("Failed cleanup! {e}"), shot=shot_id, e=repr(e))
112 | logger.opt(exception=True).debug(shot_msg("Failed cleanup!"), shot=shot_id)
113 | if isinstance(e, mdsExceptions.MDSplusERROR):
114 | physics_method_params.mds_conn.reconnect()
115 | retrieved_data = None
116 |
117 | return retrieved_data
118 |
119 | def shot_setup(
120 | self, shot_id: int, retrieval_settings: RetrievalSettings, **kwargs
121 | ) -> PhysicsMethodParams | None:
122 | """
123 | Sets up the shot properties for the tokamak.
124 |
125 | Parameters
126 | ----------
127 | shot_id : int
128 | The ID of the shot to set up.
129 | retrieval_settings : RetrievalSettings
130 | The settings for data retrieval.
131 | **kwargs : dict
132 | Additional keyword arguments.
133 |
134 | Returns
135 | -------
136 | PhysicsMethodParams, or None
137 | Parameters containing MDS connection and shot information
138 | """
139 |
140 | disruption_time = self.process_database.get_disruption_time(shot_id=shot_id)
141 |
142 | mds_conn = self.process_mds_conn.get_shot_connection(shot_id=shot_id)
143 |
144 | mds_conn.add_tree_nickname_funcs(
145 | tree_nickname_funcs={
146 | "_efit_tree": lambda: retrieval_settings.efit_nickname_setting.get_tree_name(
147 | NicknameSettingParams(
148 | shot_id=shot_id,
149 | mds_conn=mds_conn,
150 | database=self.process_database,
151 | disruption_time=disruption_time,
152 | tokamak=self.tokamak,
153 | )
154 | )
155 | }
156 | )
157 |
158 | physics_method_params = self.setup_physics_method_params(
159 | shot_id=shot_id,
160 | mds_conn=mds_conn,
161 | disruption_time=disruption_time,
162 | retrieval_settings=retrieval_settings,
163 | **kwargs,
164 | )
165 | if len(physics_method_params.times) < 2:
166 | raise ValueError("Pathological timebase.")
167 | return physics_method_params
168 |
169 | @classmethod
170 | def shot_cleanup(
171 | cls,
172 | physics_method_params: PhysicsMethodParams,
173 | ):
174 | """
175 | Clean up the physics method parameters.
176 |
177 | Parameters
178 | ----------
179 | cls : type
180 | The class type.
181 | physics_method_params : PhysicsMethodParams
182 | Parameters containing MDS connection and shot information.
183 | """
184 | physics_method_params.cleanup()
185 |
186 | def setup_physics_method_params(
187 | self,
188 | shot_id: int,
189 | mds_conn: MDSConnection,
190 | disruption_time: float,
191 | retrieval_settings: RetrievalSettings,
192 | ) -> PhysicsMethodParams:
193 | """
194 | Set up the physics method parameters for the shot.
195 |
196 | Parameters
197 | ----------
198 | shot_id : int
199 | The ID of the shot.
200 | mds_conn : MDSConnection
201 | The MDS connection for the shot.
202 | disruption_time : float
203 | The disruption time of the shot.
204 | retrieval_settings : RetrievalSettings
205 | The settings for data retrieval.
206 |
207 | Returns
208 | -------
209 | PhysicsMethodParams
210 | The configured physics method parameters.
211 | """
212 |
213 | times = self._init_times(
214 | shot_id=shot_id,
215 | mds_conn=mds_conn,
216 | disruption_time=disruption_time,
217 | retrieval_settings=retrieval_settings,
218 | )
219 |
220 | physics_method_params = PhysicsMethodParams(
221 | shot_id=shot_id,
222 | tokamak=self.tokamak,
223 | disruption_time=disruption_time,
224 | mds_conn=mds_conn,
225 | times=times,
226 | )
227 |
228 | return physics_method_params
229 |
230 | def _init_times(
231 | self,
232 | shot_id: int,
233 | mds_conn: MDSConnection,
234 | disruption_time: float,
235 | retrieval_settings: RetrievalSettings,
236 | ) -> np.ndarray:
237 | """
238 | Initialize the timebase of the shot.
239 |
240 | Parameters
241 | ----------
242 | shot_id : int
243 | The ID of the shot.
244 | mds_conn : MDSConnection
245 | The MDS connection for the shot.
246 | disruption_time : float
247 | The disruption time of the shot.
248 | retrieval_settings : RetrievalSettings
249 | The settings for data retrieval.
250 |
251 | Returns
252 | -------
253 | np.ndarray
254 | The initialized timebase as a NumPy array.
255 | """
256 | setting_params = TimeSettingParams(
257 | shot_id=shot_id,
258 | mds_conn=mds_conn,
259 | database=self.process_database,
260 | disruption_time=disruption_time,
261 | tokamak=self.tokamak,
262 | )
263 | return retrieval_settings.time_setting.get_times(setting_params)
264 |
--------------------------------------------------------------------------------
/disruption_py/data/cmod_ufo.csv:
--------------------------------------------------------------------------------
1 | shot,injection_time,duration,thermal_quench_time,vetted,notes
2 | 1120106009,1.419925259590149,0.0083008537292479,1.4284259092933025,0,
3 | 1120106022,1.022206114768982,0.0009999999999998,1.0234101349261076,1,Fe and Ca injections
4 | 1120118001,1.1411611309051517,0.0049541721343993,1.1467081640341859,0,
5 | 1120125008,0.7635446062088013,0.0035665163993835,0.7678791314730729,1,Mo injection
6 | 1120131008,1.445440100669861,0.0031638870239256,1.4537097074757726,0,
7 | 1120131010,1.1470923891067506,0.0015257129669188,1.1486340704560802,0,
8 | 1120207010,0.9375249614715576,0.0029770860671997,0.9454333283327954,0,
9 | 1120210016,0.8722823133468628,0.0089082250595092,0.8817024104653579,0,
10 | 1120210022,1.2261742820739747,0.001130772590637,1.2359312617529994,0,
11 | 1120216027,1.453524040222168,0.0009999999999998,1.4549583128415302,0,
12 | 1120222008,0.90589218044281,0.0055678615570068,0.9120179492499877,0,
13 | 1120222010,0.6488757004737854,0.0089081654548645,0.6581727769173287,0,
14 | 1120223007,1.0047446956634525,0.0067194232940672,1.011627767321869,0,
15 | 1120501008,1.3108001222610477,0.0013873109817503,1.3165180521503062,0,
16 | 1120607006,1.2400136461257936,0.0029770860671995,1.24313617096243,1,Mo injection betweeen CaF2 Laser Blow-off (LBO) injections scheduled every 0.1s
17 | 1120615013,1.2281513681411744,0.0022366771697996,1.230405942715089,0,
18 | 1120626008,1.204426812171936,0.0070333013534544,1.212908916988897,0,
19 | 1120626020,1.3092104663848878,0.0010696182250975,1.3134339375695305,0,
20 | 1120705009,0.9454331865310668,0.0082388648986816,0.9548966673099006,0,
21 | 1120710027,0.9157774319648744,0.0015466341972351,0.9196755257663473,0,
22 | 1120711016,0.933570848941803,0.0069311985969543,0.942954121335919,0,
23 | 1120713018,1.0897578468322755,0.0029770860671995,1.0972984450881889,0,
24 | 1120713020,0.9236856570243837,0.0074504742622375,0.9312008346353438,0,
25 | 1120718012,0.6464601030349731,0.0014385113716125,0.6489228122272189,0,
26 | 1120724010,0.9750889410972596,0.001335156917572,0.9768788044531879,0,
27 | 1120727010,0.680508481502533,0.0057476291656494,0.6871653941273633,0,
28 | 1120731002,0.807039665222168,0.0013524422645568,0.8088778420308373,1,Fe injection
29 | 1120731004,0.7101642360687256,0.0054718384742736,0.7170166152787247,0,
30 | 1120801012,0.7635446062088013,0.0069311389923095,0.7704807055428728,0,
31 | 1120803008,0.5520002117156982,0.0036478767395019,0.5561693310675193,1,Fe injection
32 | 1120824007,1.186633275985718,0.0089082250595091,1.196631135693217,0,
33 | 1120824015,1.4377186765670775,0.0033974180221556,1.4416735202231594,0,
34 | 1120824024,0.6192199459075928,0.0069311389923095,0.6284576057109234,0,
35 | 1120828014,0.5737478008270264,0.0054522881507873,0.5793912509183865,0,
36 | 1120829007,0.6192199459075928,0.0083121190071105,0.6283883417327495,0,
37 | 1120830023,0.8505347838401794,0.0056733021736145,0.8603805720149459,0,
38 | 1120831001,0.6706232299804687,0.0037768611907958,0.6746363043389693,1,Fe injection
39 | 1120831013,0.3167312910556793,0.0029770562648773,0.31997140480809805,1,Mo injection
40 | 1120906002,0.6666691174507141,0.004974974155426,0.674633011625901,0,
41 | 1120906003,1.079872654914856,0.0049540529251097,1.0867226411113053,0,
42 | 1120907017,0.654806839466095,0.0023692378997802,0.6573938519915165,0,
43 | 1120907022,1.2637383213043214,0.001941753387451,1.2656984747754496,0,
44 | 1120907025,1.1823361387252809,0.0009999999999998,1.1837478676742843,0,
45 | 1120914018,1.2333560457229615,0.0017264614105223,1.237379885715889,0,
46 | 1120917014,0.755636381149292,0.0017997155189514,0.7574367080837685,0,
47 | 1120927005,0.4056985249519348,0.0060375461578369,0.412180545902117,0,
48 | 1120927006,0.3285935988426208,0.0049540827274322,0.334714611935532,0,
49 | 1120927011,1.2933940162658693,0.0028581151962279,1.2962545603169005,1,Mo injection
50 | 1120927014,1.2676923742294313,0.0029770860671995,1.2707121594524986,0,
51 | 1140328012,1.166862892150879,0.0069311389923094,1.1760068250350322,0,
52 | 1140415009,1.2980918159484864,0.0022333393096922,1.3036793207350672,0,
53 | 1140522001,0.6073576083183289,0.005402220249176,0.6129774293360591,0,
54 | 1140522003,0.9671807160377502,0.0052871832847595,0.9726726223687494,1,Shortly after ICRF turned on
55 | 1140522005,0.8090167512893677,0.0010231266021728,0.8101255881336238,0,
56 | 1140522007,1.1905874481201173,0.0014483461380003,1.192175640747502,0,
57 | 1140522018,0.8363918533325195,0.001,0.8376796276359789,1,Impurity unclear based on spectrum. Some Mo present before large injection
58 | 1140522020,0.8366954197883606,0.0031204352378845,0.8399574241542863,1,Impurity unclear based on spectrum. Some Mo present before large injection
59 | 1140523024,1.0126529207229615,0.0039349327087401,1.0194518406759838,0,
60 | 1140605022,1.3408432474136354,0.0039885768890379,1.345182265850002,1,Mo injection
61 | 1140606001,0.5381609072685242,0.0068589577674865,0.5454511249470774,0,
62 | 1140613025,0.7655216326713562,0.0089082250595092,0.7749445857932222,0,
63 | 1140625018,1.1035972108840943,0.0069427022933958,1.1109140540911724,0,
64 | 1140701029,1.4831908216476442,0.0068209896087645,1.4909496705922098,0,
65 | 1140717002,1.0264922847747804,0.0027675161361693,1.0299513017105804,0,
66 | 1140722018,0.5243215432167053,0.0013663301467895,0.5258849562091255,0,
67 | 1140731037,1.0502168407440189,0.0029770860671995,1.057170009345997,0,
68 | 1150616015,0.9671807160377502,0.0049540529251098,0.9762238050766213,0,
69 | 1150714014,0.9236856570243837,0.0089184174537658,0.9334469498586647,0,
70 | 1150714029,0.8011085262298584,0.0059995779991149,0.8071445797528044,0,
71 | 1150722005,1.3902694454193116,0.0059026012420653,1.3962220091855917,0,
72 | 1150722007,0.9810200800895692,0.0076600441932678,0.9889050435699798,0,
73 | 1150728029,0.9533413519859314,0.0035027394294738,0.9569324570027306,0,
74 | 1150729004,1.3487514724731446,0.0014086494445799,1.358405782799297,0,
75 | 1150729008,0.5559543242454529,0.0076977739334106,0.5636592848301419,0,
76 | 1150826017,0.59914808177948,0.001,0.6001501776032747,0,
77 | 1150827008,1.4733056297302247,0.0049540529251097,1.4824545155050692,0,
78 | 1150827009,0.2192760875225067,0.001579759478569,0.22218431550122322,0,
79 | 1150904009,1.2479218711853028,0.0069311389923094,1.2554737920324832,0,
80 | 1150904010,1.2578071823120118,0.0069311389923094,1.2651275392355148,0,
81 | 1150904012,1.2518759241104127,0.0074041614532469,1.2593807749813002,0,
82 | 1150904014,1.0660332908630372,0.0049540529251097,1.0714555939603876,0,
83 | 1150904015,1.1233677139282228,0.0060003528594969,1.1294778930643399,0,
84 | 1150904017,1.2419907321929933,0.0029770860671995,1.2454079295316793,0,
85 | 1150904020,1.1115054359436036,0.0057026872634886,1.1179699254699111,0,
86 | 1150916025,0.9572955241203308,0.0024085173606872,0.9597067611243599,1,Likely Mo injection. Based on spectrum there could be additional high-Z impurity types.
87 | 1150917001,0.4373313357830047,0.0055247375965118,0.4435033772237849,0,
88 | 1150923012,1.2518759241104127,0.0053241977691649,1.2572019559664396,0,
89 | 1150928025,1.4733056297302247,0.0029024610519408,1.4762090253074514,1,Mo injection
90 | 1150928026,0.3246394863128662,0.0049541125297546,0.3302082113154773,0,
91 | 1160503009,0.8999610414505005,0.0049750337600708,0.9060042077669405,0,
92 | 1160503020,0.7002789845466614,0.0029971132278442,0.7089239577777233,0,
93 | 1160504006,1.4001547565460206,0.0029932985305785,1.4078863724987558,0,
94 | 1160504009,1.2004726400375367,0.0024154911041258,1.20297389214806,0,
95 | 1160504012,1.2004726400375367,0.0023794898986815,1.2028766378173124,0,
96 | 1160512032,1.5365711917877198,0.0020968446731566,1.5389685551695944,1,Mo injection
97 | 1160520006,0.5203674306869507,0.0069311389923095,0.5292286946087499,0,
98 | 1160608008,0.8505347838401794,0.0071373114585876,0.8582734673892559,0,
99 | 1160608015,0.8544888963699341,0.0049540529251098,0.8631776661740019,0,
100 | 1160615019,1.3527055253982545,0.0029770860671995,1.3560320380205462,0,
101 | 1160617015,0.4610559215545654,0.0029770860671997,0.4676559709585288,0,
102 | 1160617027,0.5816559662818909,0.0049541125297546,0.5867414509950807,0,
103 | 1160712006,1.3922465314865111,0.0031855831146239,1.395467820930981,0,
104 | 1160728023,1.2281513681411744,0.0085206756591795,1.2366784411393479,0,
105 | 1160803030,0.9256626834869384,0.0049694309234619,0.9326443799688463,0,
106 | 1160805001,0.830764280796051,0.0012797842025756,0.8346817350393334,1,Mo injection
107 | 1160805017,0.5460690727233887,0.0061910281181335,0.5524804028124258,0,
108 | 1160809014,1.4555120935440065,0.0089082250595091,1.4652081818166172,0,
109 | 1160811011,1.2617612352371217,0.0063588151931761,1.2681981175339712,0,
110 | 1160816019,0.8643741478919983,0.0049540529251098,0.8706710232329837,0,
111 | 1160817002,0.8604200353622437,0.0044080743789672,0.8666663648990534,0,
112 | 1160819006,0.3879050781726837,0.0012350211143493,0.389455132317897,0,
113 | 1160826007,1.41003994846344,0.007572127342224,1.4176358774922988,0,
114 | 1160826009,0.7734298577308655,0.0029982457160949,0.7768897364212038,1,Mo injection; early locked mode but plasma recovers before injection
115 | 1160831018,0.4590788950920105,0.0069311389923095,0.4669068050982785,0,
116 | 1160908021,0.8838320841789246,0.003289593219757,0.892199629976638,0,
117 | 1160916007,0.9632266631126404,0.0049540529251098,0.9701788868774036,0,
118 | 1160921016,1.3922465314865111,0.0049540529251097,1.3994786319502979,0,
119 | 1160929007,1.208380865097046,0.0029432306289671,1.2114486811330767,1,Mo injection
120 | 1160929008,1.2617612352371217,0.0048588047027586,1.2667016239325242,1,Mo injection
121 | 1160929009,0.8287872543334961,0.002568853855133,0.831428903604218,1,Mo injection
122 | 1160929030,0.490711676120758,0.0049541125297546,0.4972811359435042,0,
123 | 1160930025,1.28153173828125,0.0049763450622557,1.2901531344780073,0,
124 |
--------------------------------------------------------------------------------