├── .flake8 ├── .github └── workflows │ ├── codeql.yml │ ├── publish-to-pypi.yml │ ├── pylint.yml │ └── tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CITATION.cff ├── CONTRIBUTING.rst ├── LICENSE.txt ├── MANIFEST.in ├── README.rst ├── docs ├── Makefile ├── _static │ ├── example_dispersion_one_fluid_nb_thumbnail.png │ ├── example_mms_b_e_j_nb_thumbnail.png │ ├── example_mms_ebfields_nb_thumbnail.png │ ├── example_mms_edr_signatures_nb_thumbnail.png │ ├── example_mms_eis_nb_thumbnail.png │ ├── logo-pyrfu.png │ ├── logo-pyrfu.svg │ └── quick-overview_nb_thumbnail.png ├── conf.py ├── contributing.rst ├── dev │ ├── index.rst │ └── modules.rst ├── examples │ ├── 00_overview │ │ ├── .ipynb_checkpoints │ │ │ └── quick-overview-checkpoint.ipynb │ │ ├── index.rst │ │ └── quick-overview.ipynb │ ├── 01_mms │ │ ├── example_mms_b_e_j.ipynb │ │ ├── example_mms_ebfields.ipynb │ │ ├── example_mms_edr_signatures.ipynb │ │ ├── example_mms_eis.ipynb │ │ ├── example_mms_electron_psd.ipynb │ │ ├── example_mms_feeps.ipynb │ │ ├── example_mms_hpca.ipynb │ │ ├── example_mms_ipshocks.ipynb │ │ ├── example_mms_ohmslaw.ipynb │ │ ├── example_mms_particle_deflux.ipynb │ │ ├── example_mms_particle_distributions.ipynb │ │ ├── example_mms_particle_pad.ipynb │ │ ├── example_mms_polarizationanalysis.ipynb │ │ ├── example_mms_reduced_electron_dist.ipynb │ │ ├── example_mms_reduced_ion_dist.ipynb │ │ ├── example_mms_walen_test.ipynb │ │ └── index.rst │ ├── 02_dispersion │ │ ├── .ipynb_checkpoints │ │ │ └── example_dispersion_one_fluid-checkpoint.ipynb │ │ ├── example_dispersion_one_fluid.ipynb │ │ └── index.rst │ └── index.rst ├── index.rst ├── installation.rst └── requirements.txt ├── pyproject.toml ├── pyrfu ├── __init__.py ├── dispersion │ ├── __init__.py │ ├── disp_surf_calc.py │ └── one_fluid_dispersion.py ├── lp │ ├── __init__.py │ ├── photo_current.py │ └── thermal_current.py ├── maven │ ├── __init__.py │ ├── config.json │ ├── db_init.py │ └── download_data.py ├── mms │ ├── __init__.py │ ├── ancillary.json │ ├── calculate_epsilon.py │ ├── config.json │ ├── copy_files.py │ ├── copy_files_ancillary.py │ ├── correct_edp_probe_timing.py │ ├── db_get_ts.py │ ├── db_get_variable.py │ ├── db_init.py │ ├── def2psd.py │ ├── dft_time_shift.py │ ├── download_ancillary.py │ ├── download_data.py │ ├── dpf2psd.py │ ├── dsl2gse.py │ ├── dsl2gsm.py │ ├── eis_ang_ang.py │ ├── eis_combine_proton_pad.py │ ├── eis_combine_proton_skymap.py │ ├── eis_combine_proton_spec.py │ ├── eis_moments.py │ ├── eis_omni.py │ ├── eis_pad.py │ ├── eis_pad_combine_sc.py │ ├── eis_pad_spinavg.py │ ├── eis_proton_correction.py │ ├── eis_skymap.py │ ├── eis_skymap_combine_sc.py │ ├── eis_spec_combine_sc.py │ ├── eis_spin_avg.py │ ├── estimate_phase_speed.py │ ├── feeps_active_eyes.py │ ├── feeps_bad_data.json │ ├── feeps_correct_energies.py │ ├── feeps_energy_table.py │ ├── feeps_flat_field_corrections.py │ ├── feeps_omni.py │ ├── feeps_pad.py │ ├── feeps_pad_spinavg.py │ ├── feeps_pitch_angles.py │ ├── feeps_remove_bad_data.py │ ├── feeps_remove_sun.py │ ├── feeps_sector_spec.py │ ├── feeps_spin_avg.py │ ├── feeps_split_integral_ch.py │ ├── fft_bandpass.py │ ├── fk_power_spectrum_4sc.py │ ├── get_data.py │ ├── get_dist.py │ ├── get_eis_allt.py │ ├── get_feeps_alleyes.py │ ├── get_feeps_omni.py │ ├── get_hpca_dist.py │ ├── get_pitch_angle_dist.py │ ├── get_ts.py │ ├── get_variable.py │ ├── hpca_calc_anodes.py │ ├── hpca_energies.py │ ├── hpca_pad.py │ ├── hpca_spin_sum.py │ ├── lh_wave_analysis.py │ ├── list_files.py │ ├── list_files_ancillary.py │ ├── list_files_ancillary_sdc.py │ ├── list_files_aws.py │ ├── list_files_sdc.py │ ├── load_ancillary.py │ ├── load_brst_segments.py │ ├── make_model_kappa.py │ ├── make_model_vdf.py │ ├── mms_keys.json │ ├── probe_align_times.py │ ├── psd2def.py │ ├── psd2dpf.py │ ├── psd_moments.py │ ├── psd_rebin.py │ ├── read_feeps_sector_masks_csv.py │ ├── reduce.py │ ├── remove_edist_background.py │ ├── remove_idist_background.py │ ├── remove_imoms_background.py │ ├── rotate_tensor.py │ ├── scpot2ne.py │ ├── spectr_to_dataset.py │ ├── sun │ │ ├── MMS1_FEEPS_ContaminatedSectors_20151111.csv │ │ ├── MMS1_FEEPS_ContaminatedSectors_20160709.csv │ │ ├── MMS1_FEEPS_ContaminatedSectors_20161028.csv │ │ ├── MMS1_FEEPS_ContaminatedSectors_20170531.csv │ │ ├── MMS1_FEEPS_ContaminatedSectors_20171003.csv │ │ ├── MMS1_FEEPS_ContaminatedSectors_20181005.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20151111.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20160709.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20161028.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20170531.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20171003.csv │ │ ├── MMS2_FEEPS_ContaminatedSectors_20181005.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20151111.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20160709.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20161028.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20170531.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20171003.csv │ │ ├── MMS3_FEEPS_ContaminatedSectors_20181005.csv │ │ ├── MMS4_FEEPS_ContaminatedSectors_20151111.csv │ │ ├── MMS4_FEEPS_ContaminatedSectors_20160709.csv │ │ ├── MMS4_FEEPS_ContaminatedSectors_20161028.csv │ │ ├── MMS4_FEEPS_ContaminatedSectors_20170531.csv │ │ ├── MMS4_FEEPS_ContaminatedSectors_20171003.csv │ │ └── MMS4_FEEPS_ContaminatedSectors_20181005.csv │ ├── tokenize.py │ ├── vdf_elim.py │ ├── vdf_omni.py │ ├── vdf_projection.py │ ├── vdf_reduce.py │ ├── vdf_to_e64.py │ └── whistler_b2e.py ├── models │ ├── __init__.py │ ├── igrf.py │ ├── igrf13coeffs.csv │ └── magnetopause_normal.py ├── plot │ ├── __init__.py │ ├── add_position.py │ ├── annotate_heatmap.py │ ├── colorbar.py │ ├── logo │ │ └── IRF_logo_blue_on_white.jpg │ ├── make_labels.py │ ├── mms_pl_config.py │ ├── pl_scatter_matrix.py │ ├── pl_tx.py │ ├── plot_ang_ang.py │ ├── plot_clines.py │ ├── plot_contour.py │ ├── plot_heatmap.py │ ├── plot_line.py │ ├── plot_magnetosphere.py │ ├── plot_projection.py │ ├── plot_reduced_2d.py │ ├── plot_spectr.py │ ├── plot_surf.py │ ├── span_tint.py │ └── zoom.py ├── pyrf │ ├── __init__.py │ ├── autocorr.py │ ├── average_vdf.py │ ├── avg_4sc.py │ ├── c_4_grad.py │ ├── c_4_j.py │ ├── c_4_k.py │ ├── c_4_v.py │ ├── calc_ag.py │ ├── calc_agyro.py │ ├── calc_dng.py │ ├── calc_dt.py │ ├── calc_fs.py │ ├── calc_sqrtq.py │ ├── cart2sph.py │ ├── cart2sph_ts.py │ ├── cdfepoch2datetime64.py │ ├── compress_cwt.py │ ├── convert_fac.py │ ├── corr_deriv.py │ ├── cotrans.py │ ├── cross.py │ ├── date_str.py │ ├── datetime2iso8601.py │ ├── datetime642iso8601.py │ ├── datetime642ttns.py │ ├── datetime642unix.py │ ├── dec_par_perp.py │ ├── dist_append.py │ ├── dot.py │ ├── dynamic_press.py │ ├── e_vxb.py │ ├── eb_nrf.py │ ├── ebsp.py │ ├── edb.py │ ├── end.py │ ├── estimate.py │ ├── extend_tint.py │ ├── filt.py │ ├── find_closest.py │ ├── get_omni_data.py │ ├── gradient.py │ ├── gse2gsm.py │ ├── histogram.py │ ├── histogram2d.py │ ├── increments.py │ ├── int_sph_dist.py │ ├── integrate.py │ ├── iplasma_calc.py │ ├── iso86012datetime.py │ ├── iso86012datetime64.py │ ├── iso86012timevec.py │ ├── iso86012unix.py │ ├── l_shell.py │ ├── lowpass.py │ ├── magnetosphere.py │ ├── match_phibe_dir.py │ ├── match_phibe_v.py │ ├── mean.py │ ├── mean_bins.py │ ├── mean_field.py │ ├── medfilt.py │ ├── median_bins.py │ ├── movmean.py │ ├── mva.py │ ├── mva_gui.py │ ├── new_xyz.py │ ├── norm.py │ ├── normalize.py │ ├── optimize_nbins_1d.py │ ├── optimize_nbins_2d.py │ ├── pid_4sc.py │ ├── plasma_beta.py │ ├── plasma_calc.py │ ├── poynting_flux.py │ ├── pres_anis.py │ ├── psd.py │ ├── pvi.py │ ├── pvi_4sc.py │ ├── read_cdf.py │ ├── remove_repeated_points.py │ ├── resample.py │ ├── shock_models_parameters.json │ ├── shock_normal.py │ ├── shock_parameters.py │ ├── sliding_derivative.py │ ├── solid_angle.py │ ├── sph2cart.py │ ├── st_diff.py │ ├── start.py │ ├── struct_func.py │ ├── t_eval.py │ ├── time_clip.py │ ├── timevec2iso8601.py │ ├── trace.py │ ├── transformation_indices.json │ ├── ts_append.py │ ├── ts_scalar.py │ ├── ts_skymap.py │ ├── ts_spectr.py │ ├── ts_tensor_xyz.py │ ├── ts_time.py │ ├── ts_vec_xyz.py │ ├── ttns2datetime64.py │ ├── unix2datetime64.py │ ├── vht.py │ ├── wave_fft.py │ ├── wavelet.py │ ├── wavepolarize_means.py │ └── waverage.py ├── solo │ ├── __init__.py │ ├── config.json │ ├── db_init.py │ ├── read_lfr_density.py │ └── read_tnr.py ├── stylesheets │ ├── aps.mplstyle │ ├── metropolis.mplstyle │ └── pyrfu.mplstyle └── tests │ ├── __init__.py │ ├── test_dispersion.py │ ├── test_mms.py │ ├── test_models.py │ ├── test_plot.py │ ├── test_pyrf.py │ └── test_solo.py └── requirements.txt /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, W503, DOC105, DOC203 3 | max-line-length = 88 4 | max-complexity = 60 -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | max-parallel: 4 12 | matrix: 13 | python-version: [ '3.10' ] 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install build wheel twine 24 | - name: Build and publish 25 | env: 26 | TWINE_USERNAME: __token__ 27 | TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} 28 | run: | 29 | python -m build . 30 | twine upload dist/* 31 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: pylint 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: [ '3.10' ] 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Set up Python ${{ matrix.python-version }} 14 | uses: actions/setup-python@v5 15 | with: 16 | python-version: ${{ matrix.python-version }} 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | python -m pip install -r docs/requirements.txt 21 | python -m pip install -r requirements.txt 22 | pip install pylint 23 | - name: pylint 24 | run: | 25 | pylint --rcfile=pyproject.toml $(git ls-files '*.py') 26 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ${{ matrix.os }} 8 | strategy: 9 | fail-fast: false 10 | matrix: 11 | python-version: ['3.10', '3.11', '3.12'] 12 | os: [ macos-latest, windows-2019, ubuntu-latest ] 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | python -m pip install flake8 pytest 24 | pip install -r requirements.txt 25 | pip install keyrings.alt 26 | - name: Lint with flake8 27 | run: | 28 | flake8 $(git ls-files '*.py') 29 | - name: Test with pytest 30 | run: | 31 | pip install pytest ddt pytest-cov sphinx pandoc 32 | pip install -r docs/requirements.txt 33 | pip install keyrings.alt 34 | pytest 35 | - name: Test with pytest (coverage + long tests) 36 | if: matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' 37 | run: | 38 | sudo apt update && sudo apt install -y texlive pandoc 39 | pip install pytest ddt pytest-cov sphinx pandoc 40 | pip install -r docs/requirements.txt 41 | pip install keyrings.alt 42 | pytest --cov=./ --cov-report=xml 43 | - name: Check that release process is not broken 44 | if: matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' 45 | env: 46 | TWINE_USERNAME: __token__ 47 | TWINE_PASSWORD: ${{ secrets.TESTPYPI_API_TOKEN }} 48 | run: | 49 | pip install build wheel twine 50 | python -m build . 51 | twine check dist/* 52 | twine upload --repository testpypi --skip-existing dist/* 53 | - name: Upload coverage reports to Codecov 54 | if: matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' 55 | uses: codecov/codecov-action@v4 56 | with: 57 | token: ${{ secrets.CODECOV_TOKEN }} 58 | file: ./coverage.xml 59 | flags: unittests 60 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # The following code can be uncommented to automatically keep all pre-commit rev's updated 3 | - repo: local 4 | # First a local check if new "rev" exists... 5 | hooks: 6 | - id: pre-commit-autoupdate 7 | name: Check for new rev with pre-commit autoupdate 8 | entry: "pre-commit autoupdate" 9 | language: system 10 | pass_filenames: false 11 | - repo: https://github.com/pre-commit/pre-commit-hooks 12 | rev: "v5.0.0" 13 | hooks: 14 | - id: check-yaml 15 | - id: check-symlinks 16 | - id: check-merge-conflict 17 | - id: requirements-txt-fixer 18 | - id: trailing-whitespace 19 | - id: pretty-format-json 20 | args: 21 | - --autofix 22 | - id: name-tests-test 23 | args: 24 | - --unittest 25 | - repo: https://github.com/pycqa/isort 26 | rev: 6.0.1 27 | hooks: 28 | - id: isort 29 | name: isort (python) 30 | - repo: https://github.com/psf/black-pre-commit-mirror 31 | rev: 25.1.0 32 | hooks: 33 | - id: black 34 | language_version: python3.12 35 | - repo: https://github.com/nbQA-dev/nbQA 36 | rev: 1.9.1 37 | hooks: 38 | - id: nbqa-black 39 | name: nbqa-black 40 | description: "Run 'black' on a Jupyter Notebook" 41 | entry: nbqa black 42 | language: python 43 | require_serial: true 44 | types: [ jupyter ] 45 | additional_dependencies: [ black ] 46 | - repo: https://github.com/pycqa/flake8 47 | rev: "7.2.0" 48 | hooks: 49 | - id: flake8 50 | - repo: https://github.com/PyCQA/pylint 51 | rev: v3.3.6 52 | hooks: 53 | - id: pylint 54 | args: 55 | - --rcfile 56 | - pyproject.toml 57 | - --disable 58 | - import-error 59 | - --output-format 60 | - colorized 61 | 62 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | version: 2 6 | 7 | build: 8 | os: ubuntu-22.04 9 | tools: 10 | python: "3.10" 11 | 12 | sphinx: 13 | configuration: docs/conf.py 14 | 15 | formats: 16 | - epub 17 | - pdf 18 | 19 | # Optionally declare the Python requirements required to build your docs 20 | python: 21 | install: 22 | - requirements: docs/requirements.txt 23 | - requirements: requirements.txt -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | type: software 4 | authors: 5 | - name: "PyRFU team" 6 | - family-names: Richard 7 | given-names: Louis 8 | affiliation: IRF Uppsala, Sweden 9 | orcid: "https://orcid.org/0000-0003-3446-7322" 10 | - family-names: Khotyaintsev 11 | given-names: Yuri 12 | affiliation: IRF Uppsala, Sweden 13 | orcid: "https://orcid.org/0000-0001-5550-3113" 14 | - family-names: Vaivads 15 | given-names: Andris 16 | affiliation: KTH, Sweden 17 | orcid: "https://orcid.org/0000-0003-1654-841X" 18 | - family-names: Graham 19 | given-names: Daniel 20 | affiliation: IRF Uppsala, Sweden 21 | orcid: "https://orcid.org/0000-0002-1046-746X" 22 | - family-names: Norgren 23 | given-names: Cecilia 24 | affiliation: IRF Uppsala, Sweden 25 | orcid: "https://orcid.org/0000-0002-6561-2337" 26 | - family-names: Johlander 27 | given-names: Andreas 28 | affiliation: Swedish Defence Research Agency, Sweden 29 | orcid: "https://orcid.org/0000-0001-7714-1870" 30 | title: "pyrfu" 31 | abstract: "An Open-Source Python Package for Advanced In-Situ Space Plasma Analysis." 32 | license: MIT 33 | doi: 10.5281/zenodo.10678695 34 | url: "https://pyrfu.readthedocs.io/" 35 | repository-code: 'https://github.com/louis-richard/irfu-python' -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing to pyrfu 2 | ===================== 3 | .. start-marker-style-do-not-remove 4 | 5 | The following is a set of guidelines for contributing to ``pyrfu`` and its packages, which are hosted on GitHub. These are mostly guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request. 6 | 7 | Introduction 8 | ------------ 9 | This guide defines the conventions for writing Python code for ``pyrfu``. 10 | The main ideas are: 11 | 12 | - ensuring a consistent code style 13 | - promote good practices for testing 14 | - maintaining a good level of readability and maintainability 15 | - to keep it simple 16 | 17 | 18 | Python version 19 | -------------- 20 | Prefer if possible Python>=3.8 since there are major dependencies that do not support 21 | older python versions. 22 | 23 | 24 | Coding style 25 | ------------ 26 | The pyrfu package uses the `Black code style `__ . 28 | 29 | Use Linters 30 | ------------ 31 | Linters are tools for static code quality checker. For instance, you can 32 | use the following tools to test conformity with the common pythonic 33 | standards: 34 | 35 | - `pylint `__ is one of the oldest linters and tracks various problems such as good practice violation, coding standard violation, or programming issues. Pylint may be seen as slow, too verbose and complex to configure to get it working properly. You can run a complete static analysis with the following command: 36 | 37 | .. code:: python 38 | 39 | pylint pyrfu --rcfile=setup.cfg 40 | 41 | All these linters can be simply installed with pip. Further details 42 | on the functionnalities can be found 43 | `here `__ 44 | or `there `__. 45 | Also, a lot of features can also be provided natively or by installing 46 | plugins with your IDE (PyCharm, Spyder, Eclipse, etc.). 47 | 48 | To be accepted to ``pyrfu`` every new code as to get a pylint score of 10/10. 49 | 50 | Documentation 51 | ------------- 52 | Documentation of all the files must be done in-line using Sphinx_. 53 | The doxtring as to follow the numpydoc_ style 54 | 55 | .. _Sphinx: http://www.sphinx-doc.org/en/master/ 56 | .. _numpydoc: https://numpydoc.readthedocs.io/en/latest/format.html 57 | 58 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | MIT License 3 | 4 | Copyright (c) 2020 L. RICHARD 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # Include the README 2 | include README.rst 3 | 4 | # Include the license file 5 | include LICENCE.txt 6 | 7 | # include IRF logo 8 | include pyrfu/plot/logo/IRF_logo_blue_on_white.jpg 9 | 10 | # include the MMS configuration files JSON 11 | include pyrfu/mms/*.json 12 | 13 | # include the MMS FEEPS sun contamination CSV files 14 | include pyrfu/mms/sun/*.csv 15 | 16 | # include the models CSV files 17 | include pyrfu/models/*.csv 18 | 19 | # include the PYRF configuration files JSON 20 | include pyrfu/pyrf/*.json 21 | 22 | # include the style sheets 23 | include pyrfu/stylesheets/*.mplstyle -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS = 7 | SPHINXBUILD = python -m sphinx 8 | SPHINXPROJ = pyrfu 9 | SOURCEDIR = . 10 | BUILDDIR = _build 11 | 12 | # Put it first so that "make" without argument is like "make help". 13 | help: 14 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 15 | 16 | .PHONY: help Makefile 17 | 18 | # Catch-all target: route all unknown targets to Sphinx using the new 19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 20 | %: Makefile 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/_static/example_dispersion_one_fluid_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/example_dispersion_one_fluid_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/_static/example_mms_b_e_j_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/example_mms_b_e_j_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/_static/example_mms_ebfields_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/example_mms_ebfields_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/_static/example_mms_edr_signatures_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/example_mms_edr_signatures_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/_static/example_mms_eis_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/example_mms_eis_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/_static/logo-pyrfu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/logo-pyrfu.png -------------------------------------------------------------------------------- /docs/_static/logo-pyrfu.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | image/svg+xml 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | pyrfu 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/_static/quick-overview_nb_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/louis-richard/irfu-python/4680222ea778747cc15967e75f9a9577ee76a585/docs/_static/quick-overview_nb_thumbnail.png -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing to pyrfu 2 | ======================== 3 | 4 | All contributions are welcome. For detailed information about the code style please 5 | read the following instructions. All the code must have a adequate number of tests 6 | that assures it's credibility. Feel free to make ``pull`` requests. 7 | 8 | .. include:: ./../CONTRIBUTING.rst 9 | :start-after: start-marker-style-do-not-remove -------------------------------------------------------------------------------- /docs/dev/index.rst: -------------------------------------------------------------------------------- 1 | Pyrfu developer documentation 2 | ============================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | Indices and tables 9 | ------------------ 10 | * :ref:`genindex` 11 | * :ref:`modindex` 12 | * :ref:`search` -------------------------------------------------------------------------------- /docs/dev/modules.rst: -------------------------------------------------------------------------------- 1 | pyrfu 2 | ===== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | pyrfu 8 | -------------------------------------------------------------------------------- /docs/examples/00_overview/index.rst: -------------------------------------------------------------------------------- 1 | Overview examples gallery 2 | ========================= 3 | 4 | .. nbgallery:: 5 | :glob: 6 | 7 | ./quick-overview -------------------------------------------------------------------------------- /docs/examples/01_mms/index.rst: -------------------------------------------------------------------------------- 1 | MMS examples gallery 2 | ==================== 3 | 4 | .. nbgallery:: 5 | :glob: 6 | 7 | ./example_mms_b_e_j 8 | ./example_mms_ebfields 9 | ./example_mms_edr_signatures 10 | ./example_mms_eis 11 | ./example_mms_electron_psd 12 | ./example_mms_feeps 13 | ./example_mms_hpca 14 | ./example_mms_ipshocks 15 | ./example_mms_ohmslaw 16 | ./example_mms_particle_deflux 17 | ./example_mms_particle_distributions 18 | ./example_mms_particle_pad 19 | ./example_mms_reduced_ion_dist 20 | ./example_mms_reduced_electron_dist 21 | ./example_mms_polarizationanalysis 22 | ./example_mms_walen_test -------------------------------------------------------------------------------- /docs/examples/02_dispersion/index.rst: -------------------------------------------------------------------------------- 1 | Dispersion relations examples gallery 2 | ===================================== 3 | 4 | .. nbgallery:: 5 | :glob: 6 | 7 | ./example_dispersion_one_fluid -------------------------------------------------------------------------------- /docs/examples/index.rst: -------------------------------------------------------------------------------- 1 | Pyrfu examples 2 | ============== 3 | 4 | In this section, we provide simple and practical examples on how to use 5 | the different subpackages of the ``pyrfu`` package. 6 | 7 | .. toctree:: 8 | :maxdepth: 1 9 | 10 | ./00_overview/index 11 | ./01_mms/index 12 | ./02_dispersion/index -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pyrfu documentation master file, created by 2 | sphinx-quickstart on Thu Nov 26 15:54:25 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pyrfu's documentation! 7 | ================================= 8 | 9 | .. toctree:: 10 | :titlesonly: 11 | :hidden: 12 | :maxdepth: 2 13 | 14 | installation 15 | examples/index 16 | dev/index 17 | contributing 18 | 19 | .. include:: ../README.rst 20 | :start-after: start-marker-intro-do-not-remove 21 | :end-before: end-marker-intro-do-not-remove 22 | 23 | 24 | 25 | Examples 26 | ======== 27 | See :doc:`here ` for a complete list of examples. 28 | 29 | 30 | :doc:`Go to developers doc ` 31 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install Pyrfu, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ python -m pip install pyrfu 16 | # or 17 | $ python -m pip install --user pyrfu 18 | 19 | This is the preferred method to install Pyrfu, as it will always install the most 20 | recent stable release. 21 | 22 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 23 | you through the process. 24 | 25 | .. _pip: https://pip.pypa.io 26 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 27 | 28 | 29 | From sources 30 | ------------ 31 | 32 | The sources for Pyrfu can be downloaded from the `Github repo`_. 33 | 34 | You can either clone the public repository: 35 | 36 | .. code-block:: console 37 | 38 | $ git clone git://github.com/louis-richard/irfu-python 39 | 40 | Once you have a copy of the source, you can install it with: 41 | 42 | .. code-block:: console 43 | 44 | $ python -m pip install . 45 | 46 | 47 | .. _Github repo: https://github.com/louis-richard/irfu-python -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | nbsphinx==0.9.2 2 | numpydoc==1.5.0 3 | pydata-sphinx-theme==0.13.3 4 | sphinx==7.0.0 5 | sphinx-codeautolink==0.15.0 6 | sphinx-copybutton==0.5.2 7 | sphinx-gallery==0.13.0 8 | sphinxcontrib-apidoc==0.3.0 9 | -------------------------------------------------------------------------------- /pyrfu/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from pyrfu import dispersion, lp, maven, mms, models, plot, pyrf 5 | 6 | __author__ = "Louis Richard" 7 | __email__ = "louisr@irfu.se" 8 | __copyright__ = "Copyright 2020-2025" 9 | __license__ = "MIT" 10 | __version__ = "2.4.17" 11 | __status__ = "Prototype" 12 | 13 | __all__ = ["dispersion", "lp", "maven", "mms", "models", "plot", "pyrf"] 14 | -------------------------------------------------------------------------------- /pyrfu/dispersion/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from .disp_surf_calc import disp_surf_calc 5 | 6 | # @Louis Richard 7 | from .one_fluid_dispersion import one_fluid_dispersion 8 | 9 | __author__ = "Louis Richard" 10 | __email__ = "louisr@irfu.se" 11 | __copyright__ = "Copyright 2020-2023" 12 | __license__ = "MIT" 13 | __version__ = "2.4.2" 14 | __status__ = "Prototype" 15 | 16 | __all__ = ["disp_surf_calc", "one_fluid_dispersion"] 17 | -------------------------------------------------------------------------------- /pyrfu/maven/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from .db_init import db_init 5 | from .download_data import download_data 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.10" 12 | __status__ = "Prototype" 13 | 14 | __all__ = [ 15 | "db_init", 16 | "download_data", 17 | ] 18 | -------------------------------------------------------------------------------- /pyrfu/maven/config.json: -------------------------------------------------------------------------------- 1 | {"local_data_dir": "/Volumes/maven"} 2 | -------------------------------------------------------------------------------- /pyrfu/maven/db_init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | 6 | # Built-in imports 7 | import os 8 | 9 | __author__ = "Louis Richard" 10 | __email__ = "louisr@irfu.se" 11 | __copyright__ = "Copyright 2020-2023" 12 | __license__ = "MIT" 13 | __version__ = "2.4.10" 14 | __status__ = "Prototype" 15 | 16 | 17 | def db_init(local_data_dir): 18 | r"""Setup the default path of MAVEN data. 19 | 20 | Parameters 21 | ---------- 22 | local_data_dir : str 23 | Path to the data. 24 | 25 | """ 26 | 27 | # Normalize the path and make sure that it exists 28 | local_data_dir = os.path.normpath(local_data_dir) 29 | assert os.path.exists(local_data_dir), f"{local_data_dir} doesn't exists!!" 30 | 31 | # Path to the configuration file. 32 | pkg_path = os.path.dirname(os.path.abspath(__file__)) 33 | 34 | # Read the current version of the configuration 35 | with open(os.path.join(pkg_path, "config.json"), "r", encoding="utf-8") as fs: 36 | config = json.load(fs) 37 | 38 | # Overwrite the configuration file with the new path 39 | with open(os.path.join(pkg_path, "config.json"), "w", encoding="utf-8") as fs: 40 | config["local_data_dir"] = local_data_dir 41 | json.dump(config, fs) 42 | -------------------------------------------------------------------------------- /pyrfu/mms/ancillary.json: -------------------------------------------------------------------------------- 1 | { 2 | "defatt": { 3 | "header": 49, 4 | "time_format": "%Y-%jT%H:%M:%S.%f", 5 | "columns_names": [ 6 | "time", 7 | "elapsed_sec", 8 | "q1", 9 | "q2", 10 | "q3", 11 | "qc", 12 | "wx", 13 | "wy", 14 | "wz", 15 | "w_phase", 16 | "z_ra", 17 | "z_dec", 18 | "z_phase", 19 | "l_ra", 20 | "l_dec", 21 | "l_phase", 22 | "p_ra", 23 | "p_dec", 24 | "p_phase", 25 | "nut", 26 | "qf" 27 | ] 28 | }, 29 | "defeph": { 30 | "header": 14, 31 | "time_format": "%Y-%j/%H:%M:%S.%f", 32 | "columns_names": ["time", "epoch_mms_tai", "x", "y", "z", "vx", "vy", "vz"] 33 | }, 34 | "defq": { 35 | "header": 11, 36 | "time_format": "%Y-%j/%H:%M:%S.%f", 37 | "columns_names": ["time", "epoch_mms_tai", "q", "scale"] 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /pyrfu/mms/config.json: -------------------------------------------------------------------------------- 1 | {"default": "local", "local": "/Volumes/mms", "sdc": {"rights": "public", "username": "username"}, "aws": ""} -------------------------------------------------------------------------------- /pyrfu/mms/copy_files.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import json 6 | import os 7 | import shutil 8 | from typing import Mapping, Optional, Union 9 | 10 | # Local imports 11 | from pyrfu.mms.db_init import MMS_CFG_PATH 12 | from pyrfu.mms.list_files import list_files 13 | 14 | __author__ = "Louis Richard" 15 | __email__ = "louisr@irfu.se" 16 | __copyright__ = "Copyright 2020-2024" 17 | __license__ = "MIT" 18 | __version__ = "2.4.13" 19 | __status__ = "Prototype" 20 | 21 | 22 | def copy_files( 23 | var: Mapping[str, str], 24 | tint: list[str], 25 | mms_id: Union[int, str], 26 | tar_path: str, 27 | data_path: Optional[str] = "", 28 | ) -> None: 29 | r"""Copy files from local as defined in config.json to the target path. 30 | 31 | Parameters 32 | ---------- 33 | var : dict 34 | Dictionary containing 4 keys 35 | * var["inst"] : name of the instrument. 36 | * var["tmmode"] : data rate. 37 | * var["lev"] : data level. 38 | * var["dtype"] : data type. 39 | tint : list 40 | Time interval. 41 | mms_id : int or str 42 | Index of the spacecraft. 43 | tar_path : str 44 | Target path to put files. 45 | data_path : str, Optional 46 | Local path to MMS data. Default uses that provided in pyrfu.mms.config.json 47 | 48 | """ 49 | # Normalize the target path and make sure it exists. 50 | tar_path = os.path.normpath(tar_path) 51 | assert os.path.exists(tar_path), f"{tar_path} doesn't exist!!" 52 | 53 | if not data_path: 54 | # Read the current version of the MMS configuration file 55 | with open(MMS_CFG_PATH, "r", encoding="utf-8") as fs: 56 | config = json.load(fs) 57 | 58 | normed_path = os.path.normpath(config["local"]) 59 | else: 60 | normed_path = os.path.normpath(data_path) 61 | 62 | # Make sure the local path exists. 63 | assert os.path.exists(normed_path), f"{normed_path} doesn't exist!!" 64 | 65 | # List files that matches the requirements (instrument, date level, 66 | # data type, data rate) in the time interval for the target spacecraft. 67 | files = list_files(tint, mms_id, var, data_path=normed_path) 68 | 69 | for file in files: 70 | # Make paths 71 | relative_path = os.path.relpath(file, normed_path) 72 | directory_path = os.path.join(tar_path, os.path.dirname(relative_path)) 73 | target_file = os.path.join(directory_path, os.path.basename(file)) 74 | 75 | # Create directories in target path 76 | os.makedirs(directory_path, exist_ok=True) 77 | 78 | # Copy file 79 | shutil.copy2(file, target_file) 80 | -------------------------------------------------------------------------------- /pyrfu/mms/copy_files_ancillary.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import json 6 | import os 7 | import shutil 8 | from typing import Literal, Optional, Union 9 | 10 | # Local imports 11 | from pyrfu.mms.db_init import MMS_CFG_PATH 12 | from pyrfu.mms.list_files_ancillary import list_files_ancillary 13 | 14 | __author__ = "Louis Richard" 15 | __email__ = "louisr@irfu.se" 16 | __copyright__ = "Copyright 2020-2024" 17 | __license__ = "MIT" 18 | __version__ = "2.4.13" 19 | __status__ = "Prototype" 20 | 21 | 22 | def copy_files_ancillary( 23 | product: Literal["predatt", "predeph", "defatt", "defeph"], 24 | tint: list[str], 25 | mms_id: Union[int, str], 26 | tar_path: str, 27 | data_path: Optional[str] = "", 28 | ) -> None: 29 | r"""Copy ancillary files from local as defined in config.json to the target path. 30 | 31 | Parameters 32 | ---------- 33 | product : {"predatt", "predeph", "defatt", "defeph"} 34 | Ancillary type. 35 | tint : list 36 | Time interval. 37 | mms_id : str or int 38 | Index of the spacecraft. 39 | tar_path : str 40 | Target path. 41 | data_path : str, Optional 42 | Local path to MMS data. Default uses that provided in pyrfu.mms.config.json 43 | 44 | """ 45 | # Normalize the target path and make sure it exists. 46 | tar_path = os.path.normpath(tar_path) 47 | assert os.path.exists(tar_path), f"{tar_path} doesn't exist!!" 48 | 49 | if not data_path: 50 | # Read the current version of the MMS configuration file 51 | with open(MMS_CFG_PATH, "r", encoding="utf-8") as fs: 52 | config = json.load(fs) 53 | 54 | root_path = os.path.normpath(config["local"]) 55 | else: 56 | root_path = os.path.normpath(data_path) 57 | 58 | # Make sure the local path exists. 59 | assert os.path.exists(root_path), f"{root_path} doesn't exist!!" 60 | 61 | # List files that matches the requirements (instrument, date level, 62 | # data type, data rate) in the time interval for the target spacecraft. 63 | files = list_files_ancillary(tint, mms_id, product, data_path=root_path) 64 | 65 | for file in files: 66 | # Make paths 67 | relative_path = os.path.relpath(file, root_path) 68 | path = os.path.join(tar_path, os.path.dirname(relative_path)) 69 | target_file = os.path.join(path, os.path.basename(file)) 70 | 71 | # Create directories in target path 72 | os.makedirs(path, exist_ok=True) 73 | 74 | # Copy file 75 | shutil.copy2(file, target_file) 76 | -------------------------------------------------------------------------------- /pyrfu/mms/db_get_variable.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import logging 6 | from typing import Optional 7 | 8 | # 3rd party imports 9 | from xarray.core.dataarray import DataArray 10 | 11 | # Local imports 12 | from pyrfu.mms.get_variable import get_variable 13 | from pyrfu.mms.list_files import list_files 14 | 15 | __author__ = "Louis Richard" 16 | __email__ = "louisr@irfu.se" 17 | __copyright__ = "Copyright 2020-2024" 18 | __license__ = "MIT" 19 | __version__ = "2.4.13" 20 | __status__ = "Prototype" 21 | 22 | 23 | logging.captureWarnings(True) 24 | logging.basicConfig( 25 | format="[%(asctime)s] %(levelname)s: %(message)s", 26 | datefmt="%d-%b-%y %H:%M:%S", 27 | level=logging.INFO, 28 | ) 29 | 30 | 31 | def db_get_variable( 32 | dataset_name: str, 33 | cdf_name: str, 34 | tint: list[str], 35 | verbose: Optional[bool] = True, 36 | data_path: Optional[str] = "", 37 | ) -> DataArray: 38 | r"""Get variable in the cdf file. 39 | 40 | Parameters 41 | ---------- 42 | dataset_name : str 43 | Name of the dataset. 44 | cdf_name : str 45 | Name of the target field in cdf file. 46 | tint : list 47 | Time interval. 48 | verbose : bool, Optional 49 | Status monitoring. Default is verbose = True 50 | data_path : str, Optional 51 | Path of MMS data. Default uses `pyrfu.mms.mms_config.py` 52 | 53 | Returns 54 | ------- 55 | out : DataArray 56 | Variable of the target variable. 57 | 58 | Raises 59 | ------ 60 | FileNotFoundError 61 | If no files are found for the dataset. 62 | 63 | """ 64 | dataset = dataset_name.split("_") 65 | 66 | # Index of the MMS spacecraft 67 | probe = dataset[0][-1] 68 | 69 | var = {"inst": dataset[1], "tmmode": dataset[2], "lev": dataset[3]} 70 | 71 | try: 72 | var["dtype"] = dataset[4] 73 | except IndexError: 74 | pass 75 | 76 | files = list_files(tint, probe, var, data_path=data_path) 77 | 78 | if not files: 79 | raise FileNotFoundError(f"No files found for {cdf_name} in {data_path}") 80 | 81 | if verbose: 82 | logging.info("Loading %s...", cdf_name) 83 | 84 | out = get_variable(files[0], cdf_name) 85 | 86 | return out 87 | -------------------------------------------------------------------------------- /pyrfu/mms/dft_time_shift.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | # Local imports 8 | from ..pyrf.calc_fs import calc_fs 9 | from ..pyrf.ts_scalar import ts_scalar 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def dft_time_shift(inp, tau): 20 | r"""Shifts the input signal ``inp`` by ``tau`` seconds using discrete 21 | fourier transform (DFT). Particularly useful when calculating the 22 | frequency-wavenumber spectrum of the mms' spin-plane or axial probes. 23 | 24 | Parameters 25 | ---------- 26 | inp : xarray.DataArray 27 | Time series to be shifted (Note : Only tensor order 1). 28 | tau : float 29 | Applied shift in seconds. 30 | 31 | Returns 32 | ------- 33 | out : xarray.DataArray 34 | Time series of the shifted input. 35 | 36 | See also 37 | -------- 38 | pyrfu.mms.fk_power_spectrum : Calculates the frequency-wave number 39 | power spectrum. 40 | 41 | """ 42 | 43 | time, sig = [inp.time.data, inp.data] 44 | 45 | # Sampling frequency 46 | f_sampling = calc_fs(inp) 47 | 48 | # Applied delay in samples. 49 | delay = np.floor(tau * f_sampling) 50 | 51 | # Forward FFT 52 | sig_fft = np.fft.fft(sig) 53 | n_p = len(sig) 54 | 55 | # Disregard Nyquist frequency for even-sized dft 56 | if not n_p % 2: 57 | sig_fft[int(n_p / 2 + 1)] = 0 58 | 59 | freq = (np.arange(n_p) + np.floor(n_p / 2)) % n_p - np.floor(n_p / 2) 60 | freq /= n_p 61 | 62 | # Backward FFT 63 | out_data = np.fft.ifft(sig_fft * np.exp(-2j * np.pi * delay * freq)) 64 | 65 | out_time = time + int(tau * 1e9) 66 | 67 | out = ts_scalar(out_time, out_data, attrs=inp.attrs) 68 | 69 | return out 70 | -------------------------------------------------------------------------------- /pyrfu/mms/eis_combine_proton_skymap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from .eis_ang_ang import eis_ang_ang 5 | 6 | # Local imports 7 | from .eis_combine_proton_spec import eis_combine_proton_spec 8 | from .eis_skymap import eis_skymap 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2023" 13 | __license__ = "MIT" 14 | __version__ = "2.4.2" 15 | __status__ = "Prototype" 16 | 17 | 18 | def eis_combine_proton_skymap( 19 | phxtof_allt, 20 | extof_allt, 21 | en_chan: list = None, 22 | to_psd: bool = True, 23 | ): 24 | r"""Combines ExTOF and PHxTOF proton energy spectra and generate proton 25 | skymap distribution. 26 | 27 | Parameters 28 | ---------- 29 | phxtof_allt : xarray.Dataset 30 | Dataset containing the PHxTOF energy spectrum of the 6 telescopes. 31 | extof_allt : xarray.Dataset 32 | Dataset containing the ExTOF energy spectrum of the 6 telescopes. 33 | en_chan : array_like, Optional 34 | Energy channels to use. Default use all energy channels. 35 | to_psd : bool, Optional 36 | Flag to convert differential particle flux to phase space density. 37 | 38 | Returns 39 | ------- 40 | eis_skymap : xarray.Dataset 41 | EIS skymap distribution 42 | 43 | """ 44 | 45 | # Combine Pulse-Height x Time Of Flight and Energy x Time Of Flight spectra 46 | eis_allt = eis_combine_proton_spec(phxtof_allt, extof_allt) 47 | 48 | # Compute EIS angle-angle (azimuth-elevation) distribution. 49 | eis_ang_ = eis_ang_ang(eis_allt, en_chan=en_chan) 50 | 51 | # Convert to skymap format 52 | eis_skymap_ = eis_skymap(eis_ang_, to_psd=to_psd) 53 | 54 | return eis_skymap_ 55 | -------------------------------------------------------------------------------- /pyrfu/mms/eis_omni.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def eis_omni(eis_allt, method: str = "mean"): 17 | r"""Calculates the omni-directional flux for all 6 telescopes. 18 | 19 | Parameters 20 | ---------- 21 | eis_allt : xarray.Dataset 22 | Dataset of the fluxes of all 6 telescopes. 23 | 24 | Returns 25 | ------- 26 | flux_omni : xarray.DataArray 27 | Omni-directional flux for all 6 telescopes 28 | 29 | See Also 30 | -------- 31 | pyrfu.mms.get_eis_allt 32 | 33 | Examples 34 | -------- 35 | >>> from pyrfu import mms 36 | 37 | Define spacecraft index and time interval 38 | 39 | >>> tint = ["2017-07-23T16:10:00", "2017-07-23T18:10:00"] 40 | >>> ic = 2 41 | 42 | Get EIS ExTOF all 6 telescopes fluxes 43 | 44 | >>> extof_allt = mms.get_eis_allt("flux_extof_proton_srvy_l2", tint, ic) 45 | 46 | Compute the omni-directional flux for all 6 telescopes 47 | 48 | >>> extof_omni = mms.eis_omni(extof_allt) 49 | 50 | """ 51 | 52 | assert method.lower() in ["mean", "sum"] 53 | 54 | scopes = list(filter(lambda x: x[0] == "t", eis_allt)) 55 | 56 | flux_omni = np.zeros_like(eis_allt[scopes[0]].data) 57 | 58 | for scope in scopes: 59 | flux_omni += eis_allt[scope].data.copy() 60 | 61 | # Why?? 62 | # try: 63 | # flux_omni += eis_allt[scope].data.copy() 64 | # except TypeError: 65 | # flux_omni = eis_allt[scope].data.copy() 66 | 67 | if method.lower() == "mean": 68 | flux_omni /= len(scopes) 69 | 70 | # Get dimensions, coordinates and attributes based on first telescope 71 | dims = eis_allt[scopes[0]].dims 72 | coords = [eis_allt[scopes[0]][k] for k in dims] 73 | attrs = eis_allt[scopes[0]].attrs 74 | 75 | flux_omni = xr.DataArray(flux_omni, coords=coords, dims=dims, attrs=attrs) 76 | 77 | return flux_omni 78 | -------------------------------------------------------------------------------- /pyrfu/mms/eis_pad_combine_sc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def eis_pad_combine_sc(pads): 17 | r"""Generate composite Pitch Angle Distributions (PAD) from the EIS 18 | sensors across the MMS spacecraft. 19 | 20 | Parameters 21 | ---------- 22 | pads : list of xarray.DataArray 23 | Pitch-angle distribution for all spacecrafts. 24 | 25 | Returns 26 | ------- 27 | allmms_pad_avg : xarray.DataArray 28 | Composite pitch angle distribution. 29 | 30 | See Also 31 | -------- 32 | pyrfu.mms.get_eis_allt, pyrfu.mms.eis_pad, pyrfu.mms.eis_spec_combine_sc 33 | 34 | """ 35 | # Determine spacecraft with smallest number of time steps to use as 36 | # reference spacecraft 37 | time_size = [len(probe.time.data) for probe in pads] 38 | ref_sc_loc = np.argmin(time_size) 39 | ref_probe = pads[ref_sc_loc] 40 | 41 | # Define common energy grid across EIS instruments 42 | n_en_chans = [len(probe.energy.data) for probe in pads] 43 | 44 | size_en = np.min(n_en_chans) 45 | energy_data = [probe.energy.data[:size_en] for probe in pads] 46 | energy_data = np.stack(energy_data) 47 | common_energy = np.nanmean(energy_data, axis=0) 48 | 49 | # create PA labels 50 | n_pabins = len(ref_probe.theta.data) 51 | size_pabin = 180 / n_pabins 52 | pa_label = 180.0 * np.arange(n_pabins) / n_pabins + size_pabin / 2.0 53 | 54 | allmms_pad = np.zeros( 55 | ( 56 | ref_probe.shape[0], 57 | ref_probe.shape[1], 58 | ref_probe.shape[2], 59 | len(pads), 60 | ), 61 | ) 62 | 63 | for i_pad, pad_ in enumerate(pads): 64 | allmms_pad[..., i_pad] = pad_.data[: len(ref_probe.time), ...] 65 | 66 | allmms_pad_avg = np.nanmean(allmms_pad, axis=3) 67 | 68 | allmms_pad_avg = xr.DataArray( 69 | allmms_pad_avg, 70 | coords=[ref_probe.time.data, pa_label, common_energy], 71 | dims=["time", "theta", "energy"], 72 | ) 73 | 74 | return allmms_pad_avg 75 | -------------------------------------------------------------------------------- /pyrfu/mms/eis_pad_spinavg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def eis_pad_spinavg(inp, spin_nums): 17 | r"""Calculates spin-averaged Pitch-Angle Distribution (PAD) for the EIS 18 | instrument. 19 | 20 | Parameters 21 | ---------- 22 | inp : xarray.DataArray 23 | Pitch-Angle Distribution. 24 | spin_nums : xarray.DataArray 25 | Spin #s associated with each measurement. 26 | 27 | Returns 28 | ------- 29 | out : xarray.DataArray 30 | Spin-averaged PAD. 31 | 32 | See Also 33 | -------- 34 | pyrfu.mms.get_eis_allt, pyrfu.mms.eis_pad 35 | 36 | """ 37 | _, spin_starts = np.unique(spin_nums.data, return_index=True) 38 | 39 | spin_times = np.zeros(len(spin_starts), dtype=">> from pyrfu import mms 39 | 40 | Define time interval 41 | 42 | >>> tint = ["2017-07-18T13:04:00.000", "2017-07-18T13:07:00.000"] 43 | 44 | Spacecraft index 45 | 46 | >>> mms_id = 2 47 | 48 | Load data from FEEPS 49 | 50 | >>> cps_i = mms.get_feeps_alleyes("CPSi_brst_l2", tint, mms_id) 51 | >>> cps_i_clean, _ = mms.feeps_split_integral_ch(cps_i) 52 | >>> cps_i_clean_sun_removed = mms.feeps_remove_sun(cps_i_clean) 53 | 54 | """ 55 | 56 | var = inp_dataset.attrs 57 | 58 | tint = list(np.datetime_as_string(inp_dataset.time.data[[0, -1]], "ns")) 59 | 60 | spin_sectors = inp_dataset["spinsectnum"] 61 | mask_sectors = read_feeps_sector_masks_csv(tint) 62 | 63 | out_dict = {} 64 | out_dict["spinsectnum"] = inp_dataset["spinsectnum"] 65 | 66 | for k in inp_dataset: 67 | out_dict[k] = inp_dataset[k] 68 | if mask_sectors.get(f"mms{var['mmsId']:d}_imask_{k}") is not None: 69 | bad_sectors = mask_sectors[f"mms{var['mmsId']:d}_imask_{k}"] 70 | 71 | for bad_sector in bad_sectors: 72 | this_bad_sector = np.where(spin_sectors == bad_sector)[0] 73 | if len(this_bad_sector) != 0: 74 | out_dict[k].data[this_bad_sector] = np.nan 75 | 76 | out = xr.Dataset(out_dict, attrs=var) 77 | 78 | out.attrs = var 79 | 80 | return out 81 | -------------------------------------------------------------------------------- /pyrfu/mms/feeps_sector_spec.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def feeps_sector_spec(inp_alle): 17 | r"""Creates sector-spectrograms with FEEPS data (particle data organized 18 | by time and sector number) 19 | 20 | Parameters 21 | ---------- 22 | inp_alle : xarray.Dataset 23 | Dataset of energy spectrum of all eyes. 24 | 25 | Returns 26 | ------- 27 | out : xarray.Dataset 28 | Sector-spectrograms with FEEPS data for all eyes. 29 | 30 | """ 31 | 32 | sensors_eyes_top = list(filter(lambda x: x[:3] in "top", inp_alle)) 33 | sensors_eyes_bot = list(filter(lambda x: x[:3] in "bot", inp_alle)) 34 | sensors_eyes = [*sensors_eyes_top, *sensors_eyes_bot] 35 | 36 | sector_time = inp_alle["spinsectnum"].time.data 37 | sector_data = inp_alle["spinsectnum"].data 38 | 39 | out_dict = {k: inp_alle[k] for k in inp_alle if k not in sensors_eyes} 40 | 41 | for sensors_eye in sensors_eyes: 42 | sensor_data = inp_alle[sensors_eye].data 43 | 44 | spin_starts = np.where(sector_data[:-1] > sector_data[1:])[0] + 1 45 | 46 | sector_spec = np.zeros((len(spin_starts), 64)) 47 | 48 | c_start = spin_starts[0] 49 | 50 | for i, spin in enumerate(spin_starts): 51 | # find the sectors for this spin 52 | spin_sect = sector_data[c_start:spin] 53 | 54 | sector_spec[i, spin_sect] = np.nanmean( 55 | sensor_data[c_start:spin, :], 56 | axis=1, 57 | ) 58 | 59 | c_start = spin 60 | 61 | out_dict[sensors_eye] = xr.DataArray( 62 | sector_spec, 63 | coords=[sector_time[spin_starts], np.arange(64)], 64 | dims=["time", "sectornum"], 65 | ) 66 | 67 | out = xr.Dataset(out_dict) 68 | 69 | return out 70 | -------------------------------------------------------------------------------- /pyrfu/mms/feeps_spin_avg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import warnings 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def feeps_spin_avg(flux_omni, spin_sectors): 20 | r"""spin-average the omni-directional FEEPS energy spectra 21 | 22 | Parameters 23 | ---------- 24 | flux_omni : xarray.DataArray 25 | Omni-direction flux. 26 | spin_sectors : xarray.DataArray 27 | Time series of the spin sectors. 28 | 29 | Returns 30 | ------- 31 | spin_avg_flux : xarray.DataArray 32 | Spin averaged omni-directional flux. 33 | 34 | """ 35 | 36 | spin_starts = np.where(spin_sectors[:-1] >= spin_sectors[1:])[0] + 1 37 | 38 | energies = flux_omni.energy.data 39 | data = flux_omni.data 40 | 41 | spin_avg = np.zeros([len(spin_starts), len(energies)]) 42 | 43 | c_start = spin_starts[0] 44 | for i, spin_start in enumerate(spin_starts[1:-1]): 45 | with warnings.catch_warnings(): 46 | warnings.simplefilter("ignore", category=RuntimeWarning) 47 | spin_avg[i, :] = np.nanmean( 48 | data[c_start : spin_start + 1, :], 49 | axis=0, 50 | ) 51 | c_start = spin_start + 1 52 | 53 | spin_avg_flux = xr.DataArray( 54 | spin_avg, 55 | coords=[flux_omni.time.data[spin_starts], energies], 56 | dims=["time", "energy"], 57 | ) 58 | return spin_avg_flux 59 | -------------------------------------------------------------------------------- /pyrfu/mms/feeps_split_integral_ch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import xarray as xr 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def feeps_split_integral_ch(inp_dataset): 16 | r"""This function splits the last integral channel from the FEEPS spectra, 17 | creating 2 new DataArrays 18 | 19 | Parameters 20 | ---------- 21 | inp_dataset : xarray.Dataset 22 | Energetic particles energy spectrum from FEEPS. 23 | 24 | Returns 25 | ------- 26 | out : xarray.Dataset 27 | Energetic particles energy spectra with the integral channel removed. 28 | out_500kev : xarray.Dataset 29 | Integral channel that was removed. 30 | 31 | """ 32 | 33 | out_dict, out_dict_500kev = [{}, {}] 34 | 35 | out_dict["spinsectnum"] = inp_dataset["spinsectnum"] 36 | 37 | for k in inp_dataset: 38 | try: 39 | # Energy spectra with the integral channel removed 40 | out_dict[k] = inp_dataset[k][:, :-1] 41 | 42 | # Integral channel that was removed 43 | out_dict_500kev[k] = inp_dataset[k][:, -1] 44 | except IndexError: 45 | pass 46 | 47 | out = xr.Dataset(out_dict, attrs=inp_dataset.attrs) 48 | 49 | out_500kev = xr.Dataset(out_dict_500kev, attrs=inp_dataset.attrs) 50 | 51 | return out, out_500kev 52 | -------------------------------------------------------------------------------- /pyrfu/mms/get_variable.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | from pycdfpp import _pycdfpp, load, to_datetime64 8 | from xarray.core.dataarray import DataArray 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2024" 13 | __license__ = "MIT" 14 | __version__ = "2.4.13" 15 | __status__ = "Prototype" 16 | 17 | 18 | def _pycdfpp_attributes_to_dict(attributes): 19 | attributes_dict = {} 20 | 21 | for k in attributes: 22 | tmp = [attributes[k][i] for i in range(len(attributes[k]))] 23 | 24 | if np.size(tmp) == 1: 25 | if isinstance(tmp[0], (list, np.ndarray)) and isinstance( 26 | tmp[0][0], _pycdfpp.tt2000_t 27 | ): 28 | attributes_dict[k] = to_datetime64(tmp[0][0]) 29 | else: 30 | attributes_dict[k] = tmp[0] 31 | else: 32 | attributes_dict[k] = tmp[:] 33 | 34 | return attributes_dict 35 | 36 | 37 | def get_variable(file_path: str, cdf_name: str) -> DataArray: 38 | r"""Read field named cdf_name in file and convert to DataArray. 39 | 40 | Parameters 41 | ---------- 42 | file_path : str 43 | Path of the cdf file. 44 | cdf_name : str 45 | Name of the target variable in the cdf file. 46 | 47 | Returns 48 | ------- 49 | out : DataArray 50 | Target variable. 51 | 52 | """ 53 | # Load file 54 | file = load(file_path) 55 | 56 | var_data = file[cdf_name].values 57 | var_attributes = _pycdfpp_attributes_to_dict(file[cdf_name].attributes) 58 | 59 | out = xr.DataArray( 60 | var_data, 61 | coords=[np.arange(len(var_data))], 62 | dims=["x"], 63 | attrs=var_attributes, 64 | ) 65 | 66 | return out 67 | -------------------------------------------------------------------------------- /pyrfu/mms/hpca_calc_anodes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | anodes_theta = np.array( 16 | [ 17 | 123.75000, 18 | 101.25000, 19 | 78.750000, 20 | 56.250000, 21 | 33.750000, 22 | 11.250000, 23 | 191.25000, 24 | 213.75000, 25 | 236.25000, 26 | 258.75000, 27 | 281.25000, 28 | 303.75000, 29 | 326.25000, 30 | 348.75000, 31 | 168.75000, 32 | 146.25000, 33 | ], 34 | ) 35 | 36 | 37 | def hpca_calc_anodes(inp, fov: list = None, method: str = "mean"): 38 | r"""Averages over anodes (or a given field of view) for HPCA ion data. 39 | 40 | Parameters 41 | ---------- 42 | inp : xarray.DataArray 43 | Ion flux; [nt, npo16, ner63], looking direction 44 | fov : list of float, Optional 45 | Field of view, in angles, from 0-360. Default is [0., 360.]. 46 | method : {"mean", "sum"}, Optional 47 | Method. Default is "mean". 48 | 49 | Returns 50 | ------- 51 | out : xarray.DataArray 52 | HPCA ion flux averaged over the anodes within the selected field of 53 | view. 54 | 55 | """ 56 | 57 | if fov is None: 58 | fov = [0.0, 360.0] 59 | 60 | assert method in ["mean", "sum"] 61 | 62 | times = inp.time.data 63 | energies = inp.ccomp.data 64 | 65 | cond_ = np.logical_and(anodes_theta >= fov[0], anodes_theta <= fov[1]) 66 | anodes_in_fov = np.where(cond_)[0] 67 | 68 | if method == "mean": 69 | updated_spectra = inp.data[:, anodes_in_fov, :].mean(axis=1) 70 | else: 71 | updated_spectra = inp.data[:, anodes_in_fov, :].sum(axis=1) 72 | 73 | out = xr.DataArray( 74 | updated_spectra, 75 | coords=[times, energies], 76 | dims=["time", "energy"], 77 | ) 78 | 79 | return out 80 | -------------------------------------------------------------------------------- /pyrfu/mms/hpca_energies.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | __author__ = "Louis Richard" 5 | __email__ = "louisr@irfu.se" 6 | __copyright__ = "Copyright 2020-2023" 7 | __license__ = "MIT" 8 | __version__ = "2.4.2" 9 | __status__ = "Prototype" 10 | 11 | 12 | def hpca_energies(): 13 | r"""Construct Hot Plasma Composition Analyser (HPCA) energy bins""" 14 | return [ 15 | 1.35500, 16 | 1.57180, 17 | 1.84280, 18 | 2.22220, 19 | 2.60160, 20 | 3.08940, 21 | 3.63140, 22 | 4.28180, 23 | 5.04060, 24 | 5.96200, 25 | 6.99180, 26 | 8.23840, 27 | 9.75600, 28 | 11.4904, 29 | 13.5500, 30 | 15.9890, 31 | 18.8616, 32 | 22.2762, 33 | 26.2328, 34 | 30.9482, 35 | 36.5308, 36 | 43.0890, 37 | 50.7854, 38 | 59.9452, 39 | 70.6768, 40 | 83.4138, 41 | 98.3730, 42 | 116.042, 43 | 136.855, 44 | 161.462, 45 | 190.459, 46 | 224.659, 47 | 264.984, 48 | 312.571, 49 | 368.723, 50 | 434.955, 51 | 513.057, 52 | 605.197, 53 | 713.868, 54 | 842.051, 55 | 993.323, 56 | 1171.70, 57 | 1382.10, 58 | 1630.28, 59 | 1923.07, 60 | 2268.43, 61 | 2675.80, 62 | 3156.28, 63 | 3723.11, 64 | 4391.72, 65 | 5180.44, 66 | 6110.72, 67 | 7208.11, 68 | 8502.57, 69 | 10029.5, 70 | 11830.6, 71 | 13955.2, 72 | 16461.4, 73 | 19417.5, 74 | 22904.6, 75 | 27017.9, 76 | 31869.8, 77 | 37593.1, 78 | ] 79 | -------------------------------------------------------------------------------- /pyrfu/mms/hpca_spin_sum.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def hpca_spin_sum(inp, saz, method: str = "mean"): 17 | r"""Sum or average teh Hot Plasma Composition Analyser (HPCA) data over 18 | each spin. 19 | 20 | Parameters 21 | ---------- 22 | inp : xarray.DataArray 23 | Ion PSD or flux; [nt, npo16, ner63], looking direction 24 | saz : xarray.DataArray 25 | Start azimuthal spin indices. 26 | method : {"mean", "sum"}, Optional 27 | Method either "sum" or "mean". Default is "mean" 28 | 29 | Returns 30 | ------- 31 | out : xarray.DataArray 32 | Distribution averaged over each spin. 33 | 34 | """ 35 | 36 | az_times, start_az = [saz.time.data, saz.data] 37 | 38 | spin_starts = np.squeeze(np.argwhere(start_az == 0)) 39 | 40 | out_data = [] 41 | for i, spin in enumerate(spin_starts[:-1]): 42 | if method == "mean": 43 | out_data.append( 44 | inp[spin : spin_starts[i + 1]].mean(dim="time").data, 45 | ) 46 | elif method == "sum": 47 | out_data.append( 48 | inp[spin : spin_starts[i + 1]].sum(dim="time").data, 49 | ) 50 | else: 51 | raise ValueError("Invalid method") 52 | 53 | out_time = np.stack(az_times[spin_starts[:-1]]) 54 | out_data = np.stack(out_data) 55 | coords = [inp.coords[k].data for k in inp.dims[1:]] 56 | coords = [out_time, *coords] 57 | dims = list(inp.dims) 58 | 59 | out = xr.DataArray(out_data, coords=coords, dims=dims, attrs=inp.attrs) 60 | 61 | return out 62 | -------------------------------------------------------------------------------- /pyrfu/mms/list_files_ancillary_sdc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import warnings 6 | from datetime import datetime, timedelta 7 | 8 | # 3rd party imports 9 | import numpy as np 10 | 11 | # Local imports 12 | from .list_files_sdc import _login_lasp 13 | 14 | __author__ = "Louis Richard" 15 | __email__ = "louisr@irfu.se" 16 | __copyright__ = "Copyright 2020-2023" 17 | __license__ = "MIT" 18 | __version__ = "2.4.11" 19 | __status__ = "Prototype" 20 | 21 | 22 | def _construct_url_json_list(tint, mms_id, product, lasp_url): 23 | r"""Construct the url that return a json-formatted string of science 24 | filenames that are available for download according to: 25 | https://lasp.colorado.edu/mms/sdc/team/about/how-to/ 26 | """ 27 | 28 | tint = np.array(tint).astype(">> from pyrfu import mms 44 | >>> e_power = mms.whistler_b2e(b_power, freq, theta_k, b_mag, n_e) 45 | 46 | """ 47 | 48 | # Calculate plasma parameters 49 | pparam = plasma_calc(b_mag, n_e, n_e, n_e, n_e) 50 | fpe, fce = [pparam.Fpe, pparam.Fce] 51 | 52 | # Check input 53 | if len(b2) != len(freq): 54 | raise IndexError("B2 and freq lengths do not agree!") 55 | 56 | # Calculate cold plasma parameters 57 | rr = 1 - fpe**2 / (freq * (freq - fce)) 58 | ll = 1 - fpe**2 / (freq * (freq + fce)) 59 | pp = 1 - fpe**2 / freq**2 60 | dd = 0.5 * (rr - ll) 61 | ss = 0.5 * (rr + ll) 62 | 63 | n2 = rr * ll * np.sin(theta_k) ** 2 64 | n2 += pp * ss * (1 + np.cos(theta_k) ** 2) 65 | n2 -= np.sqrt( 66 | (rr * ll - pp * ss) ** 2 * np.sin(theta_k) ** 4 67 | + 4 * (pp**2) * (dd**2) * np.cos(theta_k) ** 2, 68 | ) 69 | n2 /= 2 * (ss * np.sin(theta_k) ** 2 + pp * np.cos(theta_k) ** 2) 70 | 71 | e_temp1 = (pp - n2 * np.sin(theta_k) ** 2) ** 2.0 * ((dd / (ss - n2)) ** 2 + 1) + ( 72 | n2 * np.cos(theta_k) * np.sin(theta_k) 73 | ) ** 2 74 | e_temp2 = (dd / (ss - n2)) ** 2 * ( 75 | pp - n2 * np.sin(theta_k) ** 2 76 | ) ** 2 + pp**2 * np.cos(theta_k) ** 2 77 | 78 | e2 = (constants.speed_of_light**2 / n2) * e_temp1 / e_temp2 * b2.data 79 | e2 *= 1e-12 80 | 81 | return e2 82 | -------------------------------------------------------------------------------- /pyrfu/models/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # @Louis Richard 5 | from .igrf import igrf 6 | from .magnetopause_normal import magnetopause_normal 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | __all__ = ["igrf", "magnetopause_normal"] 16 | -------------------------------------------------------------------------------- /pyrfu/plot/add_position.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from matplotlib.axes import Axes 7 | from matplotlib.dates import num2date 8 | from xarray.core.dataarray import DataArray 9 | 10 | # Local imports 11 | from ..pyrf.t_eval import t_eval 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2023" 16 | __license__ = "MIT" 17 | __version__ = "2.4.2" 18 | __status__ = "Prototype" 19 | 20 | 21 | def add_position( 22 | ax: Axes, 23 | r_xyz: DataArray, 24 | spine: float = 20, 25 | position: str = "top", 26 | fontsize: float = 10, 27 | ) -> Axes: 28 | r"""Add extra axes to plot spacecraft position. 29 | 30 | Parameters 31 | ---------- 32 | ax : matplotlib.axes._axes.Axes 33 | Axis where to label the spacecraft position. 34 | r_xyz : xarray.DataArray 35 | Time series of the spacecraft position. 36 | spine : float, Optional 37 | Relative position of the axes. Default is 20. 38 | position : str, Optional 39 | Axis position wtr to the reference axis. Default is "top". 40 | fontsize : float, Optional 41 | xticks label font size. Default is 10. 42 | 43 | Returns 44 | ------- 45 | axr : matplotlib.axes._axes.Axes 46 | Twin axis with spacecraft position as x-axis label. 47 | 48 | """ 49 | 50 | x_lim = ax.get_xlim() 51 | 52 | t_ticks = [t_.replace(tzinfo=None) for t_ in num2date(ax.get_xticks())] 53 | t_ticks = np.array(t_ticks).astype(" Axes: 25 | r"""Line plot of 4 spacecraft time series. 26 | 27 | Parameters 28 | ---------- 29 | axis : matplotlib.axes._axes.Axes 30 | Axis 31 | inp_list : list of xarray.DataArray 32 | Time series to plot 33 | comp: int, Optional 34 | Index of the column to plot. Default is 0. 35 | colors: {'cluster', 'mms'}, Optional 36 | Color cycle to use. Default uses MMS 37 | 38 | Other Parameters 39 | ---------------- 40 | kwargs : dict 41 | Hash table of plot options. 42 | 43 | Returns 44 | ------- 45 | axis : matplotlib.axes._axes.Axes 46 | Axis with matplotlib.lines.Line2D. 47 | 48 | Raises 49 | ------ 50 | NotImplementedError: if invalid color style of inp_list.ndim > 3 51 | 52 | """ 53 | 54 | if axis is None: 55 | _, axis = plt.subplots(1) 56 | 57 | if colors.lower() not in ["cluster", "mms"]: 58 | raise NotImplementedError("Unknown color cycle") 59 | 60 | for i, inp in enumerate(inp_list): 61 | if inp.ndim == 1: 62 | data = inp.data[:, np.newaxis] 63 | elif inp.ndim == 2: 64 | data = inp.data 65 | elif inp.ndim == 3: 66 | data = np.reshape( 67 | inp.data, 68 | (inp.shape[0], inp.shape[1] * inp.shape[2]), 69 | ) 70 | else: 71 | raise NotImplementedError("inp.ndim > 3 not implemented") 72 | 73 | time = inp.time 74 | axis.plot(time, data[:, comp], color=f"{colors}:{colors}{i + 1:d}", **kwargs) 75 | 76 | locator = mdates.AutoDateLocator(minticks=3, maxticks=7) 77 | formatter = mdates.ConciseDateFormatter(locator) 78 | axis.xaxis.set_major_locator(locator) 79 | axis.xaxis.set_major_formatter(formatter) 80 | axis.grid(True, which="major", linestyle="-", linewidth="0.5", c="0.5") 81 | 82 | return axis 83 | -------------------------------------------------------------------------------- /pyrfu/plot/plot_clines.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import matplotlib.pyplot as plt 5 | 6 | # 3rd party import 7 | import numpy as np 8 | from matplotlib.cm import get_cmap 9 | from matplotlib.colorbar import ColorbarBase 10 | from matplotlib.colors import LogNorm 11 | 12 | # Local imports 13 | from .plot_line import plot_line 14 | 15 | __author__ = "Louis Richard" 16 | __email__ = "louisr@irfu.se" 17 | __copyright__ = "Copyright 2020-2023" 18 | __license__ = "MIT" 19 | __version__ = "2.4.2" 20 | __status__ = "Prototype" 21 | 22 | 23 | def plot_clines(axis, inp, yscale="log", cscale="log", cmap="jet", **kwargs): 24 | r"""Plot lines with color associated to the level. 25 | 26 | Parameters 27 | ---------- 28 | axis : 29 | Axes 30 | inp : xarray.DataArray 31 | Time series as an energy spectrum to plot. 32 | yscale : str, Optional 33 | Scale of the yaxis. Default is "log" 34 | cscale : str, Optional 35 | Scale of the colormap. Default is "log". 36 | cmap : str, Optional 37 | Colormap. Default is "jet" 38 | kwargs : dict 39 | Plot options. 40 | 41 | Returns 42 | ------- 43 | axis : 44 | Updated axis 45 | cbl : 46 | Colorbar associated 47 | 48 | Other Parameters 49 | ---------------- 50 | See pyrfu.plot.plot_line 51 | 52 | """ 53 | 54 | pad = 0.01 55 | c_map = get_cmap(name=cmap) 56 | 57 | for i, c in enumerate(c_map(np.linspace(0, 1, len(inp.energy.data)))): 58 | plot_line(axis, inp[:, i], color=c, **kwargs) 59 | 60 | pos = axis.get_position() 61 | f = plt.gcf() 62 | cax = f.add_axes([pos.x0 + pos.width + pad, pos.y0, pad, pos.height]) 63 | 64 | if cscale == "log": 65 | norm = LogNorm(vmin=inp.energy.data[0], vmax=inp.energy.data[-1]) 66 | else: 67 | raise NotImplementedError 68 | 69 | ColorbarBase(cax, cmap=c_map, norm=norm, orientation="vertical") 70 | 71 | if yscale == "log": 72 | axis.set_yscale("log") 73 | 74 | cax.set_axisbelow(False) 75 | 76 | return axis, cax 77 | -------------------------------------------------------------------------------- /pyrfu/plot/plot_contour.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import matplotlib as mpl 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.14" 13 | __status__ = "Prototype" 14 | 15 | __all__ = ["plot_contour"] 16 | 17 | 18 | def plot_contour(axis, inp, **kwargs): 19 | r"""Plot a contour plot. 20 | 21 | Parameters 22 | ---------- 23 | axis : matplotlib.pyplot.subplotsaxes 24 | Target axis to plot. 25 | inp : xarray.DataArray 26 | Input 2D data to plot. 27 | 28 | Other Parameters 29 | ---------------- 30 | **kwargs 31 | Keyword arguments for matplotlib.pyplot.contour. 32 | 33 | Returns 34 | ------- 35 | axis : matplotlib.pyplot.subplotsaxes 36 | Axis with contour plot. 37 | clines : matplotlib.contour.QuadContourSet 38 | Contour lines. 39 | 40 | """ 41 | 42 | if not isinstance(axis, mpl.axes.Axes): 43 | raise TypeError("ax must be a matplotlib.pyplot.subplotsaxes.") 44 | 45 | if not isinstance(inp, xr.DataArray): 46 | raise TypeError("inp must be a xarray.DataArray.") 47 | 48 | # Get dimensions of the input data 49 | dims = inp.dims 50 | x, y = [inp[dim].data for dim in dims] 51 | 52 | # Plot contour 53 | clines = axis.contour(x, y, inp.data.T, **kwargs) 54 | 55 | return axis, clines 56 | -------------------------------------------------------------------------------- /pyrfu/plot/plot_line.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import matplotlib as mpl 6 | import matplotlib.dates as mdates 7 | import matplotlib.pyplot as plt 8 | import matplotlib.ticker as mticker 9 | import numpy as np 10 | import xarray as xr 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2023" 15 | __license__ = "MIT" 16 | __version__ = "2.4.2" 17 | __status__ = "Prototype" 18 | 19 | 20 | def plot_line(axis, inp, **kwargs): 21 | r"""Line plot of time series. 22 | 23 | Parameters 24 | ---------- 25 | axis : matplotlib.axes._axes.Axes 26 | Single axis where to plot inp. If None creates a new figure with a single axis. 27 | inp : xarray.DataArray 28 | Time series to plot 29 | 30 | Other Parameters 31 | ---------------- 32 | **kwargs 33 | Keyword arguments control the line properties. See matplotlib.lines.Line2D 34 | for reference. 35 | 36 | Returns 37 | ------- 38 | axs : matplotlib.axes._axes.Axes 39 | Axis with matplotlib.lines.Line2D. 40 | 41 | """ 42 | 43 | if axis is None: 44 | _, axis = plt.subplots(1) 45 | else: 46 | if not isinstance(axis, mpl.axes.Axes): 47 | raise TypeError("axis must be a matplotlib.axes._axes.Axes") 48 | 49 | if not isinstance(inp, xr.DataArray): 50 | raise TypeError("inp must be an xarray.DataArray object!") 51 | 52 | if inp.data.ndim < 3: 53 | data = inp.data 54 | elif inp.data.ndim == 3: 55 | data = np.reshape( 56 | inp.data, 57 | (inp.shape[0], inp.shape[1] * inp.shape[2]), 58 | ) 59 | else: 60 | raise NotImplementedError( 61 | f"plot_line cannot handle {inp.data.ndim} dimensional data" 62 | ) 63 | 64 | time = inp.time 65 | axis.plot(time, data, **kwargs) 66 | 67 | if time.dtype == " DataArray: 25 | r"""Computes the input quantity at the center of mass of the MMS 26 | tetrahedron. 27 | 28 | Parameters 29 | ---------- 30 | b_list : Sequence of DataArray or Dataset 31 | List of the time series of the quantity for each spacecraft. 32 | 33 | Returns 34 | ------- 35 | b_avg : DataArray or Dataset 36 | Time series of the input quantity a the enter of mass of the 37 | MMS tetrahedron. 38 | 39 | Raises 40 | ------ 41 | TypeError 42 | If b_list is not a list of DataArray or Dataset 43 | 44 | Examples 45 | -------- 46 | >>> from pyrfu.mms import get_data 47 | >>> from pyrfu.pyrf import avg_4sc 48 | 49 | Time interval 50 | 51 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 52 | 53 | Spacecraft indices 54 | 55 | >>> b_mms = [get_data("B_gse_fgm_srvy_l2", tint, i) for i in range(1, 5)] 56 | >>> b_xyz = avg_4sc(b_mms) 57 | 58 | """ 59 | # Check input type 60 | if not isinstance(b_list, list): 61 | raise TypeError("b_list must be a list") 62 | 63 | b_list_r = [] 64 | 65 | for b in b_list: 66 | if isinstance(b, DataArray): 67 | b_list_r.append(resample(b, b_list[0], f_s=calc_fs(b_list[0]))) 68 | else: 69 | raise TypeError("elements of b_list must be DataArray or Dataset") 70 | 71 | b_avg_data = np.zeros(b_list_r[0].shape) 72 | 73 | for b in b_list_r: 74 | b_avg_data += b.data 75 | 76 | # Average the resamples time series 77 | b_avg = xr.DataArray( 78 | b_avg_data / len(b_list_r), 79 | coords=b_list_r[0].coords, 80 | dims=b_list_r[0].dims, 81 | attrs=b_list_r[0].attrs, 82 | ) 83 | 84 | return b_avg 85 | -------------------------------------------------------------------------------- /pyrfu/pyrf/c_4_k.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Built-in imports 4 | from typing import Sequence 5 | 6 | # 3rd party imports 7 | import numpy as np 8 | from xarray.core.dataarray import DataArray 9 | 10 | # Local imports 11 | from pyrfu.pyrf.cross import cross 12 | from pyrfu.pyrf.dot import dot 13 | 14 | __author__ = "Louis Richard" 15 | __email__ = "louisr@irfu.se" 16 | __copyright__ = "Copyright 2020-2024" 17 | __license__ = "MIT" 18 | __version__ = "2.4.13" 19 | __status__ = "Prototype" 20 | 21 | 22 | def c_4_k(r_list: Sequence[DataArray]) -> Sequence[DataArray]: 23 | r"""Calculates reciprocal vectors in barycentric coordinates. 24 | 25 | Parameters 26 | ---------- 27 | r_list : list of DataArray 28 | Position of the spacecrafts. 29 | 30 | Returns 31 | ------- 32 | k_list : list of DataArray 33 | Reciprocal vectors in barycentric coordinates. 34 | 35 | Notes 36 | ----- 37 | The units of reciprocal vectors are the same as [1/r]. 38 | 39 | """ 40 | 41 | mms_list = np.arange(4) 42 | 43 | k_list = [r_list[0].copy()] * 4 44 | 45 | mms_list_r0 = np.roll(mms_list, 0) 46 | mms_list_r1 = np.roll(mms_list, 1) 47 | mms_list_r2 = np.roll(mms_list, 2) 48 | mms_list_r3 = np.roll(mms_list, 3) 49 | 50 | for i, alpha, beta, gamma in zip( 51 | mms_list_r0, 52 | mms_list_r1, 53 | mms_list_r2, 54 | mms_list_r3, 55 | ): 56 | dr_jk_x_dr_jm = cross( 57 | r_list[beta] - r_list[alpha], 58 | r_list[gamma] - r_list[alpha], 59 | ) 60 | 61 | dr12 = r_list[i] - r_list[alpha] 62 | 63 | k_list[i] = dr_jk_x_dr_jm / dot(dr_jk_x_dr_jm, dr12) 64 | 65 | return k_list 66 | -------------------------------------------------------------------------------- /pyrfu/pyrf/calc_agyro.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # 3rd party imports 4 | import numpy as np 5 | import xarray as xr 6 | from xarray.core.dataarray import DataArray 7 | 8 | # Local imports 9 | from pyrfu.pyrf.ts_scalar import ts_scalar 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2024" 14 | __license__ = "MIT" 15 | __version__ = "2.4.13" 16 | __status__ = "Prototype" 17 | 18 | 19 | def calc_agyro(p_xyz: DataArray) -> DataArray: 20 | r"""Compute agyrotropy coefficient as 21 | 22 | .. math:: 23 | 24 | A\Phi = \frac{|P_{\perp 1} - P_{\perp 2}|}{P_{\perp 1} 25 | + P_{\perp 2}} 26 | 27 | 28 | Parameters 29 | ---------- 30 | p_xyz : DataArray 31 | Time series of the pressure tensor 32 | 33 | Returns 34 | ------- 35 | DataArray 36 | Time series of the agyrotropy coefficient of the specie. 37 | 38 | Raises 39 | ------ 40 | TypeError 41 | If input is not a xarray.DataArray. 42 | ValueError 43 | If input is not a time series of a tensor (n_time, 3, 3). 44 | 45 | Examples 46 | -------- 47 | >>> from pyrfu import mms, pyrf 48 | 49 | Time interval 50 | 51 | >>> tint = ["2019-09-14T07:54:00.000","2019-09-14T08:11:00.000"] 52 | 53 | Spacecraft index 54 | 55 | >>> ic = 1 56 | 57 | Load magnetic field and electron pressure tensor 58 | 59 | >>> b_xyz = mms.get_data("b_gse_fgm_srvy_l2", tint, 1) 60 | >>> p_xyz_e = mms.get_data("pe_gse_fpi_fast_l2", tint, 1) 61 | 62 | Rotate electron pressure tensor to field aligned coordinates 63 | 64 | >>> p_fac_e_qq = mms.rotate_tensor(p_xyz_e, "fac", b_xyz, "qq") 65 | 66 | Compute agyrotropy coefficient 67 | 68 | >>> agyro_e = pyrf.calc_agyro(p_fac_e_qq) 69 | 70 | """ 71 | # Check input type 72 | if not isinstance(p_xyz, xr.DataArray): 73 | raise TypeError("p_xyz must be a xarray.DataArray") 74 | 75 | # Check input shape 76 | if p_xyz.data.ndim != 3 or p_xyz.shape[1] != 3 or p_xyz.shape[2] != 3: 77 | raise ValueError("p_xyz must be a time series of a tensor") 78 | 79 | # Parallel and perpendicular components 80 | p_perp_1, p_perp_2 = [p_xyz.data[:, 1, 1], p_xyz.data[:, 2, 2]] 81 | 82 | agyrotropy = np.abs(p_perp_1 - p_perp_2) / (p_perp_1 + p_perp_2) 83 | agyrotropy = ts_scalar(p_xyz.time.data, agyrotropy) 84 | 85 | return agyrotropy 86 | -------------------------------------------------------------------------------- /pyrfu/pyrf/calc_dt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from xarray.core.dataarray import DataArray 10 | from xarray.core.dataset import Dataset 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2024" 15 | __license__ = "MIT" 16 | __version__ = "2.4.13" 17 | __status__ = "Prototype" 18 | 19 | 20 | def calc_dt(inp: Union[Dataset, DataArray]) -> float: 21 | r"""Compute time step of the input time series. 22 | 23 | Parameters 24 | ---------- 25 | inp : DataArray or Dataset 26 | Time series of the input variable. 27 | 28 | Returns 29 | ------- 30 | float 31 | Time step in seconds. 32 | 33 | """ 34 | # Check input type 35 | if not isinstance(inp, (Dataset, DataArray)): 36 | raise TypeError("Input must be a time series") 37 | 38 | return float(np.median(np.diff(inp.time.data)).astype(np.float64) * 1e-9) 39 | -------------------------------------------------------------------------------- /pyrfu/pyrf/calc_fs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from xarray.core.dataarray import DataArray 10 | from xarray.core.dataset import Dataset 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2024" 15 | __license__ = "MIT" 16 | __version__ = "2.4.13" 17 | __status__ = "Prototype" 18 | 19 | 20 | def calc_fs(inp: Union[Dataset, DataArray]) -> float: 21 | r"""Computes the sampling frequency of the input time series. 22 | 23 | Parameters 24 | ---------- 25 | inp : DataArray or Dataset 26 | Time series of the input variable. 27 | 28 | Returns 29 | ------- 30 | float 31 | Sampling frequency in Hz. 32 | 33 | """ 34 | # Check input type 35 | if not isinstance(inp, (Dataset, DataArray)): 36 | raise TypeError("Input must be a time series") 37 | 38 | return float(1 / (np.median(np.diff(inp.time.data)).astype(np.float64) * 1e-9)) 39 | -------------------------------------------------------------------------------- /pyrfu/pyrf/cart2sph.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def cart2sph(x, y, z): 16 | r"""Cartesian to spherical coordinate transform. 17 | 18 | .. math:: 19 | 20 | \alpha = \arctan \left( \frac{y}{x} \right) \\ 21 | \beta = \arccos \left( \frac{z}{r} \right) \\ 22 | r = \sqrt{x^2 + y^2 + z^2} 23 | 24 | with :math:`\alpha \in [0, 2\pi], \beta \in [0, \pi], r \geq 0` 25 | 26 | Parameters 27 | ---------- 28 | x : float or array_like 29 | x-component of Cartesian coordinates 30 | y : float or array_like 31 | y-component of Cartesian coordinates 32 | z : float or array_like 33 | z-component of Cartesian coordinates 34 | 35 | Returns 36 | ------- 37 | alpha : float or array_like 38 | Azimuth angle in radians 39 | beta : float or array_like 40 | Elevation angle in radians (with 0 denoting North pole) 41 | r : float or array_like 42 | Radius 43 | """ 44 | 45 | # Radius 46 | r = np.sqrt(x**2 + y**2 + z**2) 47 | 48 | # Azimuthal angle 49 | alpha = np.arctan2(y, x) 50 | 51 | # Elevation angle 52 | beta = np.arccos(z / r) 53 | 54 | return alpha, beta, r 55 | -------------------------------------------------------------------------------- /pyrfu/pyrf/cart2sph_ts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | # Local imports 9 | from .ts_vec_xyz import ts_vec_xyz 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def cart2sph_ts(inp, direction_flag: int = 1): 20 | r"""Computes magnitude, theta and phi angle from column vector xyz 21 | (first column is x ....) theta is 0 at equator. 22 | direction_flag = -1 -> to make transformation in opposite direction 23 | 24 | Parameters 25 | ---------- 26 | inp : xarray.DataArray 27 | Time series to convert. 28 | 29 | direction_flag : {1, -1}, Optional 30 | Set to 1 (default) to transform from cartesian to spherical 31 | coordinates. Set to -1 to transform from spherical to cartesian 32 | coordinates. 33 | 34 | Returns 35 | ------- 36 | out : xarray.DataArray 37 | Input field in spherical/cartesian coordinate system. 38 | 39 | """ 40 | 41 | # Check input type 42 | assert isinstance(inp, xr.DataArray), "inp must be a xarray.DataArray" 43 | 44 | # Check that inp is a vector time series 45 | assert inp.data.ndim == 2 and inp.shape[1] == 3, "inp must be a vector time series" 46 | 47 | # Check direction +/-1 48 | assert direction_flag in [-1, 1], "direction_flag must be +/-1" 49 | 50 | if direction_flag == -1: 51 | r_data = inp.data[:, 0] 52 | 53 | sin_the = np.sin(np.deg2rad(inp.data[:, 1])) 54 | cos_the = np.cos(np.deg2rad(inp.data[:, 1])) 55 | sin_phi = np.sin(np.deg2rad(inp.data[:, 2])) 56 | cos_phi = np.cos(np.deg2rad(inp.data[:, 2])) 57 | 58 | z_data = r_data * sin_the 59 | x_data = r_data * cos_the * cos_phi 60 | y_data = r_data * cos_the * sin_phi 61 | 62 | out_data = np.transpose(np.vstack([x_data, y_data, z_data])) 63 | 64 | else: 65 | xy2 = inp.data[:, 0] ** 2 + inp.data[:, 1] ** 2 66 | 67 | r_data = np.sqrt(xy2 + inp.data[:, 2] ** 2) 68 | theta = np.rad2deg(np.arctan2(inp.data[:, 2], np.sqrt(xy2))) 69 | phi = np.rad2deg(np.arctan2(inp.data[:, 1], inp.data[:, 0])) 70 | 71 | out_data = np.transpose(np.vstack([r_data, theta, phi])) 72 | 73 | out = ts_vec_xyz(inp.time.data, out_data, inp.attrs) 74 | 75 | return out 76 | -------------------------------------------------------------------------------- /pyrfu/pyrf/cdfepoch2datetime64.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from cdflib import cdfepoch 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def _compose_date( 17 | years, 18 | months, 19 | days, 20 | hours=None, 21 | minutes=None, 22 | seconds=None, 23 | milliseconds=None, 24 | microseconds=None, 25 | nanoseconds=None, 26 | ): 27 | years = np.asarray(years) - 1970 28 | months = np.asarray(months) - 1 29 | days = np.asarray(days) - 1 30 | types = [ 31 | " DataArray: 21 | r"""Compute cross product of two fields. 22 | 23 | Parameters 24 | ---------- 25 | inp1 : DataArray 26 | Time series of the first field X. 27 | inp2 : DataArray 28 | Time series of the second field Y. 29 | 30 | Returns 31 | ------- 32 | DataArray 33 | Time series of the cross product Z = XxY. 34 | 35 | Raises 36 | ------ 37 | TypeError 38 | If inp1 or inp2 are not xarray.DataArray. 39 | ValueError 40 | If inp1 or inp2 are not vectors timeseries. 41 | 42 | Examples 43 | -------- 44 | >>> from pyrfu import mms, pyrf 45 | 46 | Define time interval 47 | 48 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 49 | 50 | Index of the MMS spacecraft 51 | 52 | >>> mms_id = 1 53 | 54 | Load magnetic field and electric field 55 | 56 | >>> b_xyz = mms.get_data("b_gse_fgm_srvy_l2", tint, mms_id) 57 | >>> e_xyz = mms.get_data("e_gse_edp_fast_l2", tint, mms_id) 58 | 59 | Compute magnitude of the magnetic field 60 | 61 | >>> b_mag = pyrf.norm(b_xyz) 62 | 63 | Compute ExB drift velocity 64 | 65 | >>> v_xyz_exb = pyrf.cross(e_xyz, b_xyz) / b_mag ** 2 66 | 67 | """ 68 | # Check type 69 | if not isinstance(inp1, DataArray): 70 | raise TypeError("inp1 must be a xarray.DataArray") 71 | 72 | if not isinstance(inp2, DataArray): 73 | raise TypeError("inp2 must be a xarray.DataArray") 74 | 75 | # Check inputs are vectors 76 | if inp1.ndim != 2 or inp1.shape[1] != 3: 77 | raise ValueError("inp1 must be a vector") 78 | 79 | if inp2.ndim != 2 or inp2.shape[1] != 3: 80 | raise ValueError("inp2 must be a vector") 81 | 82 | if len(inp1) != len(inp2): 83 | inp2 = resample(inp2, inp1) 84 | 85 | out_data = np.cross(inp1.data, inp2.data, axis=1) 86 | 87 | out = ts_vec_xyz(inp1.time.data, out_data) 88 | 89 | return out 90 | -------------------------------------------------------------------------------- /pyrfu/pyrf/date_str.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from datetime import datetime 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def date_str(tint: list[str], fmt: int = 1) -> str: 16 | r"""Creates a string corresponding to time interval for output plot naming. 17 | 18 | Parameters 19 | ---------- 20 | tint : list of str 21 | Time interval. 22 | fmt : int 23 | Format of the output : 24 | * 1 : "%Y%m%d_%H%M" 25 | * 2 : "%y%m%d%H%M%S" 26 | * 3 : "%Y%m%d_%H%M%S"_"%H%M%S" 27 | * 4 : "%Y%m%d_%H%M%S"_"%Y%m%d_%H%M%S" 28 | 29 | Returns 30 | ------- 31 | out : str 32 | String corresponding to the time interval in the desired format. 33 | 34 | """ 35 | 36 | # Check input 37 | assert isinstance(tint, list), "tint must be a list" 38 | assert isinstance(tint[0], str), "1st element of tint must be a string" 39 | assert isinstance(tint[1], str), "2nd element of tint must be a string" 40 | assert fmt in range(1, 5), "fmt must be 1, 2, 3, or 4" 41 | 42 | assert len(tint[0]) > 25, "tint[0] must be in %Y-%m-%dT%H:%M:%S.%f format" 43 | assert len(tint[1]) > 25, "tint[1] must be in %Y-%m-%dT%H:%M:%S.%f format" 44 | 45 | tint = [t_[:26] for t_ in tint] 46 | 47 | start_time = datetime.strptime(tint[0], "%Y-%m-%dT%H:%M:%S.%f") 48 | end_time = datetime.strptime(tint[1], "%Y-%m-%dT%H:%M:%S.%f") 49 | 50 | if fmt == 1: 51 | out = start_time.strftime("%Y%m%d_%H%M") 52 | elif fmt == 2: 53 | out = start_time.strftime("%y%m%d%H%M%S") 54 | elif fmt == 3: 55 | out = "_".join( 56 | [ 57 | start_time.strftime("%Y%m%d_%H%M%S"), 58 | end_time.strftime("%H%M%S"), 59 | ], 60 | ) 61 | else: 62 | out = "_".join( 63 | [ 64 | start_time.strftime("%Y%m%d_%H%M%S"), 65 | end_time.strftime("%Y%m%d_%H%M%S"), 66 | ], 67 | ) 68 | 69 | return out 70 | -------------------------------------------------------------------------------- /pyrfu/pyrf/datetime2iso8601.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from datetime import datetime 6 | from typing import Union 7 | 8 | # 3rd party imports 9 | import numpy as np 10 | import pandas as pd 11 | from numpy.typing import NDArray 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2023" 16 | __license__ = "MIT" 17 | __version__ = "2.4.2" 18 | __status__ = "Prototype" 19 | 20 | 21 | def datetime2iso8601( 22 | time: Union[list[datetime], NDArray[datetime], datetime] 23 | ) -> Union[list[str], str]: 24 | r"""Transforms datetime to TT2000 string format. 25 | 26 | Parameters 27 | ---------- 28 | time : datetime 29 | Time to convert to tt2000 string. 30 | 31 | Returns 32 | ------- 33 | tt2000 : str 34 | Time in TT20000 iso_8601 format. 35 | 36 | """ 37 | 38 | if isinstance(time, (np.ndarray, list)): 39 | time_iso8601 = [] 40 | 41 | for t in time: 42 | time_iso8601.append(datetime2iso8601(t)) 43 | 44 | elif isinstance(time, datetime): 45 | time_datetime = pd.Timestamp(time) 46 | 47 | # Convert to string 48 | datetime_str = time_datetime.strftime("%Y-%m-%dT%H:%M:%S.%f") 49 | 50 | time_iso8601 = f"{datetime_str}{time_datetime.nanosecond:03d}" 51 | 52 | else: 53 | raise TypeError("time must be array_like or datetime") 54 | 55 | return time_iso8601 56 | -------------------------------------------------------------------------------- /pyrfu/pyrf/datetime642iso8601.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from numpy.typing import NDArray 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2024" 14 | __license__ = "MIT" 15 | __version__ = "2.4.13" 16 | __status__ = "Prototype" 17 | 18 | 19 | def datetime642iso8601( 20 | time: Union[np.datetime64, NDArray[np.datetime64]] 21 | ) -> NDArray[np.str_]: 22 | r"""Convert datetime64 in ns units to ISO 8601 time format . 23 | 24 | Parameters 25 | ---------- 26 | time : numpy.datetime64 or numpy.ndarray 27 | Time in datetime64 in ns units. 28 | 29 | Returns 30 | ------- 31 | time_iso8601 : numpy.ndarray 32 | Time in ISO 8601 format. 33 | 34 | See Also 35 | -------- 36 | pyrfu.pyrf.datetime642iso8601 37 | 38 | """ 39 | if isinstance(time, np.datetime64): 40 | time_datetime64 = np.atleast_1d(time).astype("datetime64[ns]") 41 | elif isinstance(time, np.ndarray) and isinstance(time[0], np.datetime64): 42 | time_datetime64 = time.astype("datetime64[ns]") 43 | else: 44 | raise TypeError("time must be numpy.datetime64 or numpy.ndarray") 45 | 46 | # Convert to string 47 | time_iso8601 = time_datetime64.astype(str) 48 | time_iso8601 = np.atleast_1d(np.squeeze(np.stack([time_iso8601]))) 49 | 50 | return time_iso8601 51 | -------------------------------------------------------------------------------- /pyrfu/pyrf/datetime642ttns.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from cdflib import cdfepoch 7 | 8 | # local imports 9 | from .datetime642iso8601 import datetime642iso8601 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def datetime642ttns(time): 20 | r"""Converts datetime64 in ns units to epoch_tt2000 21 | (nanoseconds since J2000). 22 | 23 | Parameters 24 | ---------- 25 | time : ndarray 26 | Times in datetime64 format. 27 | 28 | Returns 29 | ------- 30 | time_ttns : ndarray 31 | Times in epoch_tt2000 format (nanoseconds since J2000). 32 | 33 | """ 34 | 35 | # Convert to datetime64 in ns units 36 | time_iso8601 = datetime642iso8601(time) 37 | 38 | # Convert to ttns 39 | time_ttns = np.array([cdfepoch.parse(t_) for t_ in time_iso8601]) 40 | 41 | return time_ttns 42 | -------------------------------------------------------------------------------- /pyrfu/pyrf/datetime642unix.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from numpy.typing import NDArray 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2024" 14 | __license__ = "MIT" 15 | __version__ = "2.4.13" 16 | __status__ = "Prototype" 17 | 18 | 19 | def datetime642unix( 20 | time: Union[list[np.datetime64], NDArray[np.datetime64]] 21 | ) -> NDArray[np.float64]: 22 | r"""Converts datetime64 in ns units to unix time. 23 | 24 | Parameters 25 | ---------- 26 | time : ndarray 27 | Time in datetime64 format. 28 | 29 | Returns 30 | ------- 31 | time_unix : ndarray 32 | Time in unix format. 33 | 34 | See Also 35 | -------- 36 | pyrfu.pyrf.unix2datetime64 37 | 38 | """ 39 | 40 | # Make sure that time is in ns format 41 | if isinstance(time, (list, np.ndarray)) and isinstance(time[0], np.datetime64): 42 | time_datetime64 = np.array(time).astype("datetime64[ns]") 43 | else: 44 | raise TypeError("time must be list or numpy.ndarray of datetime64") 45 | 46 | # Convert to unix 47 | time_unix = time_datetime64.astype(np.int64) / 1e9 48 | 49 | return time_unix 50 | -------------------------------------------------------------------------------- /pyrfu/pyrf/dot.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from xarray.core.dataarray import DataArray 7 | 8 | # Local imports 9 | from pyrfu.pyrf.resample import resample 10 | from pyrfu.pyrf.ts_scalar import ts_scalar 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2024" 15 | __license__ = "MIT" 16 | __version__ = "2.4.13" 17 | __status__ = "Prototype" 18 | 19 | 20 | def dot(inp1: DataArray, inp2: DataArray) -> DataArray: 21 | r"""Compute dot product of two fields. 22 | 23 | Parameters 24 | ---------- 25 | inp1 : DataArray 26 | Time series of the first field X. 27 | inp2 : DataArray 28 | Time series of the second field Y. 29 | 30 | Returns 31 | ------- 32 | out : DataArray 33 | Time series of the dot product Z = X.Y. 34 | 35 | Examples 36 | -------- 37 | >>> from pyrfu import mms, pyrf 38 | 39 | Time interval 40 | 41 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 42 | 43 | Load magnetic field, electric field and spacecraft position 44 | 45 | >>> r_mms, b_mms, e_mms = [[] * 4 for _ in range(3)] 46 | >>> for mms_id in range(1, 5): 47 | >>> r_mms.append(mms.get_data("R_gse", tint, mms_id)) 48 | >>> b_mms.append(mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id)) 49 | >>> e_mms.append(mms.get_data("E_gse_edp_fast_l2", tint, mms_id)) 50 | 51 | Compute current density using curlometer technique 52 | 53 | >>> j_xyz, _, _, _, _, _ = pyrf.c_4_j(r_mms, b_mms) 54 | 55 | Compute the electric at the center of mass of the tetrahedron 56 | 57 | >>> e_xyz = pyrf.avg_4sc(e_mms) 58 | 59 | Compute J.E dissipation 60 | 61 | >>> je = pyrf.dot(j_xyz, e_xyz) 62 | 63 | """ 64 | 65 | # Resample to first input sampling 66 | inp2 = resample(inp2, inp1) 67 | 68 | # Compute scalar product 69 | out_data = np.sum(inp1.data * inp2.data, axis=1) 70 | 71 | # Output to xarray 72 | out = ts_scalar(inp1.time.data, out_data) 73 | 74 | return out 75 | -------------------------------------------------------------------------------- /pyrfu/pyrf/e_vxb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | # Local imports 9 | from .resample import resample 10 | from .ts_vec_xyz import ts_vec_xyz 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2023" 15 | __license__ = "MIT" 16 | __version__ = "2.4.2" 17 | __status__ = "Prototype" 18 | 19 | 20 | def e_vxb(v_xyz, b_xyz, flag: str = "vxb"): 21 | r"""Computes the convection electric field :math:`V\times B` (default) 22 | or the :math:`E\times B/|B|^{2}` drift velocity (flag="exb"). 23 | 24 | Parameters 25 | ---------- 26 | v_xyz : xarray.DataArray 27 | Time series of the velocity/electric field. 28 | b_xyz : xarray.DataArray 29 | Time series of the magnetic field. 30 | flag : {"vxb", "exb"}, Optional 31 | Method flag : 32 | * "vxb" : computes convection electric field (Default). 33 | * "exb" : computes ExB drift velocity. 34 | 35 | Returns 36 | ------- 37 | out : xarray.DataArray 38 | Time series of the convection electric field/ExB drift velocity. 39 | 40 | Examples 41 | -------- 42 | >>> from pyrfu import mms, pyrf 43 | 44 | Time interval 45 | 46 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 47 | 48 | Spacecraft index 49 | 50 | >>> mms_id = 1 51 | 52 | Load magnetic field and electric field 53 | 54 | >>> b_xyz = mms.get_data("b_gse_fgm_srvy_l2", tint, mms_id) 55 | >>> e_xyz = mms.get_data("e_gse_edp_fast_l2", tint, mms_id) 56 | 57 | Compute ExB drift velocity 58 | 59 | >>> v_xyz_exb = pyrf.e_vxb(e_xyz, b_xyz,"ExB") 60 | 61 | """ 62 | 63 | assert isinstance(flag, str) and flag.lower() in ["exb", "vxb"], "Invalid flag" 64 | assert isinstance(b_xyz, xr.DataArray), "b_xyz must be a xarray.DataArray" 65 | 66 | if isinstance(v_xyz, xr.DataArray): 67 | b_xyz = resample(b_xyz, v_xyz) 68 | else: 69 | raise TypeError("v_xyz must be xarray.DataArray or array_like constant vector") 70 | 71 | if flag.lower() == "exb": 72 | res = 1e3 * np.cross(v_xyz.data, b_xyz.data, axis=1) 73 | res /= np.linalg.norm(b_xyz.data, axis=1)[:, None] ** 2 74 | 75 | attrs = {"UNITS": "km/s", "FIELDNAM": "Velocity", "LABLAXIS": "V"} 76 | 77 | else: 78 | res = -1e-3 * np.cross(v_xyz.data, b_xyz.data) 79 | 80 | attrs = { 81 | "UNITS": "mV/s", 82 | "FIELDNAM": "Electric field", 83 | "LABLAXIS": "E", 84 | } 85 | 86 | out = ts_vec_xyz(b_xyz.time.data, res, attrs) 87 | 88 | return out 89 | -------------------------------------------------------------------------------- /pyrfu/pyrf/end.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party import 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def end(inp): 17 | """Gives the last time of the time series in unix format. 18 | 19 | Parameters 20 | ---------- 21 | inp : xarray.DataArray or xarray.Dataset 22 | Time series of the input variable. 23 | 24 | Returns 25 | ------- 26 | out : float 27 | Value of the last time in unix format. 28 | 29 | """ 30 | 31 | message = "inp must be a xarray.DataArray or xarray.Dataset" 32 | assert isinstance(inp, (xr.DataArray, xr.Dataset)), message 33 | 34 | out = inp.time.data[-1].astype(np.int64) / 1e9 35 | 36 | return out 37 | -------------------------------------------------------------------------------- /pyrfu/pyrf/extend_tint.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | from .datetime642iso8601 import datetime642iso8601 8 | 9 | # Local imports 10 | from .iso86012datetime64 import iso86012datetime64 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2023" 15 | __license__ = "MIT" 16 | __version__ = "2.4.2" 17 | __status__ = "Prototype" 18 | 19 | 20 | def extend_tint(tint, ext: list = None): 21 | r"""Extends time interval. 22 | 23 | Parameters 24 | ---------- 25 | tint : list of str 26 | Reference time interval to extend. 27 | ext : list of float or list of float 28 | Number of seconds to extend time interval 29 | [left extend, right extend]. 30 | 31 | Returns 32 | ------- 33 | tint_new : list of str 34 | Extended time interval. 35 | 36 | Examples 37 | -------- 38 | >>> from pyrfu import pyrf 39 | 40 | Time interval 41 | 42 | >>> tints = ["2015-10-30T05:15:42.000", "2015-10-30T05:15:54.000"] 43 | 44 | Load spacecraft position 45 | 46 | >>> tints_long = pyrf.extend_tint(tint, [-100, 100]) 47 | 48 | """ 49 | 50 | # Set default extension 51 | if ext is None: 52 | ext = [-60.0, 60.0] 53 | 54 | # Make sure tint and ext are 2 elements array_like 55 | message = "must be array_like with 2 elements" 56 | assert isinstance(tint, (np.ndarray, list)) and len(tint) == 2, f"tint {message}" 57 | assert isinstance(ext, (np.ndarray, list)) and len(ext) == 2, f"ext {message}" 58 | 59 | # Convert extension to timedelta64[ns] 60 | ext = (np.array(ext) * 1e9).astype("timedelta64[ns]") 61 | 62 | # Original time interval to datetime64[ns] 63 | if isinstance(tint[0], np.datetime64): 64 | tint_ori = tint 65 | elif isinstance(tint[0], str): 66 | tint_ori = iso86012datetime64(np.array(tint)) 67 | else: 68 | raise TypeError("Invalid time format!! Must be datetime64 or str!!") 69 | 70 | # New time interval in iso 8601 format 71 | tint_new = list(datetime642iso8601(tint_ori + ext)) 72 | 73 | return tint_new 74 | -------------------------------------------------------------------------------- /pyrfu/pyrf/find_closest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from scipy import interpolate 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def find_closest(inp1, inp2): 17 | r"""Finds pairs that are closest to each other in two time series. 18 | 19 | Parameters 20 | ---------- 21 | inp1 : ndarray 22 | Vector with time instants. 23 | inp2 : ndarray 24 | Vector with time instants. 25 | 26 | Returns 27 | ------- 28 | t1new : ndarray 29 | Identified time instants that are closest each other. 30 | t2new : ndarray 31 | Identified time instants that are closest each other. 32 | ind1new : ndarray 33 | Identified time instants that are closest each other. 34 | ind2new : ndarray 35 | Identified time instants that are closest each other. 36 | 37 | """ 38 | 39 | t1_orig = inp1 40 | t2_orig = inp2 41 | flag = True 42 | 43 | nt1, nt2 = [len(t) for t in [inp1, inp2]] 44 | 45 | while flag: 46 | flag_t1 = np.zeros(inp1.shape) 47 | tckt1 = interpolate.interp1d( 48 | inp1, 49 | np.arange(nt1), 50 | kind="nearest", 51 | fill_value="extrapolate", 52 | ) 53 | flag_t1[tckt1(inp2)] = 1 54 | 55 | flag_t2 = np.zeros(inp2.shape) 56 | tckt2 = interpolate.interp1d( 57 | inp2, 58 | np.arange(nt2), 59 | kind="nearest", 60 | fill_value="extrapolate", 61 | ) 62 | flag_t2[tckt2(inp1)] = 1 63 | 64 | ind_zeros_t1 = np.where(flag_t1 == 0)[0] 65 | ind_zeros_t2 = np.where(flag_t2 == 0)[0] 66 | if ind_zeros_t1: 67 | inp1 = np.delete(inp1, ind_zeros_t1) 68 | elif ind_zeros_t2: 69 | inp2 = np.delete(inp2, ind_zeros_t2) 70 | else: 71 | break 72 | 73 | tckt1_orig = interpolate.interp1d(t1_orig, np.arange(nt1), kind="nearest") 74 | tckt2_orig = interpolate.interp1d(t2_orig, np.arange(nt2), kind="nearest") 75 | 76 | return inp1, inp2, tckt1_orig(inp1), tckt2_orig(inp2) 77 | -------------------------------------------------------------------------------- /pyrfu/pyrf/gradient.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | # Local imports 9 | from .calc_dt import calc_dt 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def gradient(inp): 20 | r"""Computes time derivative of the input variable. 21 | 22 | Parameters 23 | ---------- 24 | inp : xarray.DataArray 25 | Time series of the input variable. 26 | 27 | Returns 28 | ------- 29 | out : xarray.DataArray 30 | Time series of the time derivative of the input variable. 31 | 32 | Examples 33 | -------- 34 | >>> from pyrfu import mms, pyrf 35 | 36 | Time interval 37 | 38 | >>> tint = ["2017-07-18T13:03:34.000", "2017-07-18T13:07:00.000"] 39 | 40 | Spacecraft index 41 | 42 | >>> mms_id = 1 43 | 44 | Load magnetic field 45 | 46 | >>> b_xyz = mms.get_data("B_gse_fgm_brst_l2", tint, mms_id) 47 | 48 | Time derivative of the magnetic field 49 | 50 | >>> db_dt = pyrf.gradient(b_xyz) 51 | 52 | """ 53 | 54 | # guess time step 55 | delta_t = calc_dt(inp) 56 | 57 | d_inp_dt = np.gradient(inp.data, axis=0) / delta_t 58 | 59 | out = xr.DataArray( 60 | d_inp_dt, 61 | coords=inp.coords, 62 | dims=inp.dims, 63 | attrs=inp.attrs, 64 | ) 65 | 66 | if "UNITS" in out.attrs: 67 | out.attrs["UNITS"] += "/s" 68 | 69 | return out 70 | -------------------------------------------------------------------------------- /pyrfu/pyrf/gse2gsm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import xarray as xr 6 | 7 | # Local imports 8 | from .cotrans import cotrans 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2023" 13 | __license__ = "MIT" 14 | __version__ = "2.4.2" 15 | __status__ = "Prototype" 16 | 17 | 18 | def gse2gsm(inp, flag: str = "gse>gsm"): 19 | r"""Converts GSE to GSM. 20 | 21 | Parameters 22 | ---------- 23 | inp : xarray.DataArray or ndarray 24 | Time series of the input in GSE (GSM) coordinates. 25 | If ndarray first column is time in unix format. 26 | flag : {"gse>gsm", "gsm>gse"}, Optional 27 | Flag for conversion direction. Default is "gse>gsm" 28 | 29 | Returns 30 | ------- 31 | out : xarray.DataArray or ndarray 32 | Time series of the input in GSM (GSE) coordinates. 33 | If ndarray first column is time in unix format. 34 | 35 | See also 36 | -------- 37 | pyrfu.pyrf.geocentric_coordinate_transformation 38 | 39 | """ 40 | 41 | assert isinstance(inp, xr.DataArray), "inp must be a xarray.DataArray" 42 | assert inp.ndim == 2 and inp.shape[1] == 3, "inp must be a vector" 43 | 44 | message = "flag must be a string gse>gsm or gsm>gse" 45 | assert isinstance(flag, str) and flag.lower() in ["gse>gsm", "gsm>gse"], message 46 | 47 | out = cotrans(inp, flag) 48 | 49 | return out 50 | -------------------------------------------------------------------------------- /pyrfu/pyrf/histogram.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def histogram(inp, bins=100, y_range=None, weights=None, density=True): 17 | r"""Computes 1D histogram of the inp with bins bins 18 | 19 | Parameters 20 | ---------- 21 | inp : xarray.DataArray 22 | Time series of the input scalar variable. 23 | bins : str or int or array_like, Optional 24 | Number of bins. Default is ``bins=100``. 25 | y_range : (float, float), Optional 26 | The lower and upper range of the bins. If not provided, range 27 | is simply ``(inp.min(), inp.max())``. Values outside the range are 28 | ignored. The first element of the range must be less than or 29 | equal to the second. `range` affects the automatic bin 30 | computation as well. While bin width is computed to be optimal 31 | based on the actual data within `range`, the bin count will fill 32 | the entire range including portions containing no data. 33 | weights : array_like, Optional 34 | An array of weights, of the same shape as `inp`. Each value in 35 | `inp` only contributes its associated weight towards the bin count 36 | (instead of 1). If `density` is True, the weights are 37 | normalized, so that the integral of the density over the range 38 | remains 1. 39 | density : bool, Optional 40 | If ``False``, the result will contain the number of samples in each 41 | bin. If ``True``, the result is the value of the probability *density* 42 | function at the bin, normalized such that the *integral* over the 43 | range is 1. Note that the sum of the histogram values will not be 44 | equal to 1 unless bins of unity width are chosen; it is not a 45 | probability mass function. 46 | 47 | Returns 48 | ------- 49 | out : xarray.DataArray 50 | 1D distribution of the input time series. 51 | 52 | """ 53 | 54 | # Check input 55 | assert isinstance(inp, xr.DataArray), "inp must be a xarray.DataArray" 56 | assert inp.ndim == 1, "inp must be a scalar time series" 57 | 58 | hist, bins = np.histogram( 59 | inp.data, 60 | bins=bins, 61 | range=y_range, 62 | weights=weights, 63 | density=density, 64 | ) 65 | bin_center = (bins[1:] + bins[:-1]) * 0.5 66 | 67 | out = xr.DataArray(hist, coords=[bin_center], dims=["bins"]) 68 | 69 | return out 70 | -------------------------------------------------------------------------------- /pyrfu/pyrf/increments.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | from scipy.stats import kurtosis 8 | 9 | __author__ = "Louis Richard" 10 | __email__ = "louisr@irfu.se" 11 | __copyright__ = "Copyright 2020-2023" 12 | __license__ = "MIT" 13 | __version__ = "2.4.2" 14 | __status__ = "Prototype" 15 | 16 | 17 | def increments(inp, scale: int = 10): 18 | r"""Returns the increments of a time series. 19 | 20 | .. math:: y = |x_i - x_{i+s}| 21 | 22 | where :math:`s` is the scale. 23 | 24 | Parameters 25 | ---------- 26 | inp : xarray.DataArray 27 | Input time series. 28 | scale : int, Optional 29 | Scale at which to compute the increments. Default is 10. 30 | 31 | Returns 32 | ------- 33 | kurt : ndarray 34 | kurtosis of the increments, one per product, using the Fisher's 35 | definition (0 value for a normal distribution). 36 | result : xarray.DataArray 37 | An xarray containing the time series increments, one per 38 | product in the original time series. 39 | 40 | """ 41 | 42 | assert isinstance(inp, xr.DataArray), "inp must be a xarray.DataArray" 43 | assert inp.ndim < 4, "inp must ber a scalar, vector or tensor" 44 | 45 | if inp.ndim == 1: 46 | data = inp.data[:, np.newaxis] 47 | else: 48 | data = inp.data 49 | 50 | # Compute the increments 51 | delta_inp = data[scale:, ...] - data[:-scale, ...] 52 | 53 | # Compute kurtosis of the increments 54 | kurt = kurtosis(delta_inp, axis=0, fisher=False) 55 | 56 | times, *comp = [inp.coords[dim].data for dim in inp.dims] 57 | 58 | result = xr.DataArray( 59 | np.squeeze(delta_inp), 60 | coords=[times[0 : len(delta_inp)], *comp], 61 | dims=inp.dims, 62 | attrs=inp.attrs, 63 | ) 64 | 65 | return kurt, result 66 | -------------------------------------------------------------------------------- /pyrfu/pyrf/integrate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def integrate(inp, time_step: float = None): 17 | r"""Integrate time series. 18 | 19 | Parameters 20 | ---------- 21 | inp : xarray.DataArray 22 | Time series of the variable to integrate. 23 | 24 | time_step : float, Optional 25 | Time steps threshold. All time_steps larger than 3*time_step 26 | are assumed data gaps, default is that time_step is the 27 | smallest value of all time_steps of the time series. 28 | 29 | Returns 30 | ------- 31 | out : xarray.DataArray 32 | Time series of the time integrated input. 33 | 34 | Examples 35 | -------- 36 | >>> from pyrfu import mms, pyrf 37 | 38 | Time interval 39 | 40 | >>> tint = ["2015-12-14T01:17:40.200", "2015-12-14T01:17:41.500"] 41 | 42 | Spacecraft index 43 | 44 | >>> mms_id = 1 45 | 46 | Load magnetic field and electric field 47 | 48 | >>> b_xyz = mms.get_data("B_gse_fgm_brst_l2", tint, mms_id) 49 | >>> e_xyz = mms.get_data("E_gse_edp_brst_l2", tint, mms_id) 50 | 51 | Convert electric field to field aligned coordinates 52 | 53 | >>> e_xyzfac = pyrf.convert_fac(e_xyz, b_xyz, [1, 0, 0]) 54 | 55 | """ 56 | 57 | assert isinstance(inp, xr.DataArray), "inp must be xarray.DataArray" 58 | 59 | time_tmp = inp.time.data.astype(np.float64) * 1e-9 60 | 61 | if inp.data.ndim == 1: 62 | data_tmp = inp.data[:, np.newaxis] 63 | else: 64 | data_tmp = inp.data 65 | 66 | data = np.transpose(np.vstack([time_tmp, np.transpose(data_tmp)])) 67 | 68 | delta_t = np.hstack([0, np.diff(data[:, 0])]) 69 | 70 | if time_step is None: 71 | time_steps = np.diff(data[:, 0]) 72 | 73 | # remove the smallest time step in case some problems 74 | time_step = np.min(np.delete(time_steps, np.argmin(time_steps))) 75 | 76 | delta_t[delta_t > 3 * time_step] = 0 77 | 78 | x_int = data 79 | for j in range(1, x_int.shape[1]): 80 | j_ok = ~np.isnan(x_int[:, j]) 81 | 82 | x_int[j_ok, j] = np.cumsum(data[j_ok, j] * delta_t[j_ok]) 83 | 84 | out = xr.DataArray(np.squeeze(data[:, 1:]), coords=inp.coords, dims=inp.dims) 85 | 86 | return out 87 | -------------------------------------------------------------------------------- /pyrfu/pyrf/iso86012datetime.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import datetime 6 | from typing import Union 7 | 8 | # 3rd party imports 9 | import numpy as np 10 | from numpy.typing import NDArray 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2024" 15 | __license__ = "MIT" 16 | __version__ = "2.4.13" 17 | __status__ = "Prototype" 18 | 19 | 20 | def iso86012datetime( 21 | time: Union[list[str], NDArray[np.str_]] 22 | ) -> list[datetime.datetime]: 23 | r"""Convert ISO 8601 time to datetime. 24 | 25 | Parameters 26 | ---------- 27 | time : array_like 28 | Time 29 | 30 | Returns 31 | ------- 32 | time_datetime : list of datetime.datetime 33 | Time in datetime format. 34 | 35 | """ 36 | # Check input type 37 | if isinstance(time, list): 38 | time_array = np.array(time) 39 | elif isinstance(time, np.ndarray): 40 | time_array = time 41 | else: 42 | raise TypeError("time must be list or numpy.ndarray") 43 | 44 | # Make sure that str is in ISO8601 format 45 | time_datetime64 = time_array.astype("datetime64[ns]") 46 | time_iso8601 = time_datetime64.astype(str) 47 | 48 | # ISO 8601 format with miliseconds precision (max precision for datetime) 49 | fmt = "%Y-%m-%dT%H:%M:%S.%f" 50 | 51 | # Convert to datetime format 52 | time_datetime = [datetime.datetime.strptime(t_[:-3], fmt) for t_ in time_iso8601] 53 | 54 | return time_datetime 55 | -------------------------------------------------------------------------------- /pyrfu/pyrf/iso86012datetime64.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from numpy.typing import NDArray 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2024" 14 | __license__ = "MIT" 15 | __version__ = "2.4.13" 16 | __status__ = "Prototype" 17 | 18 | 19 | def iso86012datetime64( 20 | time: Union[list[str], NDArray[np.str_]] 21 | ) -> NDArray[np.datetime64]: 22 | r"""Convert ISO8601 time format to datetime64 in ns units. 23 | 24 | Parameters 25 | ---------- 26 | time : array_like 27 | Time in ISO 8601 format 28 | 29 | Returns 30 | ------- 31 | time_datetime64 : numpy.ndarray 32 | Time in datetime64 in ns units. 33 | 34 | Raises 35 | ------ 36 | TypeError 37 | If time is not a list or numpy.ndarray. 38 | 39 | 40 | See Also 41 | -------- 42 | pyrfu.pyrf.datetime642iso8601 43 | 44 | """ 45 | # Check input type 46 | if isinstance(time, list): 47 | time_array = np.array(time) 48 | elif isinstance(time, np.ndarray): 49 | time_array = time 50 | else: 51 | raise TypeError("time must be list or numpy.ndarray") 52 | 53 | time_datetime64 = time_array.astype("datetime64[ns]") 54 | 55 | return time_datetime64 56 | -------------------------------------------------------------------------------- /pyrfu/pyrf/iso86012timevec.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import re 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2023" 13 | __license__ = "MIT" 14 | __version__ = "2.4.2" 15 | __status__ = "Prototype" 16 | 17 | 18 | def iso86012timevec(time): 19 | r"""Convert ISO 8601 time string into time vector. 20 | 21 | Parameters 22 | ---------- 23 | time : ndarray or list or str 24 | Time in ISO 8601 format YYYY-MM-DDThh:mm:ss.mmmuuunnn. 25 | 26 | Returns 27 | ------- 28 | time_vec : list 29 | Time vector. 30 | 31 | See Also 32 | -------- 33 | pyrfu.pyrf.iso86012timevec 34 | 35 | """ 36 | 37 | iso_8601 = ( 38 | r"(?P[0-9]{4})-(?P[0-9]{2})-(?P[0-9]{2})" 39 | r"T(?P[0-9]{2}):(?P[0-9]{2})" 40 | r":(?P[0-9]{2}).(?P[0-9]{3})" 41 | r"(?P[0-9]{3})(?P[0-9]{3})" 42 | ) 43 | 44 | # Define parser 45 | fmt = re.compile(iso_8601) 46 | 47 | # Make time is a 1d array 48 | time = np.atleast_1d(time) 49 | 50 | time_vec = [[int(p_) for p_ in fmt.match(t_).groups()] for t_ in time] 51 | time_vec = np.array(time_vec) 52 | 53 | return time_vec 54 | -------------------------------------------------------------------------------- /pyrfu/pyrf/iso86012unix.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def iso86012unix(time): 16 | r"""Converts time in iso format to unix 17 | 18 | Parameters 19 | ---------- 20 | time : str or array_like of str 21 | Time. 22 | 23 | Returns 24 | ------- 25 | out : float or list of float 26 | Time in unix format. 27 | 28 | """ 29 | 30 | assert isinstance(time, (str, list, np.ndarray)), "time must be a str or array_like" 31 | 32 | out = np.atleast_1d(time).astype("datetime64[ns]") 33 | 34 | return out 35 | -------------------------------------------------------------------------------- /pyrfu/pyrf/l_shell.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | # Local imports 8 | from .cotrans import cotrans 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2023" 13 | __license__ = "MIT" 14 | __version__ = "2.4.2" 15 | __status__ = "Prototype" 16 | 17 | 18 | def l_shell(r_xyz): 19 | r"""Compute spacecraft position L Shell for a dipole magnetic field 20 | according to IRGF. 21 | 22 | Parameters 23 | ---------- 24 | r_xyz : xarray.DataArray 25 | Time series of the spacecraft position. Must have a 26 | "COORDINATES_SYSTEM" attributes. 27 | 28 | Returns 29 | ------- 30 | out : xarray.DataArray 31 | Time series of the spacecraft position L-Shell. 32 | 33 | """ 34 | 35 | # Transform spacecraft coordinates to solar magnetic system 36 | r_sm = cotrans(r_xyz, "sm") 37 | 38 | # Compute Geomagnetic latitude 39 | lambda_ = np.arctan(r_sm[:, 2] / np.linalg.norm(r_sm[:, :2], axis=1)) 40 | 41 | # Compute L shell 42 | out = np.linalg.norm(r_sm, axis=1) / np.cos(lambda_) ** 2 43 | 44 | return out 45 | -------------------------------------------------------------------------------- /pyrfu/pyrf/lowpass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import xarray as xr 6 | from scipy import signal 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def lowpass(inp, f_cut, fhz): 17 | r"""Filter the data through low or highpass filter with max 18 | frequency f_cut and subtract from the original. 19 | 20 | Parameters 21 | ---------- 22 | inp : xarray.DataArray 23 | Time series of the input variable. 24 | f_cut : float 25 | Cutoff frequency. 26 | fhz : float 27 | Sampling frequency. 28 | 29 | Returns 30 | ------- 31 | out : xarray.DataArray 32 | Time series of the filter data. 33 | 34 | """ 35 | 36 | data = inp.data 37 | 38 | # Remove trend 39 | data_detrend = signal.detrend(data, type="linear", axis=0) 40 | rest = data - data_detrend 41 | 42 | # Elliptic filter 43 | f_nyq, r_pass, r_stop, order = [fhz / 2, 0.1, 60, 4] 44 | 45 | elliptic_filter = signal.ellip( 46 | order, 47 | r_pass, 48 | r_stop, 49 | f_cut / f_nyq, 50 | output="ba", 51 | ) 52 | 53 | # Filter data 54 | out_data = signal.filtfilt( 55 | elliptic_filter[0], 56 | elliptic_filter[1], 57 | data_detrend, 58 | axis=0, 59 | ) 60 | 61 | out = xr.DataArray(out_data + rest, coords=inp.coords, dims=inp.dims) 62 | 63 | return out 64 | -------------------------------------------------------------------------------- /pyrfu/pyrf/match_phibe_v.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import itertools 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from scipy import constants 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def match_phibe_v(b_0, b_z, int_e_dt, n, v): 20 | r"""Get propagation velocity by matching dBpar and phi. Used together with 21 | irf_match_phibe_dir.m.Finds best match in amplitude given, B0, dB_par, 22 | phi, propagation direction implied, for specified n and v given as 23 | vectors.Returns a matrix of correlations and the two potentials that were 24 | correlated. 25 | 26 | Parameters 27 | ---------- 28 | b_0 : array_like 29 | Average background magnetic field. 30 | b_z : array_like 31 | Parallel wave magnetic field. 32 | int_e_dt : array_like 33 | Potential. 34 | n : array_like 35 | Vector of densities 36 | v : array_like 37 | Vector of velocities. 38 | 39 | Returns 40 | ------- 41 | corr_mat : numpy.ndarray 42 | Correlation matrix(nn x nv). 43 | phi_b : numpy.ndarray 44 | B0 * dB_par / n_e * e * mu0 45 | phi_e : numpy.ndarray 46 | int(E) dt * v(dl=-vdt = > -dl = vdt) 47 | 48 | """ 49 | 50 | # Define constants 51 | mu0 = constants.mu_0 52 | q_e = constants.elementary_charge 53 | 54 | # density in #/m^3 55 | n.data *= 1e6 56 | 57 | # Allocate correlations matrix rows: n, cols: v 58 | nn_, nv_ = [len(n), len(v)] 59 | corr_mat = np.zeros((nn_, nv_)) 60 | 61 | # Setup potentials 62 | phi_e = int_e_dt * v # depends on v 63 | phi_b = np.transpose(b_z[:, 0] * b_0 * 1e-18 / (mu0 * q_e * n[:, None])) 64 | 65 | # Get correlation 66 | for k, p in itertools.product(range(nn_), range(nv_)): 67 | corr_mat[k, p] = np.sum((np.log10(abs(phi_e[:, p]) / phi_b[:, k]))) 68 | 69 | return corr_mat, phi_b, phi_e 70 | -------------------------------------------------------------------------------- /pyrfu/pyrf/mean.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | from .cross import cross 9 | from .normalize import normalize 10 | 11 | # Local imports 12 | from .resample import resample 13 | from .ts_vec_xyz import ts_vec_xyz 14 | 15 | __author__ = "Louis Richard" 16 | __email__ = "louisr@irfu.se" 17 | __copyright__ = "Copyright 2020-2023" 18 | __license__ = "MIT" 19 | __version__ = "2.4.2" 20 | __status__ = "Prototype" 21 | 22 | 23 | def mean(inp, r_xyz, b_xyz, dipole_axis: xr.DataArray = None): 24 | r"""Put inp into mean field coordinates defined by position vector r and 25 | magnetic field b if earth magnetic dipole axis z is given then uses 26 | another algorithm (good for auroral passages) 27 | 28 | Parameters 29 | ---------- 30 | inp : xarray.DataArray 31 | Input field to put into MF coordinates. 32 | r_xyz : xarray.DataArray 33 | Time series of the spacecraft position. 34 | b_xyz : xarray.DataArray 35 | Time series of the background magnetic field. 36 | dipole_axis : xarray.DataArray, Optional 37 | Earth magnetic dipole axis. 38 | 39 | Returns 40 | ------- 41 | out : xarray.DataArray 42 | Input field in mean field coordinates. 43 | 44 | """ 45 | 46 | if dipole_axis is not None: 47 | assert isinstance(dipole_axis, xr.DataArray) 48 | flag_dipole = True 49 | 50 | dipole_axis = resample(dipole_axis, inp) 51 | 52 | else: 53 | flag_dipole = False 54 | 55 | # Make sure that spacecraft position and magnetic field sampling matches 56 | # input sampling 57 | r_xyz = resample(r_xyz, inp) 58 | b_xyz = resample(b_xyz, inp) 59 | 60 | b_hat = normalize(b_xyz) 61 | 62 | if not flag_dipole: 63 | bxr = cross(b_hat, r_xyz) 64 | bxr /= np.linalg.norm(bxr, axis=1)[:, None] 65 | else: 66 | fact = -1 * np.ones(len(b_xyz)) 67 | fact[np.sum(b_xyz * r_xyz) > 0] = 1 68 | bxr = np.cross(dipole_axis, b_xyz) * fact[:, None] 69 | bxr /= np.linalg.norm(bxr, axis=1)[:, None] 70 | 71 | bxrxb = np.cross(bxr, b_hat) 72 | 73 | out_data = np.zeros(inp.data.shape) 74 | out_data[:, 0] = np.sum(bxrxb * inp, axis=1) 75 | out_data[:, 1] = np.sum(bxr * inp, axis=1) 76 | out_data[:, 2] = np.sum(b_hat * inp, axis=1) 77 | 78 | out = ts_vec_xyz(inp.time.data, out_data, inp.attrs) 79 | 80 | return out 81 | -------------------------------------------------------------------------------- /pyrfu/pyrf/mean_field.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Tuple, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | from numpy.typing import NDArray 11 | from xarray.core.dataarray import DataArray 12 | 13 | # Local imports 14 | from pyrfu.pyrf.ts_vec_xyz import ts_vec_xyz 15 | 16 | __author__ = "Louis Richard" 17 | __email__ = "louisr@irfu.se" 18 | __copyright__ = "Copyright 2020-2024" 19 | __license__ = "MIT" 20 | __version__ = "2.4.13" 21 | __status__ = "Prototype" 22 | 23 | NDArrayFloats = NDArray[Union[np.float32, np.float64]] 24 | 25 | 26 | def mean_field(inp: DataArray, deg: int) -> Tuple[DataArray, DataArray]: 27 | r"""Estimate the mean and wave fields. 28 | 29 | The mean field is computed by fitting a polynomial of degree `deg` to the 30 | input data. The wave field is then computed as the difference between the 31 | input data and the mean field. 32 | 33 | Parameters 34 | ---------- 35 | inp : DataArray 36 | Input data. 37 | deg : int 38 | Degree of the polynomial. 39 | 40 | Returns 41 | ------- 42 | Tuple 43 | Mean field and wave field. 44 | 45 | Raises 46 | ------ 47 | TypeError 48 | If input is not a xarray.DataArray. 49 | 50 | """ 51 | # Checking input 52 | if not isinstance(inp, xr.DataArray): 53 | raise TypeError("Input must be a xarray.DataArray") 54 | 55 | # Extracting time and data 56 | time: NDArray[np.datetime64] = inp.time.data 57 | data: NDArray[np.float64] = inp.data.astype(np.float64) # force to double precision 58 | time_ints: NDArray[np.uint16] = np.arange(len(time), dtype=np.uint16) 59 | 60 | # Preallocating output 61 | inp_mean: NDArray[np.float64] = np.zeros_like(data, dtype=np.float64) 62 | inp_wave: NDArray[np.float64] = np.zeros_like(data, dtype=np.float64) 63 | 64 | for i in range(data.shape[1]): 65 | # Polynomial fit 66 | polynomial_coeffs: NDArray[np.float64] = np.polyfit(time_ints, data[:, i], deg) 67 | 68 | # Computing mean and wave field 69 | inp_mean[:, i] = np.polyval(polynomial_coeffs, time_ints) 70 | inp_wave[:, i] = data[:, i] - inp_mean[:, i] 71 | 72 | # Time series 73 | inp_mean_ts: DataArray = ts_vec_xyz(inp.time.data, inp_mean) 74 | inp_wave_ts: DataArray = ts_vec_xyz(inp.time.data, inp_wave) 75 | 76 | return inp_mean_ts, inp_wave_ts 77 | -------------------------------------------------------------------------------- /pyrfu/pyrf/new_xyz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def new_xyz(inp, trans_mat): 17 | r"""Transform the input field to the new frame. 18 | 19 | Parameters 20 | ---------- 21 | inp : xarray.DataArray 22 | Time series of the input field in the original coordinate system. 23 | trans_mat : array_like 24 | Transformation matrix. 25 | 26 | Returns 27 | ------- 28 | out : xarray.DataArray 29 | Time series of the input in the new frame. 30 | 31 | Examples 32 | -------- 33 | >>> from pyrfu import mms, pyrf 34 | 35 | Time interval 36 | 37 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 38 | 39 | Spacecraft indices 40 | 41 | >>> mms_id = 1 42 | 43 | Load magnetic field and electric field 44 | 45 | >>> b_xyz = mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id) 46 | >>> e_xyz = mms.get_data("E_gse_edp_fast_l2", tint, mms_id) 47 | 48 | Compute MVA frame 49 | 50 | >>> b_lmn, l, mva = pyrf.mva(b_xyz) 51 | 52 | Move electric field to the MVA frame 53 | 54 | >>> e_lmn = pyrf.new_xyz(e_xyz, mva) 55 | 56 | """ 57 | 58 | if inp.data.ndim == 3: 59 | out_data = np.matmul(np.matmul(trans_mat.T, inp.data), trans_mat) 60 | else: 61 | out_data = (trans_mat.T @ inp.data.T).T 62 | 63 | out = xr.DataArray( 64 | out_data, 65 | coords=inp.coords, 66 | dims=inp.dims, 67 | attrs=inp.attrs, 68 | ) 69 | 70 | return out 71 | -------------------------------------------------------------------------------- /pyrfu/pyrf/norm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | # Local imports 8 | from .ts_scalar import ts_scalar 9 | 10 | __author__ = "Louis Richard" 11 | __email__ = "louisr@irfu.se" 12 | __copyright__ = "Copyright 2020-2023" 13 | __license__ = "MIT" 14 | __version__ = "2.4.2" 15 | __status__ = "Prototype" 16 | 17 | 18 | def norm(inp): 19 | r"""Computes the magnitude of the input field. 20 | 21 | Parameters 22 | ---------- 23 | inp : xarray.DataArray 24 | Time series of the input field. 25 | 26 | Returns 27 | ------- 28 | out : xarray.DataArray 29 | Time series of the magnitude of the input field. 30 | 31 | Examples 32 | -------- 33 | >>> from pyrfu import mms, pyrf 34 | 35 | Time interval 36 | 37 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 38 | 39 | Spacecraft index 40 | 41 | >>> mms_id = 1 42 | 43 | Load magnetic field 44 | 45 | >>> b_xyz = mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id) 46 | 47 | Compute magnitude of the magnetic field 48 | 49 | >>> b_mag = pyrf.norm(b_xyz) 50 | 51 | """ 52 | 53 | out = ts_scalar(inp.time.data, np.linalg.norm(inp.data, axis=1), attrs=inp.attrs) 54 | 55 | return out 56 | -------------------------------------------------------------------------------- /pyrfu/pyrf/normalize.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from xarray.core.dataarray import DataArray 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def normalize(inp: DataArray) -> DataArray: 17 | r"""Normalizes the input field. 18 | 19 | Parameters 20 | ---------- 21 | inp : DataArray 22 | Time series of the input field. 23 | 24 | Returns 25 | ------- 26 | DataArray 27 | Time series of the normalized input field. 28 | 29 | Raises 30 | ------ 31 | TypeError 32 | If input is not a DataArray. 33 | ValueError 34 | If input is not a 2D DataArray. 35 | 36 | Examples 37 | -------- 38 | >>> from pyrfu import mms, pyrf 39 | 40 | Time interval 41 | 42 | >>> tint = ["2019-09-14T07:54:00.000", "2019-09-14T08:11:00.000"] 43 | 44 | Spacecraft index 45 | 46 | >>> mms_id = 1 47 | 48 | Load magnetic field 49 | 50 | >>> b_xyz = mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id) 51 | 52 | Compute the normalized magnetic field 53 | 54 | >>> b = pyrf.normalize(b_xyz) 55 | 56 | """ 57 | 58 | if not isinstance(inp, DataArray): 59 | raise TypeError("Input must be a DataArray") 60 | 61 | if inp.data.ndim == 2: 62 | out_data = inp.data / np.linalg.norm(inp.data, axis=1, keepdims=True) 63 | out = inp.copy(data=out_data) 64 | else: 65 | raise ValueError("Input must be a 2D DataArray") 66 | 67 | return out 68 | -------------------------------------------------------------------------------- /pyrfu/pyrf/optimize_nbins_1d.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def optimize_nbins_1d(x, n_min: int = 1, n_max: int = 100): 16 | r"""Estimates the number of bins for 1d histogram that minimizes the 17 | risk function in [1]_ , obtained by direct decomposition of the MISE 18 | following the method described in [2]_ . 19 | 20 | Parameters 21 | ---------- 22 | x : xarray.DataArray 23 | Input time series 24 | n_min : int, Optional 25 | Minimum number of bins. Default is 1. 26 | n_max : int, Optional 27 | Maximum number of bins. Default is 100. 28 | 29 | Returns 30 | ------- 31 | opt_n_x : int 32 | Number of bins that minimizes the cost function. 33 | 34 | References 35 | ---------- 36 | .. [1] Rudemo, M. (1982) Empirical Choice of Histograms and Kernel Density 37 | Estimators. Scandinavian Journal of Statistics, 9, 65-78. 38 | 39 | .. [2] Shimazaki H. and Shinomoto S., A method for selecting the bin size 40 | of a time histogram Neural Computation (2007) Vol. 19(6), 1503-1527 41 | """ 42 | 43 | x_min, x_max = [np.min(x.data), np.max(x.data)] 44 | 45 | # #of Bins 46 | ns_x = np.arange(n_min, n_max) 47 | 48 | # Bin size vector 49 | ds_x = (x_max - x_min) / ns_x 50 | 51 | cs_x = np.zeros(ds_x.shape) 52 | # Computation of the cost function to x and y 53 | for i, n_x in enumerate(ns_x): 54 | k_i = np.histogram(x, bins=n_x) 55 | # The mean and the variance are simply computed from the 56 | # event counts in all the bins of the 1-dimensional histogram. 57 | k_i = k_i[0] 58 | k_ = np.mean(k_i) # Mean of event count 59 | v_ = np.var(k_i) # Variance of event count 60 | # The cost Function 61 | cs_x[i] = (2 * k_ - v_) / ds_x[i] ** 2 62 | 63 | # Optimal Bin Size Selection 64 | # combination of i and j that produces the minimum cost function 65 | idx_min = np.argmin(cs_x) # get the index of the min Cxy 66 | 67 | opt_n_x = int(ns_x[idx_min]) 68 | 69 | return opt_n_x 70 | -------------------------------------------------------------------------------- /pyrfu/pyrf/plasma_beta.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from scipy import constants 7 | 8 | # Local imports 9 | from ..mms import rotate_tensor 10 | from .resample import resample 11 | from .ts_scalar import ts_scalar 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2023" 16 | __license__ = "MIT" 17 | __version__ = "2.4.2" 18 | __status__ = "Prototype" 19 | 20 | 21 | def plasma_beta(b_xyz, p_xyz): 22 | """Computes plasma beta at magnetic field sampling 23 | 24 | .. math:: 25 | 26 | \beta = \frac{P_{th}}{P_b} 27 | 28 | where : :math:`P_b = B^2 / 2 \\mu_0` 29 | 30 | Parameters 31 | ---------- 32 | b_xyz : xarray.DataArray 33 | Time series of the magnetic field. 34 | p_xyz : xarray.DataArray 35 | Time series of the pressure tensor. 36 | 37 | Returns 38 | ------- 39 | beta : xarray.DataArray 40 | Time series of the plasma beta at magnetic field sampling. 41 | 42 | """ 43 | 44 | p_xyz = resample(p_xyz, b_xyz) 45 | 46 | p_fac = rotate_tensor(p_xyz, "fac", b_xyz, "pp") 47 | 48 | # Scalar temperature 49 | p_tot = (p_fac.data[:, 0, 0] + p_fac.data[:, 1, 1] + p_fac.data[:, 2, 2]) / 3 50 | 51 | # Magnitude of the magnetic field 52 | b_mag = np.linalg.norm(b_xyz.data, axis=1) 53 | p_mag = 1e-18 * b_mag**2 / (2 * constants.mu_0) 54 | 55 | # Compute plasma beta 56 | beta = ts_scalar(b_xyz.time.data, p_tot / p_mag) 57 | 58 | return beta 59 | -------------------------------------------------------------------------------- /pyrfu/pyrf/pres_anis.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from scipy import constants 7 | 8 | # Local imports 9 | from .resample import resample 10 | from .ts_scalar import ts_scalar 11 | 12 | __author__ = "Louis Richard" 13 | __email__ = "louisr@irfu.se" 14 | __copyright__ = "Copyright 2020-2023" 15 | __license__ = "MIT" 16 | __version__ = "2.4.2" 17 | __status__ = "Prototype" 18 | 19 | 20 | def pres_anis(p_fac, b_xyz): 21 | r"""Compute pressure anisotropy factor: 22 | 23 | .. math:: 24 | 25 | \mu_0 \frac{P_\parallel - P_\perp}{|\mathbf{B}|^2} 26 | 27 | Parameters 28 | ---------- 29 | p_fac : xarray.DataArray 30 | Time series of the pressure tensor in field aligne coordinates. 31 | b_xyz : xarray.DataArray 32 | Time series of the background magnetic field. 33 | 34 | Returns 35 | ------- 36 | p_anis : xarray.DataArray 37 | Time series of the pressure anisotropy. 38 | 39 | See also 40 | -------- 41 | pyrfu.mms.rotate_tensor : Rotates pressure or temperature tensor 42 | into another coordinate system. 43 | 44 | Examples 45 | -------- 46 | >>> from pyrfu import mms, pyrf 47 | 48 | Time interval 49 | 50 | >>> tint = ["2015-10-30T05:15:20.000", "2015-10-30T05:16:20.000"] 51 | 52 | Spacecraft index 53 | 54 | >>> mms_id = 1 55 | 56 | Load magnetic field, ion/electron temperature and number density 57 | 58 | >>> b_xyz = mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id) 59 | >>> p_xyz_i = mms.get_data("Pi_gse_fpi_fast_l2", tint, mms_id) 60 | 61 | Transform pressure tensor to field aligned coordinates 62 | >>> p_fac_i = mms.rotate_tensor(p_xyz_i, "fac", b_xyz) 63 | 64 | Compute pressure anistropy 65 | 66 | >>> p_anis = pyrf.pres_anis(p_xyz_i, b_xyz) 67 | 68 | """ 69 | 70 | # Get parallel and perpendicular pressure 71 | p_para = p_fac.data[:, 0, 0] 72 | p_perp = (p_fac.data[:, 1, 1] + p_fac.data[:, 2, 2]) / 2 73 | 74 | # Compute magnetic pressure 75 | b_xyz = resample(b_xyz, p_fac) 76 | b_mag = np.linalg.norm(b_xyz.data, axis=1) 77 | p_mag = 1e-18 * b_mag**2 / (2 * constants.mu_0) 78 | 79 | # Compute pressure anistropy 80 | p_anis = (1e-9 * (p_para - p_perp) / 2) / p_mag 81 | p_anis = ts_scalar(p_fac.time.data, p_anis) 82 | 83 | return p_anis 84 | -------------------------------------------------------------------------------- /pyrfu/pyrf/pvi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def pvi(inp, scale: int = 10): 17 | r"""Returns the PVI of a time series. 18 | 19 | .. math:: 20 | 21 | y = \frac{|x_i - x_{i+s}|^2}{<|x_i - x_{i+s}|^2>} 22 | 23 | where :math:`s` is the scale. 24 | 25 | Parameters 26 | ---------- 27 | inp : xarray.DataArray 28 | Input time series. 29 | scale : int, Optional 30 | Scale at which to compute the PVI. Default is 10. 31 | 32 | Returns 33 | ------- 34 | values : xarray.DataArray 35 | An xarray containing the pvi of the original time series. 36 | 37 | """ 38 | 39 | if len(inp.data.shape) == 1: 40 | data = inp.data[:, np.newaxis] 41 | else: 42 | data = inp.data 43 | 44 | delta_inp = np.abs((data[scale:, :] - data[:-scale, :])) 45 | delta_inp2 = np.sum(delta_inp**2, axis=1) 46 | sigma = np.mean(delta_inp2) 47 | result = np.array(delta_inp2 / sigma) 48 | 49 | time = inp.coords[inp.dims[0]].data 50 | 51 | result = xr.DataArray( 52 | result, 53 | coords=[time[0 : len(delta_inp)]], 54 | dims=[inp.dims[0]], 55 | attrs=inp.attrs, 56 | ) 57 | 58 | result.attrs["units"] = "dimensionless" 59 | 60 | return result 61 | -------------------------------------------------------------------------------- /pyrfu/pyrf/read_cdf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import pycdfpp 7 | import xarray as xr 8 | 9 | __author__ = "Louis Richard" 10 | __email__ = "louisr@irfu.se" 11 | __copyright__ = "Copyright 2020-2023" 12 | __license__ = "MIT" 13 | __version__ = "2.4.13" 14 | __status__ = "Prototype" 15 | 16 | 17 | def read_cdf(path: str) -> dict: 18 | r"""Reads a .cdf file and returns a dictionary with the fields contained in 19 | the file. 20 | 21 | Parameters 22 | ---------- 23 | path : str 24 | Path to the .cdf file. 25 | 26 | Returns 27 | ------- 28 | dict 29 | Hash table with fields contained in the .cdf file. 30 | 31 | """ 32 | 33 | # Initialize output dictionary 34 | out_dict = {} 35 | 36 | # Load file 37 | file = pycdfpp.load(path) 38 | 39 | # Get keys (a.k.a zvariables) from file 40 | keys = list(map(lambda x: x[0], file.items())) 41 | 42 | for key in keys: 43 | # Get data and coordinates 44 | data = np.squeeze(file[key].values) 45 | coords = [np.arange(dim_size) for dim_size in data.shape] 46 | 47 | # Construct xarray DataArray 48 | out_dict[key.lower()] = xr.DataArray(data, coords=coords) 49 | 50 | return out_dict 51 | -------------------------------------------------------------------------------- /pyrfu/pyrf/remove_repeated_points.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | from .ts_scalar import ts_scalar 9 | 10 | # Local imports 11 | from .ts_vec_xyz import ts_vec_xyz 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2023" 16 | __license__ = "MIT" 17 | __version__ = "2.4.2" 18 | __status__ = "Prototype" 19 | 20 | 21 | def remove_repeated_points(inp): 22 | r"""Remove repeated elements in DataArray or structure data. Important 23 | when using defatt products. Must have a time variable. 24 | 25 | Parameters 26 | ---------- 27 | inp : xarray.DataArray or dict 28 | Time series of the input variable. 29 | 30 | Returns 31 | ------- 32 | out: xarray.DataArray or dict 33 | Time series of the cleaned input variable. 34 | 35 | """ 36 | 37 | # Points separated in time by less than 100ns are treated as repeats 38 | threshold = 100 39 | 40 | if isinstance(inp, xr.DataArray): 41 | diffs = np.diff(inp.time.data.astype(np.int64) * 1e-9) 42 | 43 | no_repeat = np.ones(len(inp)) 44 | no_repeat[diffs < threshold] = 0 45 | 46 | new_time = inp.time.data[no_repeat == 1] 47 | new_inp = inp.data[no_repeat == 1, :] 48 | 49 | if new_inp.ndim == 1: 50 | new_data = ts_scalar(new_time, new_inp) 51 | elif new_inp.ndim == 2: 52 | new_data = ts_vec_xyz(new_time, new_inp) 53 | elif new_inp.ndim == 3: 54 | new_data = ts_vec_xyz(new_time, new_inp) 55 | else: 56 | raise TypeError("Invalid data dimension") 57 | 58 | elif isinstance(inp, dict) and ("time" in inp): 59 | if inp["time"].dtype == " DataArray: 27 | r"""Computes trace of the time series of 2nd order tensors. 28 | 29 | Parameters 30 | ---------- 31 | inp : DataArray 32 | Time series of the input 2nd order tensor. 33 | 34 | Returns 35 | ------- 36 | DataArray 37 | Time series of the trace of the input tensor. 38 | 39 | Raises 40 | ------ 41 | TypeError 42 | If inp is not a xarray.DataArray. 43 | ValueError 44 | If inp is not a time series of a tensor. 45 | 46 | Examples 47 | -------- 48 | >>> from pyrfu import mms, pyrf 49 | 50 | Time interval 51 | 52 | >>> tint = ["2015-10-30T05:15:20.000", "2015-10-30T05:16:20.000"] 53 | 54 | Spacecraft index 55 | 56 | >>> mms_id = 1 57 | 58 | Load magnetic field and ion temperature 59 | 60 | >>> b_xyz = mms.get_data("B_gse_fgm_srvy_l2", tint, mms_id) 61 | >>> t_xyz_i = mms.get_data("Ti_gse_fpi_fast_l2", tint, mms_id) 62 | 63 | Rotate to ion temperature tensor to field aligned coordinates 64 | 65 | >>> t_xyzfac_i = mms.rotate_tensor(t_xyz_i, "fac", b_xyz, "pp") 66 | 67 | Compute scalar temperature 68 | 69 | >>> t_i = pyrf.trace(t_xyzfac_i) 70 | 71 | """ 72 | 73 | # Check input type 74 | if not isinstance(inp, xr.DataArray): 75 | raise TypeError("inp must be a xarray.DataArray") 76 | 77 | # Check that inp is a tensor 78 | if inp.ndim != 3 or inp.shape[1] != 3 or inp.shape[2] != 3: 79 | raise ValueError("inp must be a time series of a tensor") 80 | 81 | # Get diagonal elements 82 | inp_xx: NDArrayFloats = inp.data[:, 0, 0] 83 | inp_yy: NDArrayFloats = inp.data[:, 1, 1] 84 | inp_zz: NDArrayFloats = inp.data[:, 2, 2] 85 | 86 | # Compute trace 87 | out_data = inp_xx + inp_yy + inp_zz 88 | 89 | # Construct time series 90 | out = ts_scalar(inp.time.data, out_data, inp.attrs) 91 | 92 | return out 93 | -------------------------------------------------------------------------------- /pyrfu/pyrf/transformation_indices.json: -------------------------------------------------------------------------------- 1 | { 2 | "gse>gsm": [3], 3 | "gsm>gse": [-3], 4 | "gse>gei": [-2], 5 | "gse>geo": [1, -2], 6 | "gse>sm": [4, 3], 7 | "gse>mag": [5, 1, -2], 8 | "gsm>gei": [-2, -3], 9 | "gsm>geo": [1, -2, -3], 10 | "gsm>sm": [4], 11 | "gsm>mag": [5, 1, -2, -3], 12 | "sm>gei": [-2, -3, -4], 13 | "sm>geo": [1, -2, -3, -4], 14 | "sm>gse": [-3, -4], 15 | "sm>gsm": [-4], 16 | "sm>mag": [5, 1, -2, -3, -4], 17 | "mag>gei": [-1, -5], 18 | "mag>geo": [-5], 19 | "mag>gse": [2, -1, -5], 20 | "mag>gsm": [3, 2, -1, -5], 21 | "mag>sm": [4, 3, 2, -1, -5], 22 | "geo>gei": [-1], 23 | "geo>gse": [2, -1], 24 | "geo>gsm": [3, 2, -1], 25 | "geo>sm": [4, 3, 2, -1], 26 | "geo>mag": [5], 27 | "gei>geo": [1], 28 | "gei>gse": [2], 29 | "gei>gsm": [3, 2], 30 | "gei>sm": [4, 3, 2], 31 | "gei>mag": [5, 1] 32 | } 33 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ts_scalar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Mapping, Optional, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | from numpy.typing import NDArray 11 | from xarray.core.dataarray import DataArray 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2024" 16 | __license__ = "MIT" 17 | __version__ = "2.4.13" 18 | __status__ = "Prototype" 19 | 20 | 21 | def ts_scalar( 22 | time: NDArray[np.datetime64], 23 | data: NDArray[Union[np.float32, np.float64]], 24 | attrs: Optional[Mapping[str, object]] = None, 25 | ) -> DataArray: 26 | r"""Create a time series containing a 0th order tensor 27 | 28 | Parameters 29 | ---------- 30 | time : numpy.ndarray 31 | Array of times. 32 | data : numpy.ndarray 33 | Data corresponding to the time list. 34 | attrs : dict, Optional 35 | Attributes of the data list. 36 | 37 | Returns 38 | ------- 39 | DataArray 40 | 0th order tensor time series. 41 | 42 | """ 43 | # Check input type 44 | if not isinstance(time, np.ndarray): 45 | raise TypeError("time must be a numpy.ndarray") 46 | 47 | if not isinstance(data, np.ndarray): 48 | raise TypeError("data must be a numpy.ndarray") 49 | 50 | # Check input shape must be (n, ) 51 | if data.ndim != 1: 52 | raise ValueError("Input must be a scalar") 53 | 54 | if len(time) != len(data): 55 | raise ValueError("Time and data must have the same length") 56 | 57 | if attrs is None: 58 | attrs = {"TENSOR_ORDER": 0} 59 | elif isinstance(attrs, dict): 60 | attrs["TENSOR_ORDER"] = 0 61 | else: 62 | raise TypeError("attrs must be a dict") 63 | 64 | return xr.DataArray(data, coords=[time[:]], dims="time", attrs=attrs) 65 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ts_spectr.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Mapping, Optional, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | from numpy.typing import NDArray 11 | from xarray.core.dataarray import DataArray 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2023" 16 | __license__ = "MIT" 17 | __version__ = "2.4.2" 18 | __status__ = "Prototype" 19 | 20 | NDArrayFloats = NDArray[Union[np.float32, np.float64]] 21 | 22 | 23 | def ts_spectr( 24 | time: NDArray[np.datetime64], 25 | energy: NDArrayFloats, 26 | data: NDArrayFloats, 27 | comp_name: Optional[str] = None, 28 | attrs: Optional[Mapping[str, object]] = None, 29 | ) -> DataArray: 30 | r"""Create a time series containing a spectrum 31 | 32 | Parameters 33 | ---------- 34 | time : numpy.ndarray 35 | Array of times. 36 | energy : numpy.ndarray 37 | Y value of the spectrum (energies, frequencies, etc.) 38 | data : numpy.ndarray 39 | Data of the spectrum. 40 | attrs : dict, Optional 41 | Attributes of the data list. 42 | 43 | Returns 44 | ------- 45 | out : xarray.DataArray 46 | Time series of a spectrum 47 | 48 | """ 49 | 50 | # Check input type 51 | if not isinstance(time, np.ndarray): 52 | raise TypeError("time must be a numpy.ndarray") 53 | 54 | if not isinstance(energy, np.ndarray): 55 | raise TypeError("time must be a numpy.ndarray") 56 | 57 | if not isinstance(data, np.ndarray): 58 | raise TypeError("data must be a numpy.ndarray") 59 | 60 | # Check input shape must be (n, m, ) 61 | if data.ndim != 2: 62 | raise ValueError("Input must be a spectrum") 63 | 64 | if len(time) != data.shape[0]: 65 | raise ValueError("Shape mismatch. Time and data must have the same length") 66 | 67 | if len(energy) != data.shape[1]: 68 | raise ValueError("Shape mismatch. Energy and data must have the same length") 69 | 70 | if comp_name is None: 71 | comp_name = "energy" 72 | 73 | if attrs is None or not isinstance(attrs, dict): 74 | attrs = {} 75 | 76 | out: DataArray = xr.DataArray( 77 | data, coords=[time, energy], dims=["time", comp_name], attrs=attrs 78 | ) 79 | out.attrs["TENSOR_ORDER"] = 0 80 | 81 | return out 82 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ts_tensor_xyz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Mapping, Optional, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | from numpy.typing import NDArray 11 | from xarray.core.dataarray import DataArray 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2024" 16 | __license__ = "MIT" 17 | __version__ = "2.4.13" 18 | __status__ = "Prototype" 19 | 20 | 21 | def ts_tensor_xyz( 22 | time: NDArray[np.datetime64], 23 | data: NDArray[Union[np.float32, np.float64]], 24 | attrs: Optional[Mapping[str, object]] = None, 25 | ) -> DataArray: 26 | r"""Create a time series containing a 2nd order tensor. 27 | 28 | Parameters 29 | ---------- 30 | time : numpy.ndarray 31 | Array of times. 32 | data : numpy.ndarray 33 | Data corresponding to the time list. 34 | attrs : dict, Optional 35 | Attributes of the data list. 36 | 37 | Returns 38 | ------- 39 | DataArray 40 | 2nd order tensor time series. 41 | 42 | Raises 43 | ------ 44 | TypeError 45 | If time or data is not a numpy.ndarray. 46 | ValueError 47 | * If time and data do not have the same length. 48 | * If data does not have shape (n, 3, 3). 49 | 50 | """ 51 | 52 | # Check input type 53 | if not isinstance(time, np.ndarray): 54 | raise TypeError("time must be a numpy.ndarray") 55 | 56 | if not isinstance(data, np.ndarray): 57 | raise TypeError("data must be a numpy.ndarray") 58 | 59 | # Check data and time have the same length 60 | if len(time) != len(data): 61 | raise ValueError("Time and data must have the same length") 62 | 63 | # Check input shape must be (n, 3, 3) 64 | if data.ndim != 3 or data.shape[1:] != (3, 3): 65 | raise ValueError("data must have shape (n, 3, 3)") 66 | 67 | if attrs is None or not isinstance(attrs, dict): 68 | attrs = {} 69 | 70 | out = xr.DataArray( 71 | data, 72 | coords=[time[:], ["x", "y", "z"], ["x", "y", "z"]], 73 | dims=["time", "rcomp", "ccomp"], 74 | attrs=attrs, 75 | ) 76 | 77 | out.attrs["TENSOR_ORDER"] = 2 78 | 79 | return out 80 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ts_time.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | import xarray as xr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def ts_time(time, attrs: dict = None): 17 | r"""Creates time line in DataArray. 18 | 19 | Parameters 20 | ---------- 21 | time : ndarray 22 | Input time line. 23 | 24 | Returns 25 | ------- 26 | out : xarray.DataArray 27 | Time series of the time line. 28 | 29 | """ 30 | 31 | assert isinstance(time, np.ndarray) 32 | 33 | if time.dtype == np.float64: 34 | time = (time * 1e9).astype("datetime64[ns]") 35 | elif time.dtype == "datetime64[ns]": 36 | pass 37 | else: 38 | raise TypeError("time must be in unix (float64) or numpy.datetime64") 39 | 40 | out = xr.DataArray(time, coords=[time], dims=["time"], attrs=attrs) 41 | 42 | return out 43 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ts_vec_xyz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Mapping, Optional, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | import xarray as xr 10 | from numpy.typing import NDArray 11 | from xarray.core.dataarray import DataArray 12 | 13 | __author__ = "Louis Richard" 14 | __email__ = "louisr@irfu.se" 15 | __copyright__ = "Copyright 2020-2024" 16 | __license__ = "MIT" 17 | __version__ = "2.4.13" 18 | __status__ = "Prototype" 19 | 20 | 21 | def ts_vec_xyz( 22 | time: NDArray[np.datetime64], 23 | data: NDArray[Union[np.float32, np.float64]], 24 | attrs: Optional[Mapping[str, object]] = None, 25 | ) -> DataArray: 26 | r"""Create a time series containing a 1st order tensor. 27 | 28 | Parameters 29 | ---------- 30 | time : numpy.ndarray 31 | Array of times. 32 | data : numpy.ndarray 33 | Data corresponding to the time list. 34 | attrs : dict, Optional 35 | Attributes of the data list. 36 | 37 | Returns 38 | ------- 39 | out : DataArray 40 | 1st order tensor time series. 41 | 42 | Raises 43 | ------ 44 | TypeError 45 | If time or data is not a numpy.ndarray or if attrs is not a dict. 46 | ValueError 47 | If data does not have shape (n, 3) or if time and data do not have the same 48 | length. 49 | 50 | """ 51 | # Check input type 52 | if not isinstance(time, np.ndarray): 53 | raise TypeError("time must be a numpy.ndarray") 54 | 55 | if not isinstance(data, np.ndarray): 56 | raise TypeError("data must be a numpy.ndarray") 57 | 58 | # Check input shape must be (n, 3) 59 | if data.ndim != 2 or data.shape[1] != 3: 60 | raise ValueError("data must have shape (n, 3)") 61 | 62 | # Check input length 63 | if len(time) != len(data): 64 | raise ValueError("Time and data must have the same length") 65 | 66 | if attrs is None: 67 | attrs = {"TENSOR_ORDER": 1} 68 | elif isinstance(attrs, dict): 69 | attrs["TENSOR_ORDER"] = 1 70 | else: 71 | raise TypeError("attrs must be a dict") 72 | 73 | out: DataArray = xr.DataArray( 74 | data, 75 | coords=[time[:], ["x", "y", "z"]], 76 | dims=["time", "comp"], 77 | attrs=attrs, 78 | ) 79 | 80 | return out 81 | -------------------------------------------------------------------------------- /pyrfu/pyrf/ttns2datetime64.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from cdflib import cdfepoch 7 | 8 | # Local imports 9 | from .timevec2iso8601 import timevec2iso8601 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2023" 14 | __license__ = "MIT" 15 | __version__ = "2.4.2" 16 | __status__ = "Prototype" 17 | 18 | 19 | def ttns2datetime64(time): 20 | r"""Convert time in epoch_tt2000 (nanosedconds since J2000) to datetime64 21 | in ns units. 22 | 23 | Parameters 24 | ---------- 25 | time : ndarray 26 | Time in epoch_tt2000 (nanoseconds since J2000) format. 27 | 28 | Returns 29 | ------- 30 | time_datetime64 : ndarray 31 | Time in datetime64 format in ns units. 32 | 33 | """ 34 | 35 | message = "time must be float, int, or array_like" 36 | assert isinstance(time, (float, int, list, np.ndarray)), message 37 | 38 | # 39 | time_tt2000 = cdfepoch.breakdown_tt2000(time) 40 | 41 | # Convert to ISO 8601 string 'YYYY-MM-DDThh:mm:ss.mmmuuunnn' 42 | time_iso8601 = timevec2iso8601(time_tt2000) 43 | 44 | # 45 | time_datetime64 = time_iso8601.astype("datetime64[ns]") 46 | 47 | return time_datetime64 48 | -------------------------------------------------------------------------------- /pyrfu/pyrf/unix2datetime64.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | from typing import Any, Union 6 | 7 | # 3rd party imports 8 | import numpy as np 9 | from numpy.typing import NDArray 10 | 11 | __author__ = "Louis Richard" 12 | __email__ = "louisr@irfu.se" 13 | __copyright__ = "Copyright 2020-2024" 14 | __license__ = "MIT" 15 | __version__ = "2.4.13" 16 | __status__ = "Prototype" 17 | 18 | 19 | def unix2datetime64(time: Union[list[float], NDArray[Any]]) -> NDArray[np.datetime64]: 20 | r"""Converts unix time to datetime64 in ns units. 21 | 22 | Parameters 23 | ---------- 24 | time : numpy.ndarray 25 | Time in unix format. 26 | 27 | Returns 28 | ------- 29 | time_datetime64 : numpy.ndarray 30 | Time in datetime64 format. 31 | 32 | Raises 33 | ------ 34 | TypeError 35 | If time is not a list or numpy.ndarray. 36 | 37 | See Also 38 | -------- 39 | pyrfu.pyrf.datetime642unix 40 | 41 | """ 42 | # Check input type 43 | if isinstance(time, (list, np.ndarray)): 44 | time_array = np.array(time) 45 | else: 46 | raise TypeError("time must be list or numpy.ndarray") 47 | 48 | # Make sure that time is in ns format 49 | time_unix = (time_array * 1e9).astype(np.int64) 50 | 51 | # Convert to unix 52 | time_datetime64 = time_unix.astype("datetime64[ns]") 53 | 54 | return time_datetime64 55 | -------------------------------------------------------------------------------- /pyrfu/pyrf/wave_fft.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | from scipy import signal 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | 16 | def wave_fft( 17 | inp, 18 | window, 19 | frame_overlap: float = 10.0, 20 | frame_length: float = 20.0, 21 | f_sampling: float = None, 22 | ): 23 | r"""Short-Time Fourier Transform. 24 | 25 | Parameters 26 | ---------- 27 | inp : xarray.DataArray 28 | Time series of the one dimension data. 29 | window : str 30 | Window function such as rectwin, hamming (default). 31 | frame_overlap : float, Optional 32 | Length of each frame overlaps in second. 33 | frame_length : float, Optional 34 | Length of each frame in second. 35 | f_sampling : float, Optional 36 | Sampling frequency. 37 | 38 | Returns 39 | ------- 40 | spectrogram : ndarray 41 | Spectrogram of x. 42 | time : ndarray 43 | Value corresponds to the center of each frame (x-axis) in sec. 44 | frequencies : ndarray 45 | Vector of frequencies (y-axis) in Hz. 46 | 47 | """ 48 | 49 | if f_sampling is None: 50 | delta_t = np.median(np.diff(inp.time.data).astype(np.float64)) * 1e-9 51 | f_sampling = 1 / delta_t 52 | 53 | # convert ms to points 54 | n_per_seg = np.round(frame_length * f_sampling).astype(np.int64) 55 | n_overlap = np.round(frame_overlap * f_sampling).astype(np.int64) 56 | 57 | options = { 58 | "fs": f_sampling, 59 | "window": window, 60 | "nperseg": n_per_seg, 61 | "noverlap": n_overlap, 62 | "mode": "complex", 63 | } 64 | frequencies, time, spectrogram = signal.spectrogram(inp, **options) 65 | 66 | return frequencies, time, spectrogram 67 | -------------------------------------------------------------------------------- /pyrfu/pyrf/waverage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # 3rd party imports 5 | import numpy as np 6 | 7 | __author__ = "Louis Richard" 8 | __email__ = "louisr@irfu.se" 9 | __copyright__ = "Copyright 2020-2023" 10 | __license__ = "MIT" 11 | __version__ = "2.4.2" 12 | __status__ = "Prototype" 13 | 14 | 15 | def waverage(inp, f_sampl: float = None, n_pts: int = 7): 16 | r"""Computes weighted average. 17 | 18 | Parameters 19 | ---------- 20 | inp : array_like 21 | Input data 22 | f_sampl : float, Optional 23 | Sampling frequency. 24 | n_pts : int, Optional 25 | Number of point to average over 5 ot 7. Default is 7 26 | 27 | Returns 28 | ------- 29 | out : ndarray 30 | Weighted averaged of inp 31 | 32 | """ 33 | 34 | assert n_pts in [5, 7], "n_pts must be 5 or 7" 35 | 36 | if f_sampl is None: 37 | f_sampl = 1e9 / (inp.time.data[1] - inp.time.data[0]).view("i8") 38 | 39 | n_data = np.round( 40 | 1e-9 * (inp.time.data[-1] - inp.time.data[0]).view("i8") * f_sampl, 41 | ) 42 | 43 | inp_data = inp.data 44 | try: 45 | n_cols = inp_data.shape[1] 46 | except IndexError: 47 | inp_data = inp_data[:, None] 48 | n_cols = inp_data.shape[1] 49 | 50 | f_sampl = 1e9 * n_data / (inp.time.data[-1] - inp.time.data[0]).view("i8") 51 | delta_t = 1 / f_sampl 52 | 53 | out = np.zeros((n_data + 1, n_cols)) 54 | out[:, 0] = np.linspace(inp_data[0, 0], inp_data[-1, 0], n_data + 1) 55 | indices = np.round((inp_data[:, 0] - inp_data[0, 0]) / delta_t + 1) 56 | out[indices, :] = inp_data[:, 1:] 57 | out[np.isnan(out)] = 0 # set NaNs to zeros 58 | 59 | for col in range(n_cols): 60 | if n_pts == 5: 61 | new_data = np.hstack([0, 0, out[:col], 0, 0]) 62 | elif n_pts == 7: 63 | new_data = np.hstack([0, 0, 0, out[:col], 0, 0, 0]) 64 | else: 65 | raise ValueError("n_pts must be 5 or 7") 66 | 67 | for j in range(n_data + 1): 68 | out[j, col] = _wave(new_data[j : j + n_pts - 1], n_pts) 69 | 70 | # Make sure we do return matrix of the same size 71 | out = out[indices, :] 72 | 73 | return out 74 | 75 | 76 | def _wave(inp_window, n_pts): 77 | """computes weighted average""" 78 | 79 | if n_pts == 5: 80 | m = [0.1, 0.25, 0.3, 0.25, 0.1] 81 | elif n_pts == 7: 82 | m = [0.07, 0.15, 0.18, 0.2, 0.18, 0.15, 0.07] 83 | else: 84 | raise ValueError("n_pts must be 5 or 7") 85 | 86 | # find missing points == 0 87 | if np.sum(m[inp_window == 0]) == 1: 88 | average = 0 89 | else: 90 | average = np.sum(inp_window * m) / (1 - np.sum(m[inp_window == 0])) 91 | 92 | return average 93 | -------------------------------------------------------------------------------- /pyrfu/solo/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from .db_init import db_init 5 | from .read_lfr_density import read_lfr_density 6 | from .read_tnr import read_tnr 7 | 8 | __author__ = "Louis Richard" 9 | __email__ = "louisr@irfu.se" 10 | __copyright__ = "Copyright 2020-2023" 11 | __license__ = "MIT" 12 | __version__ = "2.4.2" 13 | __status__ = "Prototype" 14 | 15 | __all__ = ["db_init", "read_tnr", "read_lfr_density"] 16 | -------------------------------------------------------------------------------- /pyrfu/solo/config.json: -------------------------------------------------------------------------------- 1 | {"local_data_dir": "/Volumes/solo/remote/data"} 2 | -------------------------------------------------------------------------------- /pyrfu/solo/db_init.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | 6 | # Built-in imports 7 | import os 8 | 9 | __author__ = "Louis Richard" 10 | __email__ = "louisr@irfu.se" 11 | __copyright__ = "Copyright 2020-2023" 12 | __license__ = "MIT" 13 | __version__ = "2.4.2" 14 | __status__ = "Prototype" 15 | 16 | 17 | def db_init(local_data_dir): 18 | r"""Setup the default path of SolO data. 19 | 20 | Parameters 21 | ---------- 22 | local_data_dir : str 23 | Path to the data. 24 | 25 | """ 26 | 27 | # Normalize the path and make sure that it exists 28 | local_data_dir = os.path.normpath(local_data_dir) 29 | assert os.path.exists( 30 | local_data_dir, 31 | ), f"{local_data_dir} doesn't exists!!" 32 | 33 | # Path to the configuration file. 34 | pkg_path = os.path.dirname(os.path.abspath(__file__)) 35 | 36 | # Read the current version of the configuration 37 | with open(os.path.join(pkg_path, "config.json"), "r", encoding="utf-8") as fs: 38 | config = json.load(fs) 39 | 40 | # Overwrite the configuration file with the new path 41 | with open(os.path.join(pkg_path, "config.json"), "w", encoding="utf-8") as fs: 42 | config["local_data_dir"] = local_data_dir 43 | json.dump(config, fs) 44 | -------------------------------------------------------------------------------- /pyrfu/stylesheets/aps.mplstyle: -------------------------------------------------------------------------------- 1 | ## ************************************************************************************* 2 | ## * Matplotlib style sheet for APS publications * 3 | ## author : Louis Richard 4 | ## email : louisr@irfu.se 5 | ## ************************************************************************************* 6 | 7 | ## Fonts 8 | font.family: serif 9 | font.style: normal 10 | font.variant: normal 11 | font.weight: normal 12 | font.stretch: normal 13 | font.size: 8.0 14 | #font.serif: Times, Computer Modern Roman 15 | #font.sans-serif: Helvetica, Computer Modern Sans serif 16 | 17 | ## LaTeX 18 | #text.usetex: True 19 | #text.latex.preamble: \usepackage{bm} 20 | pgf.texsystem: xelatex 21 | 22 | ## Axes 23 | axes.labelsize: 8 24 | axes.linewidth: .5 25 | axes.xmargin: 0 26 | axes.ymargin: 0 27 | 28 | ## Save 29 | savefig.dpi: 600 30 | savefig.format: eps 31 | # savefig.bbox: tight 32 | 33 | ## Legend 34 | legend.loc: best 35 | legend.frameon: False # if True, draw the legend on a background patch 36 | legend.framealpha: 0.8 # legend patch transparency 37 | legend.facecolor: inherit # inherit from axes.facecolor; or color spec 38 | legend.edgecolor: 0.8 # background patch boundary color 39 | legend.fancybox: False # if True, use a rectangle box for the legend background 40 | legend.shadow: False # if True, give background a shadow effect 41 | legend.numpoints: 1 # the number of marker points in the legend line 42 | legend.scatterpoints: 1 # number of scatter points 43 | legend.markerscale: 0.9 # the relative size of legend markers vs. original 44 | legend.fontsize: 8 45 | legend.title_fontsize: None # None sets to the same as the default axes. 46 | 47 | ## Dimensions as fraction of fontsize: 48 | legend.borderpad: 0.2 # border whitespace 49 | legend.labelspacing: 0.2 # the vertical space between the legend entries 50 | legend.handlelength: 0.1 # the length of the legend lines 51 | #legend.handleheight: 0.4 # the height of the legend handle 52 | legend.handletextpad: 0.1 # the space between the legend line and legend text 53 | legend.borderaxespad: 0.4 # the border between the axes and legend edge 54 | legend.columnspacing: 0.8 # column separation 55 | 56 | ## Ticks 57 | xtick.labelsize: 8 58 | xtick.major.size: 2 59 | xtick.minor.size: 1.2 60 | xtick.direction: in 61 | xtick.top: True 62 | 63 | ytick.labelsize: 8 64 | ytick.major.size: 2 65 | ytick.minor.size: 1.2 66 | ytick.direction : in 67 | ytick.right: True 68 | 69 | ## Lines 70 | lines.linewidth: 0.6 71 | lines.markersize: 3 72 | 73 | ## Errorbars 74 | errorbar.capsize: 2 75 | -------------------------------------------------------------------------------- /pyrfu/stylesheets/metropolis.mplstyle: -------------------------------------------------------------------------------- 1 | # Matplotlib style file to create plots that integrate nicely 2 | # with the metropolis beamer template 3 | 4 | # Colours pulled from beamermetropolis.sty 5 | # Background colour in beamermetropolis.sty is as black!2 6 | # not very useful for matplotlib. Approximate as FAFAFA 7 | 8 | axes.axisbelow: True 9 | axes.edgecolor: 23373B 10 | axes.facecolor: EAEAF2 11 | axes.grid: False 12 | axes.labelcolor: 23373B 13 | axes.labelsize: 22 14 | axes.linewidth: 1.5 15 | 16 | # Set up colour cycle from metropolis 17 | axes.prop_cycle: cycler('color', ['4C72B0', 'EB811B', '14B03D', '604c38']) 18 | 19 | # Default size of single figure spanning textwidth of thesis 20 | figure.figsize: 4.2519699737097, 2.627861962896592 21 | figure.titlesize: 22 22 | figure.facecolor: none 23 | 24 | figure.dpi: 100 25 | 26 | font.family: sans-serif 27 | font.size: 20 28 | 29 | grid.color: white 30 | grid.linestyle: - 31 | grid.linewidth: 1 32 | 33 | image.cmap: Greys 34 | 35 | legend.fontsize: 18 36 | legend.frameon: False 37 | legend.numpoints: 1 38 | legend.scatterpoints: 1 39 | 40 | lines.antialiased: True 41 | lines.linewidth: 1.25 42 | lines.markeredgewidth: 0 43 | lines.markersize: 7 44 | lines.solid_capstyle: round 45 | 46 | patch.facecolor: 4C72B0 47 | patch.linewidth: .3 48 | 49 | savefig.facecolor: none 50 | savefig.format: pdf 51 | 52 | text.usetex: True 53 | text.latex.preamble: \usepackage[T1]{fontenc} \usepackage[lf]{FiraSans} \boldmath 54 | text.color: 23373B 55 | 56 | xtick.color: 23373B 57 | xtick.labelsize: 20 58 | xtick.direction: out 59 | xtick.major.pad: 10 60 | xtick.major.size: 5 61 | xtick.major.width: 1.5 62 | xtick.minor.size: 0 63 | xtick.minor.width: .5 64 | 65 | ytick.color: 23373B 66 | ytick.labelsize: 20 67 | ytick.direction: out 68 | ytick.major.pad: 10 69 | ytick.major.size: 5 70 | ytick.major.width: 1.5 71 | ytick.minor.size: 0 72 | ytick.minor.width: .5 -------------------------------------------------------------------------------- /pyrfu/stylesheets/pyrfu.mplstyle: -------------------------------------------------------------------------------- 1 | ## ************************************************************************************* 2 | ## * Matplotlib style sheet for default figures * 3 | ## author : Louis Richard 4 | ## email : louisr@irfu.se 5 | ## ************************************************************************************* 6 | 7 | ## Fonts 8 | font.family: serif 9 | font.style: normal 10 | font.variant: normal 11 | font.weight: normal 12 | font.stretch: normal 13 | 14 | ## LaTeX 15 | #text.usetex: True 16 | #text.latex.preamble: \usepackage{bm} 17 | pgf.texsystem: xelatex 18 | 19 | ## Axes 20 | axes.labelsize: 8 21 | axes.linewidth: .5 22 | axes.xmargin: 0 23 | axes.ymargin: 0 24 | 25 | ## Save 26 | savefig.dpi: 600 27 | savefig.format: eps 28 | # savefig.bbox: tight 29 | 30 | ## Legend 31 | legend.loc: best 32 | legend.frameon: False # if True, draw the legend on a background patch 33 | legend.framealpha: 0.8 # legend patch transparency 34 | legend.facecolor: inherit # inherit from axes.facecolor; or color spec 35 | legend.edgecolor: 0.8 # background patch boundary color 36 | legend.fancybox: False # if True, use a rectangle box for the legend background 37 | ## Ticks 38 | xtick.direction: in 39 | xtick.top: True 40 | 41 | ytick.direction : in 42 | ytick.right: True 43 | -------------------------------------------------------------------------------- /pyrfu/tests/test_dispersion.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import random 6 | import unittest 7 | 8 | # 3rd party imports 9 | import numpy as np 10 | import xarray as xr 11 | from ddt import data, ddt 12 | 13 | # Local imports 14 | from .. import dispersion 15 | 16 | __author__ = "Louis Richard" 17 | __email__ = "louisr@irfu.se" 18 | __copyright__ = "Copyright 2020-2023" 19 | __license__ = "MIT" 20 | __version__ = "2.4.4" 21 | __status__ = "Prototype" 22 | 23 | 24 | class DispSurfCalcTestCase(unittest.TestCase): 25 | def test_disp_surf_calc_output(self): 26 | kx, kz, wf, extra_param = dispersion.disp_surf_calc( 27 | random.random(), random.random(), random.random(), random.random() 28 | ) 29 | self.assertIsInstance(kx, np.ndarray) 30 | self.assertIsInstance(kz, np.ndarray) 31 | self.assertIsInstance(wf, np.ndarray) 32 | self.assertIsInstance(extra_param, dict) 33 | 34 | 35 | @ddt 36 | class OneFluidDispersionTestCase(unittest.TestCase): 37 | @data( 38 | ( 39 | random.random(), 40 | random.random(), 41 | {"n": random.random(), "t": random.random(), "gamma": random.random()}, 42 | {"n": random.random(), "t": random.random(), "gamma": random.random()}, 43 | random.randint(10, 1000), 44 | ) 45 | ) 46 | def test_one_fluid_dispersion_output(self, value): 47 | b_0, theta, ions, electrons, n_k = value 48 | result = dispersion.one_fluid_dispersion(b_0, theta, ions, electrons, n_k) 49 | self.assertIsInstance(result[0], xr.DataArray) 50 | self.assertIsInstance(result[1], xr.DataArray) 51 | self.assertIsInstance(result[2], xr.DataArray) 52 | 53 | 54 | if __name__ == "__main__": 55 | unittest.main() 56 | -------------------------------------------------------------------------------- /pyrfu/tests/test_solo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Built-in imports 5 | import os 6 | import random 7 | import unittest 8 | 9 | # 3rd party imports 10 | import numpy as np 11 | from ddt import data, ddt, unpack 12 | 13 | # Local imports 14 | from .. import solo 15 | 16 | __author__ = "Louis Richard" 17 | __email__ = "louisr@irfu.se" 18 | __copyright__ = "Copyright 2020-2023" 19 | __license__ = "MIT" 20 | __version__ = "2.4.4" 21 | __status__ = "Prototype" 22 | 23 | 24 | class DbInitTestCase(unittest.TestCase): 25 | def test_db_init_inpput(self): 26 | with self.assertRaises(AssertionError): 27 | solo.db_init("/Volumes/solo/remote/data") 28 | 29 | def test_db_init_output(self): 30 | self.assertIsNone(solo.db_init(os.getcwd())) 31 | 32 | 33 | @ddt 34 | class ReadLFRDensityTestCase(unittest.TestCase): 35 | @data( 36 | ([], ".", False), 37 | ([np.datetime64("2023-01-01T00:00:00"), "2023-01-01T00:10:00"], ".", False), 38 | (["2023-01-01T00:00:00", np.datetime64("2023-01-01T00:10:00")], ".", False), 39 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], "/bazinga", False), 40 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], ".", "i am groot"), 41 | ) 42 | @unpack 43 | def test_read_lfr_density_input(self, tint, data_path, tree): 44 | with self.assertRaises(AssertionError): 45 | solo.read_lfr_density(tint, data_path, tree) 46 | 47 | def test_read_lfr_density_output(self): 48 | tint = ["2023-01-01T00:00:00", "2023-01-01T00:10:00"] 49 | self.assertIsNone(solo.read_lfr_density(tint)) 50 | 51 | 52 | @ddt 53 | class ReadTNRTestCase(unittest.TestCase): 54 | @data( 55 | ([], 1, "."), 56 | ([np.datetime64("2023-01-01T00:00:00"), "2023-01-01T00:10:00"], 1, "."), 57 | (["2023-01-01T00:00:00", np.datetime64("2023-01-01T00:10:00")], 1, "."), 58 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], random.random(), "."), 59 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], 1, "/bazinga"), 60 | ) 61 | @unpack 62 | def test_read_tnr_input(self, tint, sensor, data_path): 63 | with self.assertRaises(AssertionError): 64 | solo.read_tnr(tint, sensor, data_path) 65 | 66 | @data( 67 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], 1, ""), 68 | (["2023-01-01T00:00:00", "2023-01-01T00:10:00"], 2, ""), 69 | ) 70 | @unpack 71 | def test_read_tnr_output(self, tint, sensor, data_path): 72 | self.assertIsNone(solo.read_tnr(tint, sensor, data_path)) 73 | 74 | 75 | if __name__ == "__main__": 76 | unittest.main() 77 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | boto3>=1.35.0 2 | botocore>=1.35.0 3 | cdflib>=1.3.0 4 | geopack>=1.0.10 5 | keyring>=25.5.0 6 | matplotlib>=3.9.0 7 | numba==0.60.0 8 | numpy<2.0,>=1.26.4 9 | pandas>=2.2.3 10 | pycdfpp>=0.7.0 11 | python-dateutil>=2.9.0 12 | requests>=2.32.0 13 | scipy>=1.14.0 14 | tqdm>=4.66.0 15 | xarray>=2024.10.0 16 | --------------------------------------------------------------------------------