├── lidarwind ├── wind_retrieval │ ├── __init__.py │ └── fft_wind_retrieval.py ├── preprocessing │ ├── __init__.py │ └── wind_cube.py ├── postprocessing │ ├── __init__.py │ ├── post_rpg_radar.py │ └── post_wind_cube.py ├── __init__.py ├── io.py ├── lidar_code.py ├── data_attributes.py ├── lidarwind_config.py ├── wind_prop_retrieval_6_beam.py ├── utilities.py ├── visualization.py └── filters.py ├── docs ├── authors.rst ├── history.rst ├── contributing.rst ├── figures │ ├── dbs.png │ ├── radar.png │ ├── diagram.png │ └── six_beam.png ├── modules.rst ├── requirements.pip ├── environment.yml ├── notebooks.rst ├── _autosummary │ ├── lidarwind.lidar_code.GetLidarData.rst │ ├── lidarwind.filters.Filtering.rst │ ├── lidarwind.data_operator.DataOperations.rst │ ├── lidarwind.data_operator.DbsOperations.rst │ ├── lidarwind.data_operator.ReadProcessedData.rst │ ├── lidarwind.data_attributes.LoadAttributes.rst │ ├── lidarwind.wind_prop_retrieval.RetriveWindFFT.rst │ ├── lidarwind.filters.WindCubeCloudRemoval.rst │ ├── lidarwind.data_operator.GetRestructuredData.rst │ ├── lidarwind.filters.SecondTripEchoFilter.rst │ ├── lidarwind.wind_prop_retrieval_6_beam.SixBeamMethod.rst │ ├── lidarwind.wind_prop_retrieval.GetWindProperties5Beam.rst │ └── lidarwind.lidarwind_config.Configurations.rst ├── index.rst ├── readme.rst ├── Makefile ├── make.bat ├── diagram.rst ├── installation.rst ├── api.rst ├── lidarwind.rst ├── conf.py ├── overview.rst └── usage.rst ├── tests ├── __init__.py ├── test_IO.py ├── test_windPropRetrieval.py ├── test_lidar_suit.py ├── postprocessing │ ├── test_post_rpg_radar.py │ └── test_post_wind_cube.py ├── data.py ├── test_get_wind_Properties_5_beam.py ├── test_wind_prop_retrieval_6_beam.py ├── test_retrieve_wind_fft.py ├── preprocessing │ ├── test_preprocessing.py │ └── test_rpg_preprocessing.py ├── test_dataOperator.py ├── test_fourier_transf_wind_method.py ├── test_get_restructured_data.py └── wind_retrieval │ └── test_wind_retrieval.py ├── joss ├── wind_panel.png ├── paper.bib └── paper.md ├── HISTORY.rst ├── AUTHORS.rst ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── MANIFEST.in ├── .editorconfig ├── .github ├── ISSUE_TEMPLATE.md └── workflows │ ├── ci.yml │ └── tox.yml ├── environment.yml ├── CITATION.cff ├── readthedocs.yml ├── tox.ini ├── .pre-commit-config.yaml ├── LICENSE ├── .gitignore ├── Makefile ├── pyproject.toml ├── README.rst └── CONTRIBUTING.rst /lidarwind/wind_retrieval/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /lidarwind/preprocessing/__init__.py: -------------------------------------------------------------------------------- 1 | from .wind_cube import * -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Unit test package for lidarwind.""" 2 | -------------------------------------------------------------------------------- /lidarwind/postprocessing/__init__.py: -------------------------------------------------------------------------------- 1 | from .post_wind_cube import * 2 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | 3 | .. _contributing: 4 | -------------------------------------------------------------------------------- /docs/figures/dbs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdiasn/lidarwind/HEAD/docs/figures/dbs.png -------------------------------------------------------------------------------- /joss/wind_panel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdiasn/lidarwind/HEAD/joss/wind_panel.png -------------------------------------------------------------------------------- /docs/figures/radar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdiasn/lidarwind/HEAD/docs/figures/radar.png -------------------------------------------------------------------------------- /docs/figures/diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdiasn/lidarwind/HEAD/docs/figures/diagram.png -------------------------------------------------------------------------------- /docs/figures/six_beam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jdiasn/lidarwind/HEAD/docs/figures/six_beam.png -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | lidarwind 2 | ========= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | lidarwind 8 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 0.0.1 (2022-01-02) 6 | ------------------ 7 | 8 | * First release on PyPI. 9 | -------------------------------------------------------------------------------- /docs/requirements.pip: -------------------------------------------------------------------------------- 1 | numpy 2 | xarray 3 | pandas 4 | matplotlib 5 | xarray-datatree 6 | sphinx 7 | nbsphinx 8 | sphinx-gallery 9 | sphinx_rtd_theme 10 | pooch 11 | gdown 12 | xrft 13 | #lidarwind 14 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Jose Dias Neto 9 | 10 | Contributors 11 | ------------ 12 | 13 | * Guilherme Castelao 14 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | pip>=23.3 2 | bump2version==0.5.11 3 | wheel==0.38.1 4 | watchdog==0.9.0 5 | flake8==3.7.8 6 | tox==3.14.0 7 | coverage==4.5.4 8 | Sphinx==1.8.5 9 | twine==1.14.0 10 | Click==7.1.2 11 | pytest==6.2.4 12 | black>=24.3.0 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | max-complexity = 22 6 | max-line-length = 79 7 | ignore = E203, W503, B011 8 | exclude = 9 | .git, 10 | __pycache__, 11 | build, 12 | dist, 13 | docs/conf.py 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | 5 | from setuptools import find_packages, setup 6 | 7 | setup( 8 | include_package_data=True, 9 | packages=find_packages(include=["lidarwind", "lidarwind.*"]), 10 | zip_safe=False, 11 | ) 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: lidarwind 2 | 3 | channels: 4 | - defaults 5 | - conda-forge 6 | 7 | dependencies: 8 | - numpy 9 | - xarray 10 | - pandas 11 | - matplotlib 12 | - nbsphinx 13 | - sphinx-gallery 14 | - pip 15 | - pip: 16 | - pooch 17 | - gdown 18 | - xrft 19 | - lidarwind 20 | - xarray-datatree==0.0.11 21 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /docs/notebooks.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Rendered notebooks 3 | ================== 4 | 5 | .. nbgallery:: 6 | examples/quicklooks_rendered.ipynb 7 | examples/merging_6beam_rendered.ipynb 8 | examples/turbulence_6beam_data_rendered.ipynb 9 | examples/dbs_scans_rendered.ipynb 10 | examples/reading_long_dbs_rendered.ipynb 11 | examples/lidarwind_4_rpg_radar_serial.ipynb 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * LIDAR Suit version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: lidarwind 2 | 3 | channels: 4 | - defaults 5 | - conda-forge 6 | 7 | dependencies: 8 | - python=3.8 9 | - xarray>=2022.3.0 10 | - xrft=0.4.1 11 | - pandas=1.4.2 12 | - numpy=1.22.3 13 | - netCDF4=1.5.8 14 | - matplotlib=3.5.1 15 | - click=8.1.2 16 | - xarray-datatree>=0.0.11 17 | - pip 18 | - pip: 19 | - gdown==4.5.1 20 | - lidarwind==0.2.4 21 | - pooch==1.6.0 22 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.lidar_code.GetLidarData.rst: -------------------------------------------------------------------------------- 1 | lidarwind.lidar\_code.GetLidarData 2 | ================================== 3 | 4 | .. currentmodule:: lidarwind.lidar_code 5 | 6 | .. autoclass:: GetLidarData 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~GetLidarData.__init__ 17 | ~GetLidarData.open_lidar_file 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.filters.Filtering.rst: -------------------------------------------------------------------------------- 1 | lidarwind.filters.Filtering 2 | =========================== 3 | 4 | .. currentmodule:: lidarwind.filters 5 | 6 | .. autoclass:: Filtering 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~Filtering.__init__ 17 | ~Filtering.get_radial_obs_comp 18 | ~Filtering.get_vertical_obs_comp 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to lidarwind's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | readme 9 | installation 10 | overview 11 | usage 12 | notebooks 13 | api 14 | modules 15 | diagram 16 | contributing 17 | authors 18 | 19 | **Help & reference** 20 | 21 | 22 | * :doc:`history` 23 | 24 | Indices and tables 25 | ================== 26 | * :ref:`genindex` 27 | * :ref:`modindex` 28 | * :ref:`search` 29 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. _intro: 2 | .. include:: ../README.rst 3 | 4 | .. note:: 5 | All publicly available classes are listed on the :any:`API reference `. 6 | 7 | ------------ 8 | Contribution 9 | ------------ 10 | 11 | Any contribution to further development is appreciated, and guidelines for the contributions are available in the :any:`contributing section `. To support further development, sample datasets from different scanning strategies are available on Zenodo, and they can be found at https://doi.org/10.5281/zenodo.7312960. 12 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.data_operator.DataOperations.rst: -------------------------------------------------------------------------------- 1 | lidarwind.data\_operator.DataOperations 2 | ======================================= 3 | 4 | .. currentmodule:: lidarwind.data_operator 5 | 6 | .. autoclass:: DataOperations 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~DataOperations.__init__ 17 | ~DataOperations.elevation_filter 18 | ~DataOperations.get_merge_data 19 | ~DataOperations.rename_var_90 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /tests/test_IO.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import lidarwind as lst 4 | 5 | from .data import data_filenames # , get_sample_data 6 | 7 | 8 | @pytest.fixture 9 | # @pytest.mark.skip("deactivating to isolate sef fault error") 10 | def test_DataOperations(data_filenames): 11 | 12 | tmp_object = lst.DataOperations(data_filenames) 13 | 14 | return tmp_object.merged_data 15 | 16 | 17 | # @pytest.mark.skip("deactivating to isolate sef fault error") 18 | def test_GetRestructuredData(test_DataOperations): 19 | 20 | lst.GetRestructuredData(test_DataOperations) 21 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.data_operator.DbsOperations.rst: -------------------------------------------------------------------------------- 1 | lidarwind.data\_operator.DbsOperations 2 | ====================================== 3 | 4 | .. currentmodule:: lidarwind.data_operator 5 | 6 | .. autoclass:: DbsOperations 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~DbsOperations.__init__ 17 | ~DbsOperations.add_mean_time 18 | ~DbsOperations.mean_time_derivation 19 | ~DbsOperations.merge_2_ds 20 | ~DbsOperations.merge_data 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.data_operator.ReadProcessedData.rst: -------------------------------------------------------------------------------- 1 | lidarwind.data\_operator.ReadProcessedData 2 | ========================================== 3 | 4 | .. currentmodule:: lidarwind.data_operator 5 | 6 | .. autoclass:: ReadProcessedData 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~ReadProcessedData.__init__ 17 | ~ReadProcessedData.merge_data 18 | ~ReadProcessedData.merge_data_method_1 19 | ~ReadProcessedData.merge_data_method_2 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.data_attributes.LoadAttributes.rst: -------------------------------------------------------------------------------- 1 | lidarwind.data\_attributes.LoadAttributes 2 | ========================================= 3 | 4 | .. currentmodule:: lidarwind.data_attributes 5 | 6 | .. autoclass:: LoadAttributes 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~LoadAttributes.__init__ 17 | ~LoadAttributes.variables_attrs 18 | ~LoadAttributes.write_coords_attrs 19 | ~LoadAttributes.write_global_attrs 20 | ~LoadAttributes.write_variables_attrs 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.wind_prop_retrieval.RetriveWindFFT.rst: -------------------------------------------------------------------------------- 1 | lidarwind.wind\_prop\_retrieval.RetriveWindFFT 2 | ============================================== 3 | 4 | .. currentmodule:: lidarwind.wind_prop_retrieval 5 | 6 | .. autoclass:: RetriveWindFFT 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~RetriveWindFFT.__init__ 17 | ~RetriveWindFFT.get_beta 18 | ~RetriveWindFFT.load_attrs 19 | ~RetriveWindFFT.ret_hor_wind_data 20 | ~RetriveWindFFT.ret_vert_wind_data 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.filters.WindCubeCloudRemoval.rst: -------------------------------------------------------------------------------- 1 | lidarwind.filters.WindCubeCloudRemoval 2 | ====================================== 3 | 4 | .. currentmodule:: lidarwind.filters 5 | 6 | .. autoclass:: WindCubeCloudRemoval 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~WindCubeCloudRemoval.__init__ 17 | ~WindCubeCloudRemoval.get_height_interface 18 | ~WindCubeCloudRemoval.get_interp_interf_height 19 | ~WindCubeCloudRemoval.get_noise_free_beta 20 | ~WindCubeCloudRemoval.remove_cloud 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.data_operator.GetRestructuredData.rst: -------------------------------------------------------------------------------- 1 | lidarwind.data\_operator.GetRestructuredData 2 | ============================================ 3 | 4 | .. currentmodule:: lidarwind.data_operator 5 | 6 | .. autoclass:: GetRestructuredData 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~GetRestructuredData.__init__ 17 | ~GetRestructuredData.data_transform 18 | ~GetRestructuredData.data_transform_90 19 | ~GetRestructuredData.get_coord_non_90 20 | ~GetRestructuredData.vertical_component_check 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.filters.SecondTripEchoFilter.rst: -------------------------------------------------------------------------------- 1 | lidarwind.filters.SecondTripEchoFilter 2 | ====================================== 3 | 4 | .. currentmodule:: lidarwind.filters 5 | 6 | .. autoclass:: SecondTripEchoFilter 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~SecondTripEchoFilter.__init__ 17 | ~SecondTripEchoFilter.cal_mean_and_anom_90 18 | ~SecondTripEchoFilter.cal_mean_and_anom_slant 19 | ~SecondTripEchoFilter.cleaning 20 | ~SecondTripEchoFilter.cleaning90 21 | ~SecondTripEchoFilter.get_time_edges 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.1.0 2 | message: If you use this software, please cite it using these metadata. 3 | title: 'A python package for retrieving wind profiles from Doppler lidar observations' 4 | authors: 5 | - given-names: José 6 | family-names: Dias Neto 7 | affiliation: Delft University of Technology - The Netherlands 8 | orcid: https://orcid.org/0000-0002-8488-8486 9 | - given-names: Guilherme 10 | family-names: Castelao 11 | affiliation: Scripps Institution of Oceanography - UC San Diego 12 | orcid: https://orcid.org/0000-0002-6765-0708 13 | version: 0.1.4 14 | doi: 10.5281/zenodo.7026548 15 | repository-code: https://github.com/jdiasn/lidarwind 16 | license: BSD-3-Clause 17 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version, and any other needed tools 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # Build HTML & PDF formats 15 | formats: 16 | - htmlzip 17 | # - pdf 18 | 19 | # Build documentation in the docs/ directory with Sphinx 20 | sphinx: 21 | configuration: docs/conf.py 22 | 23 | # Version of Python and requirements required to build the docs 24 | python: 25 | install: 26 | - requirements: docs/requirements.pip 27 | - method: pip 28 | path: . 29 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{39,310}-numpy{123,124}-pandas{15,20}-xarray{2022,2023}-xrft{04,10}-netcdf{15,16} 3 | minversion = 3.12 4 | isolated_build = true 5 | skip_missing_interpreters = true 6 | 7 | [testenv] 8 | deps = 9 | pytest>=6.0 10 | netcdf15: netCDF4~=1.5 11 | netcdf16: netCDF4>=1.6.0 12 | numpy123: numpy>=1.23,<1.24 13 | numpy124: numpy>=1.24.0 14 | pandas15: pandas~=1.5.0 15 | pandas20: pandas>=2.0 16 | xarray2022: xarray~=2022.0 17 | xarray2023: xarray>=2023.0 18 | xrft04: xrft>=0.4,<0.5 19 | xrft10: xrft>=1.0.0 20 | commands = 21 | pip install -U pip 22 | pytest 23 | 24 | [gh-actions] 25 | python = 26 | 3.8: py38 27 | 3.9: py39 28 | 3.10: py310 29 | 3.11: py311 30 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = lidarwind 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | sphinx-apidoc -f -o . ../lidarwind 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.wind_prop_retrieval_6_beam.SixBeamMethod.rst: -------------------------------------------------------------------------------- 1 | lidarwind.wind\_prop\_retrieval\_6\_beam.SixBeamMethod 2 | ====================================================== 3 | 4 | .. currentmodule:: lidarwind.wind_prop_retrieval_6_beam 5 | 6 | .. autoclass:: SixBeamMethod 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~SixBeamMethod.__init__ 17 | ~SixBeamMethod.calc_variances 18 | ~SixBeamMethod.get_m_matrix 19 | ~SixBeamMethod.get_m_matrix_inv 20 | ~SixBeamMethod.get_s_matrix 21 | ~SixBeamMethod.get_sigma 22 | ~SixBeamMethod.get_variance 23 | ~SixBeamMethod.get_variance_ds 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.wind_prop_retrieval.GetWindProperties5Beam.rst: -------------------------------------------------------------------------------- 1 | lidarwind.wind\_prop\_retrieval.GetWindProperties5Beam 2 | ====================================================== 3 | 4 | .. currentmodule:: lidarwind.wind_prop_retrieval 5 | 6 | .. autoclass:: GetWindProperties5Beam 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~GetWindProperties5Beam.__init__ 17 | ~GetWindProperties5Beam.calc_hor_wind_comp_continuous 18 | ~GetWindProperties5Beam.calc_hor_wind_comp_single_dbs 19 | ~GetWindProperties5Beam.calc_hor_wind_dir 20 | ~GetWindProperties5Beam.calc_hor_wind_speed 21 | ~GetWindProperties5Beam.correct_vert_wind_comp 22 | ~GetWindProperties5Beam.correct_wind_comp 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /docs/_autosummary/lidarwind.lidarwind_config.Configurations.rst: -------------------------------------------------------------------------------- 1 | lidarwind.lidarwind\_config.Configurations 2 | ========================================== 3 | 4 | .. currentmodule:: lidarwind.lidarwind_config 5 | 6 | .. autoclass:: Configurations 7 | 8 | 9 | .. automethod:: __init__ 10 | 11 | 12 | .. rubric:: Methods 13 | 14 | .. autosummary:: 15 | 16 | ~Configurations.__init__ 17 | ~Configurations.generate_conf 18 | ~Configurations.load_comments 19 | ~Configurations.load_conf_file 20 | ~Configurations.load_contact 21 | ~Configurations.load_email 22 | ~Configurations.load_institution 23 | ~Configurations.load_instrument 24 | ~Configurations.load_reference 25 | ~Configurations.load_site 26 | ~Configurations.load_version 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=lidarwind 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /lidarwind/__init__.py: -------------------------------------------------------------------------------- 1 | """Top-level package for lidarwind package""" 2 | 3 | __author__ = "José Dias Neto" 4 | __email__ = "jdiasn@gmail.com" 5 | __affiliation__ = "Delft University of Technology" 6 | 7 | 8 | from pkg_resources import DistributionNotFound, get_distribution 9 | 10 | try: 11 | __version__ = get_distribution(__name__).version 12 | except DistributionNotFound: 13 | try: 14 | from .version import version as __version__ 15 | except ImportError: 16 | raise ImportError( 17 | "Failed to find (autogenerated) version.py. " 18 | "This might be because you are installing from GitHub's tarballs, " 19 | "use the PyPI ones." 20 | ) 21 | 22 | 23 | from .data_attributes import * 24 | from .data_operator import * 25 | from .filters import * 26 | from .lidar_code import * 27 | from .lidarwind_config import * 28 | from .utilities import * 29 | from .visualization import * 30 | from .wind_prop_retrieval import * 31 | from .wind_prop_retrieval_6_beam import * 32 | -------------------------------------------------------------------------------- /tests/test_windPropRetrieval.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | 7 | 8 | def test_wind_prop_retrieval_ffWindPropRet_doppler_obs(): 9 | 10 | with pytest.raises(TypeError): 11 | lst.FourierTransfWindMethod(doppler_obs=np.array([0, 1])) 12 | 13 | 14 | def test_wind_prop_retrieval_GetWindProperties5Beam_data(): 15 | 16 | with pytest.raises(TypeError): 17 | lst.GetWindProperties5Beam(data=xr.DataArray(np.array([0, 1]))) 18 | 19 | 20 | def test_wind_prop_retrieval_GetWindProperties5Beam_data_cont(): 21 | 22 | with pytest.raises(AttributeError): # with pytest.raises(TypeError): 23 | lst.GetWindProperties5Beam( 24 | xr.Dataset({"radial_wind_speed": xr.DataArray(np.array([0, 1]))}), 25 | method="continuous", 26 | ) 27 | 28 | 29 | def test_wind_prop_retrieval_RetriveWindFFT_tranfdData(): 30 | 31 | with pytest.raises(TypeError): 32 | lst.RetriveWindFFT(transfd_data=np.array([0, 1])) 33 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | default_language_version: 4 | python: python3.9 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v4.3.0 8 | hooks: 9 | - id: check-ast 10 | - id: check-docstring-first 11 | - id: check-merge-conflict 12 | - id: check-yaml 13 | - id: check-toml 14 | - id: debug-statements 15 | - id: trailing-whitespace 16 | - id: end-of-file-fixer 17 | - id: check-added-large-files 18 | - repo: https://github.com/psf/black 19 | rev: 22.8.0 20 | hooks: 21 | - id: black 22 | args: [ --safe ] 23 | exclude: docs/conf.py 24 | - repo: https://github.com/PyCQA/flake8 25 | rev: 5.0.4 26 | hooks: 27 | - id: flake8 28 | - repo: https://github.com/charliermarsh/ruff-pre-commit 29 | # Ruff version. 30 | rev: 'v0.0.244' 31 | hooks: 32 | - id: ruff 33 | args: [ --fix ] 34 | -------------------------------------------------------------------------------- /docs/diagram.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Package Diagram 3 | =============== 4 | 5 | The package is structured with four module groups. The diagram below illustrates how those groups interact with each other. The main modules, indicated by the green colour, are responsible for preprocessing and deriving information related to the wind. The yellow, blue and red colours indicate the auxiliary modules. The yellow modules are used during the data preprocessing, and the red modules are used for preparing the final data for storage. The blue modules add extra functionalities to the package. 6 | 7 | 8 | .. figure:: /figures/diagram.png 9 | :scale: 60% 10 | :align: center 11 | 12 | The name of each module is indicated in the coloured part of each box, and the classes available in each module are listed inside the boxes. The starting point of the dashed lines indicates the classes that are calling other classes or functions from the auxiliary modules. The bold black lines indicate the data flow while using the package for retrieving wind information. The bold red lines indicate that some classes can be used to act on the preprocessed data before continuing with the wind retrieval. 13 | -------------------------------------------------------------------------------- /lidarwind/io.py: -------------------------------------------------------------------------------- 1 | import datatree 2 | import pandas as pd 3 | import xarray as xr 4 | 5 | 6 | def open_sweep(file_name): 7 | """Windcube's data reader 8 | 9 | It opens and reads the original NetCDF output 10 | from the Windcube lidar. 11 | 12 | Parameters 13 | ---------- 14 | 15 | file_name : str 16 | path to the file that will be opened 17 | 18 | Returns 19 | ------- 20 | ds : xarray.DataSet 21 | 22 | a dataset from the original NetCDF files 23 | """ 24 | 25 | raw_data = datatree.open_datatree(file_name, decode_times=False) 26 | 27 | assert ( 28 | "sweep_group_name" in raw_data 29 | ), "missing sweep group variable in input file" 30 | sweep_group_name = raw_data["sweep_group_name"].values[0] 31 | ds = raw_data[f"/{sweep_group_name}"].to_dataset() 32 | 33 | del raw_data 34 | 35 | if "time_reference" in ds: 36 | # Guarantee that it is a valid datetime 37 | reference_time = pd.to_datetime( 38 | ds["time_reference"].values 39 | ).isoformat() 40 | ds["time"].attrs["units"] = f"seconds since {reference_time}" 41 | 42 | ds = xr.decode_cf(ds) 43 | 44 | return ds 45 | -------------------------------------------------------------------------------- /tests/test_lidar_suit.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """Tests for 'lidarwind' package.""" 4 | 5 | import pytest 6 | from click.testing import CliRunner 7 | 8 | import lidarwind 9 | 10 | # from lidarwind import cli 11 | 12 | 13 | @pytest.fixture 14 | def response(): 15 | """Sample pytest fixture. 16 | 17 | See more at: http://doc.pytest.org/en/latest/fixture.html 18 | """ 19 | # import requests 20 | # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') 21 | 22 | 23 | def test_content(response): 24 | """Sample pytest test function with the pytest fixture as an argument.""" 25 | # from bs4 import BeautifulSoup 26 | # assert 'GitHub' in BeautifulSoup(response.content).title.string 27 | 28 | 29 | # @pytest.skip('we do not have a command line yet') 30 | @pytest.mark.skip("we do not have a command line yet") 31 | def test_command_line_interface(): 32 | """Test the CLI.""" 33 | runner = CliRunner() 34 | result = runner.invoke(cli.main) 35 | assert result.exit_code == 0 36 | assert "lidarwind.cli.main" in result.output 37 | help_result = runner.invoke(cli.main, ["--help"]) 38 | assert help_result.exit_code == 0 39 | assert "--help Show this message and exit." in help_result.output 40 | -------------------------------------------------------------------------------- /lidarwind/lidar_code.py: -------------------------------------------------------------------------------- 1 | """Module to open the lidar original output 2 | 3 | """ 4 | 5 | 6 | import warnings 7 | 8 | from .io import open_sweep 9 | 10 | 11 | class GetLidarData: 12 | """Windcube's data reader 13 | 14 | 15 | ATENTION, please move to io.open_sweep(). This GetLidarData will 16 | be eventually removed. 17 | 18 | It opens and reads the original NetCDF output from the Windcube lidar 19 | 20 | Parameters 21 | ---------- 22 | 23 | file_name : str 24 | name of the file that will be open 25 | 26 | """ 27 | 28 | def __init__(self, file_name): 29 | 30 | self.file_name = file_name 31 | 32 | def open_lidar_file(self): 33 | 34 | """ 35 | Function to read the lidar NetCDF files 36 | 37 | Returns 38 | ------- 39 | ds: xarray.DataSet 40 | a dataset from the original NetCDF files 41 | 42 | Note 43 | ---- 44 | Alias to io.open_sweep() so upper functions don't break until we 45 | finished the transition to new I/O. 46 | """ 47 | warnings.warn( 48 | "GetLidarData will be removed eventually. " 49 | "Please use io module instead", 50 | DeprecationWarning, 51 | stacklevel=2, 52 | ) 53 | 54 | return open_sweep(self.file_name) 55 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install lidarwind, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install lidarwind 16 | 17 | This is the preferred method to install lidarwind, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for lidarwind can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/jdiasn/lidarwind 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OJL https://github.com/jdiasn/lidarwind/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/jdiasn/lidarwind 51 | .. _tarball: https://github.com/jdiasn/lidarwind/tarball/master 52 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: lidarwind 2 | 3 | ############# 4 | API reference 5 | ############# 6 | 7 | .. _api: 8 | 9 | The public API resources are listed below. 10 | 11 | Basic data reader 12 | ================= 13 | 14 | .. autosummary:: 15 | :toctree: _autosummary 16 | 17 | lidar_code.GetLidarData 18 | 19 | Data Manager 20 | ============ 21 | 22 | .. autosummary:: 23 | :toctree: _autosummary/ 24 | 25 | data_operator.DataOperations 26 | data_operator.ReadProcessedData 27 | data_operator.GetRestructuredData 28 | data_operator.DbsOperations 29 | 30 | Filters 31 | ======= 32 | 33 | .. autosummary:: 34 | :toctree: _autosummary/ 35 | 36 | filters.Filtering 37 | filters.SecondTripEchoFilter 38 | filters.WindCubeCloudRemoval 39 | 40 | Wind Retrieval 41 | ============== 42 | 43 | .. autosummary:: 44 | :toctree: _autosummary/ 45 | 46 | wind_prop_retrieval.RetriveWindFFT 47 | wind_prop_retrieval.GetWindProperties5Beam 48 | 49 | Turbulence estimation 50 | ===================== 51 | 52 | .. autosummary:: 53 | :toctree: _autosummary/ 54 | 55 | wind_prop_retrieval_6_beam.SixBeamMethod 56 | 57 | Data Attributes 58 | =============== 59 | 60 | .. autosummary:: 61 | :toctree: _autosummary/ 62 | 63 | data_attributes.LoadAttributes 64 | 65 | Global Attributes Definitions 66 | ============================= 67 | 68 | .. autosummary:: 69 | :toctree: _autosummary/ 70 | 71 | lidarwind_config.Configurations 72 | -------------------------------------------------------------------------------- /lidarwind/postprocessing/post_rpg_radar.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | 3 | from ..wind_retrieval.fft_wind_retrieval import get_wind_properties 4 | 5 | 6 | def get_horizontal_wind(ds: xr.Dataset) -> xr.Dataset: 7 | 8 | """Horizontal wind dataset 9 | 10 | This function creates the wind dataset. 11 | It uses the preprocessed dataset to 12 | extract the needed information and 13 | applies the fft wind retrieval. 14 | 15 | 16 | Parameters: 17 | ----------- 18 | ds : xr.Dataset 19 | A preprocessed dataset: output from 20 | rpg_slanted_radial_velocity_4_fft 21 | 22 | Returns: 23 | -------- 24 | xr.Dataset 25 | Final horizontal wind dataset 26 | including auxiliary information 27 | to help to characterise the 28 | dataset. 29 | 30 | """ 31 | 32 | if not isinstance(ds, xr.Dataset): 33 | raise TypeError(f"{ds} is not an instance of xr.Dataset") 34 | 35 | required_variables = [ 36 | "start_scan", 37 | "end_scan", 38 | "zdr_max", 39 | "nan_percentual", 40 | "chirp_start", 41 | "chirp_end", 42 | "chirp_azimuth_bias", 43 | "azm_seq", 44 | ] 45 | 46 | for v in required_variables + ["mean_time"]: 47 | if v not in ds: 48 | raise KeyError(f"{v} is not available in the provided dataset") 49 | 50 | wind_ds = get_wind_properties(ds.MeanVel) 51 | wind_ds = wind_ds.merge(ds[required_variables]) 52 | wind_ds = wind_ds.expand_dims(["mean_time"]) 53 | 54 | return wind_ds 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | 3 | BSD License 4 | 5 | Copyright (c) 2022, Jose Dias Neto 6 | All rights reserved. 7 | 8 | Redistribution and use in source and binary forms, with or without modification, 9 | are permitted provided that the following conditions are met: 10 | 11 | * Redistributions of source code must retain the above copyright notice, this 12 | list of conditions and the following disclaimer. 13 | 14 | * Redistributions in binary form must reproduce the above copyright notice, this 15 | list of conditions and the following disclaimer in the documentation and/or 16 | other materials provided with the distribution. 17 | 18 | * Neither the name of the copyright holder nor the names of its 19 | contributors may be used to endorse or promote products derived from this 20 | software without specific prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 23 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 24 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 25 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 26 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 27 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 28 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 29 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 30 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 31 | OF THE POSSIBILITY OF SUCH DAMAGE. 32 | -------------------------------------------------------------------------------- /docs/lidarwind.rst: -------------------------------------------------------------------------------- 1 | lidarwind package 2 | ================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | lidarwind.data\_attributes module 8 | --------------------------------- 9 | 10 | .. automodule:: lidarwind.data_attributes 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | lidarwind.data\_operator module 16 | ------------------------------- 17 | 18 | .. automodule:: lidarwind.data_operator 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | lidarwind.filters module 24 | ------------------------ 25 | 26 | .. automodule:: lidarwind.filters 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | lidarwind.io module 32 | ------------------- 33 | 34 | .. automodule:: lidarwind.io 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | lidarwind.lidar\_code module 40 | ---------------------------- 41 | 42 | .. automodule:: lidarwind.lidar_code 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | lidarwind.lidarwind\_config module 48 | ---------------------------------- 49 | 50 | .. automodule:: lidarwind.lidarwind_config 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | lidarwind.utilities module 56 | -------------------------- 57 | 58 | .. automodule:: lidarwind.utilities 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | lidarwind.version module 64 | ------------------------ 65 | 66 | .. automodule:: lidarwind.version 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | lidarwind.visualization module 72 | ------------------------------ 73 | 74 | .. automodule:: lidarwind.visualization 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | lidarwind.wind\_prop\_retrieval module 80 | -------------------------------------- 81 | 82 | .. automodule:: lidarwind.wind_prop_retrieval 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | lidarwind.wind\_prop\_retrieval\_6\_beam module 88 | ----------------------------------------------- 89 | 90 | .. automodule:: lidarwind.wind_prop_retrieval_6_beam 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | 95 | Module contents 96 | --------------- 97 | 98 | .. automodule:: lidarwind 99 | :members: 100 | :undoc-members: 101 | :show-inheritance: 102 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | version.py 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: lidarwind_test 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | schedule: 9 | - cron: "7 18 * * 0" 10 | 11 | jobs: 12 | build: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | max-parallel: 2 18 | matrix: 19 | python-version: ["3.9", "3.10"] 20 | 21 | steps: 22 | - name: checkout lidarwind 23 | uses: actions/checkout@v4 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip 35 | pip install flake8 pytest 36 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 37 | pip install -e .[plots,test] 38 | 39 | - name: Cache sample data 40 | id: cache-samples 41 | uses: actions/cache@v3 42 | with: 43 | path: ~/.cache/lidarwind 44 | key: ${{ runner.os }}-samples-${{ hashFiles('**/pyproject.toml') }} 45 | 46 | - name: Test with pytest 47 | run: | 48 | pytest tests 49 | 50 | coverage: 51 | runs-on: ubuntu-latest 52 | 53 | steps: 54 | - name: checkout lidarwind 55 | uses: actions/checkout@v4 56 | with: 57 | fetch-depth: 0 58 | 59 | - name: Set up Python 60 | uses: actions/setup-python@v4 61 | with: 62 | python-version: "3.10" 63 | cache: "pip" 64 | cache-dependency-path: 'pyproject.toml' 65 | 66 | - name: Coverage report 67 | run: | 68 | python -m pip install --upgrade pip 69 | pip install -e .[test] 70 | pytest --cov=./ --cov-report=xml 71 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax docs/examples/turbulence_6beam_data_rendered.ipynb 72 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax docs/examples/merging_6beam_rendered.ipynb 73 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax docs/examples/dbs_scans_rendered.ipynb 74 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax docs/examples/reading_long_dbs_rendered.ipynb 75 | 76 | 77 | - name: Upload coverage to Codecov 78 | uses: codecov/codecov-action@v4 79 | env: 80 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 81 | with: 82 | fail_ci_if_error: true 83 | verbose: true 84 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-build clean-pyc clean-test coverage dist docs help install lint lint/flake8 lint/black 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -f {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr htmlcov/ 48 | rm -fr .pytest_cache 49 | 50 | lint/flake8: ## check style with flake8 51 | flake8 lidarwind tests 52 | lint/black: ## check style with black 53 | black --check lidarwind tests 54 | 55 | lint: lint/flake8 lint/black ## check style 56 | 57 | test: ## run tests quickly with the default Python 58 | pytest 59 | 60 | test-all: ## run tests on every Python version with tox 61 | tox 62 | 63 | coverage: ## check code coverage quickly with the default Python 64 | coverage run --source lidarwind -m pytest 65 | coverage report -m 66 | coverage html 67 | $(BROWSER) htmlcov/index.html 68 | 69 | docs: ## generate Sphinx HTML documentation, including API docs 70 | rm -f docs/lidarwind.rst 71 | rm -f docs/modules.rst 72 | sphinx-apidoc -o docs/ lidarwind 73 | $(MAKE) -C docs clean 74 | $(MAKE) -C docs html 75 | $(BROWSER) docs/_build/html/index.html 76 | 77 | servedocs: docs ## compile the docs watching for changes 78 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 79 | 80 | release: dist ## package and upload a release 81 | twine upload dist/* 82 | 83 | dist: clean ## builds source and wheel package 84 | python setup.py sdist 85 | python setup.py bdist_wheel 86 | ls -l dist 87 | 88 | install: clean ## install the package to the active Python's site-packages 89 | python setup.py install 90 | -------------------------------------------------------------------------------- /tests/postprocessing/test_post_rpg_radar.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | from lidarwind.postprocessing import post_rpg_radar 6 | 7 | 8 | def get_test_ds(): 9 | test_ds = xr.Dataset( 10 | { 11 | "start_scan": [0], 12 | "end_scan": [0], 13 | "zdr_max": [0], 14 | "nan_percentual": [0], 15 | "chirp_start": [0], 16 | "chirp_end": [0], 17 | "chirp_azimuth_bias": [0], 18 | "azm_seq": [0], 19 | "mean_time": [0], 20 | } 21 | ) 22 | return test_ds 23 | 24 | 25 | def test_get_horizontal_wind_ds_type(): 26 | 27 | with pytest.raises(TypeError): 28 | post_rpg_radar.get_horizontal_wind(ds=np.array([1, 2])) 29 | 30 | 31 | def test_get_horizontal_wind_start_scan(): 32 | 33 | ds = get_test_ds() 34 | del ds["start_scan"] 35 | 36 | with pytest.raises(KeyError): 37 | post_rpg_radar.get_horizontal_wind(ds) 38 | 39 | 40 | def test_get_horizontal_wind_end_scan(): 41 | 42 | ds = get_test_ds() 43 | del ds["end_scan"] 44 | 45 | with pytest.raises(KeyError): 46 | post_rpg_radar.get_horizontal_wind(ds) 47 | 48 | 49 | def test_get_horizontal_wind_zdr_max(): 50 | 51 | ds = get_test_ds() 52 | del ds["zdr_max"] 53 | 54 | with pytest.raises(KeyError): 55 | post_rpg_radar.get_horizontal_wind(ds) 56 | 57 | 58 | def test_get_horizontal_wind_nan_percentual(): 59 | 60 | ds = get_test_ds() 61 | del ds["nan_percentual"] 62 | 63 | with pytest.raises(KeyError): 64 | post_rpg_radar.get_horizontal_wind(ds) 65 | 66 | 67 | def test_get_horizontal_wind_chirp_start(): 68 | 69 | ds = get_test_ds() 70 | del ds["chirp_start"] 71 | 72 | with pytest.raises(KeyError): 73 | post_rpg_radar.get_horizontal_wind(ds) 74 | 75 | 76 | def test_get_horizontal_wind_chirp_end(): 77 | 78 | ds = get_test_ds() 79 | del ds["chirp_end"] 80 | 81 | with pytest.raises(KeyError): 82 | post_rpg_radar.get_horizontal_wind(ds) 83 | 84 | 85 | def test_get_horizontal_wind_chirp_azimuth_bias(): 86 | 87 | ds = get_test_ds() 88 | del ds["chirp_azimuth_bias"] 89 | 90 | with pytest.raises(KeyError): 91 | post_rpg_radar.get_horizontal_wind(ds) 92 | 93 | 94 | def test_get_horizontal_wind_azm_seq(): 95 | 96 | ds = get_test_ds() 97 | del ds["azm_seq"] 98 | 99 | with pytest.raises(KeyError): 100 | post_rpg_radar.get_horizontal_wind(ds) 101 | 102 | 103 | def test_get_horizontal_wind_mean_time(): 104 | 105 | ds = get_test_ds() 106 | del ds["mean_time"] 107 | 108 | with pytest.raises(KeyError): 109 | post_rpg_radar.get_horizontal_wind(ds) 110 | -------------------------------------------------------------------------------- /tests/data.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import shutil 4 | from typing import Optional 5 | 6 | import gdown 7 | import pytest 8 | 9 | from lidarwind.io import open_sweep 10 | 11 | GDRIVE_ID = "1i6iX6KuZOkP_WLuPZHG5uCcvRjlWS-SU" 12 | 13 | 14 | def lidarwindrc(subdir: Optional[str] = None): 15 | """Standard path for Lidar Suit configurations 16 | 17 | This might be moved into lidarwind.utils if used somewhere else 18 | """ 19 | path = os.getenv("LIDARWIND_DIR", os.path.join("~", ".lidarwindrc")) 20 | path = os.path.expanduser(path) 21 | 22 | if subdir is not None: 23 | path = os.path.join(path, subdir) 24 | 25 | return path 26 | 27 | 28 | def get_sample_data(sample_path, file_type): 29 | 30 | if file_type == "12-00": 31 | url = f"https://drive.google.com/uc?export=download&id={GDRIVE_ID}" 32 | 33 | if file_type == "dbs": 34 | url = "path" 35 | 36 | output = f"{sample_path}{file_type}.zip" 37 | gdown.download(url, output, quiet=False) 38 | 39 | print(f"Extracting: {output}") 40 | shutil.unpack_archive(output, sample_path) 41 | os.remove(output) 42 | 43 | 44 | def download_samples(): 45 | file_type = "12-00" # change to 6 beam in the future 46 | sample_path = lidarwindrc("sample_data") 47 | 48 | if not os.path.isdir(sample_path): 49 | os.makedirs(sample_path) 50 | get_sample_data(sample_path, file_type) 51 | 52 | 53 | @pytest.fixture 54 | def data_filenames(): 55 | 56 | file_type = "12-00" # change to 6 beam in the future 57 | sample_path = os.path.join(lidarwindrc("sample_data"), file_type) 58 | 59 | if os.path.isdir(sample_path): 60 | 61 | if os.path.isdir(sample_path): 62 | file_list = sorted(glob.glob(f"{sample_path}/*.nc")) 63 | 64 | if bool(file_list) is False: 65 | get_sample_data(sample_path, file_type) 66 | file_list = sorted(glob.glob(f"{sample_path}/*.nc")) 67 | 68 | else: 69 | get_sample_data(sample_path, file_type) 70 | file_list = sorted(glob.glob(f"{sample_path}/*.nc")) 71 | 72 | else: 73 | download_samples() 74 | file_list = sorted(glob.glob(f"{sample_path}/*.nc")) 75 | 76 | return file_list 77 | 78 | 79 | def sample_dataset(key: str): 80 | """Single xr.Dataset for testing 81 | 82 | For a given identifier, for now it is the filename, download if needed, 83 | and return already as an xarray Dataset ready to be used. 84 | 85 | !!ATENTION!!! the data_filenames as an argument is a requirement while 86 | the download of sample data is not refactored. This function only access 87 | a file that was expected to have been previously downlaoded. 88 | """ 89 | path = os.path.join(lidarwindrc("sample_data"), "12-00", key) 90 | 91 | ds = open_sweep(path) 92 | return ds 93 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools >= 48", 4 | "setuptools_scm[toml] >= 4", 5 | "setuptools_scm_git_archive", 6 | "milksnake", 7 | "wheel >= 0.38.1", 8 | ] 9 | build-backend = 'setuptools.build_meta' 10 | 11 | [project] 12 | name="lidarwind" 13 | dynamic = ['version'] 14 | description="LIDAR procedures" 15 | readme = "README.md" 16 | requires-python = ">=3.8" 17 | license = {file = "LICENSE"} 18 | keywords=["Lidar", "Turbulence", "Wind"] 19 | authors = [ 20 | {email = "jdiasn@gmail.com"}, 21 | {name = "José Dias Neto"} 22 | ] 23 | classifiers=[ 24 | "Development Status :: 2 - Pre-Alpha", 25 | "Intended Audience :: Science/Research", 26 | "License :: OSI Approved :: BSD License", 27 | "Natural Language :: English", 28 | "Programming Language :: Python :: 3", 29 | "Programming Language :: Python :: 3.8", 30 | "Programming Language :: Python :: 3.9", 31 | "Programming Language :: Python :: 3.10", 32 | "Topic :: Scientific/Engineering", 33 | ] 34 | dependencies = [ 35 | "xrft>=0.3", 36 | "xarray>=0.21", 37 | "pandas>=1.4", 38 | "numpy>=1.22", 39 | "netCDF4 >= 1.5", 40 | "matplotlib>=3.4.3", 41 | "click>=8.1.2", 42 | "gdown>=4.5.1", 43 | "xarray-datatree~=0.0.11", 44 | "pooch>=1.6", 45 | ] 46 | 47 | [project.optional-dependencies] 48 | plots = ["matplotlib>=3.4.3"] 49 | dev = [ 50 | "flake8~=4.0.1", 51 | "pre-commit~=2.20.0", 52 | "ruff~=0.0.220", 53 | "sphinx_rtd_theme~=0.4.3" 54 | ] 55 | test = [ 56 | "gdown>=4.5.1", 57 | "pytest>=6.2", 58 | "pytest-cov>=3.0", 59 | "nbval", 60 | "pooch>=1.6", 61 | "tox~=4.3.0" 62 | ] 63 | 64 | [project.urls] 65 | homepage = "https://github.com/jdiasn/lidarwind" 66 | repository = "https://github.com/jdiasn/lidarwind" 67 | documentation = "https://lidarwind.readthedocs.io" 68 | 69 | [project.scripts] 70 | "lidarwind" = "lidarwind.cli:main" 71 | 72 | [tool.black] 73 | line-length = 79 74 | 75 | [tool.setuptools_scm] 76 | write_to = "lidarwind/version.py" 77 | git_describe_command = "git describe --dirty --tags --long --match 'v*' --first-parent" 78 | 79 | [tool.pytest.ini_options] 80 | testpaths = ["tests"] 81 | 82 | [tool.ruff] 83 | select = ["A", "I", "UP", "W"] 84 | ignore = [] 85 | 86 | # Allow autofix for all enabled rules (when `--fix`) is provided. 87 | fixable = ["I"] 88 | unfixable = ["F401"] 89 | 90 | # Exclude a variety of commonly ignored directories. 91 | exclude = [ 92 | ".eggs", 93 | ".git", 94 | ".mypy_cache", 95 | ".nox", 96 | ".ruff_cache", 97 | ".tox", 98 | "__pypackages__", 99 | "_build", 100 | "build", 101 | "dist", 102 | "docs/conf.py", 103 | "docs/_autosummary", 104 | ] 105 | per-file-ignores = {} 106 | 107 | line-length = 79 108 | 109 | target-version = "py38" 110 | 111 | [tool.ruff.pydocstyle] 112 | convention = "numpy" 113 | -------------------------------------------------------------------------------- /tests/test_get_wind_Properties_5_beam.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | 7 | 8 | def get_dummy_dbs(): 9 | 10 | wind = ( 11 | np.sin(np.deg2rad(np.array([0, 180, 270, 90]))) 12 | * 1 13 | / np.sin(np.deg2rad(45)) 14 | ) 15 | 16 | elv = np.array([45, 45, 45, 45, 90]) 17 | data_elv = xr.DataArray( 18 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 19 | ) 20 | 21 | azm = np.array([0, 180, 270, 90, 0]) 22 | data_azm = xr.DataArray( 23 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 24 | ) 25 | 26 | data = xr.DataArray( 27 | np.append(wind, 0)[:, np.newaxis], 28 | dims=("time", "gate_index"), 29 | coords={"time": np.arange(len(elv)), "gate_index": [1]}, 30 | ) 31 | 32 | data_status = xr.DataArray( 33 | np.array([1, 1, 1, 1, 1])[:, np.newaxis], 34 | dims=("time", "gate_index"), 35 | coords={"time": np.arange(len(elv)), "gate_index": [1]}, 36 | ) 37 | 38 | data_mean_time = xr.DataArray( 39 | np.array([1, 1, 1, 1, 1]), 40 | dims=("time"), 41 | coords={"time": np.arange(len(elv))}, 42 | ) 43 | 44 | test_ds = xr.Dataset( 45 | { 46 | "elevation": data_elv, 47 | "azimuth": data_azm, 48 | "cnr": data_status, 49 | "measurement_height": data_status, 50 | "radial_wind_speed": data, 51 | "radial_wind_speed_status": data_status, 52 | "scan_mean_time": data_mean_time, 53 | } 54 | ) 55 | 56 | return test_ds 57 | 58 | 59 | @pytest.fixture 60 | def get_wind_profiles(): 61 | return lst.GetWindProperties5Beam(get_dummy_dbs()) 62 | 63 | 64 | def test_get_wind_properties_5_beam_vert_wind_dim(get_wind_profiles): 65 | assert "gate_index" not in get_wind_profiles.ver_wind_speed.dims 66 | 67 | 68 | def test_get_wind_properties_5_beam_comp_u_dim(get_wind_profiles): 69 | assert "gate_index" not in get_wind_profiles.comp_u.dims 70 | 71 | 72 | def test_get_wind_properties_5_beam_comp_v_dim(get_wind_profiles): 73 | assert "gate_index" not in get_wind_profiles.comp_v.dims 74 | 75 | 76 | def test_get_wind_properties_5_beam_comp_u_value(get_wind_profiles): 77 | assert np.round(get_wind_profiles.comp_u.values, 1) == 2.0 78 | 79 | 80 | def test_get_wind_properties_5_beam_comp_v_value(get_wind_profiles): 81 | assert np.round(get_wind_profiles.comp_v.values, 1) == 0 82 | 83 | 84 | def test_get_wind_properties_5_beam_wind_speed_value(get_wind_profiles): 85 | assert np.round(get_wind_profiles.hor_wind_speed.values, 1) == 2 86 | 87 | 88 | def test_get_wind_properties_5_beam_wind_dir_value(get_wind_profiles): 89 | assert np.round(get_wind_profiles.hor_wind_dir.values, 1) == 270 90 | 91 | 92 | def test_get_wind_properties_5_beam_vert_wind_value(get_wind_profiles): 93 | assert np.round(get_wind_profiles.ver_wind_speed.values, 1) == 0 94 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | name: lidarwind_test 2 | 3 | on: 4 | #push: 5 | # branches: [ main ] 6 | # schedule: 7 | # - cron: "7 18 * * 0" 8 | 9 | workflow_dispatch: 10 | 11 | jobs: 12 | build: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | strategy: 17 | max-parallel: 3 18 | matrix: 19 | python-version: ["3.9", "3.10"] 20 | fail-fast: false 21 | 22 | steps: 23 | - name: checkout lidarwind 24 | uses: actions/checkout@v4 25 | with: 26 | fetch-depth: 0 27 | 28 | - name: Set up Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v4 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | 33 | - name: Get pip cache dir 34 | id: pip-cache 35 | run: | 36 | echo "::set-output name=dir::$(pip cache dir)" 37 | 38 | - name: pip cache 39 | uses: actions/cache@v3 40 | with: 41 | path: ${{ steps.pip-cache.outputs.dir }} 42 | key: ${{ runner.os }}-pip-v4-${{ hashFiles('**/pyproject.toml') }} 43 | restore-keys: | 44 | ${{ runner.os }}-pip-v4- 45 | 46 | - name: Install dependencies 47 | run: | 48 | python -m pip install --upgrade pip 49 | pip install tox tox-gh-actions flake8 pytest 50 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 51 | pip install -e .[plots,test] 52 | 53 | - name: Cache TOX 54 | uses: actions/cache@v3 55 | with: 56 | path: .tox/ 57 | key: ${{ runner.os }}-tox-${{ hashFiles('**/pyproject.toml') }} 58 | restore-keys: | 59 | ${{ runner.os }}-tox- 60 | 61 | - name: Test with TOX 62 | run: | 63 | tox 64 | 65 | coverage: 66 | runs-on: ubuntu-latest 67 | 68 | steps: 69 | - name: checkout lidarwind 70 | uses: actions/checkout@v4 71 | with: 72 | fetch-depth: 0 73 | 74 | - name: Set up Python 75 | uses: actions/setup-python@v4 76 | with: 77 | python-version: "3.10" 78 | cache: "pip" 79 | cache-dependency-path: 'pyproject.toml' 80 | 81 | - name: Coverage report 82 | run: | 83 | python -m pip install --upgrade pip 84 | pip install -e .[test] 85 | pytest --cov=./ --cov-report=xml 86 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax notebooks/turbulence_6beam_data.ipynb 87 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax notebooks/merging_6beam_data.ipynb 88 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax notebooks/dbs_scans.ipynb 89 | # pytest --cov=./ --cov-append --cov-report=xml --current-env --nbval-lax notebooks/reading_long_dbs.ipynb 90 | 91 | 92 | - name: Upload coverage to Codecov 93 | uses: codecov/codecov-action@v4 94 | env: 95 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 96 | with: 97 | fail_ci_if_error: true 98 | verbose: true 99 | -------------------------------------------------------------------------------- /tests/test_wind_prop_retrieval_6_beam.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | 7 | 8 | def get_dummy_six_beam_obj(): 9 | 10 | elv = np.array([75, 75, 90, 75, 75, 75]) 11 | data_elv = xr.DataArray( 12 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 13 | ) 14 | 15 | azm = np.array([0, 72, 0, 144, 216, 288]) 16 | data_azm = xr.DataArray( 17 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 18 | ) 19 | 20 | data = xr.DataArray( 21 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 22 | dims=("time", "range"), 23 | coords={"time": np.arange(len(elv)), "range": [1]}, 24 | ) 25 | 26 | data90 = xr.DataArray( 27 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 28 | dims=("time", "range90"), 29 | coords={"time": np.arange(len(elv)), "range90": [1]}, 30 | ) 31 | 32 | test_ds = xr.Dataset( 33 | { 34 | "elevation": data_elv, 35 | "azimuth": data_azm, 36 | "cnr90": data90, 37 | "gate_index90": data90, 38 | "radial_wind_speed90": data90, 39 | "radial_wind_speed_status90": data90, 40 | "relative_beta90": data90, 41 | "cnr": data, 42 | "gate_index": data, 43 | "radial_wind_speed": data, 44 | "radial_wind_speed_status": data, 45 | "relative_beta": data, 46 | } 47 | ) 48 | 49 | return lst.GetRestructuredData(test_ds) 50 | 51 | 52 | def test_six_beam_method_input(): 53 | 54 | with pytest.raises(TypeError): 55 | lst.SixBeamMethod(data=xr.DataArray(np.array([0, 1]))) 56 | 57 | 58 | @pytest.fixture 59 | def test_get_six_beam_obj(): 60 | 61 | six_beam_obj = lst.SixBeamMethod( 62 | get_dummy_six_beam_obj(), freq=6, freq90=6 63 | ) 64 | 65 | return six_beam_obj 66 | 67 | 68 | def test_six_beam_method_m_matrix(test_get_six_beam_obj): 69 | 70 | assert np.all(np.isfinite(test_get_six_beam_obj.m_matrix)) 71 | 72 | 73 | def test_six_beam_method_m_matrix_inv(test_get_six_beam_obj): 74 | 75 | assert np.all(np.isfinite(test_get_six_beam_obj.m_matrix_inv)) 76 | 77 | 78 | def test_six_beam_method_variance_dic(test_get_six_beam_obj): 79 | 80 | assert len(test_get_six_beam_obj.radial_variances.keys()) == 2 81 | 82 | 83 | def test_six_beam_method_radial_variances90(test_get_six_beam_obj): 84 | 85 | assert np.all( 86 | test_get_six_beam_obj.radial_variances["rVariance90"].values == 0 87 | ) 88 | 89 | 90 | def test_six_beam_method_radial_variances(test_get_six_beam_obj): 91 | 92 | assert np.all( 93 | test_get_six_beam_obj.radial_variances["rVariance"].values == 0 94 | ) 95 | 96 | 97 | def test_six_beam_method_sigma_matrix(test_get_six_beam_obj): 98 | 99 | assert np.all(test_get_six_beam_obj.sigma_matrix == 0) 100 | 101 | 102 | def test_six_beam_method_variance_dim_time(test_get_six_beam_obj): 103 | 104 | assert len(test_get_six_beam_obj.var_comp_ds.time.values) == 1 105 | 106 | 107 | def test_six_beam_method_variance_dim_range(test_get_six_beam_obj): 108 | 109 | assert len(test_get_six_beam_obj.var_comp_ds.range.values) == 1 110 | -------------------------------------------------------------------------------- /lidarwind/postprocessing/post_wind_cube.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | from ..preprocessing.wind_cube import wc_slanted_radial_velocity_4_fft 5 | from ..wind_retrieval.fft_wind_retrieval import get_wind_properties 6 | 7 | 8 | def get_horizontal_wind(ds: xr.Dataset) -> xr.Dataset: 9 | """Apply fft retrieval 10 | 11 | This function applies the fft wind retrieval method 12 | to the preprocessed dataset 13 | 14 | Parameters 15 | ---------- 16 | ds : xr.Dataset 17 | A dataset of preprocessed observations 18 | 19 | Returns 20 | ------- 21 | The input dataset, but including the horizontal 22 | wind profiles. 23 | 24 | """ 25 | 26 | unique_elevation = np.unique(ds.elevation)[np.unique(ds.elevation) != 90] 27 | ds_slanted = ds.where(ds.elevation == unique_elevation, drop=True) 28 | 29 | radial_velocities = wc_slanted_radial_velocity_4_fft(ds_slanted) 30 | horizontal_wind = get_wind_properties(radial_velocities.radial_wind_speed) 31 | 32 | ds = ds.merge(horizontal_wind) 33 | 34 | return ds 35 | 36 | 37 | # Post processing 38 | def wc_extract_wind(ds: xr.Dataset, method="full") -> xr.Dataset: 39 | """Wind profiles extraction 40 | 41 | This function extracts the wind information from 42 | the processed dataset. 43 | 44 | Parameters 45 | ---------- 46 | ds : xr.Dataset 47 | A dataset processed and containing the retrieved 48 | horizontal wind. 49 | 50 | method : string 51 | Method for extracting the wind dataset. 52 | If "full", the data is merged, preserving all information. 53 | If "compact", the data is merged, but only the horizontal 54 | wind profiles that are almost simultaneous to the 55 | vertical observations are merged. 56 | 57 | Returns 58 | ------- 59 | The wind dataset 60 | 61 | """ 62 | 63 | vertical_velocity = ds.radial_wind_speed.where( 64 | ds["elevation"] == 90, drop=True 65 | ) 66 | vertical_velocity.name = "vertical_wind_speed" 67 | vertical_velocity = ( 68 | vertical_velocity.drop(["gate_index", "range"]) 69 | .assign_coords( 70 | {"gate_index": vertical_velocity.range.isel(time=0).values} 71 | ) 72 | .rename({"gate_index": "range"}) 73 | ) 74 | 75 | variables = [ 76 | "horizontal_wind_speed", 77 | "horizontal_wind_direction", 78 | "meridional_wind", 79 | "zonal_wind", 80 | ] 81 | 82 | horizontal_wind = ds[variables] 83 | 84 | horizontal_wind = horizontal_wind.where( 85 | horizontal_wind["elevation"] != 90, drop=True 86 | ) 87 | horizontal_wind = horizontal_wind.where( 88 | np.isfinite(horizontal_wind.range), drop=True 89 | ) 90 | new_range = ( 91 | np.sin(np.deg2rad(horizontal_wind.elevation)) * horizontal_wind.range 92 | ) 93 | horizontal_wind = ( 94 | horizontal_wind.drop(["gate_index", "range"]) 95 | .assign_coords({"gate_index": new_range.isel(time=0).values}) 96 | .rename({"gate_index": "range"}) 97 | ) 98 | 99 | horizontal_wind = horizontal_wind.interp( 100 | {"range": vertical_velocity.range}, method="nearest" 101 | ) 102 | 103 | if method == "full": 104 | wind = horizontal_wind.merge(vertical_velocity) 105 | 106 | if method == "compact": 107 | 108 | wind = ( 109 | horizontal_wind.interp( 110 | {"time": vertical_velocity["time"]}, method="nearest" 111 | ) 112 | .drop(["elevation"]) 113 | .merge(vertical_velocity) 114 | ) 115 | 116 | wind = wind.drop(["azimuth", "azimuth_length", "freq_azimuth"]) 117 | 118 | return wind 119 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | lidarwind introduction 3 | ====================== 4 | 5 | .. image:: https://joss.theoj.org/papers/28430a0c6a79e6d1ff33579ff13458f7/status.svg 6 | :target: https://doi.org/10.21105/joss.04852 7 | 8 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7026548.svg 9 | :target: https://doi.org/10.5281/zenodo.7026548 10 | 11 | .. image:: https://readthedocs.org/projects/lidarwind/badge/?version=latest 12 | :target: https://lidarwind.readthedocs.io/en/latest/?badge=latest 13 | :alt: Documentation Status 14 | 15 | .. image:: https://mybinder.org/badge_logo.svg 16 | :target: https://mybinder.org/v2/gh/jdiasn/lidarwind/main?labpath=docs%2Fexamples 17 | 18 | .. image:: https://img.shields.io/pypi/v/lidarwind.svg 19 | :target: https://pypi.python.org/pypi/lidarwind/ 20 | 21 | .. image:: https://codecov.io/gh/jdiasn/lidarwind/branch/main/graph/badge.svg?token=CEZM17YY3I 22 | :target: https://codecov.io/gh/jdiasn/lidarwind 23 | 24 | 25 | .. note:: 26 | Now lidarwind supports data from RPG cloud radars. If you are interested, have a look at Radar usage section. 27 | 28 | 29 | lidarwind is an open-source Python project that retrieves wind speed and direction profiles from Doppler velocity observations recorded by Doppler-capable instruments. Initially, this package was developed to work with observations from the WindCube-200s lidar, but with this new release, lidarwind starts to support data from RPG Cloud Radars. Currently, It can retrieve wind profiles from the PPI, 6-beam and DBS scanning strategies and calculate the Reynolds stress tensor matrix elements from the 6-beam observation. The package can be further extended to process data from other Doppler lidar and radar and from other scanning strategies. 30 | 31 | lidarwind results from an effort to create a flexible and easy-to-use package to process observations recorded by the WindCube Doppler lidar. The package development started in 2021 when I had to retrieve wind profiles from the 6-beam observations. 32 | 33 | 34 | -------- 35 | Citation 36 | -------- 37 | 38 | If you use lidarwind, or replicate part of it, in your work/package, please consider including the reference: 39 | 40 | Neto, J. D. and Castelão, G. P., (2023). lidarwind: A Python package for retrieving wind profiles from Doppler lidar observations. Journal of Open Source Software, 8(82), 4852, https://doi.org/10.21105/joss.04852 41 | 42 | :: 43 | 44 | @article{Neto2023, 45 | doi = {10.21105/joss.04852}, 46 | url = {https://doi.org/10.21105/joss.04852}, 47 | year = {2023}, publisher = {Journal of Open Source Software}, 48 | volume = {8}, number = {82}, pages = {4852}, 49 | author = {José Dias Neto and Guilherme P. Castelao}, 50 | title = {lidarwind: A Python package for retrieving wind profiles from Doppler lidar observations}, 51 | journal = {Journal of Open Source Software} 52 | } 53 | 54 | 55 | 56 | ------------- 57 | Documentation 58 | ------------- 59 | 60 | The lidarwind's documentation is available at https://lidarwind.readthedocs.io. There you can find the steps needed for installing the package. You can also find a short description of how lidarwind derives the wind speed and direction from observations. 61 | 62 | 63 | Notebooks 64 | ========= 65 | 66 | An introductory set of rendered notebooks are available at https://nbviewer.org/github/jdiasn/lidarwind/tree/main/docs/examples/ or at https://github.com/jdiasn/lidarwind/tree/main/docs/examples. If you want to try the package without installing it locally, click on the binder badge above. You will be redirected to a virtual environment where you can also access the same notebooks and test the package. 67 | 68 | .. warning:: 69 | 70 | Beware that between versions 0.1.6 and 0.2.0, the package underwent significant refactoring. Now the classes' names 71 | follow the Pascal case, while module names, functions and attributes follow the snake case. Codes developed using the previous 72 | version will need revision. 73 | -------------------------------------------------------------------------------- /joss/paper.bib: -------------------------------------------------------------------------------- 1 | @manual{windcube2020, 2 | title = {WindCube Scan software suite User Manual}, 3 | author = {{Leosphere, a Vaisala company}}, 4 | organization = {Vaisala}, 5 | edition = {Version 20.a}, 6 | year = {2020}, 7 | } 8 | 9 | @article {lhermitte1962, 10 | author = "Roger M. Lhermitte", 11 | title = "Note on Wind Variability with Doppler Radar", 12 | journal = "Journal of Atmospheric Sciences", 13 | year = "1962", 14 | publisher = "American Meteorological Society", 15 | address = "Boston MA, USA", 16 | volume = "19", 17 | number = "4", 18 | doi = "10.1175/1520-0469(1962)019<0343:NOWVWD>2.0.CO;2", 19 | pages= "343 - 346", 20 | url = "https://journals.ametsoc.org/view/journals/atsc/19/4/1520-0469_1962_019_0343_nowvwd_2_0_co_2.xml" 21 | } 22 | 23 | @article{eberhard1989, 24 | title={Doppler lidar measurement of profiles of turbulence and momentum flux}, 25 | author={Eberhard, Wynn L and Cupp, Richard E and Healy, Kathleen R}, 26 | journal={Journal of Atmospheric and Oceanic Technology}, 27 | volume={6}, 28 | number={5}, 29 | pages={809--819}, 30 | doi = {10.1175/1520-0426(1989)006<0809:DLMOPO>2.0.CO;2}, 31 | year={1989} 32 | } 33 | 34 | @article{sathe2015, 35 | title={A six-beam method to measure turbulence statistics using ground-based wind lidars}, 36 | author={Sathe, Ameya and Mann, Jakob and Vasiljevic, Nikola and Lea, Guillaume}, 37 | journal={Atmospheric Measurement Techniques}, 38 | volume={8}, 39 | number={2}, 40 | pages={729--740}, 41 | year={2015}, 42 | doi={10.5194/amt-8-729-2015}, 43 | publisher={Copernicus GmbH} 44 | } 45 | 46 | @dataset{diasneto2022a, 47 | author = {Dias Neto, José}, 48 | title = {{The Tracing Convective Momentum Transport in 49 | Complex Cloudy Atmospheres Experiment - Level 1}}, 50 | month = {jul}, 51 | year = {2022}, 52 | publisher = {Zenodo}, 53 | version = {1.0.0}, 54 | doi = {10.5281/zenodo.6926483}, 55 | url = {https://doi.org/10.5281/zenodo.6926483} 56 | } 57 | 58 | @article{vanZandt2000, 59 | author = {Van Zandt, T. E.}, 60 | doi = {10.1007/s00585-000-0740-4}, 61 | issn = {14319268}, 62 | journal = {Annales Geophysicae}, 63 | title = {A Brief History of the Development of Wind-Profiling or {{MST}} Radars}, 64 | year = {2000},} 65 | 66 | 67 | @MastersThesis{Ishwardat2017, 68 | author = {Ishwardat, N.K.S.}, 69 | title = {Radar based horizontal wind profile retrieval techniques: DFT applied to scanning Doppler radar measurements}, 70 | school = {Delft University of Technology}, 71 | address = {the Netherlands}, 72 | year = {2017}, 73 | url={http://resolver.tudelft.nl/uuid:a659654b-e76a-4513-a656-ecad761bdbc8} 74 | } 75 | 76 | @article{rew1990, 77 | author={Rew, R. and Davis, G.}, 78 | journal={IEEE Computer Graphics and Applications}, 79 | title={NetCDF: an interface for scientific data access}, 80 | year={1990}, 81 | volume={10}, 82 | number={4}, 83 | pages={76-82}, 84 | doi={10.1109/38.56302}} 85 | 86 | 87 | 88 | @article{diasneto2022b, 89 | author = {Dias Neto, J. and Nuijens, L. and Unal, C. and Knoop, S.}, 90 | doi = {10.5194/essd-2022-268}, 91 | journal = {Earth System Science Data Discussions}, 92 | pages = {1-30}, 93 | title = {Combined Wind Lidar and Cloud Radar for Wind Profiling}, 94 | url = {https://essd.copernicus.org/preprints/essd-2022-268/}, 95 | volume = {2022}, 96 | year = {2022}, 97 | bdsk-url-1 = {https://essd.copernicus.org/preprints/essd-2022-268/}, 98 | bdsk-url-2 = {https://doi.org/10.5194/essd-2022-268}} 99 | 100 | 101 | @BOOK{Stull2003, 102 | title = "An Introduction to Boundary Layer Meteorology ", 103 | author = "Stull, Roland B.", 104 | publisher = "Springer Dordrecht", 105 | edition = "1", 106 | year = "2003", 107 | address = "Dordrecht", 108 | isbn = "978-90-277-2769-5", 109 | } 110 | -------------------------------------------------------------------------------- /tests/test_retrieve_wind_fft.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | 7 | 8 | def get_dummy_six_beam_obj(): 9 | 10 | wind = ( 11 | np.sin(np.deg2rad(np.array([0, 72, 144, 216, 288]) + 0)) 12 | * 1 13 | / np.sin(np.deg2rad(45)) 14 | ) 15 | 16 | elv = np.array([45, 45, 45, 45, 45, 90]) 17 | data_elv = xr.DataArray( 18 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 19 | ) 20 | 21 | azm = np.array([0, 72, 144, 216, 288, 0]) 22 | data_azm = xr.DataArray( 23 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 24 | ) 25 | 26 | data = xr.DataArray( 27 | np.append(wind, 0)[:, np.newaxis], 28 | dims=("time", "range"), 29 | coords={"time": np.arange(len(elv)), "range": [1]}, 30 | ) 31 | 32 | data90 = xr.DataArray( 33 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 34 | dims=("time", "range90"), 35 | coords={"time": np.arange(len(elv)), "range90": [1]}, 36 | ) 37 | 38 | data_status = xr.DataArray( 39 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 40 | dims=("time", "range"), 41 | coords={"time": np.arange(len(elv)), "range": [1]}, 42 | ) 43 | 44 | test_ds = xr.Dataset( 45 | { 46 | "elevation": data_elv, 47 | "azimuth": data_azm, 48 | "cnr90": data90, 49 | "gate_index90": data90, 50 | "radial_wind_speed90": data90, 51 | "radial_wind_speed_status90": data90, 52 | "relative_beta90": data90, 53 | "cnr": data, 54 | "gate_index": data, 55 | "radial_wind_speed": data, 56 | "radial_wind_speed_status": data_status, 57 | "relative_beta": data, 58 | } 59 | ) 60 | 61 | return lst.GetRestructuredData(test_ds) 62 | 63 | 64 | @pytest.fixture 65 | def get_wind_profiles(): 66 | 67 | return lst.RetriveWindFFT(get_dummy_six_beam_obj()) 68 | 69 | 70 | def test_retrieve_wind_fft_dim_elv(get_wind_profiles): 71 | assert "elv" not in get_wind_profiles.wind_prop.dims 72 | 73 | 74 | def test_retrieve_wind_fft_variabe_realtive_beta(get_wind_profiles): 75 | assert "lidar_relative_beta" in get_wind_profiles.wind_prop 76 | 77 | 78 | def test_retrieve_wind_fft_variable_vertical_wind(get_wind_profiles): 79 | assert "vertical_wind_speed" in get_wind_profiles.wind_prop 80 | 81 | 82 | def test_retrieve_wind_fft_variable_wind_direction(get_wind_profiles): 83 | assert "horizontal_wind_direction" in get_wind_profiles.wind_prop 84 | 85 | 86 | def test_retrieve_wind_fft_variable_wind_speed(get_wind_profiles): 87 | assert "horizontal_wind_speed" in get_wind_profiles.wind_prop 88 | 89 | 90 | def test_retrieve_wind_fft_variable_zonal_wind(get_wind_profiles): 91 | assert "zonal_wind" in get_wind_profiles.wind_prop 92 | 93 | 94 | def test_retrieve_wind_fft_variable_meridional_wind(get_wind_profiles): 95 | assert "meridional_wind" in get_wind_profiles.wind_prop 96 | 97 | 98 | def test_retrieve_wind_fft_relative_beta_val(get_wind_profiles): 99 | assert get_wind_profiles.wind_prop["lidar_relative_beta"] == 1 100 | 101 | 102 | def test_retrieve_wind_fft_vertical_wind_val(get_wind_profiles): 103 | assert get_wind_profiles.wind_prop["vertical_wind_speed"] == 1 104 | 105 | 106 | def test_retrieve_wind_fft_global_attrs(get_wind_profiles): 107 | assert len(get_wind_profiles.wind_prop.attrs) > 0 108 | 109 | 110 | def test_retrieve_wind_fft_time_len(get_wind_profiles): 111 | assert len(get_wind_profiles.wind_prop.time) == 5 112 | 113 | 114 | def test_retrieve_wind_fft_time90_len(get_wind_profiles): 115 | assert len(get_wind_profiles.wind_prop.time90) == 1 116 | 117 | 118 | def test_retrieve_wind_fft_range_len(get_wind_profiles): 119 | assert len(get_wind_profiles.wind_prop.range) == 1 120 | -------------------------------------------------------------------------------- /tests/preprocessing/test_preprocessing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind 6 | from lidarwind import preprocessing 7 | 8 | files = lidarwind.sample_data("wc_6beam") 9 | files = sorted(files) 10 | 11 | 12 | def get_sintetic_6_beam_data(): 13 | 14 | elv = np.array([75, 75, 90, 75, 75, 75, 75]) 15 | data_elv = xr.DataArray( 16 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 17 | ) 18 | 19 | azm = np.array([0, 72, 0, 144, 216, 288, 0]) 20 | data_azm = xr.DataArray( 21 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 22 | ) 23 | 24 | gate = np.array([104, 104, 100, 104, 104, 104, 104]) 25 | data_range = xr.DataArray( 26 | gate, dims=("time"), coords={"time": np.arange(len(elv))} 27 | ) 28 | 29 | data = xr.DataArray( 30 | np.array([1, 1, 1, 1, 1, 1, 1])[:, np.newaxis], 31 | dims=("time", "gate_index"), 32 | coords={ 33 | "time": np.arange(len(elv)), 34 | "gate_index": [1], 35 | }, 36 | ) 37 | 38 | sintetic_data = xr.Dataset( 39 | { 40 | "range": data_range, 41 | "elevation": data_elv, 42 | "azimuth": data_azm, 43 | "cnr": data, 44 | "radial_wind_speed": data, 45 | "radial_wind_speed_status": data, 46 | } 47 | ) 48 | 49 | sintetic_data = sintetic_data.set_coords(["azimuth", "elevation", "range"]) 50 | 51 | return sintetic_data 52 | 53 | 54 | def test_wc_fixed_files_restruc_dataset_ds_type(): 55 | 56 | with pytest.raises(TypeError): 57 | preprocessing.wc_fixed_files_restruc_dataset(ds=np.array([0, 1])) 58 | 59 | 60 | def test_wc_fixed_files_restruc_dataset(file_name=files[0]): 61 | 62 | ds = lidarwind.open_sweep(file_name) 63 | ds = preprocessing.wc_fixed_files_restruc_dataset(ds) 64 | 65 | assert "time" in ds.dims 66 | assert "gate_index" in ds.dims 67 | 68 | assert "azimuth" in ds.coords 69 | assert "elevation" in ds.coords 70 | assert "gate_index" in ds.coords 71 | 72 | assert len(ds.variables) > 0 73 | 74 | ds.close() 75 | 76 | 77 | def test_wc_fixed_merge_files_empty_file_names(): 78 | 79 | with pytest.raises(FileNotFoundError): 80 | preprocessing.wc_fixed_merge_files(file_names=[]) 81 | 82 | 83 | # @pytest.fixture 84 | def test_wc_fixed_merge_files_ds_structure(file_names=files[0:6]): 85 | 86 | ds = preprocessing.wc_fixed_merge_files(file_names) 87 | 88 | assert len(ds.time) > 1 89 | assert len(ds.gate_index) > 1 90 | 91 | ds.close() 92 | 93 | 94 | # @pytest.mark.skip("skip 1") 95 | def test_wc_slanted_radial_velocity_4_fft_90_deg_elevation(): 96 | 97 | ds = get_sintetic_6_beam_data() 98 | ds = preprocessing.wc_azimuth_elevation_correction(ds) 99 | ds = ds.where(ds["elevation"] == 90, drop=True) 100 | 101 | assert ds["elevation"].size > 0 102 | 103 | with pytest.raises(ValueError): 104 | preprocessing.wc_slanted_radial_velocity_4_fft(ds) 105 | 106 | ds.close() 107 | 108 | 109 | # @pytest.mark.skip("skip 1") 110 | def test_wc_slanted_radial_velocity_4_fft_multiple_elevation(): 111 | 112 | ds = get_sintetic_6_beam_data() 113 | ds = preprocessing.wc_azimuth_elevation_correction(ds) 114 | 115 | with pytest.raises(TypeError): 116 | preprocessing.wc_slanted_radial_velocity_4_fft(ds) 117 | 118 | ds.close() 119 | 120 | 121 | # @pytest.mark.skip("skip 1") 122 | def test_wc_slanted_radial_velocity_4_fft_few_data(): 123 | 124 | ds = get_sintetic_6_beam_data() 125 | ds = preprocessing.wc_azimuth_elevation_correction(ds) 126 | unique_elevation = np.unique(ds.elevation)[np.unique(ds.elevation) != 90] 127 | 128 | with pytest.raises(ValueError): 129 | preprocessing.wc_slanted_radial_velocity_4_fft( 130 | ds.where(ds.elevation == unique_elevation, drop=True) 131 | ) 132 | 133 | ds.close() 134 | -------------------------------------------------------------------------------- /tests/test_dataOperator.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | from lidarwind.data_operator import wc_fixed_preprocessing 7 | 8 | from .data import sample_dataset 9 | 10 | 11 | # 12 | def test_data_operator_DataOperations_data_paths(): 13 | 14 | with pytest.raises(FileNotFoundError): 15 | lst.DataOperations(data_paths=None) 16 | 17 | 18 | # 19 | def test_data_operator_ReadProcessedData_file_list(): 20 | 21 | with pytest.raises(FileNotFoundError): 22 | lst.ReadProcessedData(file_list=None) 23 | 24 | 25 | def test_data_operator_ReadProcessedData_wrong_data(): 26 | 27 | ds = lst.ReadProcessedData( 28 | file_list=lst.sample_data("wc_short_dbs")[0] 29 | ).merge_data() 30 | assert len(ds.coords) > 0 31 | assert len(ds.variables) > 0 32 | 33 | 34 | # 35 | def test_data_operator_GetRestructuredData_data(): 36 | 37 | with pytest.raises(TypeError): 38 | lst.GetRestructuredData(data=xr.DataArray(np.array([0, 1]))) 39 | 40 | 41 | # 42 | def test_data_operator_getResampled_xr_data_array_none(): 43 | 44 | with pytest.raises(TypeError): 45 | lst.GetResampledData(xr_data_array=np.array([0, 1])) 46 | 47 | 48 | # 49 | def test_data_operator_DbsOperations_file_list_none(): 50 | 51 | with pytest.raises(FileNotFoundError): 52 | lst.DbsOperations(file_list=None, var_list=["range"]) 53 | 54 | 55 | # 56 | def test_data_operator_DbsOperations_varList_none(): 57 | 58 | with pytest.raises(KeyError): 59 | lst.DbsOperations(file_list=["file_path"], var_list=None) 60 | 61 | 62 | def test_data_operator_DbsOperations_data_wrong_path(): 63 | 64 | with pytest.raises(ValueError): 65 | lst.DbsOperations(file_list=["file_path"], var_list=["str"]) 66 | 67 | 68 | def test_data_operator_DbsOperations_data_wrong_varlist(): 69 | 70 | with pytest.raises(ValueError): 71 | lst.DbsOperations( 72 | file_list=lst.sample_data("wc_short_dbs")[0], var_list=["str"] 73 | ) 74 | 75 | 76 | def test_data_operator_DbsOperations_data_structure(): 77 | 78 | var_list = [ 79 | "azimuth", 80 | "elevation", 81 | "radial_wind_speed", 82 | "radial_wind_speed_status", 83 | "measurement_height", 84 | "cnr", 85 | ] 86 | 87 | # with pytest.raises(ValueError): 88 | ds = lst.DbsOperations( 89 | file_list=lst.sample_data("wc_short_dbs")[0:1], var_list=var_list 90 | ).merged_ds 91 | assert len(ds.coords) > 0 92 | assert len(ds.variables) > 0 93 | 94 | 95 | # @pytest.mark.skip("deactivating to isolate sef fault error") 96 | def test_wc_fixed_preprocessing_vertical(): 97 | """Pre-process a vertical WC dataset""" 98 | ds = sample_dataset("WLS200s-218_2021-05-13_12-00-14_fixed_399_50m.nc") 99 | # Sanity check. Is it indeed a vertical example? 100 | assert (ds.elevation == 90).all() 101 | 102 | ds = wc_fixed_preprocessing(ds) 103 | assert "elevation" in ds.dims 104 | 105 | 106 | # @pytest.mark.skip("deactivating to isolate sef fault error") 107 | def test_wc_fixed_preprocessing_slanted(): 108 | """Pre-process a slanted WC dataset""" 109 | ds = sample_dataset("WLS200s-218_2021-05-13_12-00-08_fixed_381_50m.nc") 110 | # Sanity check. Is it indeed a slanted example? 111 | assert (ds.elevation != 90).all() 112 | 113 | ds = wc_fixed_preprocessing(ds) 114 | assert "elevation" in ds.dims 115 | 116 | 117 | # @pytest.mark.skip("deactivating to isolate sef fault error") 118 | def test_wc_fixed_preprocessing_without_elevation(): 119 | """It should raise an error with a Dataset missing elevation 120 | 121 | It would be best to raise a specific error in wc_fixed_preprocessing(). 122 | """ 123 | ds = sample_dataset("WLS200s-218_2021-05-13_12-00-08_fixed_381_50m.nc") 124 | ds = ds.drop("elevation") 125 | 126 | with pytest.raises(AssertionError): 127 | ds = wc_fixed_preprocessing(ds) 128 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | 8 | Contributions are welcome, and they are greatly appreciated! Every little bit 9 | helps, and credit will always be given. 10 | 11 | You can contribute in many ways: 12 | 13 | Types of Contributions 14 | ---------------------- 15 | 16 | Report Bugs 17 | ~~~~~~~~~~~ 18 | 19 | Report bugs at https://github.com/jdiasn/lidarwind/issues. 20 | 21 | If you are reporting a bug, please include: 22 | 23 | * Your operating system name and version. 24 | * Any details about your local setup that might be helpful in troubleshooting. 25 | * Detailed steps to reproduce the bug. 26 | 27 | Fix Bugs 28 | ~~~~~~~~ 29 | 30 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 31 | wanted" is open to whoever wants to implement it. 32 | 33 | Implement Features 34 | ~~~~~~~~~~~~~~~~~~ 35 | 36 | Look through the GitHub issues for features. Anything tagged with "enhancement" 37 | and "help wanted" is open to whoever wants to implement it. 38 | 39 | Write Documentation 40 | ~~~~~~~~~~~~~~~~~~~ 41 | 42 | LIDAR Suit could always use more documentation, whether as part of the 43 | official LIDAR Suit docs, in docstrings, or even on the web in blog posts, 44 | articles, and such. 45 | 46 | Submit Feedback 47 | ~~~~~~~~~~~~~~~ 48 | 49 | The best way to send feedback is to file an issue at https://github.com/jdiasn/lidarwind/issues. 50 | 51 | If you are proposing a feature: 52 | 53 | * Explain in detail how it would work. 54 | * Keep the scope as narrow as possible, to make it easier to implement. 55 | * Remember that this is a volunteer-driven project, and that contributions 56 | are welcome :) 57 | 58 | Get Started! 59 | ------------ 60 | 61 | Ready to contribute? Here's how to set up `lidarwind` for local development. 62 | 63 | 1. Fork the `lidarwind` repo on GitHub. 64 | 2. Clone your fork locally:: 65 | 66 | $ git clone git@github.com:your_name_here/lidarwind.git 67 | 68 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 69 | 70 | $ mkvirtualenv lidarwind 71 | $ cd lidarwind/ 72 | $ python setup.py develop 73 | 74 | 4. Create a branch for local development:: 75 | 76 | $ git checkout -b name-of-your-bugfix-or-feature 77 | 78 | Now you can make your changes locally. 79 | 80 | 5. When you're done making changes, check that your changes pass flake8 and the 81 | tests, including testing other Python versions with tox:: 82 | 83 | $ flake8 lidarwind tests 84 | $ python setup.py test or pytest 85 | $ tox 86 | 87 | To get flake8 and tox, just pip install them into your virtualenv. 88 | 89 | 6. Commit your changes and push your branch to GitHub:: 90 | 91 | $ git add . 92 | $ git commit -m "Your detailed description of your changes." 93 | $ git push origin name-of-your-bugfix-or-feature 94 | 95 | 7. Submit a pull request through the GitHub website. 96 | 97 | Pull Request Guidelines 98 | ----------------------- 99 | 100 | Before you submit a pull request, check that it meets these guidelines: 101 | 102 | 1. The pull request should include tests. 103 | 2. If the pull request adds functionality, the docs should be updated. Put 104 | your new functionality into a function with a docstring, and add the 105 | feature to the list in README.rst. 106 | 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check 107 | https://travis-ci.com/jdiasn/lidarwind/pull_requests 108 | and make sure that the tests pass for all supported Python versions. 109 | 110 | Tips 111 | ---- 112 | 113 | To run a subset of tests:: 114 | 115 | $ pytest tests.test_lidarwind 116 | 117 | 118 | Deploying 119 | --------- 120 | 121 | A reminder for the maintainers on how to deploy. 122 | Make sure all your changes are committed (including an entry in HISTORY.rst). 123 | Then run:: 124 | 125 | $ bump2version patch # possible: major / minor / patch 126 | $ git push 127 | $ git push --tags 128 | 129 | Travis will then deploy to PyPI if tests pass. 130 | -------------------------------------------------------------------------------- /tests/postprocessing/test_post_wind_cube.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | 5 | from lidarwind import postprocessing, preprocessing 6 | 7 | 8 | def sintetic_data(step=90, elevation=75) -> xr.Dataset: 9 | """Sintect data 10 | 11 | Function to create synthetic data for testing the package 12 | 13 | Parameters 14 | ---------- 15 | step : int 16 | separation in degree between the data points 17 | 18 | elevation : int 19 | elevation of the observation 20 | 21 | Returns 22 | ------- 23 | A synthetic dataset 24 | 25 | """ 26 | 27 | azimuths = np.arange(0, 360, step) 28 | 29 | wind = np.cos(np.deg2rad(azimuths + 0)) * 20 * np.cos(np.deg2rad(75)) 30 | 31 | elv = np.append(np.ones_like(azimuths) * elevation, [90, 75]) 32 | 33 | time = pd.to_datetime("11/11/2017") + pd.to_timedelta( 34 | np.arange(len(elv)), unit="seconds" 35 | ) 36 | 37 | data_elv = xr.DataArray(elv, dims=("time"), coords={"time": time}) 38 | 39 | azm = np.append(azimuths, [0, 0]) 40 | data_azm = xr.DataArray(azm, dims=("time"), coords={"time": time}) 41 | 42 | data = xr.DataArray( 43 | (np.ones((2, 7)) * np.append(wind, [0, wind[0]])).T, 44 | dims=("time", "gate_index"), 45 | coords={"time": time, "gate_index": [1, 2]}, 46 | ) 47 | 48 | ranges = xr.DataArray( 49 | np.ones((2, 7)).T 50 | * np.array([100, 150]) 51 | / np.sin(np.deg2rad(elevation)), 52 | dims=("time", "gate_index"), 53 | coords={"time": time, "gate_index": [1, 2]}, 54 | ) 55 | 56 | data_status = xr.DataArray( 57 | np.ones_like(data.values), 58 | dims=("time", "gate_index"), 59 | coords={"time": time, "gate_index": [1, 2]}, 60 | ) 61 | 62 | test_ds = xr.Dataset( 63 | { 64 | "elevation": data_elv, 65 | "azimuth": data_azm, 66 | "cnr": data, 67 | "range": ranges, 68 | "radial_wind_speed": data, 69 | "radial_wind_speed_status": data_status, 70 | "relative_beta": data, 71 | } 72 | ) 73 | 74 | test_ds = test_ds.set_coords( 75 | { 76 | "elevation": test_ds.elevation, 77 | "azimuth": test_ds.azimuth, 78 | "range": ranges, 79 | } 80 | ) 81 | 82 | test_ds["range"].values[test_ds.elevation == 90] = np.array([100, 150]) 83 | 84 | return test_ds 85 | 86 | 87 | def ds_for_test(): 88 | 89 | ds = sintetic_data(step=72, elevation=75) 90 | ds = preprocessing.wc_azimuth_elevation_correction(ds) 91 | ds = ds.where(ds.radial_wind_speed_status == 1) 92 | 93 | return ds 94 | 95 | 96 | def test_post_wind_cube_get_horizontal_wind_dim_size(): 97 | 98 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 99 | assert len(tmp_ds.dims) > 0 100 | 101 | 102 | def test_post_wind_cube_get_horizontal_wind_time(): 103 | 104 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 105 | assert "time" in tmp_ds.dims 106 | 107 | 108 | def test_post_wind_cube_get_horizontal_wind_gate_index(): 109 | 110 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 111 | assert "gate_index" in tmp_ds.dims 112 | 113 | 114 | def test_post_wind_cube_get_horizontal_wind_coord_range(): 115 | 116 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 117 | assert "range" in tmp_ds.coords 118 | 119 | 120 | def test_post_wind_cube_get_horizontal_wind_coord_elevation(): 121 | 122 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 123 | assert "elevation" in tmp_ds.coords 124 | 125 | 126 | def test_post_wind_cube_get_horizontal_wind_coord_azimuth(): 127 | 128 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 129 | assert "azimuth" in tmp_ds.coords 130 | 131 | 132 | def test_post_wind_cube_get_horizontal_wind_speed(): 133 | 134 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 135 | assert "horizontal_wind_speed" in tmp_ds.variables 136 | 137 | 138 | def test_post_wind_cube_get_horizontal_wind_direction(): 139 | 140 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 141 | assert "horizontal_wind_direction" in tmp_ds.variables 142 | 143 | 144 | def test_post_wind_cube_get_horizontal_wind_zonal(): 145 | 146 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 147 | assert "zonal_wind" in tmp_ds.variables 148 | 149 | 150 | def test_post_wind_cube_get_horizontal_wind_meridional(): 151 | 152 | tmp_ds = postprocessing.get_horizontal_wind(ds_for_test()) 153 | assert "meridional_wind" in tmp_ds.variables 154 | -------------------------------------------------------------------------------- /tests/test_fourier_transf_wind_method.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind 6 | import lidarwind as lst 7 | from lidarwind.wind_prop_retrieval import first_harmonic_amplitude 8 | 9 | 10 | def get_dummy_six_beam_obj(): 11 | 12 | wind = ( 13 | np.sin(np.deg2rad(np.array([0, 72, 144, 216, 288]) + 0)) 14 | * 1 15 | / np.sin(np.deg2rad(45)) 16 | ) 17 | 18 | elv = np.array([45, 45, 45, 45, 45, 90]) 19 | data_elv = xr.DataArray( 20 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 21 | ) 22 | 23 | azm = np.array([0, 72, 144, 216, 288, 0]) 24 | data_azm = xr.DataArray( 25 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 26 | ) 27 | 28 | data = xr.DataArray( 29 | np.append(wind, 0)[:, np.newaxis], 30 | dims=("time", "range"), 31 | coords={"time": np.arange(len(elv)), "range": [1]}, 32 | ) 33 | 34 | data90 = xr.DataArray( 35 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 36 | dims=("time", "range90"), 37 | coords={"time": np.arange(len(elv)), "range90": [1]}, 38 | ) 39 | 40 | data_status = xr.DataArray( 41 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 42 | dims=("time", "range"), 43 | coords={"time": np.arange(len(elv)), "range": [1]}, 44 | ) 45 | 46 | test_ds = xr.Dataset( 47 | { 48 | "elevation": data_elv, 49 | "azimuth": data_azm, 50 | "cnr90": data90, 51 | "gate_index90": data90, 52 | "radial_wind_speed90": data90, 53 | "radial_wind_speed_status90": data90, 54 | "relative_beta90": data90, 55 | "cnr": data, 56 | "gate_index": data, 57 | "radial_wind_speed": data, 58 | "radial_wind_speed_status": data_status, 59 | "relative_beta": data, 60 | } 61 | ) 62 | 63 | return lst.GetRestructuredData(test_ds) 64 | 65 | 66 | @pytest.fixture 67 | def test_get_fft_obj(): 68 | 69 | test_ds = get_dummy_six_beam_obj() 70 | fft_obj = lst.FourierTransfWindMethod(test_ds.data_transf) 71 | 72 | return fft_obj 73 | 74 | 75 | def test_fourier_transf_wind_method_pase(test_get_fft_obj): 76 | 77 | assert np.all(np.round(test_get_fft_obj.phase.values, 2) == 90) 78 | 79 | 80 | def test_fourier_transf_wind_method_wind_dir(test_get_fft_obj): 81 | 82 | assert np.all(np.round(test_get_fft_obj.wind_dir.values, 2) == 270) 83 | 84 | 85 | def test_fourier_transf_wind_method_rad_wind_speed(test_get_fft_obj): 86 | print(test_get_fft_obj.rad_wind_speed.values) 87 | assert np.all( 88 | np.round(test_get_fft_obj.rad_wind_speed.values, 2) 89 | == np.round(1 / np.sin(np.deg2rad(45)), 2) 90 | ) 91 | 92 | 93 | def test_fourier_transf_wind_method_hor_wind_speed(test_get_fft_obj): 94 | 95 | assert np.all(np.round(test_get_fft_obj.hor_wind_speed.values, 2) == 2) 96 | 97 | 98 | def test_fourier_transf_wind_method_comp_u(test_get_fft_obj): 99 | 100 | assert np.all(np.round(test_get_fft_obj.comp_u.values, 2) == 2) 101 | 102 | 103 | def test_fourier_transf_wind_method_comp_v(test_get_fft_obj): 104 | 105 | assert np.all(np.round(test_get_fft_obj.comp_v.values, 2) == 0) 106 | 107 | 108 | def test_fourier_transf_wind_method_dim_tim(test_get_fft_obj): 109 | 110 | assert "time" in test_get_fft_obj.wind_prop().dims 111 | 112 | 113 | def test_fourier_transf_wind_method_dim_range(test_get_fft_obj): 114 | 115 | assert "range" in test_get_fft_obj.wind_prop().dims 116 | 117 | 118 | def test_fourier_transf_wind_method_dim_elv(test_get_fft_obj): 119 | 120 | assert "elv" in test_get_fft_obj.wind_prop().dims 121 | 122 | 123 | def test_fourier_transf_wind_method_variable_wind_direction(test_get_fft_obj): 124 | 125 | assert "horizontal_wind_direction" in test_get_fft_obj.wind_prop() 126 | 127 | 128 | def test_fourier_transf_wind_method_variable_wind_speed(test_get_fft_obj): 129 | 130 | assert "horizontal_wind_speed" in test_get_fft_obj.wind_prop() 131 | 132 | 133 | def test_fourier_transf_wind_method_variable_zonal_wind(test_get_fft_obj): 134 | 135 | assert "zonal_wind" in test_get_fft_obj.wind_prop() 136 | 137 | 138 | def test_fourier_transf_wind_method_variable_zonal_meridional( 139 | test_get_fft_obj, 140 | ): 141 | 142 | assert "meridional_wind" in test_get_fft_obj.wind_prop() 143 | 144 | 145 | # def test_first_harmonic_amplitude(test_get_fft_obj): 146 | # da = test_get_fft_obj.doppler_obs 147 | # amp = lidarwind.wind_prop_retrieval.first_harmonic_amplitude(da) 148 | 149 | # assert "freq_azm" in amp.coords, "Missing freq_azm as a coordinate" 150 | # assert np.allclose( 151 | # amp.freq_azm, 0.002777777777777778 152 | # ), "Wrong value of frequency" 153 | 154 | 155 | # def test_first_harmonic_amplitude_diff_n_data_points(test_get_fft_obj): 156 | # da = test_get_fft_obj.doppler_obs 157 | # da = da.isel(azm=range(3)) 158 | # da.azm[:] = [0, 120, 240] 159 | 160 | # assert False 161 | -------------------------------------------------------------------------------- /lidarwind/data_attributes.py: -------------------------------------------------------------------------------- 1 | """Module for wrinting the data attributes 2 | 3 | """ 4 | 5 | from .lidarwind_config import Configurations 6 | 7 | 8 | class LoadAttributes: 9 | 10 | """Level 1 Attribute generator 11 | 12 | It defines and writes the attributes from the Level 1 13 | 14 | Parameters 15 | ---------- 16 | data : xarray.Dataset 17 | A xarray dataset containing the Level 1 data 18 | generated by the lidarwind 19 | 20 | Returns 21 | ------- 22 | data : xarray.Dataset 23 | 24 | A dataset with corrected attributes 25 | 26 | """ 27 | 28 | def __init__(self, data): 29 | 30 | self.data = data 31 | self.write_global_attrs() 32 | self.variables_attrs() 33 | self.write_coords_attrs() 34 | self.write_variables_attrs() 35 | 36 | def write_global_attrs(self): 37 | 38 | """Global attribute writer 39 | 40 | It loads and writes the global attributes from the dataset. 41 | The global attributes are defiened in the configuration file. 42 | See lidarwind_config.generate_conf documentation for information 43 | about generating the configuration file. 44 | 45 | """ 46 | 47 | config_info = Configurations(lidarwind=None).load_conf_file() 48 | 49 | tmp_att = { 50 | "Conventions": "Cf/Radial 2.0", 51 | "title": "Wind properties", 52 | "references": config_info.references, 53 | "institution": config_info.institution, 54 | "instrument_name": config_info.instrument, 55 | "comments": config_info.comments, 56 | "site_name": config_info.site, 57 | "contact_person": config_info.contact, 58 | "email": config_info.email, 59 | } 60 | 61 | self.data.attrs = tmp_att 62 | 63 | return self 64 | 65 | def variables_attrs(self): 66 | 67 | """Variable attributes definitions 68 | 69 | All variables and coordinates attributes are defined here. 70 | 71 | """ 72 | 73 | attrs_dic = {} 74 | 75 | attrs_dic["range"] = { 76 | "standard_name": "range", 77 | "units": "m", 78 | "comments": "Distance between the instrument " 79 | "and the center of each range gate", 80 | } 81 | 82 | attrs_dic["time"] = { 83 | "standard_name": "time", 84 | "reference": "seconds since 1970-01-01 00:00:00", 85 | "comments": "time of the horizotal observations", 86 | } 87 | 88 | attrs_dic["time90"] = { 89 | "standard_name": "time90", 90 | "reference": "seconds since 1970-01-01 00:00:00", 91 | "comments": "time of the vertical observations", 92 | } 93 | 94 | attrs_dic["horizontal_wind_speed"] = { 95 | "standard_name": "wind_speed", 96 | "units": "m/s", 97 | "comments": "horizontal wind speed retrived using the FFT method", 98 | } 99 | 100 | attrs_dic["horizontal_wind_direction"] = { 101 | "standard_name": "wind_direction", 102 | "units": "degrees", 103 | "comments": "horizontal wind direction retrived " 104 | "using the FFT method with respect to true north", 105 | "info": "0=wind coming from the north, " 106 | "90=east, 180=south, 270=west", 107 | } 108 | 109 | attrs_dic["zonal_wind"] = { 110 | "standard_name": "zonal_wind", 111 | "units": "m/s", 112 | "comments": "zonal wind retrived using the FFT method", 113 | } 114 | 115 | attrs_dic["meridional_wind"] = { 116 | "standard_name": "meridional_wind", 117 | "units": "m/s", 118 | "comments": "meridional wind retrived using the FFT method", 119 | } 120 | 121 | attrs_dic["vertical_wind_speed"] = { 122 | "standard_name": "vertical_wind_speed", 123 | "units": "m/s", 124 | "comments": "observed vertical wind speed " 125 | "(negative towards the ground)", 126 | } 127 | 128 | attrs_dic["lidar_relative_beta"] = { 129 | "standard_name": "volume_attenuated_backwards" 130 | "_scattering_function_in_air", 131 | "units": "m-1 sr-1", 132 | "comments": "Attenuated relative backscatter " 133 | "coefficient from the vertical beam", 134 | } 135 | 136 | self.attrs_dic = attrs_dic 137 | 138 | return self 139 | 140 | def write_coords_attrs(self): 141 | 142 | """Coordinate attribute writer 143 | 144 | It loops over the dataset coordinate and writes their attributes 145 | 146 | """ 147 | 148 | for key in self.data.coords: 149 | 150 | try: 151 | self.data[key].attrs = self.attrs_dic[key] 152 | 153 | except KeyError: 154 | print(f"coord not found: {key}") 155 | 156 | return self 157 | 158 | def write_variables_attrs(self): 159 | 160 | """Variable attribute writer 161 | 162 | It loops over the dataset variables and writes their attributes 163 | 164 | """ 165 | 166 | for key in self.data.keys(): 167 | 168 | try: 169 | self.data[key].attrs = self.attrs_dic[key] 170 | 171 | except KeyError: 172 | print(f"variable not found: {key}") 173 | 174 | return self 175 | -------------------------------------------------------------------------------- /lidarwind/lidarwind_config.py: -------------------------------------------------------------------------------- 1 | """Module for configuring the data global attribute 2 | 3 | """ 4 | 5 | import json 6 | 7 | 8 | class Configurations: 9 | """Global attributes definition 10 | 11 | This class defines all global attributes 12 | that will be writen on the dataset 13 | 14 | Parameters 15 | ---------- 16 | lidarwind : object 17 | an instance of the lidarwind package 18 | 19 | """ 20 | 21 | def __init__(self, lidarwind=None): 22 | 23 | self.load_version(lidarwind) 24 | self.load_reference() 25 | self.load_institution() 26 | self.load_instrument() 27 | self.load_site() 28 | self.load_contact() 29 | self.load_email() 30 | self.load_comments() 31 | 32 | def load_version(self, lidarwind): 33 | """ 34 | It identifies the lidarwind version 35 | and writes it to the configuration file 36 | 37 | Parameters 38 | ---------- 39 | lidarwind : object 40 | a instance of the lidarwind package 41 | 42 | """ 43 | 44 | if lidarwind is None: 45 | self.lidarwind_version = "temporary config file" 46 | else: 47 | self.lidarwind_version = lidarwind.__version__ 48 | 49 | return self 50 | 51 | def load_institution(self, institution="institution name"): 52 | """ 53 | It defines the institution affiliation name 54 | 55 | Parameters 56 | ---------- 57 | institution : str 58 | institution name 59 | 60 | """ 61 | 62 | self.institution = institution 63 | 64 | return self 65 | 66 | def load_instrument(self, instrument="instrument name"): 67 | """ 68 | It defines the instrument name 69 | 70 | Parameters 71 | ---------- 72 | instrument : str 73 | name of the instrument used during the experiment 74 | 75 | """ 76 | 77 | self.instrument = instrument 78 | 79 | return self 80 | 81 | def load_site(self, site="site name"): 82 | """ 83 | It defines the name of the experimental site 84 | 85 | Parameters 86 | ---------- 87 | site : str 88 | name of the experimental site 89 | 90 | """ 91 | 92 | self.site = site 93 | 94 | return self 95 | 96 | def load_contact(self, contact="contact person"): 97 | """ 98 | It defines the author's name 99 | 100 | Parameters 101 | ---------- 102 | contact : str 103 | name of the contact person 104 | 105 | """ 106 | 107 | self.contact = contact 108 | 109 | return self 110 | 111 | def load_email(self, email="contact email"): 112 | """ 113 | It defines the contacting email 114 | 115 | Parameters 116 | ---------- 117 | email : str 118 | contact email 119 | 120 | """ 121 | self.email = email 122 | 123 | return self 124 | 125 | def load_reference(self, reference="Generated by lidarwind version: {0}"): 126 | """ 127 | It loads the lidarwind's version used for 128 | processing the data 129 | 130 | Parameters 131 | ---------- 132 | reference : str 133 | lidarwind version used to process the data 134 | 135 | """ 136 | 137 | self.references = reference.format(self.lidarwind_version) 138 | 139 | return self 140 | 141 | def load_comments(self, comments="General comments"): 142 | """ 143 | It defines additional comments 144 | 145 | Parameters 146 | ---------- 147 | comments : str 148 | additional comments 149 | 150 | """ 151 | self.comments = comments 152 | 153 | return self 154 | 155 | def generate_conf(self): 156 | """ 157 | It writes and saves all defined global attributes. 158 | 159 | """ 160 | 161 | config_dic = {} 162 | 163 | config_dic["references"] = self.references 164 | config_dic["institution"] = self.institution 165 | config_dic["instrument_name"] = self.instrument 166 | config_dic["site_name"] = self.site 167 | config_dic["comments"] = self.comments 168 | config_dic["contact_person"] = self.contact 169 | config_dic["email"] = self.email 170 | 171 | config_js = json.dumps(config_dic) 172 | config_file = open("config.json", "w") 173 | config_file.write(config_js) 174 | config_file.close() 175 | 176 | def load_conf_file(self, file_path="config.json"): 177 | """ 178 | It loads the pre-defined global attributes 179 | from the config.json, if it exists. 180 | 181 | Parameters 182 | ---------- 183 | file_path : str 184 | the path to the configuration file (config.json) 185 | 186 | """ 187 | 188 | try: 189 | config_dic = json.load(open(file_path)) 190 | 191 | except FileNotFoundError: 192 | 193 | print("You do not have a config file yet") 194 | print("a temporary config file was generated") 195 | print("See the documentation for generating it") 196 | self.generate_conf() 197 | config_dic = json.load(open(file_path)) 198 | 199 | self.load_reference(config_dic["references"]) 200 | self.load_institution(config_dic["institution"]) 201 | self.load_instrument(config_dic["instrument_name"]) 202 | self.load_comments(config_dic["comments"]) 203 | self.load_site(config_dic["site_name"]) 204 | self.load_contact(config_dic["contact_person"]) 205 | self.load_email(config_dic["email"]) 206 | 207 | return self 208 | -------------------------------------------------------------------------------- /tests/test_get_restructured_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import lidarwind as lst 6 | 7 | 8 | @pytest.fixture 9 | def get_dummy_six_beam_data(): 10 | 11 | wind = ( 12 | np.sin(np.deg2rad(np.array([0, 72, 144, 216, 288]) + 0)) 13 | * 1 14 | / np.sin(np.deg2rad(45)) 15 | ) 16 | 17 | elv = np.array([45, 45, 45, 45, 45, 90]) 18 | data_elv = xr.DataArray( 19 | elv, dims=("time"), coords={"time": np.arange(len(elv))} 20 | ) 21 | 22 | azm = np.array([0, 72, 144, 216, 288, 0]) 23 | data_azm = xr.DataArray( 24 | azm, dims=("time"), coords={"time": np.arange(len(elv))} 25 | ) 26 | 27 | data = xr.DataArray( 28 | np.append(wind, 0)[:, np.newaxis], 29 | dims=("time", "range"), 30 | coords={"time": np.arange(len(elv)), "range": [1]}, 31 | ) 32 | 33 | data90 = xr.DataArray( 34 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 35 | dims=("time", "range90"), 36 | coords={"time": np.arange(len(elv)), "range90": [1]}, 37 | ) 38 | 39 | data_status = xr.DataArray( 40 | np.array([1, 1, 1, 1, 1, 1])[:, np.newaxis], 41 | dims=("time", "range"), 42 | coords={"time": np.arange(len(elv)), "range": [1]}, 43 | ) 44 | 45 | test_ds = xr.Dataset( 46 | { 47 | "elevation": data_elv, 48 | "azimuth": data_azm, 49 | "cnr90": data90, 50 | "gate_index90": data90, 51 | "radial_wind_speed90": data90, 52 | "radial_wind_speed_status90": data90, 53 | "relative_beta90": data90, 54 | "cnr": data, 55 | "gate_index": data, 56 | "radial_wind_speed": data, 57 | "radial_wind_speed_status": data_status, 58 | "relative_beta": data, 59 | } 60 | ) 61 | 62 | return test_ds 63 | 64 | 65 | @pytest.fixture 66 | def get_restruc_obj(get_dummy_six_beam_data): 67 | return lst.GetRestructuredData(get_dummy_six_beam_data) 68 | 69 | 70 | def test_get_resctructured_data_range_non_90_size(get_restruc_obj): 71 | assert len(get_restruc_obj.range_non_90.values) == 1 72 | 73 | 74 | def test_get_resctructured_data_range_non_90_values(get_restruc_obj): 75 | assert np.round(get_restruc_obj.range_non_90.values, 1) == 1 76 | 77 | 78 | def test_get_resctructured_data_data_transf_time_dim(get_restruc_obj): 79 | assert "time" in get_restruc_obj.data_transf.dims 80 | 81 | 82 | def test_get_resctructured_data_data_transf_range_dim(get_restruc_obj): 83 | assert "range" in get_restruc_obj.data_transf.dims 84 | 85 | 86 | def test_get_resctructured_data_data_transf_azm_dim(get_restruc_obj): 87 | assert "azm" in get_restruc_obj.data_transf.dims 88 | 89 | 90 | def test_get_resctructured_data_data_transf_elv_dim(get_restruc_obj): 91 | assert "elv" in get_restruc_obj.data_transf.dims 92 | 93 | 94 | def test_get_resctructured_data_data_transf_elv_value(get_restruc_obj): 95 | assert np.round(get_restruc_obj.data_transf.elv.values, 1) == 45 96 | 97 | 98 | def test_get_resctructured_data_data_transf_azm_value(get_restruc_obj): 99 | assert ( 100 | np.round(get_restruc_obj.data_transf.azm.values, 1) 101 | == np.array([0, 72, 144, 216, 288]) 102 | ).all() 103 | 104 | 105 | def test_get_resctructured_data_data_transf_90_time_dim(get_restruc_obj): 106 | assert "time" in get_restruc_obj.data_transf_90.dims 107 | 108 | 109 | def test_get_resctructured_data_data_transf_90_range90_dim(get_restruc_obj): 110 | assert "range90" in get_restruc_obj.data_transf_90.dims 111 | 112 | 113 | def test_get_resctructured_data_data_transf_90_name(get_restruc_obj): 114 | assert "radial_wind_speed90" in get_restruc_obj.data_transf_90.name 115 | 116 | 117 | def test_get_resctructured_data_data_transf_90_time_dim_size(get_restruc_obj): 118 | assert len(get_restruc_obj.data_transf_90.time) == 1 119 | 120 | 121 | def test_get_resctructured_data_data_transf_90_range90_dim_size( 122 | get_restruc_obj, 123 | ): 124 | assert len(get_restruc_obj.data_transf_90.range90) == 1 125 | 126 | 127 | def test_get_resctructured_data_input_ds_missing_cnr(get_dummy_six_beam_data): 128 | 129 | broken_ds = get_dummy_six_beam_data.copy() 130 | del broken_ds["cnr"] 131 | 132 | with pytest.raises(AttributeError): 133 | lst.GetRestructuredData(broken_ds, snr=1) 134 | 135 | 136 | def test_get_resctructured_data_input_ds_missing_cnr90( 137 | get_dummy_six_beam_data, 138 | ): 139 | 140 | broken_ds = get_dummy_six_beam_data.copy() 141 | del broken_ds["cnr90"] 142 | 143 | with pytest.raises(AttributeError): 144 | lst.GetRestructuredData(broken_ds, snr=1) 145 | 146 | 147 | def test_get_resctructured_data_input_ds_missing_radial_wind_speed_status90( 148 | get_dummy_six_beam_data, 149 | ): 150 | 151 | broken_ds = get_dummy_six_beam_data.copy() 152 | del broken_ds["radial_wind_speed_status90"] 153 | 154 | with pytest.raises(AttributeError): 155 | lst.GetRestructuredData(broken_ds) 156 | 157 | 158 | def test_get_resctructured_data_input_ds_missing_radial_wind_speed_status( 159 | get_dummy_six_beam_data, 160 | ): 161 | 162 | broken_ds = get_dummy_six_beam_data.copy() 163 | del broken_ds["radial_wind_speed_status"] 164 | 165 | with pytest.raises(AttributeError): 166 | lst.GetRestructuredData(broken_ds) 167 | 168 | 169 | def test_get_resctructured_data_input_ds_missing_elevation( 170 | get_dummy_six_beam_data, 171 | ): 172 | 173 | broken_ds = get_dummy_six_beam_data.copy() 174 | del broken_ds["elevation"] 175 | 176 | with pytest.raises(AttributeError): 177 | lst.GetRestructuredData(broken_ds) 178 | 179 | 180 | def test_get_resctructured_data_input_ds_missing_azimuth( 181 | get_dummy_six_beam_data, 182 | ): 183 | 184 | broken_ds = get_dummy_six_beam_data.copy() 185 | del broken_ds["azimuth"] 186 | 187 | with pytest.raises(AttributeError): 188 | lst.GetRestructuredData(broken_ds) 189 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # lidarwind documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another 16 | # directory, add these directories to sys.path here. If the directory is 17 | # relative to the documentation root, use os.path.abspath to make it 18 | # absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | sys.path.insert(0, os.path.abspath('..')) 23 | 24 | import lidarwind 25 | 26 | # -- General configuration -------------------------------------------- 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | # 30 | # needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 34 | extensions = ['sphinx.ext.autodoc', 35 | 'sphinx.ext.viewcode', 36 | 'sphinx.ext.autosummary', 37 | 'sphinx.ext.intersphinx', 38 | 'sphinx.ext.imgmath', 39 | 'sphinx.ext.napoleon', 40 | 'nbsphinx', 41 | "sphinx_gallery.load_style"] 42 | 43 | 44 | autosummary_generate = True 45 | autodoc_typehints = "none" 46 | 47 | napoleon_google_docstring = False 48 | napoleon_numpy_docstring = True 49 | napoleon_preprocess_types = True 50 | 51 | # Add any paths that contain templates here, relative to this directory. 52 | templates_path = ['_templates'] 53 | 54 | # The suffix(es) of source filenames. 55 | # You can specify multiple suffix as a list of string: 56 | # 57 | # source_suffix = ['.rst', '.md'] 58 | source_suffix = '.rst' 59 | 60 | # The master toctree document. 61 | master_doc = 'index' 62 | 63 | # General information about the project. 64 | project = 'lidarwind' 65 | copyright = "2022, Jose Dias" 66 | author = "Jose Dias" 67 | 68 | # The version info for the project you're documenting, acts as replacement 69 | # for |version| and |release|, also used in various other places throughout 70 | # the built documents. 71 | # 72 | # The short X.Y version. 73 | version = lidarwind.__version__ 74 | # The full version, including alpha/beta/rc tags. 75 | release = lidarwind.__version__ 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | # 80 | # This is also used if you do content translation via gettext catalogs. 81 | # Usually you set "language" from the command line for these cases. 82 | language = 'en' 83 | 84 | # List of patterns, relative to source directory, that match files and 85 | # directories to ignore when looking for source files. 86 | # This patterns also effect to html_static_path and html_extra_path 87 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.ipynb*'] 88 | 89 | # The name of the Pygments (syntax highlighting) style to use. 90 | pygments_style = 'sphinx' 91 | 92 | # If true, `todo` and `todoList` produce output, else they produce nothing. 93 | todo_include_todos = False 94 | 95 | 96 | # -- Options for HTML output ------------------------------------------- 97 | 98 | # The theme to use for HTML and HTML Help pages. See the documentation for 99 | # a list of builtin themes. 100 | # 101 | html_theme = 'sphinx_rtd_theme' 102 | 103 | # Theme options are theme-specific and customize the look and feel of a 104 | # theme further. For a list of options available for each theme, see the 105 | # documentation. 106 | # 107 | # html_theme_options = {} 108 | 109 | # Add any paths that contain custom static files (such as style sheets) here, 110 | # relative to this directory. They are copied after the builtin static files, 111 | # so a file named "default.css" will overwrite the builtin "default.css". 112 | html_static_path = ['_static'] 113 | 114 | 115 | # -- Options for HTMLHelp output --------------------------------------- 116 | 117 | # Output file base name for HTML help builder. 118 | htmlhelp_basename = 'lidarwinddoc' 119 | 120 | 121 | # -- Options for LaTeX output ------------------------------------------ 122 | 123 | latex_elements = { 124 | # The paper size ('letterpaper' or 'a4paper'). 125 | # 126 | # 'papersize': 'letterpaper', 127 | 128 | # The font size ('10pt', '11pt' or '12pt'). 129 | # 130 | # 'pointsize': '10pt', 131 | 132 | # Additional stuff for the LaTeX preamble. 133 | # 134 | # 'preamble': '', 135 | 136 | # Latex figure (float) alignment 137 | # 138 | # 'figure_align': 'htbp', 139 | } 140 | 141 | # Grouping the document tree into LaTeX files. List of tuples 142 | # (source start file, target name, title, author, documentclass 143 | # [howto, manual, or own class]). 144 | latex_documents = [ 145 | # (master_doc, 146 | # 'lidarwind.tex', 147 | # 'lidarwind Documentation', 148 | # 'Jose Dias', 149 | # 'manual'), 150 | ] 151 | 152 | 153 | # -- Options for manual page output ------------------------------------ 154 | 155 | # One entry per manual page. List of tuples 156 | # (source start file, name, description, authors, manual section). 157 | man_pages = [ 158 | (master_doc, 159 | 'lidarwind', 160 | 'lidarwind Documentation', 161 | [author], 162 | 1) 163 | ] 164 | 165 | 166 | # -- Options for Texinfo output ---------------------------------------- 167 | 168 | # Grouping the document tree into Texinfo files. List of tuples 169 | # (source start file, target name, title, author, 170 | # dir menu entry, description, category) 171 | texinfo_documents = [ 172 | (master_doc, 'lidarwind', 173 | 'lidarwind Documentation', 174 | author, 175 | 'lidarwind', 176 | 'One line description of project.', 177 | 'Miscellaneous'), 178 | ] 179 | -------------------------------------------------------------------------------- /tests/wind_retrieval/test_wind_retrieval.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | 5 | from lidarwind import preprocessing 6 | from lidarwind.wind_retrieval import fft_wind_retrieval 7 | 8 | 9 | def sintetic_data(step=90, elevation=75) -> xr.Dataset: 10 | """Sintect data 11 | 12 | Function to create synthetic data for testing the package 13 | 14 | Parameters 15 | ---------- 16 | step : int 17 | separation in degree between the data points 18 | 19 | elevation : int 20 | elevation of the observation 21 | 22 | Returns 23 | ------- 24 | A synthetic dataset 25 | 26 | """ 27 | 28 | azimuths = np.arange(0, 360, step) 29 | 30 | wind = np.cos(np.deg2rad(azimuths + 0)) * 20 * np.cos(np.deg2rad(75)) 31 | 32 | elv = np.append(np.ones_like(azimuths) * elevation, [90, 75]) 33 | 34 | time = pd.to_datetime("11/11/2017") + pd.to_timedelta( 35 | np.arange(len(elv)), unit="seconds" 36 | ) 37 | 38 | data_elv = xr.DataArray(elv, dims=("time"), coords={"time": time}) 39 | 40 | azm = np.append(azimuths, [0, 0]) 41 | data_azm = xr.DataArray(azm, dims=("time"), coords={"time": time}) 42 | 43 | data = xr.DataArray( 44 | (np.ones((2, 7)) * np.append(wind, [0, wind[0]])).T, 45 | dims=("time", "gate_index"), 46 | coords={"time": time, "gate_index": [1, 2]}, 47 | ) 48 | 49 | ranges = xr.DataArray( 50 | np.ones((2, 7)).T 51 | * np.array([100, 150]) 52 | / np.sin(np.deg2rad(elevation)), 53 | dims=("time", "gate_index"), 54 | coords={"time": time, "gate_index": [1, 2]}, 55 | ) 56 | 57 | data_status = xr.DataArray( 58 | np.ones_like(data.values), 59 | dims=("time", "gate_index"), 60 | coords={"time": time, "gate_index": [1, 2]}, 61 | ) 62 | 63 | test_ds = xr.Dataset( 64 | { 65 | "elevation": data_elv, 66 | "azimuth": data_azm, 67 | "cnr": data, 68 | "range": ranges, 69 | "radial_wind_speed": data, 70 | "radial_wind_speed_status": data_status, 71 | "relative_beta": data, 72 | } 73 | ) 74 | 75 | test_ds = test_ds.set_coords( 76 | { 77 | "elevation": test_ds.elevation, 78 | "azimuth": test_ds.azimuth, 79 | "range": ranges, 80 | } 81 | ) 82 | 83 | test_ds["range"].values[test_ds.elevation == 90] = np.array([100, 150]) 84 | 85 | return test_ds 86 | 87 | 88 | def get_radial_velocities_4_test(): 89 | 90 | ds = sintetic_data(step=72, elevation=75) 91 | slanted_elevation = np.unique(ds.elevation)[np.unique(ds.elevation) != 90] 92 | ds_slanted = ds.where(ds.elevation == slanted_elevation, drop=True) 93 | radial_velocities = preprocessing.wc_slanted_radial_velocity_4_fft( 94 | ds_slanted 95 | ) 96 | 97 | return radial_velocities 98 | 99 | 100 | def test_fisrt_harmonic_amplitude_size(): 101 | 102 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 103 | get_radial_velocities_4_test().radial_wind_speed 104 | ) 105 | assert tmp_amp.size > 0 106 | 107 | 108 | def test_first_harmonic_amplitude_magnitude(): 109 | 110 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 111 | get_radial_velocities_4_test().radial_wind_speed 112 | ) 113 | assert np.round(np.absolute(tmp_amp.values[1, 1]), 2) == 12.94 114 | 115 | 116 | def test_first_harmonic_amplitude_direction(): 117 | 118 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 119 | get_radial_velocities_4_test().radial_wind_speed 120 | ) 121 | assert fft_wind_retrieval.wind_direction(tmp_amp).values[1, 1] == 180 122 | 123 | 124 | def test_first_harmonic_amplitude_wind_speed(): 125 | 126 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 127 | get_radial_velocities_4_test().radial_wind_speed 128 | ) 129 | assert np.isclose(fft_wind_retrieval.wind_speed(tmp_amp).values[1, 1], 20) 130 | 131 | 132 | def test_first_harmonic_amplitude_meridional_wind(): 133 | 134 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 135 | get_radial_velocities_4_test().radial_wind_speed 136 | ) 137 | assert np.isclose( 138 | fft_wind_retrieval.meridional_wind(tmp_amp).values[1, 1], 20 139 | ) 140 | 141 | 142 | def test_first_harmonic_amplitude_zonal_wind(): 143 | 144 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 145 | get_radial_velocities_4_test().radial_wind_speed 146 | ) 147 | assert np.isclose(fft_wind_retrieval.zonal_wind(tmp_amp).values[1, 1], 0) 148 | 149 | 150 | def test_first_harmonic_amplitude_projected_wind(): 151 | 152 | tmp_amp = fft_wind_retrieval.first_harmonic_amplitude( 153 | get_radial_velocities_4_test().radial_wind_speed 154 | ) 155 | assert np.isclose( 156 | fft_wind_retrieval.wind_projected_to_azimuth(tmp_amp, 0).values[1, 1], 157 | -20, 158 | ) 159 | 160 | 161 | def test_first_harmonic_amplitude_horizontal_wind_speed_var(): 162 | 163 | tmp_ds = fft_wind_retrieval.get_wind_properties( 164 | get_radial_velocities_4_test().radial_wind_speed 165 | ) 166 | assert "horizontal_wind_speed" in tmp_ds 167 | 168 | 169 | def test_first_harmonic_amplitude_horizontal_wind_dir_var(): 170 | 171 | tmp_ds = fft_wind_retrieval.get_wind_properties( 172 | get_radial_velocities_4_test().radial_wind_speed 173 | ) 174 | assert "horizontal_wind_direction" in tmp_ds 175 | 176 | 177 | def test_first_harmonic_amplitude_meridional_wind_dir_var(): 178 | 179 | tmp_ds = fft_wind_retrieval.get_wind_properties( 180 | get_radial_velocities_4_test().radial_wind_speed 181 | ) 182 | assert "meridional_wind" in tmp_ds 183 | 184 | 185 | def test_first_harmonic_amplitude_zonal_wind_dir_var(): 186 | 187 | tmp_ds = fft_wind_retrieval.get_wind_properties( 188 | get_radial_velocities_4_test().radial_wind_speed 189 | ) 190 | assert "zonal_wind" in tmp_ds 191 | -------------------------------------------------------------------------------- /tests/preprocessing/test_rpg_preprocessing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | from lidarwind.preprocessing import rpg_radar 6 | 7 | 8 | def test_time_decoding_ds_type(): 9 | 10 | with pytest.raises(TypeError): 11 | rpg_radar.time_decoding(ds=np.array([1, 2])) 12 | 13 | 14 | def test_time_decoding_time(): 15 | 16 | with pytest.raises(AssertionError): 17 | rpg_radar.time_decoding(ds=xr.Dataset({"Timems": [0]})) 18 | 19 | 20 | def test_time_decoding_timems(): 21 | 22 | with pytest.raises(AssertionError): 23 | rpg_radar.time_decoding(ds=xr.Dataset({"Time": [0]})) 24 | 25 | 26 | def test_selecting_variables_ds_type(): 27 | 28 | with pytest.raises(TypeError): 29 | rpg_radar.selecting_variables(ds=np.array([1, 2])) 30 | 31 | 32 | def test_selecting_variables_time(): 33 | 34 | with pytest.raises(AssertionError): 35 | rpg_radar.selecting_variables(ds=xr.Dataset({"Azm": [0], "Elv": [0]})) 36 | 37 | 38 | def test_selecting_variables_azm(): 39 | 40 | with pytest.raises(AssertionError): 41 | rpg_radar.selecting_variables(ds=xr.Dataset({"Time": [0], "Elv": [0]})) 42 | 43 | 44 | def test_selecting_variables_elv(): 45 | 46 | with pytest.raises(AssertionError): 47 | rpg_radar.selecting_variables(ds=xr.Dataset({"Time": [0], "Azm": [0]})) 48 | 49 | 50 | def test_azimuth_offset_ds(): 51 | 52 | with pytest.raises(TypeError): 53 | rpg_radar.azimuth_offset(ds=np.array([1, 2])) 54 | 55 | 56 | def test_azimuth_offset_azm(): 57 | 58 | with pytest.raises(AssertionError): 59 | rpg_radar.azimuth_offset(ds=xr.Dataset({"Time": [0], "Elv": [0]})) 60 | 61 | 62 | def test_height_estimation_ds(): 63 | 64 | with pytest.raises(TypeError): 65 | rpg_radar.height_estimation(ds=np.array([1, 2])) 66 | 67 | 68 | def test_height_estimation_range_layers(): 69 | 70 | with pytest.raises(AssertionError): 71 | rpg_radar.height_estimation(ds=xr.Dataset({"elevation": [0]})) 72 | 73 | 74 | def test_height_estimation_elevation(): 75 | 76 | with pytest.raises(AssertionError): 77 | rpg_radar.height_estimation(ds=xr.Dataset({"range_layers": [0]})) 78 | 79 | 80 | def test_fill_nan_values_ds(): 81 | 82 | with pytest.raises(TypeError): 83 | rpg_radar.fill_nan_values(ds=np.array([1, 2])) 84 | 85 | 86 | def test_fill_nan_values_time(): 87 | 88 | with pytest.raises(AssertionError): 89 | rpg_radar.fill_nan_values(ds=xr.Dataset({"range_layers": [0]})) 90 | 91 | 92 | def test_update_structure_ds(): 93 | 94 | with pytest.raises(TypeError): 95 | rpg_radar.update_structure(ds=np.array([1, 2])) 96 | 97 | 98 | def test_update_structure_time(): 99 | 100 | with pytest.raises(AssertionError): 101 | rpg_radar.update_structure( 102 | ds=xr.Dataset( 103 | { 104 | "azimuth": [0], 105 | "elevation": [0], 106 | "mean_time": [0], 107 | "nan_percentual": [0], 108 | } 109 | ) 110 | ) 111 | 112 | 113 | def test_update_structure_azimuth(): 114 | 115 | with pytest.raises(AssertionError): 116 | rpg_radar.update_structure( 117 | ds=xr.Dataset( 118 | { 119 | "time": [0], 120 | "elevation": [0], 121 | "mean_time": [0], 122 | "nan_percentual": [0], 123 | } 124 | ) 125 | ) 126 | 127 | 128 | def test_update_structure_elevation(): 129 | 130 | with pytest.raises(AssertionError): 131 | rpg_radar.update_structure( 132 | ds=xr.Dataset( 133 | { 134 | "time": [0], 135 | "azimuth": [0], 136 | "mean_time": [0], 137 | "nan_percentual": [0], 138 | } 139 | ) 140 | ) 141 | 142 | 143 | def test_update_structure_nan_percentual(): 144 | 145 | with pytest.raises(AssertionError): 146 | rpg_radar.update_structure( 147 | ds=xr.Dataset( 148 | { 149 | "time": [0], 150 | "azimuth": [0], 151 | "elevation": [0], 152 | "mean_time": [0], 153 | } 154 | ) 155 | ) 156 | 157 | 158 | def test_get_chirp_information_ds(): 159 | 160 | with pytest.raises(TypeError): 161 | rpg_radar.get_chirp_information(ds=np.array([1, 2])) 162 | 163 | 164 | def test_get_chirp_information_chirp_dim(): 165 | 166 | with pytest.raises(AssertionError): 167 | rpg_radar.get_chirp_information(ds=xr.Dataset({"range": [0]})) 168 | 169 | 170 | def test_update_range_ds(): 171 | 172 | with pytest.raises(TypeError): 173 | rpg_radar.update_range(ds=np.array([1, 2])) 174 | 175 | 176 | def test_update_range_range(): 177 | 178 | with pytest.raises(AssertionError): 179 | rpg_radar.update_range(ds=xr.Dataset({"time": [0]})) 180 | 181 | 182 | def test_count_nan_values_ds(): 183 | 184 | with pytest.raises(TypeError): 185 | rpg_radar.count_nan_values(ds=np.array([0])) 186 | 187 | 188 | def test_count_nan_values_time(): 189 | 190 | with pytest.raises(AssertionError): 191 | rpg_radar.count_nan_values(ds=xr.Dataset({"MeanVel": [0]})) 192 | 193 | 194 | def test_count_nan_values_MeanVel(): 195 | 196 | with pytest.raises(AssertionError): 197 | rpg_radar.count_nan_values(ds=xr.Dataset({"time": [0]})) 198 | 199 | 200 | def test_azimuth_regular_grid_interp_ds(): 201 | 202 | with pytest.raises(TypeError): 203 | rpg_radar.azimuth_regular_grid_interp(ds=np.array([0])) 204 | 205 | 206 | def test_azimuth_regular_grid_interp_azm_coord(): 207 | 208 | with pytest.raises(AssertionError): 209 | rpg_radar.azimuth_regular_grid_interp(ds=xr.Dataset({"time": [0]})) 210 | 211 | 212 | def test_nan_leftover_to_mean_ds(): 213 | 214 | with pytest.raises(TypeError): 215 | rpg_radar.nan_leftover_to_mean(ds=np.array([1])) 216 | 217 | 218 | def test_nan_leftover_to_mean_azimuth(): 219 | 220 | with pytest.raises(AssertionError): 221 | rpg_radar.nan_leftover_to_mean(ds=xr.Dataset({"MeanVel": [0]})) 222 | 223 | 224 | def test_nan_leftover_to_mean_meanvel(): 225 | 226 | with pytest.raises(AssertionError): 227 | rpg_radar.nan_leftover_to_mean(ds=xr.Dataset({"azimuth": [0]})) 228 | 229 | 230 | def test_rpg_slanted_radial_velocity_4_fft_ds(): 231 | 232 | with pytest.raises(TypeError): 233 | rpg_radar.rpg_slanted_radial_velocity_4_fft(ds=np.array([0])) 234 | -------------------------------------------------------------------------------- /lidarwind/wind_prop_retrieval_6_beam.py: -------------------------------------------------------------------------------- 1 | """Module for estimating turbulence 2 | 3 | """ 4 | import logging 5 | 6 | import numpy as np 7 | import xarray as xr 8 | 9 | from .data_operator import GetRestructuredData 10 | 11 | module_logger = logging.getLogger("lidarwind.wind_prop_retrieval_6_beam") 12 | module_logger.debug("loading wind_prop_retrieval_6_beam") 13 | 14 | 15 | class SixBeamMethod: 16 | 17 | """6 beam method 18 | 19 | Implementation of the 6 beam method 20 | to retrieve the Reynolds stress tensor 21 | components based on the 6 Beam method 22 | developed by Sathe at all 2015. 23 | See: https://doi.org/10.5194/amt-8-729-2015 24 | 25 | Parameters 26 | ---------- 27 | data : object 28 | an instance of the object generated by the 29 | lidarwind.GetRestructuredData() 30 | 31 | freq : int 32 | number of profiles used to calculate 33 | the variance 34 | 35 | freq90 : int 36 | number of profiles used to calculate 37 | the variance 38 | 39 | Returns 40 | ------- 41 | var_comp_ds : xarray.DataSet 42 | a dataset of the eynolds stress tensor 43 | matrix elementes 44 | 45 | """ 46 | 47 | def __init__(self, data, freq=10, freq90=10): 48 | 49 | self.logger = logging.getLogger( 50 | "lidarwind.wind_prop_retrieval_6_beam.SixBeamMethod" 51 | ) 52 | self.logger.info("creating an instance of SixBeamMethod") 53 | 54 | if not isinstance(data, GetRestructuredData): 55 | self.logger.error( 56 | "wrong data type: expecting a instance of GetRestructuredData" 57 | ) 58 | raise TypeError 59 | 60 | self.elv = data.data_transf.elv.values 61 | self.azm = data.data_transf.azm.values 62 | 63 | self.get_m_matrix() 64 | self.get_m_matrix_inv() 65 | self.radial_variances = {} 66 | self.calc_variances(data, freq, freq90) 67 | 68 | self.get_s_matrix() 69 | self.get_sigma() 70 | self.get_variance_ds() 71 | 72 | def get_m_matrix(self): 73 | 74 | """ 75 | This method populates the coefficient matrix (M). 76 | Each element of M is one of the coefficients from 77 | equation 3 from Newman et. all 2016. The lines 0 to 4 78 | in M are the radial velocities coefficients from the 79 | non 90 deg elevation and different azimuths. Line 6 80 | in M has the coefficients from the radial velocity 81 | at 90 deg elevation. 82 | See: https://doi.org/10.5194/amt-9-1993-2016 83 | 84 | 85 | M x SIGMA = S 86 | """ 87 | 88 | phis = np.append(np.ones_like(self.azm) * self.elv, np.array([90])) 89 | phis_rad = np.deg2rad(phis) 90 | 91 | thetas = np.append(self.azm, np.array([0])) 92 | thetas_rad = np.deg2rad(thetas) 93 | 94 | m_matrix = np.ones((len(phis), len(thetas))) * np.nan 95 | 96 | for i, theta in enumerate(thetas_rad): 97 | 98 | phi = phis_rad[i] 99 | 100 | ci1 = np.cos(phi) ** 2 * np.sin(theta) ** 2 101 | ci2 = np.cos(phi) ** 2 * np.cos(theta) ** 2 102 | 103 | ci3 = np.sin(phi) ** 2 104 | ci4 = np.cos(phi) ** 2 * np.cos(theta) * np.sin(theta) 105 | 106 | ci5 = np.cos(phi) * np.sin(phi) * np.sin(theta) 107 | ci6 = np.cos(phi) * np.sin(phi) * np.cos(theta) 108 | 109 | m_matrix_line = np.array( 110 | [ci1, ci2, ci3, ci4 * 2, ci5 * 2, ci6 * 2] 111 | ) 112 | 113 | m_matrix[i] = m_matrix_line 114 | 115 | self.m_matrix = m_matrix 116 | 117 | return self 118 | 119 | def get_m_matrix_inv(self): 120 | 121 | """ 122 | This method calculates the inverse matrix of M. 123 | """ 124 | 125 | self.m_matrix_inv = np.linalg.inv(self.m_matrix) 126 | 127 | return self 128 | 129 | # new approach to calculate the variances ############## 130 | 131 | def calc_variances(self, data, freq, freq90): 132 | 133 | interp_data_transf = data.data_transf.interp( 134 | time=data.data_transf_90.time, method="nearest" 135 | ) 136 | self.get_variance(interp_data_transf, freq=freq) 137 | self.get_variance( 138 | -1 * data.data_transf_90, freq=freq90, name="rVariance90" 139 | ) # think about the -1 coefficient 140 | 141 | return self 142 | 143 | def get_variance(self, data, freq=10, name="rVariance"): 144 | 145 | """ 146 | This method calculates the variance from the 147 | observed radial velocities within a time window. 148 | The default size of this window is 10 minutes. 149 | 150 | Parameters 151 | ---------- 152 | data : xarray.DataArray 153 | a dataarray of the slanted azimuthal observations 154 | 155 | freq : int 156 | number of profiles used to calculate 157 | the variance 158 | 159 | """ 160 | 161 | variance = data.rolling( 162 | time=freq, center=True, min_periods=int(freq * 0.3) 163 | ).var() 164 | 165 | self.radial_variances[name] = variance 166 | 167 | return self 168 | 169 | # new approach to calculate the variances ############## 170 | 171 | def get_s_matrix(self): 172 | 173 | """ 174 | This method fills the observation variance matrix (S). 175 | """ 176 | 177 | s_matrix = np.dstack( 178 | ( 179 | self.radial_variances["rVariance"].values, 180 | self.radial_variances["rVariance90"].values[ 181 | :, :, np.newaxis, np.newaxis 182 | ], 183 | ) 184 | ) 185 | 186 | self.s_matrix = s_matrix 187 | 188 | def get_sigma(self): 189 | 190 | """ 191 | This method calculates the components of the 192 | Reynolds stress tensor (SIGMA). 193 | 194 | SIGMA = M^-1 x S 195 | """ 196 | 197 | self.sigma_matrix = np.matmul(self.m_matrix_inv, self.s_matrix) 198 | 199 | return self 200 | 201 | def get_variance_ds(self): 202 | 203 | """ 204 | This method converts the SIGMA into a xarray dataset. 205 | """ 206 | 207 | var_comp_ds = xr.Dataset() 208 | var_comp_name = ["u", "v", "w", "uv", "uw", "vw"] 209 | 210 | for i, var_comp in enumerate(var_comp_name): 211 | 212 | tmp_data = xr.DataArray( 213 | self.sigma_matrix[:, :, i, 0], 214 | dims=("time", "range"), 215 | coords={ 216 | "time": self.radial_variances["rVariance90"].time, 217 | "range": self.radial_variances["rVariance"].range, 218 | }, 219 | name=f"var_{var_comp}", 220 | ) 221 | 222 | var_comp_ds = xr.merge([var_comp_ds, tmp_data]) 223 | 224 | self.var_comp_ds = var_comp_ds 225 | -------------------------------------------------------------------------------- /joss/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'lidarwind: A Python package for retrieving wind profiles from Doppler lidar observations' 3 | 4 | tags: 5 | - Python 6 | - meteorology 7 | - Doppler wind lidar 8 | - lidar remote sensing 9 | - wind profile retrievals 10 | 11 | authors: 12 | - name: José Dias Neto 13 | orcid: 0000-0002-8488-8486 14 | affiliation: 1 15 | 16 | - name: Guilherme P. Castelao 17 | orcid: 0000-0002-6765-0708 18 | affiliation: 2 # (Multiple affiliations must be quoted) 19 | 20 | affiliations: 21 | - name: Delft University of Technology 22 | index: 1 23 | 24 | - name: Scripps Institution of Oceanography 25 | index: 2 26 | 27 | date: 4 August 2022 28 | bibliography: paper.bib 29 | --- 30 | 31 | 32 | # Summary 33 | 34 | Wind is one of the essential components of the atmospheric system, modulating precipitation on larger scales, 35 | transporting moisture and heat, and contributing to dispersing aerosols. Due to the urgent need to replace 36 | fossil fuel-based power plants with systems based on renewable energy sources, wind has become one of 37 | the primary energy sources. Understanding wind variability is imperative for improving numerical weather 38 | prediction models and for planning wind farms. However, the number of currently available wind observations 39 | is still limited, and most of the available observations are from radiosondes or meteorological towers (met-towers). 40 | Those observational systems are restricted to a particular time (radiosondes) or height (met-towers). 41 | Wind lidar has been used to minimise those limitations, allowing monitoring of wind constantly from near 42 | the surface up to, for example, 5 km. 43 | 44 | Several wind lidar models are commercially available, but their operating software is often proprietary. 45 | Although different scanning strategies for retrieving wind exist, for example, the Doppler beam swing 46 | [DBS, [@vanZandt2000]], the velocity azimuth display [VAD, [@lhermitte1962; @eberhard1989]] and the 47 | 6-beam introduced by [@sathe2015] most of the lidar operating software provides horizontal wind speed 48 | and direction profile retrievals for the DBS and VAD scanning strategies. However, the retrievals of 49 | horizontal wind profiles are unavailable if the lidar is set to scan using the 6-beam strategy. In addition, 50 | in the case of the WindCube 200s, the different scanning strategies also generate NetCDF files with 51 | different structures. 52 | 53 | # Statement of need 54 | 55 | lidarwind is a Python package for retrieving horizontal wind speed and direction profiles from the 56 | 6-beam scanning strategy but also includes routines for retrieving wind profiles from the DBS. This 57 | package was developed for retrieving wind profiles from WindCube's NetCDF output, but it can be 58 | extended to process NetCDF [@rew1990] output from other Doppler lidar systems. lidarwind reproduces the data 59 | filtering described in WindCube's manual [@windcube2020] and allows the user to define the signal-to-noise 60 | ratio threshold for filtering noisy measurements or using the status variable. In addition, two experimental 61 | filters to minimise the presence of second trip echoes on the observations are included in the package. 62 | 63 | 64 | lidarwind was developed to be easy to use and flexible, allowing it to be used operationally to retrieve 65 | wind profiles continuously. With this package, the user can read and merge a list of WindCube's files 66 | and choose to retrieve wind using the DBS or the 6-beam dedicated modules. In particular, for the 6-beam 67 | observations, the wind is retrieved using the Fast Fourier Wind Vector Algorithm [@Ishwardat2017]. Since 68 | the 6-beam strategy is idealised for studying turbulence, lidarwind also contains a module for calculating 69 | the Reynolds stress tensor according to the methodology introduced by @sathe2015. lidarwind also includes 70 | a basic visualisation module, allowing a quick inspection of the retrieved wind speed and direction profiles. 71 | Thanks to its flexibility, this package was used to process WindCube's data from The Tracing Convective 72 | Momentum Transport in Complex Cloudy Atmospheres Experiment [@diasneto2022a]. 73 | 74 | 75 | # Visualizing the 6-beam retrieved wind 76 | 77 | 78 | As an example of the usage of this package, the vertical wind speed measured by the WindCube and the 79 | horizontal wind speed and direction retrieved using the Fast Fourier Wind Vector Algorithm are shown 80 | in panels a, b, and c from \autoref{fig:wind_panel}. 81 | 82 | 83 | ![Time-height plots of the vertical wind speed (a), horizontal wind speed (b), and horizontal wind 84 | direction (c) derived from the 6-beam observations.\label{fig:wind_panel}](wind_panel.png) 85 | 86 | 87 | Panel a, in the lowest 2 km, reveals the daily evolution of the vertical wind. Before 9:00 UTC and after 17:00 UTC, 88 | the measured vertical wind is mainly 0 m/s, indicating the stable period of the atmospheric boundary layer. 89 | Between 9:00 UTC and 17:00 UTC, the range of the vertical velocities increases, spaning values between -1 and 1 m/s, 90 | which indicates the formation of the turbulent layer, also known as the mixing layer [@Stull2003]. 91 | Panel a also shows a measurement gap between 3 UTC and 8 UTC. This gap is due to the absence of a 92 | backscattered signal caused by the extremely low amount of lidar scatterers in the atmosphere [aerosol]. 93 | Panel b, in the lowest 2 km, shows that for the whole day, the magnitude of the horizontal wind is mainly 94 | distributed between 0 and 10 m/s. This panel also reveals variabilities in the magnitude of the horizontal 95 | wind related to the presence of different temporal scales. During the stable period, the scales of the 96 | horizontal wind variability seem to be in the order of hours, while during the turbulent period, the temporal 97 | scales are in the order of minutes. In the lowest 2 km from panel c, the wind direction indicates a wind 98 | rotation along the day. Before 2 UTC, the wind is from the southeast; around 12:00 UTC, the wind is from 99 | the southwest; later, after 20:00 UTC, the wind is from the northwest. Similar to that noticed for the two 100 | previous variables, the wind direction indicates the presence of small-scale variabilities during the 101 | turbulent period. In contrast, during the stable period, the change in direction is only apparent between 102 | different heights (e.g. near the surface and around 1000 m). 103 | 104 | 105 | Above 2000 m, all three variables suggest that the lidar was able to obtain observations from within the clouds. 106 | Even though the observations are from clouds, their heights are wrong. This mispositioning of those clouds is 107 | related to the WindCube operating settings used during the measurements. A methodology for removing those wrong 108 | clouds, introduced in [@diasneto2022b], is also available in this package, but ceilometer observations are required for applying it. 109 | 110 | 111 | 112 | # Acknowledgements 113 | 114 | This publication is part of the NWO Talent Scheme Vidi project CMTRACE with project number 192.050, 115 | financed by the Dutch Research Council (NWO). The authors thank Steven Knoop for suggesting improvements 116 | to the code. 117 | 118 | 119 | # References 120 | -------------------------------------------------------------------------------- /lidarwind/utilities.py: -------------------------------------------------------------------------------- 1 | """Utilities module 2 | 3 | """ 4 | 5 | import glob 6 | import os 7 | import shutil 8 | 9 | import gdown 10 | import numpy as np 11 | import pandas as pd 12 | import pooch 13 | import xarray as xr 14 | 15 | 16 | def sample_data(key: str): 17 | if key == "wc_6beam": 18 | file_list = pooch.retrieve( 19 | url="doi:10.5281/zenodo.7312960/wc_6beam.zip", 20 | known_hash="md5:a7ea3c10a6d2f4a97ff955dc4398f930", 21 | path="~/.cache/lidarwind", 22 | processor=pooch.Unzip(), 23 | ) 24 | elif key == "wc_long_dbs": 25 | file_list = pooch.retrieve( 26 | url="doi:10.5281/zenodo.7312960/wc_long_dbs.zip", 27 | known_hash="md5:53b4eb6e5dad6dfdaddfbb718dcf8910", 28 | path="~/.cache/lidarwind", 29 | processor=pooch.Unzip(), 30 | ) 31 | elif key == "wc_short_dbs": 32 | file_list = pooch.retrieve( 33 | url="doi:10.5281/zenodo.7312960/wc_short_dbs.zip", 34 | known_hash="md5:9cbd93f89052d6c6f4407bcce415e277", 35 | path="~/.cache/lidarwind", 36 | processor=pooch.Unzip(), 37 | ) 38 | else: 39 | raise ValueError 40 | 41 | return file_list 42 | 43 | 44 | class Util: 45 | 46 | """ 47 | This class contains useful tools 48 | """ 49 | 50 | def get_time_bins(sel_day, freq="10min"): 51 | """Bins estimation 52 | 53 | Creating time bins for a given day and time resolution 54 | 55 | """ 56 | 57 | start = sel_day.strftime("%Y%m%d") 58 | start_time = pd.to_datetime(f"{start} 00:00:00") 59 | 60 | end = (sel_day + pd.to_timedelta(1, "D")).strftime("%Y%m%d") 61 | end_time = pd.to_datetime(f"{end} 00:00:00") 62 | 63 | time_bins = pd.date_range(start_time, end_time, freq=freq) 64 | 65 | return time_bins 66 | 67 | def get_sample_data(sample_path, file_type): 68 | """Downloading data 69 | 70 | It downloads the sample needed for the examples. 71 | 72 | """ 73 | 74 | if file_type == "12-00": 75 | file_id = "1i6iX6KuZOkP_WLuPZHG5uCcvRjlWS-SU" 76 | url = f"https://drive.google.com/uc?export=download&id={file_id}" 77 | 78 | if file_type == "dbs": 79 | url = "path" 80 | 81 | output = f"{sample_path}{file_type}.zip" 82 | gdown.download(url, output, quiet=False) 83 | 84 | print(f"Extracting: {output}") 85 | shutil.unpack_archive(output, sample_path) 86 | os.remove(output) 87 | 88 | def data_filenames(): 89 | """Sample file list 90 | 91 | It searches for the sample files. 92 | If the files do not exist, it downloads them. 93 | 94 | """ 95 | 96 | home = os.path.expanduser("~") 97 | sample_path = f"{home}/.lidarwindrc/sample_data/" 98 | file_type = "12-00" # change to 6 beam in the future 99 | 100 | if os.path.isdir(sample_path): 101 | 102 | if os.path.isdir(f"{sample_path}{file_type}/"): 103 | file_list = sorted(glob.glob(f"{sample_path}{file_type}/*.nc")) 104 | 105 | if bool(file_list) is False: 106 | Util.get_sample_data(sample_path, file_type) 107 | file_list = sorted( 108 | glob.glob(f"{sample_path}{file_type}/*.nc") 109 | ) 110 | 111 | else: 112 | Util.get_sample_data(sample_path, file_type) 113 | file_list = sorted(glob.glob(f"{sample_path}{file_type}/*.nc")) 114 | 115 | else: 116 | os.makedirs(sample_path) 117 | Util.get_sample_data(sample_path, file_type) 118 | file_list = sorted(glob.glob(f"{sample_path}{file_type}/*.nc")) 119 | 120 | return file_list 121 | 122 | 123 | class CloudMask: 124 | 125 | """ 126 | This class generates the time-height cloud mask 127 | and the temporal cloud mask using observations 128 | from lidar and ceilometer. 129 | """ 130 | 131 | def __init__(self, wc_data=None, ceilo_data=None, radar_data=None): 132 | 133 | self.ceilo_data = ceilo_data 134 | self.radar_data = radar_data 135 | self.wc_data = wc_data 136 | 137 | self.call_methods() 138 | 139 | def call_methods(self): 140 | 141 | if self.ceilo_data is None or self.radar_data is None: 142 | 143 | self.get_time_mask(mask_type="aux") 144 | 145 | else: 146 | 147 | self.clean_ceilo() 148 | self.clean_radar() 149 | self.get_cloud_mask_2d() 150 | self.get_time_mask(mask_type="real") 151 | 152 | def clean_ceilo(self): 153 | 154 | positive_beta = self.ceilo_data.beta_raw.where( 155 | self.ceilo_data.beta_raw > 0 156 | ) 157 | positive_beta = positive_beta.rolling( 158 | time=20, center=True, min_periods=13 159 | ).mean() 160 | positive_beta = positive_beta.rolling( 161 | range=10, center=True, min_periods=8 162 | ).mean() 163 | 164 | # grid interpolation: to lidar time 165 | self.clean_ceilo_data = positive_beta.interp( 166 | {"time": self.wc_data.time} 167 | ) 168 | 169 | def clean_radar(self): 170 | 171 | positive_ze = self.radar_data.radar_equivalent_reflectivity.where( 172 | self.radar_data.radar_equivalent_reflectivity > 0 173 | ) 174 | 175 | # grid interpolation: to lidar time, to ceilo range 176 | self.clean_radar_data = positive_ze.interp( 177 | {"time": self.wc_data.time, "range": self.clean_ceilo_data.range} 178 | ) 179 | 180 | def get_cloud_mask_2d(self): 181 | 182 | # CEILOMETER mask 183 | self.clean_ceilo_data.values[ 184 | np.isfinite(self.clean_ceilo_data.values) 185 | ] = 2 186 | self.clean_ceilo_data.values[ 187 | ~np.isfinite(self.clean_ceilo_data.values) 188 | ] = 0 189 | 190 | # RADAR mask 191 | self.clean_radar_data.values[ 192 | np.isfinite(self.clean_radar_data.values) 193 | ] = 1 194 | self.clean_radar_data.values[ 195 | ~np.isfinite(self.clean_radar_data.values) 196 | ] = 0 197 | 198 | # final mask 199 | self.cloud_mask = self.clean_ceilo_data + self.clean_radar_data 200 | 201 | def get_time_mask(self, mask_type=None): 202 | 203 | if mask_type == "aux": 204 | print("aux mask") 205 | 206 | aux_cloud_mask = xr.DataArray( 207 | np.ones(len(self.wc_data.time)), 208 | dims="time", 209 | coords={"time": self.wc_data.time.values}, 210 | ) 211 | 212 | self.time_cloud_mask = aux_cloud_mask 213 | 214 | elif mask_type == "real": 215 | print("real mask") 216 | 217 | # 6500 is the value I defined as maximum range 218 | high_cloud_layer = self.cloud_mask.where( 219 | self.cloud_mask.range > 6500 220 | ) 221 | 222 | time_cloud_mask = high_cloud_layer.sum(dim="range") 223 | 224 | # 1 indicates that there is a cloud above 225 | # the maximum range 226 | time_cloud_mask.values[time_cloud_mask.values > 0] = 1 227 | 228 | self.time_cloud_mask = time_cloud_mask 229 | 230 | else: 231 | print("mask_type not defined") 232 | -------------------------------------------------------------------------------- /lidarwind/preprocessing/wind_cube.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | 5 | from lidarwind.io import open_sweep 6 | 7 | 8 | def wc_azimuth_elevation_correction( 9 | ds: xr.Dataset, azimuth_resolution: int = 1, elevation_resolution: int = 1 10 | ): 11 | 12 | """Azimuth and elevation correction 13 | 14 | This function corrects the azimuth ambiguity issue by 15 | replacing the 360 azimuth value with 0. It also rounds 16 | the elevation and azimuth according to a specified 17 | resolution 18 | 19 | Parameters 20 | ---------- 21 | ds : xr.Dataset 22 | A dataset containing the WindCube's observations 23 | 24 | azimuth_resolution : int 25 | Azimuth resolution in degrees 26 | 27 | elevation_resolution : int 28 | Elevation resolution in degrees 29 | 30 | Returns 31 | ------- 32 | xr.Dataset 33 | The same dataset, but the azimuth is corrected for the ambiguity 34 | 35 | """ 36 | 37 | assert "azimuth" in ds 38 | assert ds["azimuth"].dims == ("time",) 39 | 40 | ds["azimuth"] = ds["azimuth"].round(azimuth_resolution) 41 | # Avoid ambiguity on 360 degrees 42 | ds["azimuth"] = ds["azimuth"].where(ds.azimuth != 360, 0) 43 | 44 | ds["elevation"] = ds["elevation"].round(elevation_resolution) 45 | 46 | return ds 47 | 48 | 49 | def wc_fixed_files_restruc_dataset(ds: xr.Dataset): 50 | 51 | """Restructuring fixed type files 52 | 53 | This function restructures the WindCube fixed 54 | file type; it is needed to allow the merging 55 | of several fixed-type files. The function starts 56 | by swapping the range dimension with the gate_index. 57 | Then, the time dimension is added to the variables 58 | that do not have it. After that, elevation, azimuth 59 | and range are defined as new coordinates. 60 | 61 | Parameters 62 | ---------- 63 | ds : xr.Dataset 64 | A dataset from the WindCube's fixed file type 65 | 66 | Returns 67 | ------- 68 | xr.Dataset 69 | The same dataset, but restructured for merging. 70 | 71 | """ 72 | 73 | if not isinstance(ds, xr.Dataset): 74 | raise TypeError 75 | 76 | assert "time" in ds 77 | assert "range" in ds 78 | assert "azimuth" in ds 79 | assert "elevation" in ds 80 | assert "gate_index" in ds 81 | 82 | ds["gate_index"] = ds["gate_index"].astype("i") 83 | ds = ds.swap_dims({"range": "gate_index"}).reset_coords() 84 | 85 | tmp_no_time = ds[ 86 | [v for v in ds.variables if "time" not in ds[v].dims] 87 | ].expand_dims("time") 88 | tmp_time = ds[[v for v in ds.variables if "time" in ds[v].dims]] 89 | 90 | ds = xr.merge([tmp_no_time, tmp_time]) 91 | ds = ds.set_coords(["elevation", "azimuth", "range"]) 92 | 93 | return ds 94 | 95 | 96 | def wc_fixed_merge_files(file_names: list): 97 | 98 | """Merging fixed type files 99 | 100 | This function merges multiple fixed files into a single dataset. 101 | 102 | Parameters 103 | ---------- 104 | file_names : list 105 | A list of fixed files to be merged 106 | 107 | Returns 108 | ------- 109 | xr.Dataset 110 | A dataset containing data from all files specified 111 | in the file_names list 112 | 113 | """ 114 | 115 | zenith_list = [] 116 | slanted_list = [] 117 | 118 | if bool(file_names) is False: 119 | raise FileNotFoundError 120 | 121 | for file in file_names: 122 | 123 | tmp_ds = wc_fixed_files_restruc_dataset(open_sweep(file)) 124 | 125 | if tmp_ds["elevation"] == 90: 126 | zenith_list.extend([tmp_ds]) 127 | 128 | if tmp_ds["elevation"] != 90: 129 | slanted_list.extend([tmp_ds]) 130 | 131 | tmp_ds.close() 132 | 133 | ds_zenith = xr.merge(zenith_list) 134 | ds_slanted = xr.merge(slanted_list) 135 | 136 | ds = xr.merge([ds_zenith, ds_slanted]) 137 | 138 | return ds 139 | 140 | 141 | def wc_slanted_radial_velocity_4_fft(ds: xr.Dataset): 142 | 143 | """Extraction of slanted radial velocities 144 | 145 | This function extracts the non-zenith pointing 146 | data from the dataset. Only the non-zenith pointing 147 | data is used to retrieve the horizontal wind speed and 148 | direction using the FFT method. Additionally, it sets 149 | azimuth as a new dimension and reindexes the data by 150 | proximity in time. The function identifies the duration 151 | of a complete scan cycle, and for a given time step, 152 | the time index is used as a central reference and 153 | assigned as the time index of all azimuthal observations 154 | within the duration of one cycle. 155 | 156 | 157 | Parameters 158 | ---------- 159 | ds : xr.Dataset 160 | A dataset of fixed-type files merged, 161 | corrected for azimuth and elevation ambiguity 162 | and from a single elevation. 163 | 164 | 165 | Rerturns 166 | -------- 167 | xr.Dataset 168 | A dataset of reindexed slanted observations with 169 | an extra dimension: azimuth 170 | 171 | """ 172 | 173 | if len(np.unique(ds.elevation)) > 1: 174 | raise TypeError("This dataset contains multiple elevations") 175 | 176 | if 90 in np.unique(ds.elevation): 177 | raise ValueError( 178 | "90 degrees elevation: not valid for retrieving horizontal wind" 179 | ) 180 | 181 | if ( 182 | len(ds["azimuth"].where(ds["azimuth"] == ds["azimuth"][0], drop=True)) 183 | < 2 184 | ): 185 | raise ValueError( 186 | "Not enough data to estimate the one scan cylce duration" 187 | ) 188 | 189 | # initializing storage ds 190 | radial_velocities = xr.Dataset() 191 | 192 | # identify the mean duration of a complete scan cycle 193 | half_cycle = ( 194 | ds.where(ds.azimuth == ds.azimuth[0], drop=True) 195 | .time.diff(dim="time") 196 | .mean() 197 | .values 198 | ) 199 | half_cycle = pd.to_timedelta(half_cycle).seconds / 2 200 | 201 | # unique azimuths 202 | azimuth = np.unique(ds.azimuth) 203 | 204 | for azm in azimuth: 205 | 206 | # selecting the reference azimuthal slice 207 | azimuth_left_over = azimuth[azimuth != azm] 208 | tmp_reference_slice = ds["radial_wind_speed"].where( 209 | ds["azimuth"] == azm, drop=True 210 | ) 211 | tmp_reference_slice = ( 212 | tmp_reference_slice.drop(["azimuth"]) 213 | .assign_coords({"azimuth": azm}) 214 | .expand_dims(["azimuth"]) 215 | ) 216 | radial_velocities = xr.merge([radial_velocities, tmp_reference_slice]) 217 | 218 | for azm_left in azimuth_left_over: 219 | 220 | tmp_azimuth_slice = ds["radial_wind_speed"].where( 221 | ds["azimuth"] == azm_left, drop=True 222 | ) 223 | interp_azimuth_slice = tmp_azimuth_slice.reindex( 224 | time=tmp_reference_slice.time, 225 | method="nearest", 226 | tolerance=f"{half_cycle}s", 227 | ) 228 | tmp_slice = ( 229 | interp_azimuth_slice.drop(["azimuth"]) 230 | .assign_coords({"azimuth": azm_left}) 231 | .expand_dims(["azimuth"]) 232 | ) 233 | radial_velocities = xr.merge([radial_velocities, tmp_slice]) 234 | 235 | radial_velocities["azimuth"].attrs = ds["azimuth"].attrs 236 | 237 | return radial_velocities 238 | -------------------------------------------------------------------------------- /docs/overview.rst: -------------------------------------------------------------------------------- 1 | ========================== 2 | Scanning strategy overview 3 | ========================== 4 | 5 | .. _overview: 6 | 7 | Due to the current transition from fossil fuel-based power plants to renewable sources, wind has become one of the main energy sources. However, to identify the places where wind harvest is suitable for power production, an understanding of how the wind blows in the possible places is needed. Wind Doppler lidars are often used to survey the winds in those places and to estimate turbulence. 8 | 9 | Wind Doppler lidars work by transmitting laser pulses and measuring the shift in frequency of the backscattered signal, the Doppler effect. If the scatterers are moving towards the lidar, the backscattered signal's frequency is higher than that of the transmitted signal and the other way around. 10 | 11 | There are different scanning strategies for estimating wind profiles using Wind Doppler lidars, but two are used more often: the 6-beam and the DBS. 12 | 13 | 14 | 15 | ============================ 16 | Wind lidar scanning strategy 17 | ============================ 18 | 19 | ------- 20 | 6-beam: 21 | ------- 22 | 23 | .. _six-beam: 24 | 25 | Reynolds stress tensor calculation 26 | ---------------------------------- 27 | 28 | The 6-beam is a scanning strategy that is idealised for retrieving turbulence. This method measures the radial velocity at five azimuths (0, 72, 144, 216, 288º) with a certain elevation angle and one additional observation at 90º elevation. The cartoon below illustrates the geometry of this strategy. 29 | 30 | .. figure:: /figures/six_beam.png 31 | :scale: 15% 32 | :align: center 33 | 34 | `Sathe et al.,2015 `_ suggested that the variance of each radial observation can be expressed in terms of variances and co-variances of u (zonal wind), v (meridional wind), and w (vertical wind), as indicated below. :math:`\theta` and :math:`\phi` are the azimuth and elevation angles, respectively. 35 | 36 | .. math:: 37 | 38 | var(v_{\theta}) = var(u)cos^2(\phi)sin^2(\theta) + var(v)cos^2(\phi)cos^2(\theta) + var(w)sin^2(\phi) \\ 39 | + 2cov(u,v)cos^2(\phi)cos(\theta)sin(\theta) + 2cov(u,w)cos(\phi)sin(\phi)sin(\theta) \\ 40 | + 2 cov(v,w)cos(\phi)sin(\phi)cos(\theta) 41 | 42 | 43 | Combining all the equations for each one of the six observations makes it possible to solve this equation system for the variances and co-variances of u, v, and W, which are the Reynolds stress tensor components. 44 | 45 | .. math:: 46 | 47 | \stackrel{\Sigma}{\mathrm{M} 48 | \begin{bmatrix} var(u)\\ var(v)\\ var(w)\\ 49 | cov(u,v)\\ cov(u,w)\\ cov(v,w)\end{bmatrix}} = 50 | \stackrel{S}{\begin{bmatrix} var(v_{0})\\ var(v_{72})\\ var(v_{144})\\ 51 | var(v_{216})\\ var(v_{288})\\ var(v_{zenith}) 52 | \end{bmatrix}} 53 | 54 | 55 | 56 | 57 | The calculation is made by solving the following equation: :math:`\Sigma = M^{-1}S`, and it is implemented in SixBeamMethod class. 58 | 59 | 60 | 61 | .. _wind-fft: 62 | Wind speed and direction estimation 63 | ----------------------------------- 64 | 65 | Even though the 6-beam strategy makes it possible to calculate the Reynolds stress tensor directly, the horizontal wind speed and direction estimation is not straightforward. One of the possible ways to estimate the wind speed and direction is using the Fast Fourier Wind Vector Algorithm (FFWVA) developed by `Ishwardat, 2017 `_. 66 | 67 | The FFWVA uses Fast Fourier Transform (FFT) digital signal-processing algorithms to decompose the radial Doppler observations in terms of amplitude and phase of their harmonic frequencies. The wind speed :math:`V_{s}` and direction :math:`V_{d}` can be calculated using the amplitude and phase from the first harmonic, as indicated below. 68 | 69 | .. math:: 70 | a + bi = \mathrm{FFT}(v_{\theta})|_{1st} 71 | 72 | .. math:: 73 | V_{s} = \frac{2|a+bi|}{Ncos(\phi)} 74 | 75 | .. math:: 76 | V_{d} = -arctan2(a/b)+180\\ 77 | 78 | 79 | 80 | a and b are the real and imaginary parts from the first harmonic. :math:`v_{\theta}` is the radial velocity from each azimuth. N is the number of data points used to calculate the FFT decomposition. Note: the `arctan2 `_ function returns the correct quadrant. This method is implemented in the class RetriveWindFFT. 81 | 82 | 83 | -------- 84 | The DBS: 85 | -------- 86 | 87 | .. _DBS: 88 | 89 | DBS stands for Doppler beam swing. This scanning strategy consists in measuring the radial velocity at four particular azimuths using a slanted beam swinging from North to South and from East to West. The carton below illustrates how those observations are collected. 90 | 91 | .. figure:: /figures/dbs.png 92 | :scale: 15% 93 | :align: center 94 | 95 | As the measurements are taken using beams with a specific elevation angle :math:`\phi`, all four observed radial velocities contain information from the vertical and horizontal wind, as indicated by the equations listed below. The indexes N, S, E, and W, stand for North, South, East and West :math:`u` and :math:`v` are the zonal and meridional wind components. 96 | 97 | .. math:: 98 | 99 | V_{rN} = v_{N}cos(\phi) + w_{N}sin(\phi) \\ 100 | V_{rS} = -v_{S}cos(\phi) + w_{S}sin(\phi) \\ 101 | V_{rE} = u_{E}cos(\phi) + w_{E}sin(\phi) \\ 102 | V_{rW} = -u_{W}cos(\phi) + w_{W}sin(\phi) \\ 103 | 104 | For estimating the meridional (south-north) and zonal (west-est) wind components, it is assumed that the meridional and zonal wind components remain constant during the entire scanning cycle, which implies that :math:`v_{N} = v_{S} = v` and :math:`u_{E} = u_{W} = u`. Applying this assumption to the set of equations listed above and then subtracting the South from the North component and the West from the East component, it is possible to estimate the mean zonal and meridional wind. 105 | 106 | .. math:: 107 | v = \frac{v_{N} - v_{S}}{2 cos(\phi)} \\ 108 | u = \frac{u_{E} - u_{W}}{2 cos(\phi)} \\ 109 | 110 | From those components, it is also possible to estimate the magnitude of the horizontal :math:`V_{h}` wind and its direction :math:`V_{d}`. Note: the `arctan2 `_ function returns the correct quadrant. 111 | 112 | 113 | .. math:: 114 | V_{s} = \sqrt(u^{2} + v^{2}) \\ 115 | V_{d} = 180 + arctan2(-u,-v)) 116 | 117 | This method is also implemented in this package and is available in class GetWindProperties5Beam. 118 | 119 | 120 | ============================= 121 | Cloud Radar scanning strategy 122 | ============================= 123 | 124 | ---- 125 | PPI: 126 | ---- 127 | 128 | The plan position indicator (PPI) scanning strategy consists of rotating the radar antenna to cover a certain azimuth range (e.g., from 0 to 360º) while the antenna elevation remains constant. The figure below illustrates a typical PPI scan. 129 | 130 | .. figure:: /figures/radar.png 131 | :scale: 60% 132 | :align: center 133 | 134 | Because the physical principle to retrieve wind from PPI and 6-beam is the same, lidarwind uses the same method to retrieve wind profiles from the RPG radar's observations. The retrieving method is introduced in the lidar section (:any:`see wind estimation `). 135 | 136 | .. note:: 137 | Be aware that the retrieved wind direction may be biased, as shown by `Dias Neto, 2023 `_. Because of that, a post-processing verification of the bias is recommended. In the future, an automatic way to estimate the direction bias based on each chirp setting may be possible. However, a detailed investigation is needed to understand the relationship between the chirp parameters and the direction bias. 138 | -------------------------------------------------------------------------------- /lidarwind/wind_retrieval/fft_wind_retrieval.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | import xrft 4 | 5 | 6 | def first_harmonic_amplitude( 7 | radial_velocity: xr.DataArray, dim="azimuth" 8 | ) -> xr.DataArray: 9 | """First harmonic amplitude 10 | 11 | This function calculates the complex amplitudes 12 | along the azimuth coordinate and returns 13 | the amplitude of the first harmonic. 14 | 15 | Parameters 16 | ---------- 17 | radial_velocity : xr.DataArray 18 | A data array of slanted Doppler velocities 19 | 20 | dim : string 21 | Name of the azimuthal dimension 22 | 23 | Returns 24 | ------- 25 | A data array of the amplitudes of the first harmonic 26 | 27 | """ 28 | 29 | complex_amplitudes = xrft.fft( 30 | radial_velocity, dim=dim, true_amplitude=False 31 | ) 32 | complex_amplitudes = complex_amplitudes.assign_coords( 33 | {f"{dim}_length": len(radial_velocity[dim])} 34 | ) 35 | 36 | complex_amplitudes[f"{dim}_length"].attrs = { 37 | "comment": "size of the azimuth coordinate" 38 | } 39 | 40 | # determination of the first harmonic position 41 | # harmonic_index = int(radial_velocity[dim].size / 2) + 1 42 | first_harmonic_index = ( 43 | complex_amplitudes[f"freq_{dim}"] 44 | .where(complex_amplitudes[f"freq_{dim}"] > 0) 45 | .argmin() 46 | ) 47 | 48 | return complex_amplitudes.isel({f"freq_{dim}": first_harmonic_index}) 49 | 50 | 51 | def harmonic_phase(amplitude: xr.DataArray) -> xr.DataArray: 52 | """First harmonic phase 53 | 54 | It calculates the phase of a given complex harmonic amplitude. 55 | 56 | Parameters 57 | ---------- 58 | amplitude : xr.DataArray 59 | A data array of complex amplitudes from a given harmonic 60 | 61 | Returns 62 | ------- 63 | harmonic phase 64 | 65 | """ 66 | 67 | phase = np.rad2deg(np.arctan2(amplitude.imag, amplitude.real)) 68 | 69 | return phase 70 | 71 | 72 | def wind_direction(amplitude: xr.DataArray) -> xr.DataArray: 73 | """wind direction 74 | 75 | It calculates the wind direction of a given complex harmonic amplitude. 76 | 77 | Parameters 78 | ---------- 79 | amplitude : xr.DataArray 80 | A data array of complex amplitudes from a given harmonic 81 | 82 | Returns 83 | ------- 84 | The wind direction 85 | 86 | """ 87 | 88 | wind_direction = -harmonic_phase(amplitude) + 180 89 | 90 | wind_direction.attrs = { 91 | "name": "wind direction", 92 | "units": "deg", 93 | "comments": "horizontal wind direction retrived " 94 | "using the FFT method with respect to true north", 95 | "info": "0=wind coming from the north, " 96 | "90=east, 180=south, 270=west", 97 | } 98 | 99 | return wind_direction 100 | 101 | 102 | def wind_speed( 103 | amplitude: xr.DataArray, coord="elevation", azimuth_name="azimuth" 104 | ) -> xr.DataArray: 105 | """Wind speed 106 | 107 | It calculates the wind speed using the first harmonic. 108 | 109 | Parameters 110 | ---------- 111 | amplitude : xr.DataArray 112 | A data array of complex amplitudes from the first harmonic 113 | 114 | coord : string 115 | Name of the elevation coordinate 116 | 117 | 118 | Returns 119 | ------- 120 | The wind speed 121 | 122 | """ 123 | 124 | radial_wind_speed = ( 125 | 2 * np.abs(amplitude) / amplitude[f"{azimuth_name}_length"] 126 | ) 127 | horizontal_wind_speed = radial_wind_speed / np.cos( 128 | np.deg2rad(amplitude[coord]) 129 | ) 130 | 131 | horizontal_wind_speed.attrs = { 132 | "name": "wind speed", 133 | "units": "m s-1", 134 | "comments": "horizontal wind speed retrived using the FFT method", 135 | } 136 | 137 | return horizontal_wind_speed 138 | 139 | 140 | def wind_projected_to_azimuth( 141 | amplitude: xr.DataArray, 142 | azimuth: float, 143 | coord="elevation", 144 | azimuth_name="azimuth", 145 | ) -> xr.DataArray: 146 | """Wind speed from an azimuth 147 | 148 | It calculates the wind speed from a given azimuth. 149 | 150 | Parameters 151 | ---------- 152 | amplitude : xr.DataArray 153 | A data array of complex amplitudes from the first harmonic 154 | 155 | azimuth : float 156 | Azimuth value 157 | 158 | coord : string 159 | Name of the elevation coordinate 160 | 161 | Returns 162 | ------- 163 | The projection of the horizontal 164 | 165 | """ 166 | 167 | radial_wind_speed = ( 168 | 2 * np.abs(amplitude) / amplitude[f"{azimuth_name}_length"] 169 | ) 170 | projected_radial_wind = radial_wind_speed * np.cos( 171 | np.deg2rad(azimuth) + np.deg2rad(-harmonic_phase(amplitude) + 180) 172 | ) 173 | projected_horizontal_wind = projected_radial_wind / np.cos( 174 | np.deg2rad(amplitude[coord]) 175 | ) 176 | 177 | return projected_horizontal_wind 178 | 179 | 180 | def zonal_wind( 181 | amplitude: xr.DataArray, elevation_name="elevation", azimuth_name="azimuth" 182 | ) -> xr.DataArray: 183 | """zonal wind 184 | 185 | It calculates the zonal wind component. 186 | 187 | Parameters 188 | ---------- 189 | amplitude : xr.DataArray 190 | A data array of complex amplitudes from the first harmonic 191 | 192 | Returns 193 | ------- 194 | The zonal wind component 195 | 196 | """ 197 | 198 | zonal_wind = ( 199 | wind_projected_to_azimuth( 200 | amplitude, 90, coord=elevation_name, azimuth_name=azimuth_name 201 | ) 202 | * -1 203 | ) 204 | 205 | zonal_wind.attrs = { 206 | "name": "zonal wind", 207 | "units": "m s-1", 208 | "comments": "zonal wind retrieved using the FFT method", 209 | } 210 | 211 | return zonal_wind 212 | 213 | 214 | def meridional_wind( 215 | amplitude: xr.DataArray, elevation_name="elevation", azimuth_name="azimuth" 216 | ) -> xr.DataArray: 217 | """meridional wind 218 | 219 | It calculates the meridional wind component. 220 | 221 | Parameters 222 | ---------- 223 | amplitude : xr.DataArray 224 | A data array of complex amplitudes from the first harmonic 225 | 226 | Returns 227 | ------- 228 | The meridional wind component 229 | 230 | """ 231 | 232 | meridional_wind = ( 233 | wind_projected_to_azimuth( 234 | amplitude, 0, coord=elevation_name, azimuth_name=azimuth_name 235 | ) 236 | * -1 237 | ) 238 | 239 | meridional_wind.attrs = { 240 | "name": "meridional wind", 241 | "units": "m s-1", 242 | "comments": "meridional wind retrieved using the FFT method", 243 | } 244 | 245 | return meridional_wind 246 | 247 | 248 | def get_wind_properties( 249 | radial_velocity: xr.DataArray, 250 | elevation_name="elevation", 251 | azimuth_name="azimuth", 252 | ) -> xr.Dataset: 253 | """Wind dataset 254 | 255 | It retrieves the wind properties from the slanted observations. 256 | 257 | Parameters 258 | ---------- 259 | radial_velocities : xr.DataArray 260 | A data array of the slanted Doppler velocities observations. 261 | 262 | Returns 263 | ------- 264 | A dataset containing the wind speed, direction, meridional 265 | and zonal components 266 | 267 | """ 268 | 269 | amplitude = first_harmonic_amplitude(radial_velocity, dim=azimuth_name) 270 | 271 | wind_properties = xr.Dataset() 272 | wind_properties["horizontal_wind_direction"] = wind_direction(amplitude) 273 | wind_properties["horizontal_wind_speed"] = wind_speed( 274 | amplitude, coord=elevation_name, azimuth_name=azimuth_name 275 | ) 276 | wind_properties["meridional_wind"] = meridional_wind( 277 | amplitude, elevation_name=elevation_name, azimuth_name=azimuth_name 278 | ) 279 | wind_properties["zonal_wind"] = zonal_wind( 280 | amplitude, elevation_name=elevation_name, azimuth_name=azimuth_name 281 | ) 282 | 283 | return wind_properties 284 | -------------------------------------------------------------------------------- /lidarwind/visualization.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib.dates as mdates 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | import pandas as pd 7 | 8 | from .filters import Filtering 9 | 10 | 11 | class PlotSettings: 12 | def __init__(self, mpl, style="dark_background"): 13 | 14 | self.mpl = mpl 15 | self.style = style 16 | 17 | def update_settings(self): 18 | 19 | font_size = 16 20 | 21 | self.mpl.style.use(self.style) 22 | self.mpl.rcParams["figure.figsize"] = [6, 6] 23 | self.mpl.rcParams["figure.dpi"] = 80 24 | self.mpl.rcParams["savefig.dpi"] = 100 25 | 26 | self.mpl.rcParams["font.size"] = font_size 27 | self.mpl.rcParams["legend.fontsize"] = font_size 28 | self.mpl.rcParams["figure.titlesize"] = font_size 29 | 30 | self.mpl.rcParams["ytick.labelsize"] = font_size 31 | self.mpl.rcParams["xtick.labelsize"] = font_size 32 | self.mpl.rcParams["axes.titlesize"] = font_size 33 | self.mpl.rcParams["axes.labelsize"] = font_size 34 | 35 | self.mpl.rcParams["legend.fancybox"] = True 36 | self.mpl.rcParams["legend.framealpha"] = 0.7 37 | self.mpl.rcParams["legend.facecolor"] = "silver" 38 | self.mpl.rcParams["legend.frameon"] = True 39 | 40 | self.mpl.rcParams["lines.linewidth"] = 5 41 | 42 | return self 43 | 44 | def plot_setup(plot): 45 | 46 | plt.setp(plot.axes.xaxis.get_majorticklabels(), rotation=0) 47 | locator = mdates.AutoDateLocator() 48 | formatter = mdates.ConciseDateFormatter(locator) 49 | plot.axes.xaxis.set_major_formatter(formatter) 50 | 51 | plt.grid(b=True) 52 | 53 | return plot 54 | 55 | 56 | class Visualizer: 57 | def __init__(self, data): 58 | 59 | self.data = data 60 | 61 | def view_orig_var( 62 | self, 63 | var_name, 64 | cmap="Spectral", 65 | vmin=-1, 66 | vmax=1, 67 | elv="90", 68 | azm="-", 69 | save=False, 70 | plot_id=None, 71 | fig_path=None, 72 | name_prefix=None, 73 | show=False, 74 | min_time=None, 75 | max_time=None, 76 | ): 77 | 78 | if plot_id == "rad_wind_speed_panel": 79 | 80 | tmp_data = self.data 81 | self.plot_data_azm( 82 | data_non_90=tmp_data, 83 | cmap=cmap, 84 | vmin=vmin, 85 | vmax=vmax, 86 | plot_id=plot_id, 87 | fig_path=fig_path, 88 | save=save, 89 | show=show, 90 | min_time=min_time, 91 | max_time=max_time, 92 | ) 93 | 94 | else: 95 | tmp_data = Filtering(self.data).get_vertical_obs_comp(var_name) 96 | 97 | if name_prefix: 98 | std_name = tmp_data.attrs["standard_name"] 99 | string_name = f"{name_prefix}_{std_name}" 100 | 101 | else: 102 | string_name = tmp_data.attrs["standard_name"] 103 | 104 | self.plot_data( 105 | tmp_data=tmp_data, 106 | cmap=cmap, 107 | vmin=vmin, 108 | vmax=vmax, 109 | elv=elv, 110 | azm=azm, 111 | save=save, 112 | plot_id=plot_id, 113 | string_name=string_name, 114 | fig_path=fig_path, 115 | min_time=min_time, 116 | max_time=max_time, 117 | ) 118 | 119 | def view_ret_var( 120 | self, 121 | var_name, 122 | cmap="Spectral", 123 | vmin=-1, 124 | vmax=1, 125 | elv="90", 126 | azm="-", 127 | save=False, 128 | plot_id=None, 129 | fig_path=None, 130 | name_prefix=None, 131 | show=False, 132 | min_time=None, 133 | max_time=None, 134 | ): 135 | 136 | tmp_data = self.data[var_name] 137 | 138 | string_name = tmp_data.attrs["standard_name"] 139 | 140 | self.plot_data( 141 | tmp_data=tmp_data, 142 | cmap=cmap, 143 | vmin=vmin, 144 | vmax=vmax, 145 | elv=elv, 146 | azm=azm, 147 | save=save, 148 | plot_id=plot_id, 149 | string_name=string_name, 150 | fig_path=fig_path, 151 | show=show, 152 | min_time=min_time, 153 | max_time=max_time, 154 | ) 155 | 156 | def plot_data( 157 | self, 158 | tmp_data, 159 | cmap="Spectral", 160 | vmin=-1, 161 | vmax=1, 162 | elv="90", 163 | azm="-", 164 | save=False, 165 | plot_id=None, 166 | fig_path=None, 167 | string_name=None, 168 | show=False, 169 | min_time=None, 170 | max_time=None, 171 | ): 172 | 173 | sel_day = pd.to_datetime(tmp_data.time[0].values) 174 | 175 | if max_time is not None: 176 | max_time = pd.to_datetime(max_time) 177 | 178 | else: 179 | max_time = pd.to_datetime(sel_day.strftime("%Y%m%d 23:59:59")) 180 | 181 | if min_time is not None: 182 | min_time = pd.to_datetime(min_time) 183 | 184 | else: 185 | min_time = pd.to_datetime(sel_day.strftime("%Y%m%d 00:00:00")) 186 | 187 | tmp_data = tmp_data.sel(time=slice(min_time, max_time)) 188 | 189 | if string_name: 190 | tmp_data.attrs["standard_name"] = string_name 191 | 192 | plt.figure(figsize=(18, 8)) 193 | plot = tmp_data.plot(x="time", cmap=cmap, vmin=vmin, vmax=vmax) 194 | plot = PlotSettings.plot_setup(plot) 195 | 196 | plt.grid(b=True) 197 | plt.ylim(0, 12e3) 198 | plt.xlim(min_time, max_time) 199 | plt.title(f"elv: {elv}, azm: {azm}") 200 | 201 | if plot_id == "hor_wind_dir": 202 | plot.colorbar.set_ticks(np.linspace(0, 360, 9)) 203 | 204 | if save: 205 | sel_day_str = sel_day.strftime("%Y%m%d") 206 | file_name = f"{sel_day_str}_{plot_id}.png" 207 | output_file_name = os.path.join(fig_path, file_name) 208 | print(output_file_name) 209 | plt.savefig(output_file_name, bbox_inches="tight") 210 | 211 | if show: 212 | plt.show() 213 | 214 | plt.close() 215 | 216 | def plot_data_azm( 217 | self, 218 | data_non_90, 219 | cmap="Spectral", 220 | vmin=-1, 221 | vmax=1, 222 | fig_path=None, 223 | save=False, 224 | plot_id=None, 225 | show=False, 226 | min_time=None, 227 | max_time=None, 228 | ): 229 | 230 | elv = data_non_90.elv.values[0] 231 | fig, axes = plt.subplots(5, 1, sharex=True, figsize=(18, 25)) 232 | 233 | sel_day = pd.to_datetime(data_non_90.time[0].values) 234 | 235 | if max_time is not None: 236 | max_time = pd.to_datetime(max_time) 237 | 238 | else: 239 | max_time = pd.to_datetime(sel_day.strftime("%Y%m%d 23:59:59")) 240 | 241 | if min_time is not None: 242 | min_time = pd.to_datetime(min_time) 243 | 244 | else: 245 | min_time = pd.to_datetime(sel_day.strftime("%Y%m%d 00:00:00")) 246 | 247 | for ax_number, i in enumerate(data_non_90.azm.values): 248 | 249 | tmp_data = data_non_90.sel(azm=i) 250 | 251 | tmp_data = tmp_data.sel(time=slice(min_time, max_time)) 252 | 253 | plot = tmp_data.plot( 254 | x="time", cmap=cmap, vmin=vmin, vmax=vmax, ax=axes[ax_number] 255 | ) 256 | 257 | plot = PlotSettings.plot_setup(plot) 258 | 259 | axes[ax_number].grid(b=True) 260 | axes[ax_number].set_ylim(0, 12e3) 261 | axes[ax_number].set_xlim(min_time, max_time) 262 | axes[ax_number].set_title(f"elv: {elv}, azm: {i}") 263 | 264 | if save: 265 | sel_day_str = sel_day.strftime("%Y%m%d") 266 | file_name = f"{sel_day_str}_{plot_id}.png" 267 | output_file_name = os.path.join(fig_path, file_name) 268 | print(output_file_name) 269 | plt.savefig(output_file_name, bbox_inches="tight") 270 | 271 | if show: 272 | plt.show() 273 | 274 | plt.close() 275 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | Lidar usage 3 | =========== 4 | 5 | To use lidarwind in a project:: 6 | 7 | import lidarwind as lst 8 | 9 | 10 | 11 | -------------------------- 12 | Generating the config file 13 | -------------------------- 14 | 15 | 16 | After installing lidarwind, it is recommended to generate the configuration file (config.json). The global attributes from the NetCDF files generated by lidarwind are defined in this file. To create the config.json, the user needs to use the lst.configuration class. Below you can find an example of how this file is generated. 17 | 18 | .. code-block:: python 19 | 20 | >>> config = lst.Configurations(lst=lst) 21 | >>> config = config.load_institution('institution name') 22 | >>> config = config.load_instrument('instrument name') 23 | >>> config = config.load_site('site name') 24 | >>> config = config.load_contact('contact person') 25 | >>> config = config.load_email('contact email') 26 | >>> config = config.load_comments('additional information') 27 | >>> config.generate_conf() 28 | 29 | The configuration file can also be created using the default values. 30 | 31 | .. code-block:: python 32 | 33 | >>> lst.configurations(lst=lst).generate_conf() 34 | 35 | Whenever the class lst.RetriveWindFFT() is used to retrieve wind, lidarwind will try to load the global attributes from the config.json. If this file does not exist, lidarwind will create it using the default values and displays the following message. 36 | 37 | .. code-block:: python 38 | 39 | 'You do not have a config file yet' 40 | 'a temporary config file was generated' 41 | 'See the documentation for generating it' 42 | 43 | 44 | 45 | ------------------- 46 | Data pre-processing 47 | ------------------- 48 | 49 | 50 | The NetCDF files produced by the WindCube operating system while following the 6-beam or the DBS can differ from each other. The methods for retrieving wind also differ from each other. For those reasons, the lidarwind has different pre-processing classes to handle those files and create the requirements for deriving wind. 51 | 52 | 53 | 54 | 6-beam data pre-processing 55 | -------------------------- 56 | 57 | 58 | The 6-beam data is pre-processed by the DataOperations class. It merges all data, grouping the observations into zenith pointing and slanted. To use this class, the user only needs to pass a list of file paths. The merged data set can be accessed using the merged_data attribute. After pre-processing the data, one can also save the pre-processed data as NetCDF and continue with the wind retrieval or turbulence estimation afterwards. 59 | 60 | .. code-block:: python 61 | 62 | >>> merged_ds = lidarwind.DataOperations(file_list).merged_data 63 | >>> merged_ds.to_netcdf(output_file_path) 64 | 65 | 66 | DBS data pre-processing 67 | -------------------------- 68 | 69 | The pre-processing of the DBS data is handled by the DbsOperations class. This class needs a list of file paths and a list of variables that will be merged. While executing, it adds a mean time variable to identify all observations from a complete DBS scan. After using this class, the user can access the merged data by calling the merged_ds attribute, which can also be saved as a NetCDF file to continue with the wind retrieval afterwards. 70 | 71 | 72 | .. code-block:: python 73 | 74 | >>> var_list = ['azimuth', 'elevation', 'radial_wind_speed', 75 | >>> 'radial_wind_speed_status', 'measurement_height', 'cnr'] 76 | >>> merged_ds = lst.DbsOperations(file_list, var_list).merged_ds 77 | >>> merged_ds.to_netcdf(output_file_path) 78 | 79 | 80 | --------------- 81 | Retrieving wind 82 | --------------- 83 | 84 | lidarwind is able to retrieve wind from the 6-beam data and also from the DBS data. 85 | 86 | 6-beam data wind retrieval 87 | -------------------------- 88 | 89 | To retrieve the wind speed and direction from the 6-beam data, the user needs to prepare the dataset for it. It is made by using the GetRestructuredData class. This class groups the slanted data by their azimuth and creates an azimuth dimension to store it. Once it is done, the output object from GetRestructuredData class contains all the information needed for retrieving wind profiles using the RetriveWindFFT class. 90 | 91 | The RetriveWindFFT class applies the fast Fourier transform (FFT) along the azimuthal dimension of the dataset for each complete scanning cycle and derives the wind speed and direction from that. A short description of the FFT-based wind retrieval can be found in the :any:`6-beam ` section. Below you can see how to retrieve winds from the 6-data. The wind_obj has an attribute wind_prop (a xarray dataset) where the wind profiles are stored. 92 | 93 | 94 | .. code-block:: python 95 | 96 | >>> restruct_data = lst.GetRestructuredData(merged_ds) 97 | >>> wind_obj = lst.RetriveWindFFT(restruct_data) 98 | 99 | 100 | A notebook example combining all steps for retrieving wind can be found in the list of `notebooks examples `_. You can run the same example online by clicking on the binder badge listed in the package :any:`introduction `. 101 | 102 | 103 | DBS data wind retrieval 104 | ----------------------- 105 | 106 | The retrieval of the wind from the DBS observations is made by the GetWindProperties5Beam class, and it uses the merged dataset generated by the DbsOperations class to do it. The GetWindProperties5Beam class applies the methodology described in the :ref:`DBS overview section ` to retrieve wind speed and direction profiles. The wind speed and direction are available as attributes of the returned object. 107 | 108 | 109 | .. code-block:: python 110 | 111 | >>> wind_obj = lst.GetWindProperties5Beam(merged_ds) 112 | >>> hor_wind_speed = wind_obj.hor_wind_speed 113 | >>> ver_wind_speed = wind_obj.ver_wind_speed 114 | >>> hor_wind_dir = wnd_obj.hor_wind_dir 115 | 116 | A notebook example combining all steps for retrieving wind can be found in the list of `notebooks examples `_. You can run the same example online by clicking on the binder badge listed in the package :any:`introduction `. 117 | 118 | 119 | --------------------- 120 | Turbulence estimation 121 | --------------------- 122 | 123 | The lidarwind package also contains an implementation of the :any:`6-beam ` method for estimating the Reynolds stress tensor components. This estimation is made by the SixBeamMethod class using the returned object from GetRestructuredData class. As introduced in the overview, the 6-beam method requires variances as input. The user needs to indicate the window size to calculate the variance in time. However, SixBeamMethod class requires a window defined in terms of the number of profiles that fit within the desired time window. Below you can find an example of how to relate a time window to its equivalent number of profiles. 124 | 125 | 126 | .. code-block:: python 127 | 128 | >>> # desired time windown in minutes 129 | >>> time_window = 5 130 | 131 | >>> # duration of one minute in seconds 132 | >>> minute_lenght = 60 133 | 134 | >>> # vertical observations time resolution in seconds 135 | >>> time_resolution = restruct_data.data_transf_90.time.diff(dim='time').values * 1e-9 136 | >>> time_resolution = int(time_resolution[0]) 137 | 138 | >>> # frequency convertion from minutes to profile number 139 | >>> freq = (minute_lenght/time_resolution)*time_window 140 | >>> freq = int(freq) 141 | 142 | 143 | As soon as the time window is converted to its number of profiles equivalent, the SixBeamMethod class can be applied to the returned object from GetRestructuredData class. The variance and co-variance profiles are available in the attribute var_comp_ds (a xarray dataset) 144 | 145 | 146 | .. code-block:: python 147 | 148 | >>> restruct_data = lst.GetRestructuredData(merged_ds) 149 | >>> turb_data = lst.SixBeamMethod(restruct_data, freq=freq, freq90=freq) 150 | >>> turb_data.var_comp_ds 151 | 152 | 153 | =========== 154 | Radar usage 155 | =========== 156 | 157 | .. image:: https://colab.research.google.com/assets/colab-badge.svg 158 | :target: https://colab.research.google.com/github/jdiasn/lidarwind/blob/main/docs/examples/lidarwind_4_rpg_radar_serial.ipynb 159 | 160 | .. _rpg_usage: 161 | 162 | .. note:: 163 | Currently, lidarwind only supports the original RPG PPI data format. 164 | 165 | Here, you will find a basic example of using lidarwind to retrieve wind profiles from the RPG radar data. The cell below starts by importing the required modules, and then the wind retrieval is applied to the dataset. 166 | 167 | If you want to try it in a live virtual environment using real data, click on the Colab badge above. Be aware that you will need a Google account to use the Colab environment. In case you prefer not to use the Colab environment, you can see the same example in a static form at `RPG example `_. 168 | 169 | .. code-block:: python 170 | 171 | >>> import xarray as xr 172 | >>> 173 | >>> from lidarwind.preprocessing import rpg_radar 174 | >>> from lidarwind.postprocessing import post_rpg_radar 175 | >>> 176 | >>> ds = xr.open_dataset(file_name) 177 | >>> ds = rpg_radar.rpg_slanted_radial_velocity_4_fft(ds) 178 | >>> tmp_wind = post_rpg_radar.get_horizontal_wind(ds) 179 | -------------------------------------------------------------------------------- /lidarwind/filters.py: -------------------------------------------------------------------------------- 1 | """Module for keep all filtering functionalities 2 | 3 | """ 4 | import logging 5 | 6 | import numpy as np 7 | import pandas as pd 8 | import xarray as xr 9 | 10 | module_logger = logging.getLogger("lidarwind.filters") 11 | 12 | 13 | def filter_status(ds: xr.Dataset): 14 | """Filter dataset based on WindCube's software 15 | 16 | Parameters 17 | ---------- 18 | ds: xr.Dataset 19 | Dataset with LIDAR records 20 | 21 | Returns 22 | ------- 23 | 24 | xr.Dataset: 25 | A Dataset with records where status was true (1) 26 | """ 27 | if "radial_wind_speed_status90" not in ds: 28 | module_logger.error( 29 | "filter_status() requires radial_wind_speed_status90" 30 | ) 31 | raise ValueError 32 | 33 | return ds.where(ds.radial_wind_speed_status90 == 1) 34 | 35 | 36 | def filter_snr(ds: xr.Dataset, snr: float): 37 | if "cnr90" not in ds: 38 | module_logger.error("filter_snr() requires cnr90") 39 | raise ValueError 40 | 41 | return ds.where(ds.cnr90 > snr) 42 | 43 | 44 | class Filtering: 45 | """SNR and Status filter 46 | 47 | It uses the carrier to noise ratio (SNR) and status 48 | variables available in the WindCube's data. It is 49 | similar to the filter described in the manual 50 | 51 | Parameters 52 | ---------- 53 | data : xrarray.Dataset 54 | Dataset containing the original WindCube's data 55 | 56 | """ 57 | 58 | def __init__(self, data): 59 | 60 | self.data = data 61 | 62 | def get_vertical_obs_comp(self, variable, snr=False, status=True): 63 | """Vertical data filter 64 | 65 | It uses the SNR and status variables to filter out 66 | the data from vertical observations 67 | 68 | Parameters 69 | ---------- 70 | variable : str 71 | name of the variable that will be filtered 72 | 73 | snr : bool, int, optional 74 | if an interger is given it is used to 75 | as threshold to filter the data based on 76 | the signal to noise ratio 77 | 78 | status : bool, optional 79 | if true it filters the data using the status 80 | variable generated by the WindCube's software 81 | 82 | Returns 83 | ------- 84 | tmp_data : xarray.DataSet 85 | an instance of the variable filtered using 86 | SNR or status variable 87 | 88 | """ 89 | 90 | tmp_data = self.data[variable] 91 | 92 | if status: 93 | tmp_data = tmp_data.where( 94 | self.data.radial_wind_speed_status90 == 1 95 | ) 96 | 97 | if snr is not False: 98 | tmp_data = tmp_data.where(self.data.cnr90 > snr) 99 | 100 | tmp_data = tmp_data.where(self.data.elevation == 90, drop=True) 101 | 102 | return tmp_data 103 | 104 | def get_radial_obs_comp(self, variable, azm, snr=False, status=True): 105 | """Slanted data filter 106 | 107 | It uses the SNR and status variables to filter out 108 | the data from slanted observations 109 | 110 | Parameters 111 | ---------- 112 | variable : str 113 | name of the variable that will be filtered 114 | 115 | snr : bool, int, optional 116 | if an interger is given it is used to 117 | as threshold to filter the data based on 118 | the signal to noise ratio 119 | 120 | status : bool, optional 121 | if true it filters the data using the status 122 | variable generated by the WindCube's software 123 | 124 | Returns 125 | ------- 126 | tmpaData : xarray.DataSet 127 | 128 | an instance of the variable filtered using 129 | SNR or status variable 130 | 131 | """ 132 | 133 | tmp_data = self.data[variable] 134 | 135 | if status: 136 | tmp_data = tmp_data.where(self.data.radial_wind_speed_status == 1) 137 | 138 | if snr is not False: 139 | tmp_data = tmp_data.where(self.data.cnr > snr) 140 | 141 | tmp_data = tmp_data.where( 142 | (self.data.elevation != 90) & (self.data.azimuth == azm), drop=True 143 | ) 144 | 145 | return tmp_data 146 | 147 | 148 | # it removes the STE below cloud layer 149 | class SecondTripEchoFilter: 150 | """Boundary layer second trip echoes filter 151 | 152 | This filter minimises the presence of second 153 | trip echoes (STE). This filter is based on the 154 | standard deviation of the anomaly of the observaions. 155 | It is applicable in regions where there is a contrast 156 | between the real data and the STE. 157 | 158 | Parameters 159 | ---------- 160 | data : object 161 | the object returned from the GetRestructuredData 162 | 163 | timeCloudMaks : xarray.DataArray 164 | it is a time series for indicating the presence 165 | of clouds above the maximum WinCube range. 166 | 1 indicates cloud and 0 indicates no cloud. 167 | (THIS MAKS IS NOT NEEDED NOW) 168 | 169 | n_prof : int 170 | number of profiles used to calculating the anomaly 171 | 172 | center : bool, optional 173 | it defines how the mean value for the anomaly will 174 | be calculated 175 | 176 | min_periods : int 177 | minimum number of profiles used for calculating the 178 | mean value 179 | 180 | n_std : int 181 | Multiplication factor for defining the size of the 182 | window to keep the data. The filter removes any 183 | anomaly larger than n_std * std 184 | 185 | str_h : str 186 | starting hour for calculating the anomaly 187 | 188 | end_h : str 189 | end hour for calculating the anomaly 190 | 191 | Returns 192 | ------- 193 | object : object 194 | an object containing data filtered for STE 195 | 196 | """ 197 | 198 | def __init__( 199 | self, 200 | data, 201 | n_prof=500, 202 | center=True, 203 | min_periods=30, 204 | n_std=2, 205 | str_h="09", 206 | end_h="16", 207 | ): 208 | 209 | self.lidar = data 210 | # self.time_cloud_mask = time_cloud_mask 211 | self.n_prof = n_prof 212 | self.center = center 213 | self.min_periods = min_periods 214 | self.n_std = n_std 215 | 216 | self.get_time_edges(str_h=str_h, end_h=end_h) 217 | self.cal_mean_and_anom_slant() 218 | self.cal_mean_and_anom_90() 219 | self.cleaning() 220 | self.cleaning90() 221 | 222 | def get_time_edges(self, str_h="09", end_h="16"): 223 | """ 224 | It creates the time boundaries for the STD anomaly calculation 225 | 226 | Parameters 227 | ---------- 228 | str_h : str 229 | starting hour for calculating the anomaly 230 | 231 | end_h : str 232 | end hour for calculating the anomaly 233 | 234 | """ 235 | sel_time = pd.to_datetime(self.lidar.data_transf.time.values[0]) 236 | sel_time = sel_time.strftime("%Y%m%d") 237 | self.start_time = pd.to_datetime(f"{sel_time} {str_h}") 238 | self.end_time = pd.to_datetime(f"{sel_time} {end_h}") 239 | 240 | def cal_mean_and_anom_slant(self): 241 | """ 242 | It calculates the anomaly from the slanted observations 243 | """ 244 | 245 | # slanted beam 246 | tmp_sel_data = self.lidar.data_transf 247 | 248 | self.data_mean = tmp_sel_data.rolling( 249 | time=self.n_prof, center=self.center, min_periods=self.min_periods 250 | ).mean() 251 | 252 | self.data_anom = self.lidar.data_transf - self.data_mean 253 | 254 | def cal_mean_and_anom_90(self): 255 | """ 256 | It calculates the anomaly from the vertical observations 257 | """ 258 | 259 | # vertical beam 260 | tmp_sel_data_90 = self.lidar.data_transf_90 261 | 262 | self.data_mean90 = tmp_sel_data_90.rolling( 263 | time=self.n_prof, center=self.center, min_periods=self.min_periods 264 | ).mean() 265 | 266 | self.data_anom_90 = self.lidar.data_transf_90 - self.data_mean90 267 | 268 | def cleaning(self): 269 | """ 270 | It removes the data that is larger than the n_std * anomaly 271 | from the slanted observations 272 | """ 273 | 274 | tmp_anom = self.data_anom.where( 275 | (self.lidar.data_transf.time > self.start_time) 276 | & (self.lidar.data_transf.time < self.end_time) 277 | ) 278 | 279 | anom_std = tmp_anom.std(dim=["time", "range", "elv"]) 280 | 281 | tmp_clean_data = self.lidar.data_transf.copy() 282 | tmp_clean_data = tmp_clean_data.where( 283 | np.abs(self.data_anom) < self.n_std * anom_std 284 | ) 285 | self.lidar.data_transf.values = tmp_clean_data.values 286 | 287 | def cleaning90(self): 288 | """ 289 | It removes the data that is larger than the n_std * anomaly 290 | from the vertical observations 291 | """ 292 | 293 | tmp_anom = self.data_anom_90.where( 294 | (self.lidar.data_transf_90.time > self.start_time) 295 | & (self.lidar.data_transf_90.time < self.end_time) 296 | ) 297 | 298 | anom_std = tmp_anom.std(dim=["time", "range90"]) 299 | 300 | tmp_clean_data = self.lidar.data_transf_90.copy() 301 | tmp_clean_data = tmp_clean_data.where( 302 | np.abs(self.data_anom_90) < self.n_std * anom_std 303 | ) 304 | 305 | self.lidar.data_transf_90.values = tmp_clean_data.values 306 | 307 | 308 | # it removes STE and clouds contamination 309 | # from above the aerosol loaded region 310 | class WindCubeCloudRemoval: 311 | """Above boundary layer second trip echoes filter 312 | 313 | This filter reduces the second trip echoes contamination 314 | and clouds from regions above the boundary layer. It 315 | requires information from the ceilometer. 316 | IT IS STILL EXPERIMENTAL 317 | 318 | Parameters 319 | ---------- 320 | ceilo : xarray.Dataset 321 | A dataset of the CHM15k Nimbus ceilometer observations. 322 | 323 | lidar : xarray.Dataset 324 | An instance of the re-structured WindCube dataset 325 | 326 | Returns 327 | ------- 328 | object : object 329 | an object containing an instance of the noise 330 | height interface and the re-structured dataset 331 | filtered for STE and clouds. 332 | 333 | """ 334 | 335 | def __init__(self, ceilo, lidar=None): 336 | 337 | self.lidar = lidar 338 | self.ceilo = ceilo 339 | 340 | self.get_noise_free_beta() 341 | self.get_height_interface() 342 | 343 | if lidar is not None: 344 | self.get_interp_interf_height() 345 | self.remove_cloud() 346 | 347 | def get_noise_free_beta(self): 348 | """ 349 | It removes the noise from the backscattered signal 350 | """ 351 | positive_beta = self.ceilo.beta_raw.where(self.ceilo.beta_raw > 0) 352 | 353 | positive_beta = positive_beta.rolling( 354 | range=10, center=True, min_periods=10 355 | ).mean() 356 | positive_beta = positive_beta.rolling( 357 | time=15, center=True, min_periods=15 358 | ).mean() 359 | 360 | self.noise_free_beta = positive_beta 361 | 362 | return self 363 | 364 | def get_height_interface(self): 365 | """ 366 | It identifies the height of the separation between 367 | the noise and the non-noise data from the ceilometer 368 | backscattered signal 369 | """ 370 | positive_beta = self.ceilo.beta_raw.where(self.ceilo.beta_raw > 0) 371 | 372 | tmp_ceilo_hgt = positive_beta.copy() 373 | tmp_values = tmp_ceilo_hgt.values 374 | tmp_values[np.isfinite(tmp_values)] = 1 375 | tmp_ceilo_hgt.values = tmp_values 376 | tmp_ceilo_hgt = tmp_ceilo_hgt * self.ceilo.range 377 | 378 | lowest_beta = self.noise_free_beta.where(tmp_ceilo_hgt < 4e3) 379 | 380 | tmp_ceilo_hgt = tmp_ceilo_hgt.where(np.isfinite(lowest_beta)) 381 | 382 | self.interf_height = tmp_ceilo_hgt.max(dim="range") 383 | self.interf_height = self.interf_height.rolling( 384 | time=7, center=True, min_periods=5 385 | ).mean() 386 | 387 | return self 388 | 389 | def get_interp_interf_height(self): 390 | """ 391 | It interpolates the noise height interface to the same 392 | temporal resolution from the windcube data 393 | """ 394 | self.interp_interf_height = self.interf_height.interp( 395 | time=self.lidar.data_transf.time 396 | ) 397 | self.interp_interf_height_90 = self.interf_height.interp( 398 | time=self.lidar.data_transf_90.time 399 | ) 400 | 401 | return self 402 | 403 | def remove_cloud(self): 404 | """ 405 | It removes from the windcube's observation all 406 | data above the noise height interface 407 | """ 408 | 409 | tmp_height = self.lidar.data_transf.copy() 410 | tmp_values = tmp_height.values 411 | tmp_values[np.isfinite(tmp_values)] = 1 412 | tmp_height.values = tmp_values 413 | tmp_height = tmp_height * self.lidar.data_transf.range 414 | self.lidar.data_transf = self.lidar.data_transf.where( 415 | tmp_height < self.interp_interf_height 416 | ) 417 | 418 | tmp_height = self.lidar.data_transf_90.copy() 419 | tmp_values = tmp_height.values 420 | tmp_values[np.isfinite(tmp_values)] = 1 421 | tmp_height.values = tmp_values 422 | tmp_height = tmp_height * self.lidar.data_transf_90.range90 423 | self.lidar.data_transf_90 = self.lidar.data_transf_90.where( 424 | tmp_height < self.interp_interf_height_90 425 | ) 426 | self.lidar.relative_beta90 = self.lidar.relative_beta90.where( 427 | tmp_height < self.interp_interf_height_90 428 | ) 429 | 430 | return self 431 | --------------------------------------------------------------------------------