├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .readthedocs.yaml ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── docs ├── Makefile ├── authors.rst ├── conf.py ├── contributing.rst ├── figures │ ├── plot_climatology.png │ ├── plot_step.png │ ├── plot_tracks.png │ └── readme.gif ├── history.rst ├── index.rst ├── installation.rst ├── make.bat ├── modules.rst └── tutorial.rst ├── environment.yml ├── pyproject.toml ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── data │ └── demo_data.nc └── test_wavebreaking.py ├── tox.ini └── wavebreaking ├── __init__.py ├── indices ├── __init__.py ├── contour_index.py ├── cutoff_index.py ├── overturning_index.py └── streamer_index.py ├── processing ├── __init__.py ├── events.py ├── plots.py └── spatial.py └── utils ├── __init__.py ├── data_utils.py ├── index_utils.py └── plot_utils.py /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * WaveBreaking version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | # IDE settings 105 | .vscode/ 106 | .idea/ 107 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: "ubuntu-20.04" 10 | tools: 11 | python: "mambaforge-4.10" 12 | 13 | conda: 14 | environment: environment.yml 15 | 16 | # Build documentation in the docs/ directory with Sphinx 17 | sphinx: 18 | configuration: docs/conf.py 19 | 20 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | Credits 2 | ------- 3 | 4 | * The installation guide is to some extend based on the `ConTrack - Contour Tracking `_ tool developed by `Daniel Steinfeld `_. 5 | 6 | * This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template. 7 | 8 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter 9 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage 10 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/skaderli/wavebreaking/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the GitHub issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | WaveBreaking could always use more documentation, whether as part of the 42 | official WaveBreaking docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/skaderli/wavebreaking/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `wavebreaking` for local development. 59 | 60 | 1. Fork the `wavebreaking` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:/wavebreaking.git 64 | 65 | 3. Install your local copy by following the `installation guide `_. 66 | 67 | 4. Create a branch for local development:: 68 | 69 | $ git checkout -b 70 | 71 | Now you can make your changes locally. 72 | 73 | 5. When you're done making changes, check that your changes pass flake8 and the 74 | tests:: 75 | 76 | $ flake8 wavebreaking tests 77 | $ python -m unittests tests. 78 | 79 | To get flake8 and tox, just pip install them into your virtual environment. 80 | 81 | 6. Commit your changes and push your branch to GitHub:: 82 | 83 | $ git add . 84 | $ git commit -m "Your detailed description of your changes." 85 | $ git push origin 86 | 87 | 7. Submit a pull request through the GitHub website. 88 | 89 | Pull Request Guidelines 90 | ----------------------- 91 | 92 | Before you submit a pull request, check that it meets these guidelines: 93 | 94 | 1. The pull request should include tests. 95 | 2. If the pull request adds functionality, the docs should be updated. Put 96 | your new functionality into a function with a docstring, and add the 97 | feature to the list in README.rst. 98 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 0.3.0 (2023-05-08) 6 | ------------------ 7 | 8 | * Update coordinate transformation to geometry intersections 9 | * Update date handling from string to numpy.datetime64 10 | * Fix problem related to parameters in degrees east/north 11 | * Add new tracking method (by_radius) 12 | * Add check for too many dimensions 13 | * Update smoothing from wrf-python to ndimage.convolve 14 | * Minor fixes 15 | 16 | 17 | 0.2.0 (2023-04-27) 18 | ------------------ 19 | 20 | * 2nd pre-release 21 | * add support of several contour levels 22 | * add support for both hemispheres 23 | * minor fixes 24 | 25 | 26 | 0.1.0 (2023-04-24) 27 | ------------------ 28 | 29 | * Pre-release on GitHub. 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023, Severin Kaderli 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-build clean-pyc clean-test coverage dist docs help install lint lint/flake8 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -f {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr htmlcov/ 48 | rm -fr .pytest_cache 49 | 50 | lint/flake8: ## check style with flake8 51 | flake8 wavebreaking tests 52 | 53 | lint: lint/flake8 ## check style 54 | 55 | test: ## run tests quickly with the default Python 56 | pytest 57 | 58 | test-all: ## run tests on every Python version with tox 59 | tox 60 | 61 | coverage: ## check code coverage quickly with the default Python 62 | coverage run --source wavebreaking -m pytest 63 | coverage report -m 64 | coverage html 65 | $(BROWSER) htmlcov/index.html 66 | 67 | docs: ## generate Sphinx HTML documentation, including API docs 68 | rm -f docs/wavebreaking.rst 69 | rm -f docs/modules.rst 70 | sphinx-apidoc -o docs/ wavebreaking 71 | $(MAKE) -C docs clean 72 | $(MAKE) -C docs html 73 | $(BROWSER) docs/_build/html/index.html 74 | 75 | servedocs: docs ## compile the docs watching for changes 76 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 77 | 78 | release: dist ## package and upload a release 79 | twine upload dist/* 80 | 81 | dist: clean ## builds source and wheel package 82 | python setup.py sdist 83 | python setup.py bdist_wheel 84 | ls -l dist 85 | 86 | install: clean ## install the package to the active Python's site-packages 87 | python setup.py install 88 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://img.shields.io/pypi/v/wavebreaking.svg 2 | :target: https://pypi.python.org/pypi/wavebreaking 3 | 4 | .. image:: https://img.shields.io/github/license/skaderli/wavebreaking 5 | :target: https://github.com/skaderli/wavebreaking/blob/master/LICENSE 6 | :alt: License 7 | 8 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.14214463.svg 9 | :target: https://doi.org/10.5281/zenodo.14214463 10 | 11 | .. image:: https://readthedocs.org/projects/wavebreaking/badge/?version=latest 12 | :target: https://wavebreaking.readthedocs.io/en/latest/?version=latest 13 | :alt: Documentation Status 14 | 15 | .. image:: https://www.codefactor.io/repository/github/skaderli/wavebreaking/badge 16 | :target: https://www.codefactor.io/repository/github/skaderli/wavebreaking 17 | :alt: CodeFactor 18 | 19 | ==================================================================================== 20 | WaveBreaking - Detection, Classification and Tracking of Rossby Wave Breaking 21 | ==================================================================================== 22 | 23 | .. image:: https://raw.githubusercontent.com/skaderli/WaveBreaking/main/docs/figures/readme.gif 24 | :alt: readme gif 25 | 26 | .. start_intro 27 | 28 | WaveBreaking is a Python package that provides detection, classification and tracking of Rossby Wave Breaking (RWB) in weather and climate data. The detection of RWB is based on analyzing the dynamical tropopause represented by a closed contour line encircling the pole as for example the 2 Potential Vorticity Units (PVU) contour line in Potential Vorticity (PV) fields. By applying three different breaking indices, regions of RWB are identified and different characteristics of the events such as area and intensity are calculated. The event tracking provides information about the temporal evolution of the RWB events. Finally, the implemented plotting methods allow for a first visualization. This tool was developed during my master studies at the University of Bern. 29 | 30 | The detection of RWB is based on applying a RWB index to the dynamical tropopause. The WaveBreaking package provides three different RWB indices: 31 | 32 | * **Streamer Index:** The streamer index is based on work by `Wernli and Sprenger (2007)`_ (and `Sprenger et al. 2017`_). Streamers are elongated structures present on the contour line that represents the dynamical tropopause. They can be described by a pair of contour points that are close together considering their geographical distance but far apart considering their distance connecting the points on the contour. Further description can be found in my `master thesis `_. 33 | 34 | * **Overturning Index:** The overturning index is based on work by `Barnes and Hartmann (2012)`_. This index identifies overturning structures of the contour line. An overturning of the contour line is present if the contour intersects at least three times with the same longitude. Further description can be found in my `master thesis `_. 35 | 36 | * **Cutoff Index:** The Cutoff Index provides information about the decaying of a wave breaking event. From a Potential Vorticity perspective, a wave breaking event is formed by an elongation of the 2 PVU contour line. These so-called streamers can elongate further until they separate from the main stratospheric or tropospheric body. The separated structure is referred to as a cutoff (`Wernli and Sprenger (2007)`_. 37 | 38 | .. _`Wernli and Sprenger (2007)`: https://journals.ametsoc.org/view/journals/atsc/64/5/jas3912.1.xml 39 | .. _`Sprenger et al. 2017`: https://journals.ametsoc.org/view/journals/bams/98/8/bams-d-15-00299.1.xml 40 | .. _`Barnes and Hartmann (2012)`: https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2012JD017469 41 | 42 | The tool is designed to analyze gridded data provided as an `xarray.DataArray `_. Output is provided either in a `geopandas.GeoDataFrame `_ or in an `xarray.DataArray `_. 43 | 44 | Parts of the data setup functions and of the tracking function are based on the `ConTrack - Contour Tracking `_ tool developed by `Daniel Steinfeld `_. 45 | 46 | **Important information:** 47 | 48 | * Free software: MIT license 49 | * Further documentation about the implemented methods can be found in my `master thesis `_. 50 | 51 | **Referencing:** 52 | 53 | * Please cite WaveBreaking in your publication: Kaderli, S., 2023. WaveBreaking - Detection, Classification and Tracking of Rossby Wave Breaking. https://doi.org/10.5281/zenodo.14214463 54 | * If you are using the Streamer Index, please cite `Wernli and Sprenger (2007)`_ (and `Sprenger et al. 2017`_). 55 | * If you are using the Overturning INdex, please cite `Barnes and Hartmann (2012)`_. 56 | 57 | .. end_intro 58 | 59 | .. start_installation 60 | 61 | Installation 62 | ------------- 63 | 64 | Stable release 65 | ~~~~~~~~~~~~~~~ 66 | To install WaveBreaking, run this command in your terminal: 67 | 68 | .. code-block:: 69 | 70 | pip install wavebreaking 71 | 72 | This is the preferred method to install WaveBreaking, as it will always install the most recent stable release. 73 | Your virtual environment is automatically checked for the necessary dependencies. 74 | After the installation, you can start calculating RWB events by following the tutorial below. 75 | 76 | From sources 77 | ~~~~~~~~~~~~~ 78 | 79 | The sources for WaveBreaking can be downloaded in two different ways. You can either install WaveBreaking directly from the GitHub repository: 80 | 81 | .. code-block:: 82 | 83 | pip install git+https://github.com/skaderli/WaveBreaking 84 | 85 | Or you can clone the GitHub repository first and then install WaveBreaking locally. For that, start with setting the working directory and cloning the repository. 86 | 87 | .. code-block:: 88 | 89 | git clone https://github.com/skaderli/WaveBreaking.git 90 | cd /path/to/local/WaveBreaking 91 | 92 | Second, set up the conda environment and install the necessary dependencies (this may take some time): 93 | 94 | .. code-block:: 95 | 96 | conda create -y -n wb_env 97 | conda env update -f environment.yml -n wb_env 98 | 99 | Now activate the environment and install the WaveBreaking package locally by using the developer mode “-e”: 100 | 101 | .. code-block:: 102 | 103 | conda activate wb_env 104 | pip install -e . 105 | 106 | To check if the installation was successful, perform some tests: 107 | 108 | .. code-block:: 109 | 110 | python -m unittest tests.test_wavebreaking 111 | 112 | .. end_installation 113 | 114 | .. start_tutorial_part1 115 | 116 | Tutorial 117 | --------- 118 | 119 | This tutorial shows how to calculate RWB events step by step. After successfully installing WaveBreaking, the module needs to be imported. Make sure that the Python kernel with the correct virtual environment (where WaveBreaking is installed) is running. 120 | 121 | .. code-block:: python 122 | 123 | import wavebreaking as wb 124 | 125 | More information about the functions presented below can be found in the `documentation `_. 126 | 127 | Please note that the algorithm depends on the order of the spatial dimensions. Both the longitude and latitude dimensions should be in ascending order. Although the WaveBreaking tool identifies and adjusts descending coordinates, the dataset should be checked and adapted before starting the calculation to get the best performance. 128 | 129 | Data pre-processing: 130 | ~~~~~~~~~~~~~~~~~~~~~ 131 | 132 | Optionally, the variable intended for the RWB calculations can be smoothed. The smoothing routine applies by default a 5-point smoothing (not diagonally) with a double-weighted center and an adjustable number of smoothing passes. Since the smoothing is based on the scipy.ndimage.convolve function, array-like weights and the mode for handling boundary values can be passed as an argument. This routine returns a xarray.DataArray with the variable "smooth_". 133 | 134 | .. code-block:: python 135 | 136 | # read data 137 | import xarray as xr 138 | demo_data = xr.open_dataset("tests/data/demo_data.nc") 139 | 140 | # smooth variable with 5 passes 141 | import numpy as np 142 | smoothed = wb.calculate_smoothed_field(data=demo_data.PV, 143 | passes=5, 144 | weights=np.array([[0, 1, 0], [1, 2, 1], [0, 1, 0]]), # optional 145 | mode="wrap") # optional 146 | 147 | The wavebreaking module calculates the intensity for each identified event, if an intensity field is provided. In my master thesis, the intensity is represented by the momentum flux derived from the product of the (daily) zonal deviations of both wind components. The routine creates a xarray.DataArray with the variable "mflux". More information can be found in my `master thesis `_. 148 | 149 | .. code-block:: python 150 | 151 | # calculate momentum flux 152 | mflux = wb.calculate_momentum_flux(u=demo_data.U, 153 | v=demo_data.V) 154 | 155 | 156 | Contour calculation: 157 | ~~~~~~~~~~~~~~~~~~~~ 158 | 159 | All RWB indices are based on a contour line representing the dynamical tropopause. The "calculate_contours()" function calculates the dynamical tropopause on the desired contour levels (commonly the 2 PVU level for Potential Vorticity). The function supports several contour levels at a time which allows for processing data of both hemispheres at the same time (e.g., contour levels -2 and 2). The contour calculation is also included in the RWB index functions and doesn't need to be performed beforehand. However, you can also pass the contours directly to the index functions. This is especially useful if you want to perform the calculation of several indices at once. 160 | 161 | If the input field is periodic, the parameter "periodic_add" can be used to extend the field in the longitudinal direction (default 120 degrees) to correctly extract the contour at the date border. With "original_coordinates = False", array indices are returned (used for the index calculations) instead of original coordinates. The routine returns a geopandas.GeoDataFrame with a geometry column and some properties for each contour. 162 | 163 | .. code-block:: python 164 | 165 | # calculate contours 166 | contours = wb.calculate_contours(data=smoothed, 167 | contour_levels=[-2, 2], 168 | periodic_add=120, # optional 169 | original_coordinates=True) # optional 170 | 171 | 172 | Index calculation: 173 | ~~~~~~~~~~~~~~~~~~~ 174 | 175 | All three RWB indices perform the contour calculation before identifying the RWB events. If you pass the separately calculated contours, the contour calcultion is skipped. For the streamer index, the default parameters are taken from `Wernli and Sprenger (2007)`_ (and `Sprenger et al. 2017`_) and for the overturning index from `Barnes and Hartmann (2012)`_. If the intensity is provided (momentum flux, see data pre-processing), it is calculated for each event. All index functions create a geopandas.GeoDataFrame with a geometry column and some properties for each event. 176 | 177 | .. code-block:: python 178 | 179 | # calculate streamers 180 | streamers = wb.calculate_streamers(data=smoothed, 181 | contour_levels=[-2, 2], 182 | contours=contours, #optional 183 | geo_dis=800, # optional 184 | cont_dis=1200, # optional 185 | intensity=mflux, # optional 186 | periodic_add=120) # optional 187 | 188 | .. code-block:: python 189 | 190 | # calculate overturnings 191 | overturnings = wb.calculate_overturnings(data=smoothed, 192 | contour_levels=[-2, 2], 193 | contours=contours, #optional 194 | range_group=5, # optional 195 | min_exp=5, # optional 196 | intensity=mflux, # optional 197 | periodic_add=120) # optional 198 | 199 | .. code-block:: python 200 | 201 | # calculate cutoffs 202 | cutoffs = wb.calculate_cutoffs(data=smoothed, 203 | contour_levels=[-2, 2], 204 | contours=contours, #optional 205 | min_exp=5, # optional 206 | intensity=mflux, # optional 207 | periodic_add=120) # optional 208 | 209 | Event classification: 210 | ~~~~~~~~~~~~~~~~~~~~~~ 211 | 212 | The event classification is based on selecting the events of interest from the geopandas.GeoDataFrame provided by the index calculation functions. 213 | 214 | Some suggested classifications: 215 | 216 | .. code-block:: python 217 | 218 | # stratospheric and tropospheric (only for streamers and cutoffs) 219 | stratospheric = events[events.mean_var >= contour_level] 220 | tropospheric = events[events.mean_var < contour_level] 221 | 222 | # anticyclonic and cyclonic by intensity for the Northern Hemisphere 223 | anticyclonic = events[events.intensity >= 0] 224 | cyclonic = events[events.intensity < 0] 225 | 226 | # anticyclonic and cyclonic by intensity for the Southern Hemisphere 227 | anticyclonic = events[events.intensity <= 0] 228 | cyclonic = events[events.intensity > 0] 229 | 230 | # anticyclonic and cyclonic by orientation (only for overturning events) 231 | anticyclonic = events[events.orientation == "anticyclonic"] 232 | cyclonic = events[events.orientation == "cyclonic"] 233 | 234 | 235 | In addition, a subset of events with certain characteristics can be selected, e.g. the 10% largest events: 236 | 237 | .. code-block:: python 238 | 239 | # 10 percent largest events 240 | large = events[events.event_area >= events.event_area.quantile(0.9)] 241 | 242 | 243 | Transform to DataArray: 244 | ~~~~~~~~~~~~~~~~~~~~~~~ 245 | 246 | To calculate and visualize the occurrence of RWB events, it comes in handy to transform the coordinates of the events into a xarray.DataArray. The "to_xarray" function flags every grid cell where an event is present with the value 1. Before the transformation, it is suggested to classify the events first and only use for example stratospheric events. 247 | 248 | .. code-block:: python 249 | 250 | # classify events 251 | stratospheric = streamers[streamers.mean_var.abs() >= 2] 252 | 253 | # transform to xarray.DataArray 254 | flag_array = wb.to_xarray(data=smoothed, 255 | events=stratospheric) 256 | 257 | 258 | Visualization: 259 | ~~~~~~~~~~~~~~~ 260 | 261 | WaveBreaking provides two options to do a first visual analysis of the output. Both options are based on the xarray.DataArray with the flagged grid cells from the "to_xarray" function. 262 | 263 | To analyze a specific large scale situation, the RWB events on a single time steps can be plotted: 264 | 265 | .. code-block:: python 266 | 267 | # import cartopy for projection 268 | import cartopy.crs as ccrs 269 | 270 | wb.plot_step(flag_data=flag_array, 271 | step="1959-06-05T06", #index or date 272 | data=smoothed, # optional 273 | contour_level=[-2, 2], # optional 274 | proj=ccrs.PlateCarree(), # optional 275 | size=(12,8), # optional 276 | periodic=True, # optional 277 | labels=True,# optional 278 | levels=None, # optional 279 | cmap="Blues", # optional 280 | color_events="gold", # optional 281 | title="") # optional 282 | 283 | .. end_tutorial_part1 284 | 285 | .. image:: https://raw.githubusercontent.com/skaderli/WaveBreaking/main/docs/figures/plot_step.png 286 | :alt: plot step 287 | 288 | .. start_tutorial_part2 289 | 290 | The analyze Rossby wave breaking from a climatological perspective, the occurrence (for specific seasons) can be plotted: 291 | 292 | .. code-block:: python 293 | 294 | wb.plot_clim(flag_data=flag_array, 295 | seasons=None, # optional 296 | proj=ccrs.PlateCarree(), # optional 297 | size=(12,8), # optional 298 | smooth_passes=0, # optional 299 | periodic=True, # optional 300 | labels=True, # optional 301 | levels=None, # optional 302 | cmap=None, # optional 303 | title="") # optional 304 | 305 | .. end_tutorial_part2 306 | 307 | .. image:: https://raw.githubusercontent.com/skaderli/WaveBreaking/main/docs/figures/plot_climatology.png 308 | :alt: plot climatology 309 | 310 | .. start_tutorial_part3 311 | 312 | Event tracking: 313 | ~~~~~~~~~~~~~~~~ 314 | 315 | Last but not least, WaveBreaking provides a routine to track events over time. Beside the time range of the temporal tracking, two methods for defining the spatial coherence are available. Events receive the same label if they either spatially overlap (method "by_overlapping") or if the centre of mass lies in a certain radius (method "by_radius"). Again, it is suggested to classify the events first and only use for example stratospheric events. This routine adds a column "label" to the events geopandas.GeoDataFrame. 316 | 317 | .. code-block:: python 318 | 319 | # classify events 320 | anticyclonic = overturnings[overturnings.orientation == "anticyclonic"] 321 | 322 | # track events 323 | tracked = wb.track_events(events=anticyclonic, 324 | time_range=6, #time range for temporal tracking in hours 325 | method="by_overlap", #method for tracking ["by_overlap", "by_distance"], optional 326 | buffer=0, # buffer in degrees for polygons overlapping, optional 327 | overlap=0, # minimum overlap percentage, optinal 328 | distance=1000) # distance in km for method "by_distance" 329 | 330 | The result can be visualized by plotting the paths of the tracked events: 331 | 332 | .. code-block:: python 333 | 334 | wb.plot_tracks(data=smoothed, 335 | events=tracked, 336 | proj=ccrs.PlateCarree(), # optional 337 | size=(12,8), # optional 338 | min_path=0, # optional 339 | plot_events=True, # optional 340 | labels=True, # optional 341 | title="") # optional 342 | 343 | 344 | .. end_tutorial_part3 345 | 346 | .. image:: https://raw.githubusercontent.com/skaderli/WaveBreaking/main/docs/figures/plot_tracks.png 347 | :alt: plot tracks 348 | 349 | Credits 350 | --------- 351 | 352 | * The installation guide is to some extend based on the `ConTrack - Contour Tracking `_ tool developed by `Daniel Steinfeld `_. 353 | 354 | * This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template. 355 | 356 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter 357 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage 358 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = wavebreaking 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # wavebreaking documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another 16 | # directory, add these directories to sys.path here. If the directory is 17 | # relative to the documentation root, use os.path.abspath to make it 18 | # absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | sys.path.insert(0, os.path.abspath('..')) 23 | 24 | import sphinx_rtd_theme 25 | import wavebreaking 26 | 27 | # -- General configuration --------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # 31 | # needs_sphinx = '1.0' 32 | 33 | # Add any Sphinx extension module names here, as strings. They can be 34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 35 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ['_templates'] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = '.rst' 45 | 46 | # The master toctree document. 47 | master_doc = 'index' 48 | 49 | # General information about the project. 50 | project = 'WaveBreaking' 51 | copyright = "2023, Severin Kaderli" 52 | author = "Severin Kaderli" 53 | 54 | # The version info for the project you're documenting, acts as replacement 55 | # for |version| and |release|, also used in various other places throughout 56 | # the built documents. 57 | # 58 | # The short X.Y version. 59 | version = wavebreaking.__version__ 60 | # The full version, including alpha/beta/rc tags. 61 | release = wavebreaking.__version__ 62 | 63 | # The language for content autogenerated by Sphinx. Refer to documentation 64 | # for a list of supported languages. 65 | # 66 | # This is also used if you do content translation via gettext catalogs. 67 | # Usually you set "language" from the command line for these cases. 68 | language = "en" 69 | 70 | # List of patterns, relative to source directory, that match files and 71 | # directories to ignore when looking for source files. 72 | # This patterns also effect to html_static_path and html_extra_path 73 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 74 | 75 | # The name of the Pygments (syntax highlighting) style to use. 76 | pygments_style = 'sphinx' 77 | 78 | # If true, `todo` and `todoList` produce output, else they produce nothing. 79 | todo_include_todos = False 80 | 81 | 82 | # -- Options for HTML output ------------------------------------------- 83 | 84 | # The theme to use for HTML and HTML Help pages. See the documentation for 85 | # a list of builtin themes. 86 | # 87 | html_theme = "sphinx_rtd_theme" 88 | 89 | # Theme options are theme-specific and customize the look and feel of a 90 | # theme further. For a list of options available for each theme, see the 91 | # documentation. 92 | # 93 | # html_theme_options = {} 94 | 95 | # Add any paths that contain custom static files (such as style sheets) here, 96 | # relative to this directory. They are copied after the builtin static files, 97 | # so a file named "default.css" will overwrite the builtin "default.css". 98 | html_static_path = ['_static'] 99 | 100 | 101 | # -- Options for HTMLHelp output --------------------------------------- 102 | 103 | # Output file base name for HTML help builder. 104 | htmlhelp_basename = 'wavebreakingdoc' 105 | 106 | # autodoc options (manually added) 107 | autodoc_member_order = "bysource" 108 | 109 | 110 | # -- Options for LaTeX output ------------------------------------------ 111 | 112 | latex_elements = { 113 | # The paper size ('letterpaper' or 'a4paper'). 114 | # 115 | # 'papersize': 'letterpaper', 116 | 117 | # The font size ('10pt', '11pt' or '12pt'). 118 | # 119 | # 'pointsize': '10pt', 120 | 121 | # Additional stuff for the LaTeX preamble. 122 | # 123 | # 'preamble': '', 124 | 125 | # Latex figure (float) alignment 126 | # 127 | # 'figure_align': 'htbp', 128 | } 129 | 130 | # Grouping the document tree into LaTeX files. List of tuples 131 | # (source start file, target name, title, author, documentclass 132 | # [howto, manual, or own class]). 133 | latex_documents = [ 134 | (master_doc, 'wavebreaking.tex', 135 | 'WaveBreaking Documentation', 136 | 'Severin Kaderli', 'manual'), 137 | ] 138 | 139 | 140 | # -- Options for manual page output ------------------------------------ 141 | 142 | # One entry per manual page. List of tuples 143 | # (source start file, name, description, authors, manual section). 144 | man_pages = [ 145 | (master_doc, 'wavebreaking', 146 | 'WaveBreaking Documentation', 147 | [author], 1) 148 | ] 149 | 150 | 151 | # -- Options for Texinfo output ---------------------------------------- 152 | 153 | # Grouping the document tree into Texinfo files. List of tuples 154 | # (source start file, target name, title, author, 155 | # dir menu entry, description, category) 156 | texinfo_documents = [ 157 | (master_doc, 'wavebreaking', 158 | 'WaveBreaking Documentation', 159 | author, 160 | 'wavebreaking', 161 | 'One line description of project.', 162 | 'Miscellaneous'), 163 | ] 164 | 165 | 166 | 167 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/figures/plot_climatology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skaderli/WaveBreaking/7c7370fdc2d429ab8b8c1682c2caf57b1420db5a/docs/figures/plot_climatology.png -------------------------------------------------------------------------------- /docs/figures/plot_step.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skaderli/WaveBreaking/7c7370fdc2d429ab8b8c1682c2caf57b1420db5a/docs/figures/plot_step.png -------------------------------------------------------------------------------- /docs/figures/plot_tracks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skaderli/WaveBreaking/7c7370fdc2d429ab8b8c1682c2caf57b1420db5a/docs/figures/plot_tracks.png -------------------------------------------------------------------------------- /docs/figures/readme.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skaderli/WaveBreaking/7c7370fdc2d429ab8b8c1682c2caf57b1420db5a/docs/figures/readme.gif -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Welcome to the Documentation of WaveBreaking! 3 | ===== 4 | 5 | .. image:: figures/readme.gif 6 | :alt: readme gif 7 | 8 | .. include:: ../README.rst 9 | :start-after: start_intro 10 | :end-before: end_intro 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | :caption: Contents: 15 | 16 | installation 17 | tutorial 18 | modules 19 | contributing 20 | authors 21 | history 22 | 23 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | :start-after: start_installation 3 | :end-before: end_installation 4 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=wavebreaking 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Documentation 3 | ============== 4 | 5 | Spatial pre-processing functions 6 | ------- 7 | 8 | .. automodule:: wavebreaking.processing.spatial 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | Indices 14 | ------- 15 | 16 | .. automodule:: wavebreaking.indices.contour_index 17 | :members: 18 | :undoc-members: 19 | :show-inheritance: 20 | :exclude-members: decorator_contour_calculation 21 | 22 | .. automodule:: wavebreaking.indices.streamer_index 23 | :members: 24 | :undoc-members: 25 | :show-inheritance: 26 | 27 | .. automodule:: wavebreaking.indices.overturning_index 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | 32 | .. automodule:: wavebreaking.indices.cutoff_index 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | 37 | Events post-processing functions 38 | ------- 39 | 40 | .. automodule:: wavebreaking.processing.events 41 | :members: 42 | :undoc-members: 43 | :show-inheritance: 44 | 45 | Plotting functions 46 | ------- 47 | 48 | .. automodule:: wavebreaking.processing.plots 49 | :members: 50 | :undoc-members: 51 | :show-inheritance: 52 | -------------------------------------------------------------------------------- /docs/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | :start-after: start_tutorial_part1 3 | :end-before: end_tutorial_part1 4 | 5 | .. image:: figures/plot_step.png 6 | :alt: plot step 7 | 8 | .. include:: ../README.rst 9 | :start-after: start_tutorial_part2 10 | :end-before: end_tutorial_part2 11 | 12 | .. image:: figures/plot_climatology.png 13 | :alt: plot climatology 14 | 15 | .. include:: ../README.rst 16 | :start-after: start_tutorial_part3 17 | :end-before: end_tutorial_part3 18 | 19 | .. image:: figures/plot_tracks.png 20 | :alt: plot tracks 21 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: wb_env 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python 6 | - numpy 7 | - xarray 8 | - netCDF4 9 | - pandas 10 | - geopandas 11 | - scikit-image 12 | - scipy 13 | - tqdm 14 | - shapely >= 2.0.1 15 | - scikit-learn 16 | # for plotting 17 | - matplotlib 18 | - cartopy >= 0.21.1 19 | # for testing 20 | - pip 21 | - flake8 22 | - tox 23 | # for readthedocs 24 | - sphinx_rtd_theme 25 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | 'setuptools', 4 | 'wheel' 5 | ] 6 | build-backend = 'setuptools.build_meta' 7 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | xarray 3 | netCDF4 4 | pandas 5 | geopandas 6 | scikit-image 7 | scipy 8 | tqdm 9 | shapely >= 2.0.1 10 | scikit-learn 11 | matplotlib 12 | cartopy >= 0.21.1 13 | pip 14 | flake8 15 | tox 16 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.3.8 3 | commit = False 4 | tag = False 5 | 6 | [bumpversion:file:setup.py] 7 | search = version="{current_version}" 8 | replace = version="{new_version}" 9 | 10 | [bumpversion:file:wavebreaking/__init__.py] 11 | search = __version__ = "{current_version}" 12 | replace = __version__ = "{new_version}" 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs,__init__.py,.ipynb_checkpoints 19 | extend-ignore = E402,E203 20 | max-line-length = 100 21 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | 5 | from setuptools import setup, find_packages 6 | 7 | with open('README.rst') as readme_file: 8 | readme = readme_file.read() 9 | 10 | with open('HISTORY.rst') as history_file: 11 | history = history_file.read() 12 | 13 | requirements = ['numpy', 14 | 'xarray', 15 | 'netcdf4', 16 | 'pandas', 17 | 'geopandas', 18 | 'scikit-image', 19 | 'scipy', 20 | 'tqdm', 21 | 'shapely >= 2.0.1', 22 | 'scikit-learn', 23 | 'matplotlib', 24 | 'cartopy >= 0.21.1'] 25 | 26 | test_requirements = ['pytest>=3', ] 27 | 28 | setup( 29 | author="Severin Kaderli", 30 | author_email='severin.kaderli@unibe.ch', 31 | python_requires='>=3.8', 32 | classifiers=[ 33 | 'Intended Audience :: Developers', 34 | 'License :: OSI Approved :: MIT License', 35 | 'Natural Language :: English', 36 | 'Programming Language :: Python :: 3', 37 | 'Operating System :: OS Independent', 38 | 'Intended Audience :: Science/Research', 39 | 'Topic :: Scientific/Engineering :: Atmospheric Science', 40 | ], 41 | description="Detect, classify, and track Rossby Wave Breaking (RWB).", 42 | install_requires=requirements, 43 | license="MIT license", 44 | long_description=readme + '\n\n' + history, 45 | long_description_content_type='text/x-rst', 46 | include_package_data=True, 47 | keywords='wavebreaking', 48 | name='wavebreaking', 49 | packages=find_packages(include=['wavebreaking', 'wavebreaking.*']), 50 | test_suite='tests', 51 | tests_require=test_requirements, 52 | url='https://github.com/skaderli/wavebreaking', 53 | version="0.3.8", 54 | zip_safe=False, 55 | ) 56 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Initiate tests for WaveBreaking 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | -------------------------------------------------------------------------------- /tests/data/demo_data.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skaderli/WaveBreaking/7c7370fdc2d429ab8b8c1682c2caf57b1420db5a/tests/data/demo_data.nc -------------------------------------------------------------------------------- /tests/test_wavebreaking.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Test classes for WaveBreaking 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | 17 | # import modules 18 | import unittest 19 | import xarray as xr 20 | import numpy as np 21 | import pandas as pd 22 | import geopandas as gpd 23 | from shapely.geometry import Polygon 24 | import tqdm 25 | 26 | 27 | # import and silent tqdm progress bar 28 | def silent_tqdm(it, *args, **kwargs): 29 | return it 30 | 31 | 32 | tqdm.tqdm = silent_tqdm 33 | 34 | 35 | from wavebreaking.utils.data_utils import check_argument_types, get_dimension_attributes 36 | from wavebreaking.utils.index_utils import ( 37 | iterate_time_dimension, 38 | iterate_contour_levels, 39 | combine_shared, 40 | ) 41 | from wavebreaking.processing.spatial import ( 42 | calculate_momentum_flux, 43 | calculate_smoothed_field, 44 | ) 45 | from wavebreaking.processing.events import to_xarray, track_events 46 | from wavebreaking.indices.contour_index import ( 47 | calculate_contours, 48 | decorator_contour_calculation, 49 | ) 50 | from wavebreaking.indices.streamer_index import calculate_streamers 51 | from wavebreaking.indices.overturning_index import calculate_overturnings 52 | from wavebreaking.indices.cutoff_index import calculate_cutoffs 53 | 54 | data = xr.open_dataset("tests/data/demo_data.nc").isel(time=slice(0, 3)) 55 | 56 | 57 | class test_data_utils(unittest.TestCase): 58 | def test_check_argument_types(self): 59 | @check_argument_types(["data"], [xr.DataArray]) 60 | def to_be_decorated(data, *args, **kwargs): 61 | return None 62 | 63 | self.assertRaises(TypeError, lambda: to_be_decorated("")) 64 | 65 | def test_get_dimension_attributes(self): 66 | @get_dimension_attributes("data") 67 | def to_be_decorated(data, *args, **kwargs): 68 | self.assertEqual(kwargs["time_name"], "time") 69 | self.assertEqual(kwargs["lon_name"], "lon") 70 | self.assertEqual(kwargs["lat_name"], "lat") 71 | 72 | self.assertEqual(kwargs["ntime"], 3) 73 | self.assertEqual(kwargs["nlon"], 360) 74 | self.assertEqual(kwargs["nlat"], 179) 75 | 76 | self.assertEqual(kwargs["dlon"], 1) 77 | self.assertEqual(kwargs["dlat"], 1) 78 | 79 | to_be_decorated(data.PV) 80 | 81 | 82 | class test_index_utils(unittest.TestCase): 83 | def test_iterate_time_dimension(self): 84 | @get_dimension_attributes("data") 85 | @iterate_time_dimension 86 | def to_be_decorated(data, contour_levels, *args, **kwargs): 87 | return pd.DataFrame([kwargs["step"].values], columns=["time"]) 88 | 89 | df_check = data.drop_vars("lev").time.to_dataframe().reset_index(drop=True) 90 | self.assertEqual(True, to_be_decorated(data.PV, 2).equals(df_check)) 91 | 92 | def test_iterate_contour_levels(self): 93 | @get_dimension_attributes("data") 94 | @iterate_contour_levels 95 | def to_be_decorated(data, contour_levels, *args, **kwargs): 96 | return pd.DataFrame([kwargs["level"]], columns=["levels"]) 97 | 98 | df_check = pd.DataFrame([-2, 2], columns=["levels"]) 99 | self.assertEqual(True, to_be_decorated(data.PV, [-2, 2]).equals(df_check)) 100 | 101 | def test_combine_shared(self): 102 | list_in = [[1, 2, 3], [2, 3, 4], [5, 6]] 103 | list_out = [[1, 2, 3, 4], [5, 6]] 104 | self.assertEqual(combine_shared(list_in), list_out) 105 | 106 | 107 | class test_spatial(unittest.TestCase): 108 | def test_calculate_momentum_flux(self): 109 | self.assertIs(type(calculate_momentum_flux(data.U, data.V)), xr.DataArray) 110 | 111 | def test_calculate_smoothed_filed(self): 112 | self.assertIs(type(calculate_smoothed_field(data.PV, 10)), xr.DataArray) 113 | 114 | 115 | class test_events(unittest.TestCase): 116 | def test_to_xarray(self): 117 | date = np.datetime64("1959-06-03T12") 118 | name = "test_flag" 119 | events = gpd.GeoDataFrame( 120 | pd.DataFrame([{"date": date}]), 121 | geometry=[Polygon([(0, 0), (10, 0), (10, 10), (0, 10)])], 122 | ) 123 | 124 | flag_data = to_xarray(data=data.PV, events=events, name=name) 125 | 126 | self.assertIs(type(flag_data), xr.DataArray) 127 | self.assertEqual(flag_data.sel(lon=5, lat=5, time=date).values, 1) 128 | self.assertEqual(flag_data.name, name) 129 | 130 | def test_track_events(self): 131 | date1 = np.datetime64("1959-06-03T12") 132 | date2 = np.datetime64("1959-06-03T18") 133 | events = gpd.GeoDataFrame( 134 | pd.DataFrame({"date": [date1, date2]}), 135 | geometry=[ 136 | Polygon([(0, 0), (10, 0), (10, 10), (0, 10)]), 137 | Polygon([(0, 0), (10, 0), (10, 10), (0, 10)]), 138 | ], 139 | ) 140 | 141 | tracked = track_events(events=events, method="by_overlap") 142 | 143 | self.assertIs(type(tracked), gpd.GeoDataFrame) 144 | self.assertEqual(tracked.iloc[0].label, 0) 145 | 146 | 147 | class test_indices(unittest.TestCase): 148 | def test_contour_index(self): 149 | contours_coords = calculate_contours( 150 | data=data.PV, 151 | contour_levels=2, 152 | periodic_add=120, 153 | original_coordinates=True, 154 | ) 155 | contours_coords = contours_coords[contours_coords.exp_lon == 360] 156 | 157 | contours_index = calculate_contours( 158 | data=data.PV, 159 | contour_levels=2, 160 | periodic_add=120, 161 | original_coordinates=False, 162 | ) 163 | contours_index = contours_index[contours_index.exp_lon == 480] 164 | 165 | self.assertIs(type(contours_coords), gpd.GeoDataFrame) 166 | self.assertIs(type(contours_index), gpd.GeoDataFrame) 167 | 168 | self.assertEqual( 169 | min(np.asarray(contours_coords.iloc[0].geometry.coords.xy).T[:, 0]), -180 170 | ) 171 | self.assertEqual( 172 | min(np.asarray(contours_index.iloc[0].geometry.coords.xy).T[:, 0]), 0 173 | ) 174 | 175 | cols = ["date", "level", "closed", "exp_lon", "mean_lat", "geometry"] 176 | self.assertEqual(contours_coords.columns.to_list(), cols) 177 | self.assertEqual(contours_index.columns.to_list(), cols) 178 | 179 | def test_decorator_contour_calculation(self): 180 | @decorator_contour_calculation 181 | def to_be_decorated(*args, **kwargs): 182 | contours = kwargs["contours"] 183 | contours = contours[contours.exp_lon == 480] 184 | 185 | self.assertIs(type(contours), gpd.GeoDataFrame) 186 | self.assertEqual( 187 | min(np.asarray(contours.iloc[0].geometry.coords.xy).T[:, 0]), 0 188 | ) 189 | self.assertEqual( 190 | contours.columns.to_list(), 191 | ["date", "level", "closed", "exp_lon", "mean_lat", "geometry"], 192 | ) 193 | 194 | to_be_decorated(data.PV, contour_levels=2) 195 | 196 | def test_streamer_index(self): 197 | streamers = calculate_streamers( 198 | data=data.PV, 199 | contour_levels=2, 200 | geo_dis=800, 201 | cont_dis=1500, 202 | intensity=data.PV, 203 | periodic_add=120, 204 | ) 205 | 206 | self.assertIs(type(streamers), gpd.GeoDataFrame) 207 | self.assertEqual(len(streamers), 39) 208 | self.assertEqual( 209 | streamers.columns.to_list(), 210 | ["date", "level", "com", "mean_var", "intensity", "event_area", "geometry"], 211 | ) 212 | 213 | def test_overturning_index(self): 214 | overturnings = calculate_overturnings( 215 | data=data.PV, 216 | contour_levels=2, 217 | range_group=5, 218 | min_exp=5, 219 | intensity=data.PV, 220 | periodic_add=120, 221 | ) 222 | 223 | self.assertIs(type(overturnings), gpd.GeoDataFrame) 224 | self.assertEqual(len(overturnings), 9) 225 | self.assertEqual( 226 | overturnings.columns.to_list(), 227 | [ 228 | "date", 229 | "level", 230 | "com", 231 | "mean_var", 232 | "intensity", 233 | "event_area", 234 | "orientation", 235 | "geometry", 236 | ], 237 | ) 238 | 239 | def test_cutoff_index(self): 240 | cutoffs = calculate_cutoffs( 241 | data=data.PV, 242 | contour_levels=2, 243 | min_exp=5, 244 | intensity=data.PV, 245 | periodic_add=120, 246 | ) 247 | 248 | self.assertIs(type(cutoffs), gpd.GeoDataFrame) 249 | self.assertEqual(len(cutoffs), 20) 250 | self.assertEqual( 251 | cutoffs.columns.to_list(), 252 | ["date", "level", "com", "mean_var", "intensity", "event_area", "geometry"], 253 | ) 254 | 255 | 256 | # execute Test 257 | if __name__ == "__main__": 258 | unittest.main() 259 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | tox>=4 4 | env_list = lint, python3.8, python3.9, py{310,311} 5 | 6 | [testenv] 7 | setenv = 8 | PYTHONPATH = {toxinidir} 9 | deps = 10 | -r{toxinidir}/requirements_dev.txt 11 | 12 | [testenv:lint] 13 | description = run linters 14 | skip_install = true 15 | deps = flake8 16 | commands = flake8 wavebreaking tests 17 | ignore = E402,E203 18 | exclude = 19 | __init__, 20 | .ipynb_checkpoints 21 | -------------------------------------------------------------------------------- /wavebreaking/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Top-level package for WaveBreaking. 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | __version__ = "0.3.8" 17 | 18 | # import spatial pre-processing functions 19 | from wavebreaking.processing.spatial import ( 20 | calculate_momentum_flux, 21 | calculate_smoothed_field, 22 | ) 23 | 24 | # import events post-processing functions 25 | from wavebreaking.processing.events import to_xarray, track_events 26 | 27 | # import plotting functions 28 | from wavebreaking.processing.plots import plot_clim, plot_step, plot_tracks 29 | 30 | # import wavebreaking indices 31 | from wavebreaking.indices.contour_index import calculate_contours 32 | from wavebreaking.indices.streamer_index import calculate_streamers 33 | from wavebreaking.indices.overturning_index import calculate_overturnings 34 | from wavebreaking.indices.cutoff_index import calculate_cutoffs 35 | -------------------------------------------------------------------------------- /wavebreaking/indices/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Init indices 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | -------------------------------------------------------------------------------- /wavebreaking/indices/contour_index.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Contour calculation 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import numpy as np 21 | import xarray as xr 22 | import pandas as pd 23 | import geopandas as gpd 24 | import itertools as itertools 25 | from shapely.geometry import LineString 26 | from skimage import measure 27 | import functools 28 | 29 | import logging 30 | 31 | logger = logging.getLogger(__name__) 32 | logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) 33 | 34 | from wavebreaking.utils.data_utils import ( 35 | get_dimension_attributes, 36 | check_argument_types, 37 | correct_dimension_orientation, 38 | ) 39 | from wavebreaking.utils.index_utils import ( 40 | iterate_time_dimension, 41 | iterate_contour_levels, 42 | ) 43 | 44 | 45 | @check_argument_types(["data"], [xr.DataArray]) 46 | @get_dimension_attributes("data") 47 | @iterate_time_dimension 48 | @iterate_contour_levels 49 | def calculate_contours( 50 | data, contour_levels, periodic_add=120, original_coordinates=True, *args, **kwargs 51 | ): 52 | """ 53 | Calculate contour lines for a set of contour levels. 54 | The calculations are based on the measure.find_contours module. 55 | If periodic_add is provided, the data array is expanded in the longitudinal direction 56 | and undulations at the date border are captured correctly. 57 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 58 | and resolution ("dlon", "dlat") can be passed as key=value argument. 59 | 60 | Parameters 61 | ---------- 62 | data : xarray.DataArray 63 | data for the contour calculation 64 | contour_levels : array_like 65 | levels for contour calculation 66 | periodic_add: int or float, optional 67 | number of longitudes in degrees to expand the dataset to 68 | correctly capture undulations at the date border 69 | if the input field is not periodic, use periodic_add = 0 70 | original_coordinates: bool, optional 71 | if False, the array indices of the contour lines are returned 72 | instead of the original coordinates 73 | 74 | Returns 75 | ------- 76 | contours: geopandas.GeoDataFrame 77 | GeoDataFrame containing different characteristics of the contours: 78 | * "date": date of the contour line 79 | * "level": level of the contour line 80 | * "closed": True if contour line is closed 81 | * "exp_lon": expansion in degrees of the contours in the longitudinal direction 82 | * "mean_lat": mean latitude of the contours 83 | * "geometry": LineString object with the contour coordinates in the format (x,y) 84 | """ 85 | 86 | # correct dimension orientation if needed 87 | data = correct_dimension_orientation(data, *args, **kwargs) 88 | 89 | # select variable and time step for the contour calculation 90 | ds = data.sel({kwargs["time_name"]: kwargs["step"]}) 91 | date = ds[kwargs["time_name"]].values 92 | 93 | # expand field for periodicity 94 | ds = xr.concat( 95 | [ 96 | ds, 97 | ds.isel({kwargs["lon_name"]: slice(0, int(periodic_add / kwargs["dlon"]))}), 98 | ], 99 | dim=kwargs["lon_name"], 100 | ) 101 | 102 | # get contours (indices in array coordinates) 103 | contours_from_measure = measure.find_contours(ds.values, kwargs["level"]) 104 | 105 | # get indices and check closed and length 106 | contours_index_expanded = [] 107 | closed = [] 108 | for item in contours_from_measure: 109 | check_closed = all(item[0] == item[-1]) 110 | indices = np.asarray( 111 | list(dict.fromkeys(map(tuple, np.round(item).astype("int")))) 112 | )[:, ::-1] 113 | check_len = len(indices) >= 4 114 | 115 | if check_len is True: 116 | contours_index_expanded.append(indices) 117 | closed.append(check_closed) 118 | 119 | def check_duplicates(list_of_arrays): 120 | """ 121 | Check if there are cutoff duplicates due to the periodic expansion 122 | """ 123 | temp = [ 124 | np.c_[item[:, 0] % kwargs["nlon"], item[:, 1]] for item in list_of_arrays 125 | ] 126 | 127 | check = [ 128 | (i1, i2) 129 | for (i1, e1), (i2, e2) in itertools.permutations(enumerate(temp), r=2) 130 | if set(map(tuple, e1)).issubset(set(map(tuple, e2))) 131 | ] 132 | drop = [] 133 | lens = np.array([len(item) for item in temp]) 134 | for indices in check: 135 | if lens[indices[0]] == lens[indices[1]]: 136 | drop.append(max(indices)) 137 | else: 138 | drop.append(indices[np.argmin(lens[[indices[0], indices[1]]])]) 139 | 140 | return list(set(drop)) 141 | 142 | # check for duplicates 143 | drop = check_duplicates(contours_index_expanded) 144 | contours_index_expanded = [ 145 | item for index, item in enumerate(contours_index_expanded) if index not in drop 146 | ] 147 | closed = [item for index, item in enumerate(closed) if index not in drop] 148 | 149 | def contour_to_dataframe(list_of_arrays): 150 | """ 151 | Calculate different characteristics of the contour line. 152 | """ 153 | 154 | exp_lon = [len(set(item[:, 0])) * kwargs["dlon"] for item in list_of_arrays] 155 | mean_lat = [np.round(item[:, 1].mean(), 2) for item in list_of_arrays] 156 | geo_mp = [LineString(coords) for coords in list_of_arrays] 157 | 158 | gdf = gpd.GeoDataFrame( 159 | pd.DataFrame( 160 | { 161 | "date": date, 162 | "level": kwargs["level"], 163 | "closed": closed, 164 | "exp_lon": exp_lon, 165 | "mean_lat": mean_lat, 166 | } 167 | ), 168 | geometry=geo_mp, 169 | ) 170 | 171 | return gdf 172 | 173 | if original_coordinates is False: 174 | # return contours in index coordinates as a geopandas.GeoDataFrame 175 | return contour_to_dataframe(contours_index_expanded) 176 | 177 | else: 178 | # select the original coordinates from the indices 179 | contours_coordinates_original = [ 180 | np.c_[ 181 | data[kwargs["lon_name"]].values[item[:, 0] % kwargs["nlon"]], 182 | data[kwargs["lat_name"]].values[item[:, 1]], 183 | ] 184 | for item in contours_index_expanded 185 | ] 186 | 187 | # drop duplicates 188 | contours_coordinates_original = [ 189 | np.asarray(list(dict.fromkeys(map(tuple, item)))) 190 | for item in contours_coordinates_original 191 | ] 192 | 193 | # return contours in original coordinates as a geopandas.GeoDataFrame 194 | return contour_to_dataframe(contours_coordinates_original) 195 | 196 | 197 | def decorator_contour_calculation(func): 198 | """ 199 | decorator to wrap the contour calculation around the index functions 200 | """ 201 | 202 | @functools.wraps(func) 203 | def wrapper(data, contour_levels, periodic_add=120, *args, **kwargs): 204 | # pass contours to the decorated function as a key=value argument 205 | if "contours" not in kwargs: 206 | kwargs["contours"] = calculate_contours( 207 | data, contour_levels, periodic_add, original_coordinates=False 208 | ) 209 | else: 210 | # check type 211 | if not isinstance(kwargs["contours"], gpd.GeoDataFrame): 212 | errmsg = "contours has to be a geopandas.GeoDataFrame!" 213 | raise TypeError(errmsg) 214 | 215 | # check empty 216 | if kwargs["contours"].empty: 217 | errmsg = "contours geopandas.GeoDataFrame is empty!" 218 | raise ValueError(errmsg) 219 | 220 | # check contour levels 221 | try: 222 | iter(contour_levels) 223 | except Exception: 224 | contour_levels = [contour_levels] 225 | 226 | check_levels = [ 227 | i for i in contour_levels if i not in set(kwargs["contours"].level) 228 | ] 229 | if len(check_levels) > 0: 230 | logger.warning( 231 | "\n The contour levels {} are not present in 'contours'".format( 232 | check_levels 233 | ) 234 | ) 235 | 236 | # check original coordinates 237 | coords = np.asarray(kwargs["contours"].iloc[0].geometry.coords.xy).T 238 | check_int = (coords.astype("int") == coords).all() 239 | check_zero = (coords[:, 0] >= 0).all() 240 | if not (check_int and check_zero): 241 | errmsg = ( 242 | "Original coordinates not supported for the index calculation. " 243 | ) 244 | hint = "Use original_coordinates=False in the contour calculation." 245 | raise ValueError(errmsg + hint) 246 | 247 | return func(data, contour_levels, periodic_add=periodic_add, *args, **kwargs) 248 | 249 | return wrapper 250 | -------------------------------------------------------------------------------- /wavebreaking/indices/cutoff_index.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Cutoff calculation 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import xarray as xr 21 | import geopandas as gpd 22 | from shapely.geometry import Polygon 23 | 24 | from wavebreaking.utils.index_utils import calculate_properties, transform_polygons 25 | from wavebreaking.utils.data_utils import ( 26 | get_dimension_attributes, 27 | check_argument_types, 28 | correct_dimension_orientation, 29 | ) 30 | from wavebreaking.indices.contour_index import decorator_contour_calculation 31 | 32 | 33 | @check_argument_types(["data"], [xr.DataArray]) 34 | @get_dimension_attributes("data") 35 | @decorator_contour_calculation 36 | def calculate_cutoffs( 37 | data, 38 | contour_level, 39 | contours=None, 40 | min_exp=5, 41 | intensity=None, 42 | periodic_add=120, 43 | *args, 44 | **kwargs 45 | ): 46 | """ 47 | Identify cutoff structures. 48 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 49 | and resolution ("dlon", "dlat") can be passed as key=value argument. 50 | Before the index calculation, the contour lines are calculated if not provided. 51 | 52 | Parameters 53 | ---------- 54 | data : xarray.DataArray 55 | data for the contour and cutoff calculation 56 | contour_levels : array_like 57 | levels for contour calculation 58 | contours : geopandas.GeoDataFrame, optional 59 | contours calculated with wavebreaking.calculate_contours(..., 60 | original_coordinates=False) 61 | min_exp : int or float, optional 62 | Minimal longitudinal expansion of a cutoff event 63 | intensity : xarray.DataArray, optional 64 | data for the intensity calculation (hint: use wb_spatial.calculate_momentum_flux) 65 | periodic_add: int or float, optional 66 | number of longitudes in degrees to expand the dataset 67 | to correctly capture undulations at the date border 68 | if the input field is not periodic, use periodic_add = 0 69 | 70 | Returns 71 | ------- 72 | cutoffs: geopandas.GeoDataFrame 73 | GeoDataFrame containing different characteristics of the cutoffs events: 74 | * "date": date of the cutoffs 75 | * "level": level of the contour line 76 | * "com": center of mass in the format (x,y) 77 | * "mean_var": mean of the variable used for the contour calculation 78 | * "event_area": area of a cutoff event 79 | * "intensity": sum of the intensity (momentum flux) 80 | * "geometry": (Multi)Polygon with the coordinates in the format (x,y) 81 | """ 82 | 83 | # correct dimension orientation if needed 84 | data = correct_dimension_orientation(data, *args, **kwargs) 85 | 86 | # filter contours from contour iteration 87 | if contours is None: 88 | contours = kwargs["contours"] 89 | contours = contours[ 90 | (contours.exp_lon < contours.exp_lon.max()) 91 | & (contours.exp_lon >= min_exp) 92 | & contours.closed 93 | ].reset_index(drop=True) 94 | 95 | # define Polygons 96 | polys = [Polygon(row.geometry) for index, row in contours.iterrows()] 97 | gdf = gpd.GeoDataFrame(contours[["date", "level"]], geometry=polys) 98 | gdf = gdf.reset_index().rename(columns={"index": "id"}) 99 | 100 | # calculate properties and transform polygons 101 | return gpd.GeoDataFrame( 102 | calculate_properties(gdf, data, intensity, periodic_add, **kwargs), 103 | geometry=transform_polygons(gdf, data, **kwargs).geometry, 104 | ) 105 | -------------------------------------------------------------------------------- /wavebreaking/indices/overturning_index.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Overturning calculation 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import numpy as np 21 | import xarray as xr 22 | import pandas as pd 23 | import geopandas as gpd 24 | from shapely.geometry import box 25 | from tqdm import tqdm 26 | import itertools as itertools 27 | from sklearn.metrics import DistanceMetric 28 | 29 | dist = DistanceMetric.get_metric("haversine") 30 | 31 | from wavebreaking.utils.index_utils import calculate_properties, transform_polygons 32 | from wavebreaking.utils.data_utils import ( 33 | get_dimension_attributes, 34 | check_argument_types, 35 | correct_dimension_orientation, 36 | ) 37 | from wavebreaking.indices.contour_index import decorator_contour_calculation 38 | 39 | 40 | @check_argument_types(["data"], [xr.DataArray]) 41 | @get_dimension_attributes("data") 42 | @decorator_contour_calculation 43 | def calculate_overturnings( 44 | data, 45 | contour_levels, 46 | contours=None, 47 | range_group=5, 48 | min_exp=5, 49 | intensity=None, 50 | periodic_add=120, 51 | *args, 52 | **kwargs 53 | ): 54 | """ 55 | Identify overturning structures based on the Overturning Index 56 | developed by Barnes and Hartmann (2012). 57 | The default parameters for the overturning identification 58 | are based on the study by Barnes and Hartmann (2012). 59 | The overturning region is represented by a rectangle 60 | enclosing all overturning contour points. 61 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 62 | and resolution ("dlon", "dlat") can be passed as key=value argument. 63 | Before the index calculation, the contour lines are calculated if not provided. 64 | 65 | Parameters 66 | ---------- 67 | data : xarray.DataArray 68 | data for the contour and overturning calculation 69 | contour_levels : array_like 70 | levels for contour calculation 71 | contours : geopandas.GeoDataFrame, optional 72 | contours calculated with wavebreaking.calculate_contours(..., 73 | original_coordinates=False) 74 | range_group : int or float, optional 75 | Maximal degrees in the longitudinal direction in which two overturning are grouped 76 | min_exp : int or float, optional 77 | Minimal longitudinal expansion of an overturning event 78 | intensity : xarray.DataArray, optional 79 | data for the intensity calculation (hint: use wb_spatial.calculate_momentum_flux) 80 | periodic_add: int or float, optional 81 | number of longitudes in degrees to expand the dataset 82 | to correctly capture undulations at the date border 83 | if the input field is not periodic, use periodic_add = 0 84 | 85 | Returns 86 | ------- 87 | overturnings: geopandas.GeoDataFrame 88 | GeoDataFrame containing different characteristics of the overturning events: 89 | * "date": date of the overturning 90 | * "level": level of the contour line 91 | * "com": center of mass in the format (x,y) 92 | * "mean_var": mean of the variable used for the overturning calculations 93 | * "event_area": area of a overturning event; 94 | * "intensity": sum of the intensity (momentum flux) 95 | * "orientation": orientation of the most west- and eastward point 96 | * "geometry": (Multi)Polygon with coordinates in the format (x,y) 97 | """ 98 | 99 | # correct dimension orientation if needed 100 | data = correct_dimension_orientation(data, *args, **kwargs) 101 | 102 | # filter contours from contour iteration 103 | if contours is None: 104 | contours = kwargs["contours"] 105 | contours = contours[contours.exp_lon == contours.exp_lon.max()].reset_index( 106 | drop=True 107 | ) 108 | 109 | # loop over contours 110 | overturnings = [] 111 | for index, series in tqdm( 112 | contours.iterrows(), 113 | total=contours.shape[0], 114 | desc="Calculating overturnings", 115 | leave=True, 116 | position=0, 117 | ): 118 | # calculate all overturning longitudes 119 | # (1) select the contour points and count the longitudes 120 | contour_index = pd.DataFrame( 121 | np.asarray(series.geometry.coords.xy).T, columns=["x", "y"] 122 | ).astype("int") 123 | lons, counts = np.unique(contour_index.x, return_counts=True) 124 | 125 | # (2) only keep longitudes that appear at least 3 times 126 | # and create add same label if they are closer than 127 | # the parameter range_group 128 | ot_lons = pd.DataFrame({"lon": lons[counts >= 3]}) 129 | ot_lons["label"] = (ot_lons.diff() > range_group / kwargs["dlon"]).cumsum() 130 | 131 | # (3) extract min and max longitude of each group 132 | groups = ot_lons.groupby("label") 133 | df_ot = groups.agg(["min", "max"]).astype("int").reset_index(drop=True) 134 | df_ot.columns = ["min_lon", "max_lon"] 135 | 136 | def check_duplicates(df): 137 | """ 138 | Check if there are cutoff duplicates due to the periodic expansion 139 | """ 140 | 141 | temp = [ 142 | np.array(range(row.min_lon, row.max_lon + 1)) % kwargs["nlon"] 143 | for index, row in df.iterrows() 144 | ] 145 | 146 | index_combinations = list(itertools.permutations(df.index, r=2)) 147 | check = [ 148 | item 149 | for item in index_combinations 150 | if set(temp[item[0]]).issubset(set(temp[item[1]])) 151 | ] 152 | 153 | drop = [] 154 | for item in check: 155 | lens = [len(temp[i]) for i in item] 156 | 157 | if lens[0] == lens[1]: 158 | drop.append(max(item)) 159 | else: 160 | drop.append(item[np.argmin(lens)]) 161 | 162 | return df[~df.reset_index(drop=True).index.isin(drop)] 163 | 164 | def check_expansion(df): 165 | exp_lon = df.max_lon - df.min_lon 166 | return df[exp_lon >= min_exp / kwargs["dlon"]] 167 | 168 | def find_lat_expansion(df): 169 | lats = [ 170 | contour_index[ 171 | contour_index.x.isin(range(row.min_lon, row.max_lon + 1)) 172 | ].y 173 | for index, row in df.iterrows() 174 | ] 175 | ot_lats = pd.DataFrame( 176 | [(item.min(), item.max()) for item in lats], 177 | columns=["min_lat", "max_lat"], 178 | ).astype("int") 179 | 180 | return pd.concat([df, ot_lats], axis=1) 181 | 182 | # apply all three routines and stop if no overturning events are left after a routine 183 | routines = [check_duplicates, check_expansion, find_lat_expansion] 184 | 185 | i = 0 186 | while len(df_ot.index) > 0 and i <= 2: 187 | df_ot = routines[i](df_ot).reset_index(drop=True) 188 | i += 1 189 | 190 | # check if event is cyclonic by orientation 191 | def check_orientation(df): 192 | lat_west = data[kwargs["lat_name"]][ 193 | contour_index[contour_index.x.eq(df.min_lon)].y.values[0] 194 | ].values 195 | lat_east = data[kwargs["lat_name"]][ 196 | contour_index[contour_index.x.eq(df.max_lon)].y.values[-1] 197 | ].values 198 | 199 | if abs(lat_west) <= abs(lat_east): 200 | return "cyclonic" 201 | else: 202 | return "anticyclonic" 203 | 204 | orientation = pd.DataFrame( 205 | {"orientation": [check_orientation(row) for index, row in df_ot.iterrows()]} 206 | ) 207 | # define Polygons 208 | dates = pd.DataFrame({"date": [series.date for i in range(0, len(df_ot))]}) 209 | levels = pd.DataFrame({"level": [series.level for i in range(0, len(df_ot))]}) 210 | polys = [ 211 | box(row.min_lon, row.min_lat, row.max_lon, row.max_lat) 212 | for index, row in df_ot.iterrows() 213 | ] 214 | overturnings.append( 215 | gpd.GeoDataFrame( 216 | pd.concat([dates, levels, orientation], axis=1), geometry=polys 217 | ) 218 | ) 219 | 220 | # concat GeoDataFrames 221 | if len(overturnings) == 0: 222 | return gpd.GeoDataFrame() 223 | else: 224 | gdf = pd.concat(overturnings).reset_index(drop=True) 225 | 226 | gdf = gdf.reset_index().rename(columns={"index": "id"}) 227 | 228 | # calculate properties and transform polygons 229 | return gpd.GeoDataFrame( 230 | calculate_properties(gdf, data, intensity, periodic_add, **kwargs), 231 | geometry=transform_polygons(gdf, data, **kwargs).geometry, 232 | ) 233 | -------------------------------------------------------------------------------- /wavebreaking/indices/streamer_index.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Streamer calculation 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import numpy as np 21 | import xarray as xr 22 | import pandas as pd 23 | import geopandas as gpd 24 | from tqdm import tqdm 25 | import itertools as itertools 26 | from shapely.geometry import LineString, Polygon 27 | from sklearn.metrics import DistanceMetric 28 | 29 | dist = DistanceMetric.get_metric("haversine") 30 | 31 | from wavebreaking.utils.index_utils import ( 32 | calculate_properties, 33 | transform_polygons, 34 | combine_shared, 35 | ) 36 | from wavebreaking.utils.data_utils import ( 37 | get_dimension_attributes, 38 | check_argument_types, 39 | correct_dimension_orientation, 40 | ) 41 | from wavebreaking.indices.contour_index import decorator_contour_calculation 42 | 43 | 44 | @check_argument_types(["data"], [xr.DataArray]) 45 | @get_dimension_attributes("data") 46 | @decorator_contour_calculation 47 | def calculate_streamers( 48 | data, 49 | contour_level, 50 | contours=None, 51 | geo_dis=800, 52 | cont_dis=1500, 53 | intensity=None, 54 | periodic_add=120, 55 | *args, 56 | **kwargs 57 | ): 58 | """ 59 | Identify streamer structures based on the Streamer Index 60 | developed by Wernli and Sprenger (2007). 61 | The default parameters for the streamer identification 62 | are based on the study by Wernli and Sprenger (2007). 63 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 64 | and resolution ("dlon", "dlat") can be passed as key=value argument. 65 | Before the index calculation, the contour lines are calculated if not provided. 66 | 67 | Parameters 68 | ---------- 69 | data : xarray.DataArray 70 | data for the contour and streamer calculation 71 | contour_levels : array_like 72 | levels for contour calculation 73 | contours : geopandas.GeoDataFrame, optional 74 | contours calculated with wavebreaking.calculate_contours(..., 75 | original_coordinates=False) 76 | geo_dis : int or float, optional 77 | Maximal geographic distance between two contour points that describe a streamer 78 | cont_dis : int or float, optional 79 | Minimal distance along the contour line between two points that describe a streamer 80 | intensity : xarray.DataArray, optional 81 | data for the intensity calculation (hint: use wb_spatial.calculate_momentum_flux) 82 | periodic_add: int or float, optional 83 | number of longitudes in degrees to expand the dataset 84 | to correctly capture undulations at the date border 85 | if the input field is not periodic, use periodic_add = 0 86 | 87 | Returns 88 | ------- 89 | streamers: geopandas.GeoDataFrame: 90 | GeoDataFrame containing different characteristics of the streamers 91 | * "date": date of the streamers 92 | * "level": level of the contour line 93 | * "com": center of mass in the format (x,y) 94 | * "mean_var": mean of the variable used for the streamer calculations 95 | * "event_area": area of a streamer 96 | * "intensity": sum of the intensity (momentum flux) 97 | * "geometry": (Multi)Polygon with coordinates in the format (x,y) 98 | """ 99 | 100 | # correct dimension orientation if needed 101 | data = correct_dimension_orientation(data, *args, **kwargs) 102 | 103 | # filter contours from contour iteration 104 | if contours is None: 105 | contours = kwargs["contours"] 106 | contours = contours[contours.exp_lon == contours.exp_lon.max()].reset_index( 107 | drop=True 108 | ) 109 | 110 | # loop over contours 111 | streamers = [] 112 | for index, series in tqdm( 113 | contours.iterrows(), 114 | total=contours.shape[0], 115 | desc="Calculating streamers ", 116 | leave=True, 117 | position=0, 118 | ): 119 | # calculate all possible basepoints combinations 120 | # (1) get coordinates of the contour points 121 | contour_index = pd.DataFrame( 122 | np.asarray(series.geometry.coords.xy).T, columns=["x", "y"] 123 | ).astype("int") 124 | contour_coords = np.c_[ 125 | data[kwargs["lat_name"]].values[contour_index.y], 126 | data[kwargs["lon_name"]].values[contour_index.x % kwargs["nlon"]], 127 | ] 128 | 129 | # (2) calculate geographic distance between all contour coordinates 130 | geo_matrix = dist.pairwise((np.radians(contour_coords))) * 6371 131 | 132 | # (3) calculate the distance connecting all combinations of contour points 133 | on = np.insert(np.diagonal(geo_matrix, 1), 0, 0) 134 | on_mat = np.triu(np.tile(on, (len(on), 1)), k=1) 135 | cont_matrix = np.cumsum(on_mat, axis=1) 136 | 137 | # (4) get indices of all contour coordinates that fulfill both conditions 138 | check = (geo_matrix < geo_dis) * (cont_matrix > cont_dis) 139 | check = np.transpose(check.nonzero()) 140 | 141 | # (5) store the coordinates of the point combinations in a DataFrame 142 | df1 = ( 143 | contour_index[["x", "y"]] 144 | .iloc[check[:, 0]] 145 | .reset_index() 146 | .rename(columns={"x": "x1", "y": "y1", "index": "ind1"}) 147 | ) 148 | df2 = ( 149 | contour_index[["x", "y"]] 150 | .iloc[check[:, 1]] 151 | .reset_index() 152 | .rename(columns={"x": "x2", "y": "y2", "index": "ind2"}) 153 | ) 154 | df_bp = pd.concat([df1, df2], axis=1) 155 | 156 | # (6) drop combinations that are invalid 157 | df_bp = df_bp.drop(df_bp[np.abs(df_bp.x1 - df_bp.x2) > 120].index) 158 | 159 | # apply several routines that neglect invalid basepoints 160 | def check_duplicates(df): 161 | """ 162 | Check if there are basepoint duplicates 163 | due to the periodic expansion in the longitudinal direction 164 | """ 165 | 166 | # drop duplicates after mapping the coordinates to the original grid 167 | temp = pd.concat( 168 | [df.x1 % kwargs["nlon"], df.y1, df.x2 % kwargs["nlon"], df.y2], axis=1 169 | ) 170 | temp = temp[temp.duplicated(keep=False)] 171 | 172 | if len(temp) == 0: 173 | check = [] 174 | else: 175 | check = list( 176 | np.asarray( 177 | temp.groupby(list(temp)) 178 | .apply(lambda x: tuple(x.index)) 179 | .tolist() 180 | )[:, 1] 181 | ) 182 | 183 | return df.drop(check) 184 | 185 | def check_intersections(df): 186 | """ 187 | Check for intersections of the basepoints with the contour line 188 | """ 189 | 190 | # calculate line connecting each basepoint pair 191 | # and drop the ones that intersect with the contour line 192 | df["dline"] = [ 193 | LineString([(row.x1, row.y1), (row.x2, row.y2)]) 194 | for index, row in df.iterrows() 195 | ] 196 | check_intersections = [ 197 | row.dline.touches(series.geometry) for index, row in df.iterrows() 198 | ] 199 | 200 | return df[check_intersections] 201 | 202 | def check_overlapping(df): 203 | """ 204 | Check for overlapping of the contour segments each described by basepoints 205 | """ 206 | 207 | # calculate contour segment for each basepoint pair 208 | # and drop the pairs that are fully covered by another pair 209 | ranges = [range(int(r2.ind1), int(r2.ind2 + 1)) for i2, r2 in df.iterrows()] 210 | index_combinations = list(itertools.permutations(df.index, r=2)) 211 | check_overlapping = set( 212 | [ 213 | item[0] 214 | for item in index_combinations 215 | if ( 216 | ranges[item[0]][0] in ranges[item[1]] 217 | and ranges[item[0]][-1] in ranges[item[1]] 218 | ) 219 | ] 220 | ) 221 | 222 | return df.drop(check_overlapping) 223 | 224 | def check_groups(df): 225 | """ 226 | Check if there are still several basepoints describing the same streamer 227 | """ 228 | 229 | # group basepoints that describe the same streamer 230 | index_combinations = np.asarray( 231 | list(itertools.combinations_with_replacement(df.index, r=2)) 232 | ) 233 | check_crossing = [ 234 | df.iloc[item[0]].dline.intersects(df.iloc[item[1]].dline) 235 | for item in index_combinations 236 | ] 237 | 238 | groups = combine_shared(index_combinations[check_crossing]) 239 | keep_index = [ 240 | item[ 241 | np.argmax( 242 | [ 243 | on[int(row.ind1) : int(row.ind2) + 1].sum() 244 | for index, row in df.iloc[item].iterrows() 245 | ] 246 | ) 247 | ] 248 | for item in groups 249 | ] 250 | 251 | return df.iloc[keep_index] 252 | 253 | # apply all four routines and stop if no basepoints are left after a routine 254 | routines = [ 255 | check_duplicates, 256 | check_intersections, 257 | check_overlapping, 258 | check_groups, 259 | ] 260 | 261 | i = 0 262 | while len(df_bp.index) > 1 and i <= 3: 263 | df_bp = routines[i](df_bp).reset_index(drop=True) 264 | i += 1 265 | 266 | # define Polygons 267 | dates = pd.DataFrame({"date": [series.date for i in range(0, len(df_bp))]}) 268 | levels = pd.DataFrame({"level": [series.level for i in range(0, len(df_bp))]}) 269 | polys = [ 270 | Polygon(contour_index[int(row.ind1) : int(row.ind2) + 1]) 271 | for index, row in df_bp.iterrows() 272 | ] 273 | streamers.append( 274 | gpd.GeoDataFrame(pd.concat([dates, levels], axis=1), geometry=polys) 275 | ) 276 | 277 | # concat GeoDataFrames 278 | if len(streamers) == 0: 279 | return gpd.GeoDataFrame() 280 | else: 281 | gdf = pd.concat(streamers).reset_index(drop=True) 282 | 283 | gdf = gdf.reset_index().rename(columns={"index": "id"}) 284 | 285 | # calculate properties and transform polygons 286 | return gpd.GeoDataFrame( 287 | calculate_properties(gdf, data, intensity, periodic_add, **kwargs), 288 | geometry=transform_polygons(gdf, data, **kwargs).geometry, 289 | ) 290 | -------------------------------------------------------------------------------- /wavebreaking/processing/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Initiate processing 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | -------------------------------------------------------------------------------- /wavebreaking/processing/events.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Events post-processing functions 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import xarray as xr 21 | import pandas as pd 22 | import geopandas as gpd 23 | import numpy as np 24 | import itertools as itertools 25 | from sklearn.metrics import DistanceMetric 26 | 27 | dist = DistanceMetric.get_metric("haversine") 28 | 29 | from wavebreaking.utils.data_utils import ( 30 | check_argument_types, 31 | check_empty_dataframes, 32 | get_dimension_attributes, 33 | ) 34 | from wavebreaking.utils import index_utils 35 | 36 | 37 | @check_argument_types(["data", "events"], [xr.DataArray, gpd.GeoDataFrame]) 38 | @check_empty_dataframes 39 | @get_dimension_attributes("data") 40 | def to_xarray(data, events, flag="ones", name="flag", *args, **kwargs): 41 | """ 42 | Create xarray.DataArray from events stored in a geopandas.GeoDataFrame. 43 | Grid cells where an event is present are flagged with the value 1. 44 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 45 | and resolution ("dlon", "dlat") can be passed as key=value argument. 46 | 47 | Parameters 48 | ---------- 49 | data : xarray.DataArray 50 | data used for the index calculation 51 | events : geopandas.GeoDataFrame 52 | GeoDataFrame with the date and geometry for each event 53 | flag : string, optional 54 | column name of the events geopandas.GeoDataFrame 55 | flag is set where an event is present 56 | default value is "ones" 57 | name : string, optional 58 | name of the xarray variable that is created 59 | 60 | Returns 61 | ------- 62 | flag: xarray.DataArray 63 | Data with events flagged with the value 1 64 | """ 65 | 66 | # get grid points 67 | lon, lat = np.meshgrid(data[kwargs["lon_name"]], data[kwargs["lat_name"]]) 68 | lonf, latf = lon.flatten(), lat.flatten() 69 | points = gpd.GeoDataFrame( 70 | pd.DataFrame({"lon": lonf, "lat": latf}), 71 | geometry=gpd.points_from_xy(lonf, latf), 72 | ) 73 | 74 | # get coordinates of all events at the same time step 75 | buffer = events.copy() 76 | buffer.geometry = buffer.geometry.buffer( 77 | ((kwargs["dlon"] + kwargs["dlat"]) / 2) / 2 78 | ) 79 | merged = gpd.sjoin(buffer, points, how="inner", predicate="contains").sort_index() 80 | 81 | # create empty xarray.Dataset with the same dimension as the original Dataset 82 | data_flagged = xr.zeros_like(data) 83 | 84 | # flag coordinates in DataSet 85 | if flag == "ones": 86 | set_val = np.ones(len(merged)) 87 | else: 88 | try: 89 | set_val = merged[flag].values 90 | except KeyError: 91 | errmsg = "{} is not a column of the events geopandas.GeoDataFrame.".format( 92 | flag 93 | ) 94 | raise KeyError(errmsg) 95 | 96 | data_flagged.loc[ 97 | { 98 | kwargs["time_name"]: merged.date.to_xarray(), 99 | kwargs["lat_name"]: merged.lat.to_xarray(), 100 | kwargs["lon_name"]: merged.lon.to_xarray(), 101 | } 102 | ] = set_val 103 | 104 | # change type and name 105 | if flag == 'ones': 106 | data_flagged = data_flagged.astype("int8") 107 | data_flagged.name = name 108 | data_flagged.attrs["long_name"] = "flag wave breaking" 109 | 110 | return data_flagged 111 | 112 | 113 | @check_argument_types(["events"], [pd.DataFrame]) 114 | @check_empty_dataframes 115 | def track_events( 116 | events, time_range=None, method="by_overlap", buffer=0, overlap=0, distance=1000 117 | ): 118 | """ 119 | Temporal tracking of events. 120 | Events receive the same label if they spatially overlap at step t 121 | and t + time_range. 122 | 123 | Parameters 124 | ---------- 125 | events : geopandas.GeoDataFrame 126 | GeoDataFrame with the date and coordinates of each identified event 127 | time_range: int or float, optional 128 | Time range for temporally tracking the events. The units of 129 | time_range is hours if the type of the time dimension is np.datetime64. 130 | If not specified, the smallest time difference larger than zero is used. 131 | method : {"by_overlap", "by_distance"}, optional 132 | Method for temporally tracking the events: 133 | * "by_overlap": Events receive the same label if they spatially 134 | overlap at step t and t + time_range. 135 | * "by_distance": Events receive the same label if their centre of mass 136 | is closer than "distance" 137 | buffer : float, optional 138 | buffer around event polygon in degrees for the 'by_overlap' method 139 | overlap : float, optional 140 | minimum percentage of overlapping for the 'by_overlap' method 141 | distance : int or float, optional 142 | maximum distance in km between two events for the 'by_distance' method 143 | 144 | 145 | Returns 146 | ------- 147 | events: geopandas.GeoDataFrame 148 | GeoDataFrame with label column showing the temporal coherence 149 | """ 150 | 151 | # reset index of events 152 | events = events.reset_index(drop=True) 153 | 154 | # detect time range 155 | if time_range is None: 156 | date_dif = events.date.diff() 157 | time_range = date_dif[date_dif > pd.Timedelta(0)].min().total_seconds() / 3600 158 | 159 | # select events that are in range of time_range 160 | def get_range_combinations(events, index): 161 | """ 162 | find events within the next steps that are in time range 163 | """ 164 | if events.date.dtype == np.dtype("datetime64[ns]"): 165 | diffs = (events.date - events.date.iloc[index]).dt.total_seconds() / 3600 166 | else: 167 | diffs = abs(events.date - events.date.iloc[index]) 168 | 169 | check = (diffs > 0) & (diffs <= time_range) 170 | 171 | return [(index, close) for close in events[check].index] 172 | 173 | range_comb = np.asarray( 174 | list( 175 | set( 176 | itertools.chain.from_iterable( 177 | [get_range_combinations(events, index) for index in events.index] 178 | ) 179 | ) 180 | ) 181 | ) 182 | 183 | if len(range_comb) == 0: 184 | errmsg = "No events detected in the time range: {}".format(time_range) 185 | raise ValueError(errmsg) 186 | 187 | if method == "by_distance": 188 | # get centre of mass 189 | com1 = np.asarray(list(events.iloc[range_comb[:, 0]].com)) 190 | com2 = np.asarray(list(events.iloc[range_comb[:, 1]].com)) 191 | 192 | # calculate distance between coms 193 | dist_com = np.asarray( 194 | [dist.pairwise(np.radians([p1, p2]))[0, 1] for p1, p2 in zip(com1, com2)] 195 | ) 196 | 197 | # check which coms are in range of 'distance' 198 | check_com = dist_com * 6371 < distance 199 | 200 | # select combinations 201 | combine = range_comb[check_com] 202 | 203 | elif method == "by_overlap": 204 | # select geometries that are in time range and add buffer 205 | geom1 = events.iloc[range_comb[:, 0]].geometry.buffer(buffer).make_valid() 206 | geom2 = events.iloc[range_comb[:, 1]].geometry.buffer(buffer).make_valid() 207 | 208 | # calculate and check the percentage of overlap 209 | inter = geom1.intersection(geom2, align=False) 210 | check_overlap = ( 211 | inter.area.values 212 | / (geom2.area.values + geom1.area.values - inter.area.values) 213 | > overlap 214 | ) 215 | 216 | # select combinations 217 | combine = range_comb[check_overlap] 218 | 219 | else: 220 | errmsg = "'{}' not supported as method!".format(method) 221 | hint = " Supported methods are 'by_overlap' and 'by_distance'" 222 | raise ValueError(errmsg + hint) 223 | 224 | # combine tracked indices to groups 225 | combine = index_utils.combine_shared(combine) 226 | 227 | # initiate label column 228 | events["label"] = events.index 229 | 230 | # assign labels to the events 231 | for item in combine: 232 | events.loc[item, "label"] = min(item) 233 | 234 | # select smallest possible index number for all events 235 | label = events.label.copy() 236 | for i in np.arange(len(set(events.label))): 237 | label[events.label == sorted(set(events.label))[i]] = i 238 | events.label = label 239 | 240 | # sort list by label and date and return geopandas.GeoDataFrame 241 | return events.sort_values(by=["label", "date"]) 242 | -------------------------------------------------------------------------------- /wavebreaking/processing/plots.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Plotting functions 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import xarray as xr 21 | import numpy as np 22 | import pandas as pd 23 | import geopandas as gpd 24 | import matplotlib.pyplot as plt 25 | import matplotlib 26 | import cartopy.crs as ccrs 27 | import cartopy.feature as cfeature 28 | 29 | from wavebreaking.utils.data_utils import ( 30 | check_argument_types, 31 | get_dimension_attributes, 32 | check_empty_dataframes, 33 | ) 34 | from wavebreaking.utils import plot_utils 35 | from wavebreaking.processing import spatial 36 | 37 | 38 | @check_argument_types(["flag_data"], [xr.DataArray]) 39 | @get_dimension_attributes("flag_data") 40 | def plot_clim( 41 | flag_data, 42 | seasons=None, 43 | proj=None, 44 | size=None, 45 | smooth_passes=5, 46 | periodic=True, 47 | labels=True, 48 | levels=None, 49 | cmap=None, 50 | title="", 51 | *args, 52 | **kwargs 53 | ): 54 | """ 55 | Creates a simple climatological plot showing the occurrence frequency of the detected events. 56 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 57 | and resolution ("dlon", "dlat") can be passed as key=value argument. 58 | 59 | Parameters 60 | ---------- 61 | flag_data : xarray.DataArray 62 | data containing the locations of the events flagged with the value 1 63 | seasons : list or array, optional 64 | months of the seasons for occurrence frequency calculation (e.g. [12, 1, 2]) 65 | proj : cartopy.crs, optional 66 | cartopy projection object 67 | size : tuple of integers, optional 68 | size of the figure in the format (width, height) 69 | smooth_passes : int or float, optional 70 | number of smoothing passes of the 5-point smoothing of the occurrence frequencies 71 | periodic : bool, optional 72 | If True, the first longitude is added at the end to close the gap in a polar projection 73 | labels : bool, optional 74 | If False, no labels are added to the plot 75 | levels : list or array, optional 76 | Colorbar levels. If not provided, default levels are used. 77 | cmap : string, optional 78 | Name of a valid cmap. If not provided, a default cmap is used. 79 | title : string, optional 80 | Title of the plot 81 | 82 | Returns 83 | ------- 84 | plot : matplotlib.pyplot 85 | Climatological plot of the occurrence frequencies. 86 | """ 87 | 88 | # define data crs 89 | data_crs = ccrs.PlateCarree() 90 | 91 | # initialize figure 92 | proj = proj if proj is not None else data_crs 93 | size = size if size is not None else (12, 8) 94 | fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj), figsize=size) 95 | 96 | # calculate occurrence frequencies, if provided for seasons 97 | if seasons is None: 98 | freq = ( 99 | xr.where(flag_data > 0, 1, 0).sum(dim=kwargs["time_name"]) 100 | / kwargs["ntime"] 101 | * 100 102 | ) 103 | else: 104 | ds_season = flag_data.sel( 105 | {kwargs["time_name"]: flag_data[kwargs["time_name"]].dt.month.isin(seasons)} 106 | ) 107 | freq = ( 108 | xr.where(ds_season > 0, 1, 0).sum(dim=kwargs["time_name"]) 109 | / len(ds_season[kwargs["time_name"]]) 110 | * 100 111 | ) 112 | 113 | # perform smoothing 114 | freq = spatial.calculate_smoothed_field( 115 | freq.expand_dims("time"), smooth_passes 116 | ).isel(time=0) 117 | 118 | # add longitude to ensure that there is no gap in a periodic field 119 | if periodic is True: 120 | freq = plot_utils.calculate_periodic_field(freq, **kwargs) 121 | 122 | # define levels 123 | if levels is None: 124 | levels = plot_utils.get_levels(freq.min(), freq.max()) 125 | 126 | # define cmap 127 | if cmap is None: 128 | cmap = plot_utils.get_new_cmap("RdYlBu_r") 129 | 130 | # plot frequencies 131 | p = freq.where(freq > 0, -999).plot.contourf( 132 | ax=ax, 133 | cmap=cmap, 134 | levels=levels, 135 | transform=data_crs, 136 | add_colorbar=False, 137 | extend="max", 138 | ) 139 | 140 | # define colorbar 141 | cax = fig.add_axes( 142 | [ 143 | ax.get_position().x1 + 0.05, 144 | ax.get_position().y0, 145 | 0.015, 146 | ax.get_position().height, 147 | ] 148 | ) 149 | plot_utils.add_colorbar(p, cax, levels, label="Occurrence frequency in %") 150 | 151 | # add coast lines and grid lines 152 | ax.add_feature(cfeature.COASTLINE, color="dimgrey") 153 | ax.gridlines(draw_labels=False, color="black", linestyle="dotted", linewidth=1.1) 154 | 155 | # plot labels 156 | if labels is True: 157 | plot_utils.add_grid_lines(ax) 158 | 159 | # make a circular cut out for the NorthPolarStereo projection 160 | if proj == ccrs.NorthPolarStereo(): 161 | plot_utils.add_circular_boundary(ax) 162 | plot_utils.add_circular_patch(ax) 163 | 164 | # set title 165 | ax.set_title(title, fontweight="bold", fontsize=20) 166 | 167 | 168 | @check_argument_types(["flag_data"], [xr.DataArray]) 169 | @get_dimension_attributes("flag_data") 170 | def plot_step( 171 | flag_data, 172 | step, 173 | data=None, 174 | contour_levels=None, 175 | proj=None, 176 | size=None, 177 | periodic=True, 178 | labels=True, 179 | levels=None, 180 | cmap="RdBu_r", 181 | color_events="gold", 182 | title="", 183 | *args, 184 | **kwargs 185 | ): 186 | """ 187 | Creates a plot showing the events at one time step. 188 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 189 | and resolution ("dlon", "dlat") can be passed as key=value argument. 190 | 191 | Parameters 192 | ---------- 193 | flag_data : xarray.DataArray 194 | Data containing the locations of the events flagged with the value 1 195 | step : int or string 196 | index or name of a time step in the xarray.Dataset 197 | data : xarray.DataArray 198 | Data that has been used to calculate the contours and the indices 199 | contour_level : array_like 200 | contour levels that are shown in the plot 201 | proj : cartopy.crs, optional 202 | cartopy projection object 203 | size : tuple of integers, optional 204 | size of the figure in the format (width, height) 205 | periodic : bool, optional 206 | If True, the first longitude is added at the end to close the gap in a polar projection 207 | labels : bool, optional 208 | If False, no labels are added to the plot 209 | levels : list or array, optional 210 | Colorbar levels. If not provided, default levels are used. 211 | cmap : string, optional 212 | Name of a valid cmap. If not provided, a default cmap is used. 213 | color_events : string, optional 214 | Color of the events 215 | title : string, optional 216 | Title of the plot 217 | 218 | Returns 219 | ------- 220 | plot : matplotlib.pyplot 221 | Plot of one time step. 222 | """ 223 | 224 | # define data crs 225 | data_crs = ccrs.PlateCarree() 226 | 227 | # initialize figure 228 | proj = proj if proj is not None else data_crs 229 | size = size if size is not None else (12, 8) 230 | fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj), figsize=size) 231 | 232 | # select data 233 | if type(step) is str or type(step) == np.dtype("datetime64[ns]"): 234 | try: 235 | flag = flag_data.sel({kwargs["time_name"]: step}) 236 | except KeyError: 237 | errmsg = "step {} not supported or out of range!".format(step) 238 | raise KeyError(errmsg) 239 | else: 240 | try: 241 | flag = flag_data.isel({kwargs["time_name"]: step}) 242 | except KeyError: 243 | errmsg = "step {} not supported or out of range!".format(step) 244 | raise KeyError(errmsg) 245 | 246 | # get date 247 | date = flag[kwargs["time_name"]].values 248 | if date.dtype == np.dtype("datetime64[ns]"): 249 | date = pd.Timestamp(date).strftime("%Y-%m-%dT%H") 250 | 251 | # plot field data if provided 252 | if data is not None: 253 | field = data.sel({kwargs["time_name"]: date}) 254 | if periodic is True: 255 | field = plot_utils.calculate_periodic_field(field, **kwargs) 256 | 257 | if levels is None: 258 | levels = plot_utils.get_levels(field.min(), field.max()) 259 | 260 | p = field.plot.contourf( 261 | ax=ax, 262 | cmap=cmap, 263 | levels=levels, 264 | transform=data_crs, 265 | add_colorbar=False, 266 | alpha=0.8, 267 | ) 268 | 269 | if contour_levels is not None: 270 | # check contour levels 271 | try: 272 | iter(contour_levels) 273 | except Exception: 274 | contour_levels = [contour_levels] 275 | 276 | field.plot.contour( 277 | ax=ax, 278 | transform=data_crs, 279 | levels=contour_levels, 280 | linestyles="-", 281 | linewidths=2, 282 | colors="#000000", 283 | ) 284 | 285 | # define colorbar 286 | if all(x in field.attrs for x in ["units", "long_name"]): 287 | cbar_label = field.long_name + " [" + field.units + "]" 288 | else: 289 | cbar_label = None 290 | 291 | cax = fig.add_axes( 292 | [ 293 | ax.get_position().x1 + 0.05, 294 | ax.get_position().y0, 295 | 0.015, 296 | ax.get_position().height, 297 | ] 298 | ) 299 | plot_utils.add_colorbar(p, cax, levels, label=cbar_label) 300 | 301 | # plot flag data 302 | if periodic is True: 303 | flag = plot_utils.calculate_periodic_field(flag, **kwargs) 304 | 305 | flag.where(flag > 0).plot.contourf( 306 | ax=ax, 307 | colors=["white", color_events], 308 | levels=[0, 0.5], 309 | transform=data_crs, 310 | add_colorbar=False, 311 | ) 312 | 313 | # add the date to the figure 314 | plt.text( 315 | 0.99, 316 | 0.98, 317 | "Date: " + str(date), 318 | fontsize=10, 319 | fontweight="bold", 320 | ha="right", 321 | va="top", 322 | transform=ax.transAxes, 323 | ) 324 | 325 | # add coast lines and grid lines 326 | ax.add_feature(cfeature.COASTLINE, color="dimgrey") 327 | ax.gridlines(draw_labels=False, color="black", linestyle="dotted", linewidth=1.1) 328 | 329 | # plot labels 330 | if labels is True: 331 | plot_utils.add_grid_lines(ax) 332 | 333 | # make a circular cut out for the NorthPolarStereo projection 334 | if proj == ccrs.NorthPolarStereo(): 335 | plot_utils.add_circular_boundary(ax) 336 | plot_utils.add_circular_patch(ax) 337 | 338 | # set title 339 | ax.set_title(title, fontweight="bold", fontsize=20) 340 | 341 | 342 | @check_argument_types(["data", "events"], [xr.DataArray, gpd.GeoDataFrame]) 343 | @check_empty_dataframes 344 | @get_dimension_attributes("data") 345 | def plot_tracks( 346 | data, 347 | events, 348 | proj=None, 349 | size=None, 350 | min_path=0, 351 | plot_events=False, 352 | labels=True, 353 | title="", 354 | *args, 355 | **kwargs 356 | ): 357 | """ 358 | Creates a plot showing the tracks of the temporally coherent events. 359 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 360 | and resolution ("dlon", "dlat") can be passed as key=value argument. 361 | 362 | Parameters 363 | ---------- 364 | data : xarray.DataArray 365 | Data that has been used to calculate the contours and the indices 366 | events: pd.DataFrame 367 | DataFrame with the date, coordinates and label of the identified events 368 | proj : cartopy.crs, optional 369 | cartopy projection object 370 | size : tuple of integers, optional 371 | size of the figure in the format (width, height) 372 | min_path: int, optional 373 | Minimal number of time steps an event has to be tracked 374 | plot_events: bool, optional 375 | If True, the events are also plotted by a shaded area 376 | labels: bool, optional 377 | If False, no labels are added to the plot 378 | title: string, optional 379 | Title of the plot 380 | 381 | Returns 382 | ------- 383 | plot : matplotlib.pyplot 384 | Plot of the tracks 385 | """ 386 | 387 | # define data crs 388 | data_crs = ccrs.PlateCarree() 389 | 390 | # initialize figure 391 | proj = proj if proj is not None else data_crs 392 | size = size if size is not None else (12, 8) 393 | fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj), figsize=size) 394 | 395 | # set background color 396 | ax.set_facecolor((0.1, 0.1, 0.1, 0.05)) 397 | 398 | # get colors for event plotting 399 | lab, count = np.unique(events.label, return_counts=True) 400 | lab_sel = lab[count > min_path] 401 | color_range = {} 402 | for r, name in enumerate(lab_sel): 403 | color_range[name] = matplotlib.cm.get_cmap("rainbow")(r / len(lab_sel)) 404 | 405 | # group event data by label and plot each path 406 | for name, group in events.groupby("label"): 407 | if len(group) > min_path: 408 | lons = np.asarray(group.com.tolist())[:, 0] 409 | lats = np.asarray(group.com.tolist())[:, 1] 410 | 411 | # plot start point of each path 412 | ax.scatter( 413 | lons[0], 414 | lats[0], 415 | s=14, 416 | zorder=10, 417 | facecolors="none", 418 | edgecolor="black", 419 | transform=data_crs, 420 | ) 421 | ax.plot(lons[0], lats[0], ".", color="red", transform=data_crs, alpha=0.7) 422 | 423 | # plot the coordinates of the events 424 | if plot_events is True: 425 | group.plot( 426 | ax=ax, color=color_range[name], transform=data_crs, alpha=0.5 427 | ) 428 | 429 | max_lon = max(data[kwargs["lon_name"]].values) + kwargs["dlon"] 430 | min_lon = min(data[kwargs["lon_name"]].values) 431 | 432 | # check if the path needs to be split due to a crossing of the date border 433 | diffs = abs(np.diff(lons)) > (max_lon - min_lon / 2) 434 | split = [[i - 1, i] for i in np.where(diffs)[0] + 1] 435 | no_split = [[i - 1, i] for i in np.where(~diffs)[0] + 1] 436 | 437 | # plot paths that do not need to be split 438 | for item in no_split: 439 | ev_seg = np.asarray(group[item[0] : item[1] + 1].com.tolist()) 440 | 441 | ax.plot( 442 | ev_seg[:, 0], ev_seg[:, 1], "-", transform=data_crs, color="black" 443 | ) 444 | 445 | # plot paths that have to be split 446 | for item in split: 447 | ev_seg = np.asarray(group[item[0] : item[1] + 1].com.tolist()) 448 | 449 | # upstream split 450 | if np.diff(ev_seg[:, 0]) < 0: 451 | lons_plot = [(ev_seg[0, 0], max_lon), (min_lon, ev_seg[1, 0])] 452 | lon_diffs = np.diff(lons_plot) 453 | 454 | m = np.diff(ev_seg[:, 1])[0] / np.sum(lon_diffs) 455 | lats_plot = [ 456 | (ev_seg[0, 1], ev_seg[0, 1] + lon_diffs[0][0] * m), 457 | (ev_seg[1, 1] - lon_diffs[1][0] * m, ev_seg[1, 1]), 458 | ] 459 | 460 | # downstream split 461 | else: 462 | lons_plot = [(ev_seg[0, 0], min_lon), (max_lon, ev_seg[1, 0])] 463 | lon_diffs = np.diff(lons_plot) 464 | 465 | m = np.diff(ev_seg[:, 1])[0] / np.sum(lon_diffs) 466 | lats_plot = [ 467 | (ev_seg[0, 1], ev_seg[0, 1] + lon_diffs[0][0] * m), 468 | (ev_seg[1, 1] - lon_diffs[1][0] * m, ev_seg[1, 1]), 469 | ] 470 | 471 | # plot splitted segments 472 | for lon, lat in zip(lons_plot, lats_plot): 473 | ax.plot(lon, lat, "-", "-", transform=data_crs, color="black") 474 | 475 | # plot invisible data to get a plot of the full grid 476 | data.isel({kwargs["time_name"]: 0}).plot.contourf( 477 | ax=ax, transform=data_crs, add_colorbar=False, alpha=0 478 | ) 479 | 480 | # add coast lines and grid lines 481 | ax.add_feature(cfeature.COASTLINE, color="dimgrey") 482 | ax.gridlines(draw_labels=False, color="black", linestyle="dotted", linewidth=1.1) 483 | 484 | # plot labels 485 | if labels is True: 486 | plot_utils.add_grid_lines(ax) 487 | 488 | # make a circular cut out for the NorthPolarStereo projection 489 | if proj == ccrs.NorthPolarStereo(): 490 | plot_utils.add_circular_boundary(ax) 491 | plot_utils.add_circular_patch(ax) 492 | 493 | # set title 494 | ax.set_title(title, fontweight="bold", fontsize=20) 495 | -------------------------------------------------------------------------------- /wavebreaking/processing/spatial.py: -------------------------------------------------------------------------------- 1 | """""" 2 | """ 3 | This file is part of WaveBreaking. 4 | 5 | WaveBreaking provides indices to detect, classify 6 | and track Rossby Wave Breaking (RWB) in climate and weather data. 7 | The tool was developed during my master thesis at the University of Bern. 8 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 9 | 10 | --- 11 | 12 | Spatial pre-processing functions 13 | """ 14 | 15 | __author__ = "Severin Kaderli" 16 | __license__ = "MIT" 17 | __email__ = "severin.kaderli@unibe.ch" 18 | 19 | # import modules 20 | import xarray as xr 21 | import numpy as np 22 | from scipy import ndimage 23 | 24 | from wavebreaking.utils.data_utils import check_argument_types, get_dimension_attributes 25 | 26 | 27 | @check_argument_types(["u", "v"], [xr.DataArray, xr.DataArray]) 28 | @get_dimension_attributes("u") 29 | def calculate_momentum_flux(u, v, *args, **kwargs): 30 | """ 31 | Calculate the momentum flux derived from the product of the deviations 32 | of both wind components from the zonal mean. 33 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 34 | and resolution ("dlon", "dlat") can be passed as key=value arguments. 35 | 36 | Parameters 37 | ---------- 38 | u : xarray.DataArray 39 | zonal (x) component of the wind 40 | v : xarray.DataArray 41 | meridional (y) component of the wind 42 | 43 | Returns 44 | ------- 45 | momentum flux: xarray.DataArray 46 | Data containing the momentum flux 47 | """ 48 | 49 | # calculate deviations from the zonal mean 50 | u_prime = u - u.mean(kwargs["lon_name"]) 51 | v_prime = v - v.mean(kwargs["lon_name"]) 52 | 53 | # mflux is given by the product of the deviation of both wind components 54 | mflux = u_prime * v_prime 55 | mflux.name = "mflux" 56 | 57 | return mflux 58 | 59 | 60 | @check_argument_types(["data"], [xr.DataArray]) 61 | @get_dimension_attributes("data") 62 | def calculate_smoothed_field( 63 | data, 64 | passes, 65 | weights=np.array([[0, 1, 0], [1, 2, 1], [0, 1, 0]]), 66 | mode="wrap", 67 | *args, 68 | **kwargs 69 | ): 70 | """ 71 | Calculate smoothed field based on a two-dimensional weight kernel 72 | and multiple smoothing passes. Default weight kernel is a 3x3 73 | 5-point smoothing with double-weighted centre. The arguments 74 | "weight" and "mode" must be accepted by scipy.ndimage.convolve. 75 | Values at the latitude border are always set to NaN. 76 | Dimension names ("time_name", "lon_name", "lat_name"), size ("ntime", "nlon", "nlat") 77 | and resolution ("dlon", "dlat") can be passed as key=value arguments. 78 | 79 | Parameters 80 | ---------- 81 | data : xarray.DataArray 82 | data to smooth 83 | passes : int or float 84 | number of smoothing passes of the 5-point smoothing 85 | weigths : array_like, optional 86 | array of weight, two-dimensional 87 | (see scipy.ndimage.convolve function) 88 | mode : string, optional 89 | defines how the array is extended at boundaries 90 | (see scipy.ndimage.convolve function) 91 | 92 | Returns 93 | ------- 94 | smoothed data: xarray.DataArray 95 | Data containing the smoothed field 96 | """ 97 | 98 | # perform smoothing 99 | smoothed = [] 100 | for step in data[kwargs["time_name"]]: 101 | temp = data.sel({kwargs["time_name"]: step}) 102 | for p in range(passes): 103 | temp = ndimage.convolve(temp, weights=weights, mode=mode) / np.sum(weights) 104 | 105 | # set latitude border values to nan 106 | border_size = int(weights.shape[0] / 2 + 0.5) 107 | temp[np.arange(-border_size, border_size), :] = np.nan 108 | 109 | smoothed.append(temp) 110 | 111 | # define DataArray 112 | da = xr.DataArray( 113 | smoothed, 114 | coords=[ 115 | data[kwargs["time_name"]], 116 | data[kwargs["lat_name"]], 117 | data[kwargs["lon_name"]], 118 | ], 119 | ) 120 | 121 | # set name 122 | da.name = "smooth_" + data.name 123 | 124 | # assign attributes 125 | da = da.assign_attrs(data.attrs) 126 | da.attrs["smooth_passes"] = passes 127 | 128 | return da 129 | -------------------------------------------------------------------------------- /wavebreaking/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | Inititate utils 11 | """ 12 | 13 | __author__ = "Severin Kaderli" 14 | __license__ = "MIT" 15 | __email__ = "severin.kaderli@unibe.ch" 16 | -------------------------------------------------------------------------------- /wavebreaking/utils/data_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | 11 | Check input data and arguments 12 | """ 13 | 14 | __author__ = "Severin Kaderli" 15 | __license__ = "MIT" 16 | __email__ = "severin.kaderli@unibe.ch" 17 | 18 | # import modules 19 | import numpy as np 20 | import geopandas as gpd 21 | import functools 22 | 23 | import warnings 24 | 25 | warnings.filterwarnings("ignore") 26 | 27 | 28 | def check_argument_types(arguments, types): 29 | """ 30 | decorator to check the type of function arguments 31 | """ 32 | 33 | def decorator(func): 34 | @functools.wraps(func) 35 | def wrapper(*args, **kwargs): 36 | for (arg_index, arg_name), arg_type in zip(enumerate(arguments), types): 37 | if arg_name in kwargs: 38 | if not isinstance(kwargs[arg_name], arg_type): 39 | errmsg = arg_name + " has to be a " + str(arg_type)[8:-2] + "!" 40 | raise TypeError(errmsg) 41 | else: 42 | if not isinstance(args[arg_index], arg_type): 43 | errmsg = arg_name + " has to be a " + str(arg_type)[8:-2] + "!" 44 | raise TypeError(errmsg) 45 | 46 | return func(*args, **kwargs) 47 | 48 | return wrapper 49 | 50 | return decorator 51 | 52 | 53 | def check_empty_dataframes(func): 54 | """ 55 | decorator to check if there is an empty DataFrame 56 | """ 57 | 58 | @functools.wraps(func) 59 | def wrapper(*args, **kwargs): 60 | for item in args: 61 | if isinstance(item, gpd.GeoDataFrame): 62 | if item.empty: 63 | errmsg = "geopandas.GeoDataFrame is empty!" 64 | raise ValueError(errmsg) 65 | for key, item in kwargs.items(): 66 | if isinstance(item, gpd.GeoDataFrame): 67 | if item.empty: 68 | errmsg = key + " geopandas.GeoDataFrame is empty!" 69 | raise ValueError(errmsg) 70 | 71 | return func(*args, **kwargs) 72 | 73 | return wrapper 74 | 75 | 76 | def get_time_name(data): 77 | """ 78 | check for 'time' dimension and return name 79 | """ 80 | 81 | for dim in data.dims: 82 | if ( 83 | ("units" in data[dim].attrs and "since" in data[dim].attrs["units"]) 84 | or ( 85 | "units" in data[dim].encoding and "since" in data[dim].encoding["units"] 86 | ) 87 | or (data[dim].dtype == np.dtype("datetime64[ns]")) 88 | or (dim in ["time"]) 89 | ): 90 | return dim 91 | 92 | errmsg = "'time' dimension (dtype='datetime64[ns]') not found." 93 | hint = " Add time dimension with xarray.DataArray.expand_dims('time')." 94 | raise ValueError(errmsg + hint) 95 | 96 | 97 | def get_lon_name(data): 98 | """ 99 | check for 'longitude' dimension and return name 100 | """ 101 | 102 | for dim in data.dims: 103 | if ( 104 | "units" in data[dim].attrs 105 | and data[dim].attrs["units"] in ["degree_east", "degrees_east"] 106 | ) or dim in ["lon", "longitude", "x"]: 107 | return dim 108 | 109 | errmsg = "'longitude' dimension (units='degrees_east') not found." 110 | raise ValueError(errmsg) 111 | 112 | 113 | def get_lat_name(data): 114 | """ 115 | check for 'latitude' dimension and return name 116 | """ 117 | 118 | for dim in data.dims: 119 | if ( 120 | "units" in data[dim].attrs 121 | and data[dim].attrs["units"] in ["degree_north", "degrees_north"] 122 | ) or dim in ["lat", "latitude", "y"]: 123 | return dim 124 | 125 | errmsg = "latitude' dimension (units='degrees_north') not found." 126 | raise ValueError(errmsg) 127 | 128 | 129 | def get_spatial_resolution(data, dim): 130 | """ 131 | check resolution of the longitude and latitude coordinate 132 | """ 133 | 134 | delta = abs(np.unique((data[dim].data[1:] - data[dim].data[:-1]))) 135 | 136 | if len(delta) > 1: 137 | errmsg = "No regular grid found for dimension {}.".format(dim) 138 | raise ValueError(errmsg) 139 | 140 | elif delta[0] == 0: 141 | errmsg = "Two equivalent coordinates found for dimension {}.".format(dim) 142 | raise ValueError(errmsg) 143 | 144 | return delta[0] 145 | 146 | 147 | def get_dimension_attributes(arg_name): 148 | """ 149 | decorator to get the dimension, size and resolution of the input data 150 | """ 151 | 152 | def decorator(func): 153 | @functools.wraps(func) 154 | def wrapper(*args, **kwargs): 155 | if arg_name in kwargs: 156 | data = kwargs[arg_name] 157 | else: 158 | data = args[0] 159 | 160 | names = ["time_name", "lon_name", "lat_name"] 161 | sizes = ["ntime", "nlon", "nlat"] 162 | resolutions = ["dlon", "dlat"] 163 | 164 | get_dims = [get_time_name, get_lon_name, get_lat_name] 165 | 166 | for name, get_dim in zip(names, get_dims): 167 | if name not in kwargs: 168 | kwargs[name] = get_dim(data) 169 | 170 | for size, name in zip(sizes, names): 171 | if size not in kwargs: 172 | kwargs[size] = len(data[kwargs[name]]) 173 | 174 | for res, name in zip(resolutions, names[1:]): 175 | if res not in kwargs: 176 | kwargs[res] = get_spatial_resolution(data, kwargs[name]) 177 | 178 | if len(data.dims) > 3: 179 | err_dims = [ 180 | dim 181 | for dim in data.dims 182 | if dim not in [kwargs[name] for name in names] 183 | ] 184 | errmsg = "Unexpected dimension(s): {}. Select dimensions first.".format( 185 | err_dims 186 | ) 187 | raise ValueError(errmsg) 188 | 189 | return func(*args, **kwargs) 190 | 191 | return wrapper 192 | 193 | return decorator 194 | 195 | 196 | def correct_dimension_orientation(data, *args, **kwargs): 197 | """ 198 | check the orientation of the latitude and longitude dimension 199 | """ 200 | 201 | # check orientation of latitude dimensions (affects the orientation condition) 202 | wrong_lat_orientation = np.average(np.diff(data[kwargs["lat_name"]])) < 0 203 | wrong_lon_orientation = np.average(np.diff(data[kwargs["lon_name"]])) < 0 204 | 205 | # correct latitude orientation 206 | if wrong_lat_orientation: 207 | data = data.sortby(kwargs["lat_name"], ascending=True) 208 | 209 | # correct longitude orientation 210 | if wrong_lon_orientation: 211 | data = data.sortby(kwargs["lon_name"], ascending=True) 212 | 213 | return data 214 | -------------------------------------------------------------------------------- /wavebreaking/utils/index_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | 11 | Utility functions for the index calculation 12 | """ 13 | 14 | __author__ = "Severin Kaderli" 15 | __license__ = "MIT" 16 | __email__ = "severin.kaderli@unibe.ch" 17 | 18 | # import modules 19 | import numpy as np 20 | import pandas as pd 21 | import geopandas as gpd 22 | from shapely.geometry import Polygon, MultiPolygon 23 | from shapely.ops import linemerge, unary_union, polygonize 24 | from shapely.validation import make_valid 25 | from tqdm import tqdm 26 | import functools 27 | 28 | # import logger 29 | import logging 30 | 31 | logger = logging.getLogger(__name__) 32 | logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) 33 | 34 | 35 | def calculate_properties(events, data, intensity, periodic_add, **kwargs): 36 | # get all grid points 37 | x, y = np.meshgrid( 38 | np.arange(0, kwargs["nlon"] + int(periodic_add / kwargs["dlon"])), 39 | np.arange(0, kwargs["nlat"]), 40 | ) 41 | x, y = x.flatten(), y.flatten() 42 | points = gpd.GeoDataFrame( 43 | pd.DataFrame({"x": x, "y": y}), geometry=gpd.points_from_xy(x, y) 44 | ) 45 | 46 | # get coordinates of all events 47 | buffer = events.copy() 48 | buffer.geometry = buffer.geometry.buffer( 49 | ((kwargs["dlon"] + kwargs["dlat"]) / 2) / 2 50 | ) 51 | merged = gpd.sjoin(buffer, points, how="inner", predicate="contains").sort_index() 52 | 53 | # get original coordinates 54 | merged["lon"] = data[kwargs["lon_name"]].values[merged.x % kwargs["nlon"]] 55 | merged["lat"] = data[kwargs["lat_name"]].values[merged.y] 56 | 57 | # calculate area equator in km^2 58 | area_cell = ( 59 | np.round(6371 * 2 * np.pi / (360 / ((kwargs["dlon"] + kwargs["dlat"]) / 2))) 60 | ** 2 61 | ) 62 | weight_lat = np.cos(np.radians(data[kwargs["lat_name"]].values)) * area_cell 63 | merged["areas"] = weight_lat[merged.y] 64 | 65 | # calculate mean_var, intensity and centre of mass 66 | merged["mean_var"] = ( 67 | merged.areas 68 | * data.loc[ 69 | { 70 | kwargs["time_name"]: merged.date.to_xarray(), 71 | kwargs["lat_name"]: merged.lat.to_xarray(), 72 | kwargs["lon_name"]: merged.lon.to_xarray(), 73 | } 74 | ] 75 | ) 76 | 77 | if intensity is not None: 78 | merged["intensity"] = ( 79 | merged.areas 80 | * intensity.loc[ 81 | { 82 | kwargs["time_name"]: merged.date.to_xarray(), 83 | kwargs["lat_name"]: merged.lat.to_xarray(), 84 | kwargs["lon_name"]: merged.lon.to_xarray(), 85 | } 86 | ] 87 | ) 88 | else: 89 | merged["intensity"] = 0 90 | 91 | merged["x_com"] = merged.x * merged.areas 92 | merged["y_com"] = merged.y * merged.areas 93 | 94 | agg_merged = merged.groupby("id").agg( 95 | { 96 | "areas": "sum", 97 | "mean_var": "sum", 98 | "intensity": "sum", 99 | "x_com": "sum", 100 | "y_com": "sum", 101 | } 102 | ) 103 | 104 | # calculate centre of mass 105 | com_x = data[kwargs["lon_name"]].values[ 106 | (agg_merged.x_com / agg_merged.areas).astype("int") % kwargs["nlon"] 107 | ] 108 | com_y = data[kwargs["lat_name"]].values[ 109 | (agg_merged.y_com / agg_merged.areas).astype("int") 110 | ] 111 | com = list(map(tuple, np.c_[com_x, com_y])) 112 | 113 | prop_dict = { 114 | "date": events.date, 115 | "level": events.level, 116 | "com": com, 117 | "mean_var": (agg_merged.mean_var / agg_merged.areas).round(2), 118 | "intensity": (agg_merged.intensity / agg_merged.areas).round(2), 119 | "event_area": agg_merged.areas.round(2), 120 | } 121 | 122 | # add orientation if available 123 | if "orientation" in events.columns: 124 | prop_dict["orientation"] = events.orientation 125 | 126 | return pd.DataFrame(prop_dict) 127 | 128 | 129 | def transform_polygons(events, data, **kwargs): 130 | def transform_coords(polygon): 131 | """ 132 | Transform coordinates to original grid 133 | """ 134 | # get coordinates and check split and last meridian 135 | coords = np.asarray(polygon.exterior.coords.xy).T.astype("int") 136 | split = (coords[:, 0] >= kwargs["nlon"]).any() 137 | 138 | # transform coordinates 139 | coords = np.c_[coords[:, 0] % kwargs["nlon"], coords[:, 1]] 140 | 141 | return split, Polygon( 142 | np.c_[ 143 | data[kwargs["lon_name"]][coords[:, 0]], 144 | data[kwargs["lat_name"]][coords[:, 1]], 145 | ] 146 | ) 147 | 148 | def split_polys(polygon): 149 | """ 150 | Split polygons at the last meridian 151 | """ 152 | # define last meridian 153 | p00, p01 = [kwargs["nlon"] - 1, kwargs["nlat"]], [kwargs["nlon"] - 1, 0] 154 | p10, p11 = [kwargs["nlon"], 0], [kwargs["nlon"], kwargs["nlat"]] 155 | meridian = Polygon([p00, p01, p10, p11]) 156 | 157 | # split polygons 158 | merged = linemerge([polygon.boundary, meridian.boundary]) 159 | borders = unary_union(merged) 160 | polygons = [ 161 | p 162 | for p in polygonize(borders) 163 | if not meridian.contains(p) and make_valid(polygon).contains(p) 164 | ] 165 | 166 | # transform if possible 167 | if len(polygons) == 0: 168 | return Polygon() 169 | elif len(polygons) == 1: 170 | return transform_coords(polygons[0])[1] 171 | else: 172 | polys = [transform_coords(p)[1] for p in polygons] 173 | return MultiPolygon(polys) 174 | 175 | # return GeoDataFrame 176 | gdf = gpd.GeoDataFrame( 177 | [transform_coords(row.geometry) for index, row in events.iterrows()], 178 | columns=["split", "geometry"], 179 | ) 180 | gdf.loc[gdf.split, "geometry"] = [ 181 | split_polys(row.geometry) for index, row in events[gdf.split].iterrows() 182 | ] 183 | 184 | return gdf 185 | 186 | 187 | def combine_shared(lst): 188 | """ 189 | This is an internal function that combines all elements of a list 190 | that have at least one element in common. 191 | """ 192 | 193 | elements = lst.copy() 194 | output = [] 195 | while len(elements) > 0: 196 | first, *rest = elements 197 | first = set(first) 198 | 199 | lf = -1 200 | while len(first) > lf: 201 | lf = len(first) 202 | 203 | rest2 = [] 204 | for r in rest: 205 | if len(first.intersection(set(r))) > 0: 206 | first |= set(r) 207 | else: 208 | rest2.append(r) 209 | rest = rest2 210 | 211 | output.append(list(first)) 212 | elements = rest 213 | 214 | return output 215 | 216 | 217 | def iterate_time_dimension(func): 218 | """ 219 | decorator to iterate function over time dimension 220 | """ 221 | 222 | @functools.wraps(func) 223 | def wrapper(data, contour_levels, *args, **kwargs): 224 | steps = data[kwargs["time_name"]] 225 | repeat_func = [] 226 | 227 | for step in tqdm( 228 | steps, desc="Calculating contours ", leave=True, position=0 229 | ): 230 | kwargs["step"] = step 231 | repeat_func.append(func(data, contour_levels, *args, **kwargs)) 232 | 233 | return pd.concat(repeat_func).reset_index(drop=True) 234 | 235 | return wrapper 236 | 237 | 238 | def iterate_contour_levels(func): 239 | """ 240 | decorator to iterate function over contour levels 241 | """ 242 | 243 | @functools.wraps(func) 244 | def wrapper(data, contour_levels, *args, **kwargs): 245 | repeat_func = [] 246 | 247 | try: 248 | iter(contour_levels) 249 | except Exception: 250 | contour_levels = [contour_levels] 251 | 252 | for level in contour_levels: 253 | kwargs["level"] = level 254 | repeat_func.append(func(data, contour_levels, *args, **kwargs)) 255 | 256 | return pd.concat(repeat_func).reset_index(drop=True) 257 | 258 | return wrapper 259 | -------------------------------------------------------------------------------- /wavebreaking/utils/plot_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file is part of WaveBreaking. 3 | 4 | WaveBreaking provides indices to detect, classify 5 | and track Rossby Wave Breaking (RWB) in climate and weather data. 6 | The tool was developed during my master thesis at the University of Bern. 7 | Link to thesis: https://occrdata.unibe.ch/students/theses/msc/406.pdf 8 | 9 | --- 10 | 11 | Utility functions for the plotting routines 12 | """ 13 | 14 | __author__ = "Severin Kaderli" 15 | __license__ = "MIT" 16 | __email__ = "severin.kaderli@unibe.ch" 17 | 18 | # import modules 19 | import numpy as np 20 | import xarray as xr 21 | import matplotlib.pyplot as plt 22 | import matplotlib.colors as colors 23 | import matplotlib.patches as mpatches 24 | import matplotlib.path as mpath 25 | 26 | 27 | def calculate_periodic_field(da, **kwargs): 28 | """ 29 | Add first longitude at the end to ensure 30 | that there is no gap in a periodic field 31 | """ 32 | return xr.concat( 33 | [ 34 | da, 35 | da.isel({kwargs["lon_name"]: 0}).assign_coords( 36 | {kwargs["lon_name"]: da[kwargs["lon_name"]].max() + 1} 37 | ), 38 | ], 39 | dim=kwargs["lon_name"], 40 | ) 41 | 42 | 43 | def get_levels(min_freq, max_freq): 44 | """ 45 | Define default levels 46 | """ 47 | 48 | max_level = np.round(max_freq) 49 | min_level = np.round(min_freq) 50 | level_range = max_level - min_level 51 | 52 | if min_level < 0: 53 | max_both = np.abs([min_level, max_level]).max() 54 | if level_range / 16 > 0.5: 55 | return np.round(np.linspace(-max_both, max_both, num=18), 1)[1:-1] 56 | else: 57 | return np.round(np.linspace(-max_both, max_both, num=10), 1)[1:-1] 58 | else: 59 | return np.round(np.linspace(min_level, max_level, num=8), 1) 60 | 61 | 62 | def get_new_cmap(color_palette): 63 | """ 64 | Define default cmaps for climatological plot 65 | """ 66 | 67 | cmap = plt.get_cmap(color_palette) 68 | 69 | return colors.LinearSegmentedColormap.from_list( 70 | "trunc({n},{a:.2f},{b:.2f})".format(n=cmap.name, a=0.3, b=1), 71 | cmap(np.linspace(0.3, 1, 100)), 72 | ) 73 | 74 | 75 | def add_colorbar(plot, caxes, levels, label): 76 | """ 77 | Define colorbar 78 | """ 79 | 80 | cbar = plt.colorbar(plot, cax=caxes, drawedges=True) 81 | cbar.ax.set_yticklabels(levels, fontsize=12, weight="bold") 82 | cbar.set_label(label=label, size=12, fontweight="bold", labelpad=15) 83 | cbar.outline.set_color("black") 84 | cbar.outline.set_linewidth(2) 85 | cbar.dividers.set_color("black") 86 | cbar.dividers.set_linewidth(2) 87 | return cbar 88 | 89 | 90 | def add_grid_lines(axes): 91 | """ 92 | Define grid lines 93 | """ 94 | 95 | gr = axes.gridlines( 96 | draw_labels=True, color="black", linestyle="dotted", linewidth=1.1 97 | ) 98 | gr.xlabel_style = {"size": 12, "color": "black", "rotation": 0} 99 | gr.ylabel_style = {"size": 12, "color": "black"} 100 | return gr 101 | 102 | 103 | def add_circular_boundary(axes): 104 | """ 105 | Define circular boundary for NorthPolarStereo projection 106 | """ 107 | 108 | return axes.add_patch( 109 | mpatches.Circle( 110 | (0.5, 0.5), 111 | radius=0.5, 112 | color="k", 113 | linewidth=5, 114 | fill=False, 115 | transform=axes.transAxes, 116 | ) 117 | ) 118 | 119 | 120 | def add_circular_patch(axes): 121 | """ 122 | Define circular patch for NorthPolarStereo projection 123 | """ 124 | 125 | theta = np.linspace(0, 2 * np.pi, 100) 126 | center, radius = [0.5, 0.5], 0.5 127 | verts = np.vstack([np.sin(theta), np.cos(theta)]).T 128 | circle = mpath.Path(verts * radius + center) 129 | return axes.set_boundary(circle, transform=axes.transAxes) 130 | --------------------------------------------------------------------------------