├── .gitignore ├── .travis.yml ├── Data ├── PV.nc ├── barotropic_vorticity.nc ├── globalTracers.nc └── internalwave.nc ├── LICENSE ├── README.md ├── notebooks ├── 1.Keff_atmos.ipynb ├── 2.LWA_atmos.ipynb ├── 3.LAPE_ocean.ipynb └── 4.ContourLength_ocean.ipynb ├── pics ├── Keff.jpg ├── LAPE.jpg ├── LWA.jpg ├── contourLength.jpg └── sorting.jpg ├── setup.py ├── tests ├── LWA.py ├── test_Keff_atmos.py ├── test_Keff_ocean.py ├── test_LAPE.py ├── test_LWA.py ├── test_breaking.py ├── test_clength.py ├── test_dask.py ├── test_fractal.py ├── test_hist.py └── test_localLength.py └── xcontour ├── __init__.py ├── core.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | pip-wheel-metadata/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | .python-version 88 | 89 | # pipenv 90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 93 | # install all needed dependencies. 94 | #Pipfile.lock 95 | 96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 97 | __pypackages__/ 98 | 99 | # Celery stuff 100 | celerybeat-schedule 101 | celerybeat.pid 102 | 103 | # SageMath parsed files 104 | *.sage.py 105 | 106 | # Environments 107 | .env 108 | .venv 109 | env/ 110 | venv/ 111 | ENV/ 112 | env.bak/ 113 | venv.bak/ 114 | 115 | # Spyder project settings 116 | .spyderproject 117 | .spyproject 118 | 119 | # Rope project settings 120 | .ropeproject 121 | 122 | # mkdocs documentation 123 | /site 124 | 125 | # mypy 126 | .mypy_cache/ 127 | .dmypy.json 128 | dmypy.json 129 | 130 | # Pyre type checker 131 | .pyre/ 132 | 133 | .idea 134 | 135 | # private things 136 | private/ 137 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | script: true -------------------------------------------------------------------------------- /Data/PV.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/Data/PV.nc -------------------------------------------------------------------------------- /Data/barotropic_vorticity.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/Data/barotropic_vorticity.nc -------------------------------------------------------------------------------- /Data/globalTracers.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/Data/globalTracers.nc -------------------------------------------------------------------------------- /Data/internalwave.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/Data/internalwave.nc -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Yu-Kun Qian 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # xcontour 2 | 3 | [![DOI](https://zenodo.org/badge/473022002.svg)](https://zenodo.org/badge/latestdoi/473022002) 4 | ![GitHub](https://img.shields.io/github/license/miniufo/xcontour) 5 | 6 | ![diagnostic analyses in contour-based coordinate](./pics/sorting.jpg) 7 | 8 | ## 1. Introduction 9 | `xcontour` is a python package built on [`xarray`](http://xarray.pydata.org/en/stable/) (starts with an `x`), targeting at performing diagnostic analyses and calculations in contour-based coordinates. The new coordinates are built based on iso-surfaces of any quasi-conservative tracers (passive or active), mostly through a conservative rearrangement (or adiabatic sorting in a particular direction). Such rearrangement allows one to **isolate the adiabatic advective process of a fluid and focus on non-conservative processes**. Therefore, it simplifies the relavent dynamical view and interpretation of many phenomena. 10 | 11 | The most well-known contour-based coordinates are 1) isentropic coordinate used in the atmosphere and 2) isopycnal coordinate used in the ocean, both of which are based on quasi-conservative variables of potential temperature and potential density. However, traditional coordinate transform requires a one-to-one relation between z-coordinate and isentropic/isopycnal surfaces. This can not be ensured in nonhydrostatic regions (heavier fluid is laid over lighter fluid). The adiabatic sorting process used here eliminates such constraint. For example, after the sorting in the vertical plane, the fluid will be completely stably stratified. 12 | 13 | Basically, this package is usually used to accomplish the following tasks: 14 | - calculate an adiabatically-sorted state that is generally a steady-state when diabatic forcings are absent. The sorted state is also termed as the background state or reference state (e.g., [Nakamura and Solomon 2010, JAS Part I](https://journals.ametsoc.org/view/journals/atsc/67/12/2010jas3503.1.xml), [2011, JAS Part II](https://journals.ametsoc.org/view/journals/atsc/68/11/2011jas3685.1.xml); [Methven and Berrisford 2015, QJRMS]()) . 15 | - calculate effective diffusivity ([Nakamura 1996, JAS](https://journals.ametsoc.org/view/journals/atsc/53/11/1520-0469_1996_053_1524_tdmefa_2_0_co_2.xml)) or dia-scalar diffusivity ([Winters and D'Asaro 1996, JFM](https://www.cambridge.org/core/journals/journal-of-fluid-mechanics/article/diascalar-flux-and-the-rate-of-fluid-mixing/56C6519F711ACAED7FEEAAF9B97BC8ED)) in the contour space; 16 | - calculate (local) wave activity density ([Nakamura 2010, JAS](https://journals.ametsoc.org/view/journals/atsc/67/9/2010jas3432.1.xml); [Huang and Nakamura 2016](https://journals.ametsoc.org/view/journals/atsc/73/1/jas-d-15-0194.1.xml)) or (local) available potential energy density ([Winters and Barkan 2013, JFM](https://www.cambridge.org/core/journals/journal-of-fluid-mechanics/article/available-potential-energy-density-for-boussinesq-fluid-flow/563B240B07CEBFDCD68F77A78FBFED16); [Scotti and White 2014, JFM](https://www.cambridge.org/core/journals/journal-of-fluid-mechanics/article/diagnosing-mixing-in-stratified-turbulent-flows-with-a-locally-defined-available-potential-energy/17223D6D05A72759B2CC64CA85D1175A)) that defined in contour space; 17 | - calculate actual (perimeter) contour length using marching squares algorithm in `scikit-image`; 18 | - calculate any other quantities if relavent... 19 | 20 | The calculations mostly take advantages of [`xarray`](http://xarray.pydata.org/en/stable/)'s functionalities rather than writing explicit loops or algorithms, including contour-coordinate expansion, conditional accumulation or integration, coordinate transform and interpolation... One would see that [`xarray`](http://xarray.pydata.org/en/stable/) **1) supports** multi-dimensional calculations, **2) simplifies** a great deal of work in programming the very details, and **3) applies** to different cases in a unified fashion. 21 | 22 | --- 23 | 24 | ## 2. How to install 25 | **Requirements** 26 | `xcontour` is developed under the environment with `numpy` (=version 1.15.4), `xarray` (=version 0.15.1), `xgcm`(=version 0.5.2), and `xhistogram`(=version 0.3.0). Older versions of these packages are not well tested. 27 | 28 | 29 | **Install from github** 30 | ``` 31 | git clone https://github.com/miniufo/xcontour.git 32 | ``` 33 | 34 | --- 35 | 36 | ## 3. Examples 37 | ### 3.1 calculate effective diffusivity in the atmosphere 38 | ![effective diffusivity](./pics/Keff.jpg) 39 | 40 | Here is a complete [notebook example on Nakamura's effective diffusivity](./notebooks/1.Keff_atmos.ipynb). 41 | 42 | ### 3.2 calculate local wave activiy in the atmosphere 43 | ![local wave activity density](./pics/LWA.jpg) 44 | 45 | Here is a complete [notebook example on local wave activity](./notebooks/2.LWA_atmos.ipynb). 46 | 47 | ### 3.3 calculate local APE density in the ocean 48 | ![local APE density](./pics/LAPE.jpg) 49 | 50 | Here is a complete [notebook example on local APE](./notebooks/3.LAPE_ocean.ipynb). 51 | 52 | ### 3.4 calculate tracer contour length in the ocean 53 | ![contour length](./pics/contourLength.jpg) 54 | 55 | Here is a complete [notebook example on contour length](./notebooks/4.ContourLength_ocean.ipynb). 56 | -------------------------------------------------------------------------------- /pics/Keff.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/pics/Keff.jpg -------------------------------------------------------------------------------- /pics/LAPE.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/pics/LAPE.jpg -------------------------------------------------------------------------------- /pics/LWA.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/pics/LWA.jpg -------------------------------------------------------------------------------- /pics/contourLength.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/pics/contourLength.jpg -------------------------------------------------------------------------------- /pics/sorting.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/miniufo/xcontour/938ca764a6adefd6ff6ecd64e600f378234e439b/pics/sorting.jpg -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import codecs 3 | from os import path 4 | import io 5 | import re 6 | 7 | with io.open("xcontour/__init__.py", "rt", encoding="utf8") as f: 8 | version = re.search(r'__version__ = "(.*?)"', f.read()).group(1) 9 | 10 | here = path.abspath(path.dirname(__file__)) 11 | 12 | with codecs.open(path.join(here, 'README.md'), encoding='utf-8') as f: 13 | long_description = f.read() 14 | 15 | setup( 16 | name='xcontour', 17 | 18 | version=version, 19 | 20 | description='diagnoses in contour-based coordinate', 21 | long_description=long_description, 22 | long_description_content_type='text/markdown', 23 | 24 | url='https://github.com/miniufo/xcontour', 25 | 26 | author='miniufo', 27 | author_email='miniufo@163.com', 28 | 29 | license='MIT', 30 | 31 | classifiers=[ 32 | 'Programming Language :: Python :: 3.6', 33 | 'Programming Language :: Python :: 3.7' 34 | ], 35 | 36 | keywords='contour xarray dask numpy', 37 | 38 | packages=find_packages(exclude=['docs', 'tests', "notebooks", "pics"]), 39 | 40 | install_requires=[ 41 | "numpy", 42 | "xarray", 43 | "dask", 44 | "xhistogram", 45 | ], 46 | ) 47 | -------------------------------------------------------------------------------- /tests/LWA.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import os 4 | from xcontour.xcontour import Contour2D, add_latlon_metrics 5 | 6 | 7 | place="D:/" 8 | LWA_Type="Areal" #Isentropic" 9 | vertical_differentiation_method='central' #'backeard' 10 | resolution=0.75 11 | 12 | 13 | for file in ["testERA.nc"]: 14 | data=xr.open_dataset(os.path.join(place,file)) 15 | earth_radius=6371200 16 | lat=data.latitude 17 | Area=(np.radians(resolution)*earth_radius)**2*(np.cos(np.radians(lat)))+0*data.longitude 18 | data=data.reindex(latitude=lat.values[::-1]) 19 | data=data.interp(level=[319.9,320,320.1], method='linear') 20 | data=data.isel(time=[0]) 21 | g=9.81 22 | earth_circle_perimeter=2*np.pi*earth_radius*np.cos(np.radians(lat)) 23 | var='pv' 24 | out=[] 25 | 26 | if LWA_Type=="Isentropic": 27 | if not( "sigma" in list(data._variables.keys())): 28 | if vertical_differentiation_method!='central': 29 | diffy=-data.pres.diff("level")/data.level.diff("level")/g 30 | complement=diffy.sel(level=diffy.level.values[0:2])\ 31 | .interp(level=data.level.values[0],method='linear', 32 | kwargs={"fill_value": "extrapolate"}) 33 | diffy=xr.concat([complement,diffy],'level') 34 | data['sigma']=diffy.copy().transpose('time','level', 'latitude', 35 | 'longitude') 36 | else: 37 | data['sigma']=(-data['pres'].differentiate("level")/g 38 | ).transpose('time','level','latitude','longitude') 39 | 40 | for t in np.arange(len(data.time)): 41 | lvl=[] 42 | for l in np.arange(len(data.level)): 43 | dset=data.isel(time=t,level=l)[[var]] 44 | dset, grid = add_latlon_metrics(dset, {'lat':'latitude', 45 | 'lon':'longitude'}) 46 | 47 | tracer = dset[var] 48 | if LWA_Type=="Isentropic": 49 | sigma=data.isel(time=t,level=l)[['sigma']].sigma 50 | else: 51 | sigma=tracer*0+1 52 | print('date= '+str(data.time.values[t])[0:10]+' , Level= '+str(data.level.values[l])+' hPa') 53 | 54 | N = len(data.latitude) # increase the contour number may get non-monotonic A(q) relation 55 | increase = True # Y-index increases with latitude 56 | lt = True # northward of PV contours (larger than) is inside the contour 57 | dtype = np.float32 # use float32 to save memory 58 | undef = -9.99e8 # for maskout topography if present 59 | analysis = Contour2D(grid, sigma*tracer, 60 | dims={'X':'longitude','Y':'latitude'}, 61 | dimEq={'Y':'latitude'}, 62 | increase=increase, 63 | lt=lt) 64 | 65 | ctr = analysis.cal_contours(N).rename(var) 66 | mask = xr.where(tracer!=undef, 1, 0).astype(dtype) 67 | table = analysis.cal_area_eqCoord_table(mask) # A(Yeq) table 68 | area = analysis.cal_integral_within_contours(ctr,tracer=sigma*tracer,integrand=sigma*0+1).rename('Area') 69 | latEq = table.lookup_coordinates(area).rename('latEq') 70 | ds_contour = xr.merge([ctr, latEq, area]) 71 | 72 | preLats = tracer.latitude.astype(dtype) 73 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 74 | 75 | lwa, ctrs, masks = analysis.cal_local_wave_activity(tracer*sigma, 76 | ds_latEq[var], 77 | mask_idx=np.arange(N), 78 | part="all") 79 | lwa1=lwa/earth_circle_perimeter 80 | ctrs=xr.concat(ctrs,dim="latitude")*1e6 81 | ctrs.attrs=dset[var].attrs 82 | ctrs.attrs['units']='PVU' 83 | lwa.attrs=dset[var].attrs 84 | lwa.attrs['units']='m*s-1' 85 | lwa.attrs['long_name']='Local Finite Amplitude Wave Activity' 86 | lwa['Equivalent Latitude']=ctrs.copy() 87 | lvl.append(lwa) 88 | 89 | for item_index in np.arange(len(masks)): 90 | item=masks[item_index].copy() 91 | for field in ['dxC','dyC','rAc']: 92 | item=item.drop(field) 93 | item=item.rename({"latitude":"Latitude","longitude":"Longitude"}) 94 | item=item.assign_coords(latitude=lwa.latitude.values[item_index]).expand_dims("latitude") 95 | masks[item_index]=item.copy() 96 | 97 | masks=xr.concat(masks,dim="latitude") 98 | out+=[xr.concat(lvl,dim="level")] 99 | outname=os.path.join(place,file.replace(".nc","_LWA.nc")) 100 | out=xr.concat(out,dim="time") 101 | out.to_netcdf(outname) 102 | 103 | ss=masks.sel(latitude=75) 104 | ss=ss * Area.rename({"latitude":"Latitude","longitude":"Longitude"}) 105 | positive_intrusion_area=xr.where(ss<0.,-ss,0).sum() 106 | negative_intrusion_area=xr.where(ss>0.,ss,0).sum() 107 | 108 | -------------------------------------------------------------------------------- /tests/test_Keff_atmos.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2020.08.01 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% 10 | import xarray as xr 11 | import numpy as np 12 | from xcontour.xcontour import Contour2D, latitude_lengths_at, add_latlon_metrics 13 | from xinvert.xinvert import FiniteDiff 14 | 15 | dset = xr.open_dataset('./xcontour/Data/PV.nc') 16 | 17 | # add metrics for xgcm 18 | dset, grid = add_latlon_metrics(dset) 19 | 20 | # get PV as a tracer and its squared gradient 21 | tracer = dset.pv 22 | grdS = dset.grdSpv 23 | 24 | #%% initialize contours 25 | # Initialize equally-spaced contours from minimum value to maximum value 26 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 27 | 28 | N = 121 # increase the contour number may get non-monotonic A(q) relation 29 | increase = True # Y-index increases with latitude 30 | lt = True # northward of PV contours (larger than) is inside the contour 31 | # change this should not change the result of Keff, but may alter 32 | # the values at boundaries 33 | dtype = np.float32 # use float32 to save memory 34 | undef = -9.99e8 # for maskout topography if present 35 | 36 | # initialize a Contour2D analysis class using PV as the tracer 37 | analysis = Contour2D(grid, tracer, 38 | dims={'X':'longitude','Y':'latitude'}, 39 | dimEq={'Y':'latitude'}, 40 | increase=increase, 41 | lt=lt) 42 | ctr = analysis.cal_contours(N) 43 | 44 | # Mask for A(q) relation table. 45 | # This can be done analytically in simple case, but we choose to do it 46 | # numerically in case there are undefined values inside the domain. 47 | mask = xr.where(tracer!=undef, 1, 0).astype(dtype) 48 | 49 | 50 | #%% calculate Laplacian 51 | fd = FiniteDiff(dim_mapping={'Y':'latitude', 'X':'longitude'}, 52 | BCs={'Y':'reflect', 'X':'periodic'}, 53 | coords='lat-lon') 54 | 55 | lap_tr = fd.Laplacian(tracer) 56 | 57 | 58 | #%% calculate related quantities for Keff 59 | # xarray's conditional integration, memory consuming and not preferred, for test only 60 | table = analysis.cal_area_eqCoord_table(mask) # A(Yeq) table 61 | area = analysis.cal_integral_within_contours(ctr).rename('intArea') 62 | intgrdS = analysis.cal_integral_within_contours(ctr, integrand=grdS).rename('intgrdS') 63 | latEq = table.lookup_coordinates(area).rename('latEq') 64 | Lmin = latitude_lengths_at(latEq).rename('Lmin') 65 | dintSdA = analysis.cal_gradient_wrt_area(intgrdS, area).rename('dintSdA') 66 | dqdA = analysis.cal_gradient_wrt_area(ctr, area).rename('dqdA') 67 | Leq2 = analysis.cal_sqared_equivalent_length(dintSdA, dqdA).rename('Leq2') 68 | nkeff = analysis.cal_normalized_Keff(Leq2, Lmin).rename('nkeff') 69 | 70 | #%% calculate related quantities for Keff 71 | # Alternative using _hist APIs, memory friendly and is preferred. 72 | # Note that since xhistogram does not support time- or level-varying bins, 73 | # this way does not support multi-dimensional calculation well as xarray's 74 | # conditional integration 75 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 76 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 77 | intgrdS = analysis.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 78 | latEq = table.lookup_coordinates(area).rename('latEq') 79 | Lmin = latitude_lengths_at(latEq).rename('Lmin') 80 | dintSdA = analysis.cal_gradient_wrt_area(intgrdS, area).rename('dintSdA') 81 | dqdA = analysis.cal_gradient_wrt_area(ctr, area).rename('dqdA') 82 | Leq2 = analysis.cal_sqared_equivalent_length(dintSdA, dqdA).rename('Leq2') 83 | nkeff = analysis.cal_normalized_Keff(Leq2, Lmin).rename('nkeff') 84 | 85 | #%% combined the results 86 | # results in contour space 87 | ds_contour = xr.merge([ctr, area, intgrdS, latEq, dintSdA, dqdA, Leq2, Lmin, nkeff]) 88 | 89 | # interpolate from contour space to equivalent-latitude space 90 | preLats = np.linspace(-90, 90, 181).astype(dtype) 91 | # results in latEq space 92 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /tests/test_Keff_ocean.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2024.08.16 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% 10 | import xarray as xr 11 | import numpy as np 12 | from xcontour.xcontour import Contour2D, add_latlon_metrics 13 | from xinvert.xinvert import FiniteDiff 14 | 15 | dset = xr.open_dataset('d:/tracerLat.nc') 16 | dset = dset.rename({'XC':'longitude', 'YC':'latitude'}) 17 | 18 | # add metrics for xgcm 19 | dset, grid = add_latlon_metrics(dset) 20 | 21 | # get PV as a tracer and its squared gradient 22 | tracer = dset.PTRACER04.where(dset.PTRACER04!=0) 23 | 24 | 25 | #%% calculate Laplacian 26 | fd = FiniteDiff(dim_mapping={'Y':'latitude', 'X':'longitude'}, 27 | BCs={'Y':'reflect', 'X':'periodic'}, 28 | coords='lat-lon') 29 | 30 | lapl = fd.Laplacian(tracer) 31 | grdx, grdy = fd.grad(tracer, dims=['X','Y']) 32 | grdS = grdx**2 + grdy**2 33 | 34 | 35 | #%% calculate related quantities for Keff 36 | increase = True 37 | lt = True 38 | check_mono = False 39 | 40 | def computeKeff(tracer, grdS): 41 | # Construct an analysis class using the tracer 42 | cm = Contour2D(grid, tracer, 43 | dims={'X':'longitude','Y':'latitude'}, 44 | dimEq={'Y':'latitude'}, 45 | increase=increase, 46 | lt=lt, check_mono=check_mono) 47 | 48 | N = 401 49 | mask = dset.maskC.rename('mask') 50 | preY = np.linspace(-70, 75, N) 51 | 52 | # This should be called first to initialize contours from minimum value 53 | # to maximum value (within lat/lon dims) using `N` contours. 54 | table = cm.cal_area_eqCoord_table_hist(mask) 55 | ctr = cm.cal_contours(N).load() 56 | area = cm.cal_integral_within_contours_hist(ctr).load().rename('intArea') 57 | intgrdS = cm.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 58 | 59 | def calLmin(mask, Yeq): 60 | preLmin = (mask*dset.dxF).sum('longitude').reset_coords(drop=True) 61 | 62 | re = preLmin.interp(latitude=Yeq.values).rename({'latitude':'contour'}) \ 63 | .assign_coords({'contour': Yeq['contour'].values}) 64 | 65 | return re 66 | 67 | Yeq = table.lookup_coordinates(area).rename('Yeq') 68 | Lmin = calLmin(mask, Yeq).rename('Lmin') 69 | dgrdSdA = cm.cal_gradient_wrt_area(intgrdS, area) 70 | dqdA = cm.cal_gradient_wrt_area(ctr, area) 71 | Leq2 = cm.cal_sqared_equivalent_length(dgrdSdA, dqdA) 72 | nkeff = cm.cal_normalized_Keff(Leq2, Lmin, mask=2e7) 73 | 74 | # Collect all these as a xarray.Dataset defined on N contours and interp to equivalent latitudes 75 | origin = xr.merge([ctr, area, Yeq, intgrdS, dgrdSdA, dqdA, Leq2, Lmin, nkeff]) 76 | interp = cm.interp_to_dataset(preY, Yeq, origin).rename({'new':'latitude'}) 77 | 78 | return interp, origin 79 | 80 | re1, o1 = computeKeff(tracer, grdS) 81 | 82 | #%% plots 83 | import proplot as pplt 84 | 85 | fontsize = 13 86 | 87 | fig, axes = pplt.subplots(figsize=(9,6), proj='spstere') 88 | 89 | ax = axes[0] 90 | m = ax.contourf(tracer, levels=21) 91 | ax.colorbar(m, loc='r', label='') 92 | # ax.set_xlim([0, 360]) 93 | ax.set_ylim([-90, -40]) 94 | ax.set_title('tracer distribution', fontsize=fontsize) 95 | 96 | axes.format(abc='(a)', land=True, coast=True, reso='hi', landcolor='gray') 97 | 98 | 99 | 100 | 101 | -------------------------------------------------------------------------------- /tests/test_LAPE.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2020.08.01 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% 10 | import xarray as xr 11 | import numpy as np 12 | from xgcm import Grid 13 | from xcontour.xcontour import Contour2D 14 | 15 | # tcount = 1440 16 | # path = 'I:/breakingIW/nonHydro/' 17 | 18 | # ds = xmitgcm.open_mdsdataset(data_dir=path+'output/', grid_dir=path, delta_t=2, 19 | # prefix=['Stat']) 20 | # dset, grid = add_MITgcm_missing_metrics(ds, periodic=['X'], 21 | # boundary={'Y':'extend','Z':'extend'}) 22 | 23 | # dset['time'] = (dset.time.astype(np.float32)/1e9).astype(np.int32) 24 | # dset = dset.drop_vars(['UVEL', 'WVEL', 'TRAC01']) 25 | # dset.isel({'time':[30, 70, 110]}).squeeze().to_netcdf('d:/internalwave.nc') 26 | 27 | ds = xr.open_dataset('E:/OneDrive/Python/MyPack/xcontour/Data/internalwave.nc') 28 | 29 | grid = Grid(ds, metrics = { 30 | ('X',) : ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 31 | ('Z',) : ['drW', 'drS', 'drC', 'drF', 'drG'], # Z distances 32 | ('X', 'Z'): ['yA']}) # Areas in X-Z plane 33 | 34 | # get potential temperature and maskout due to topography 35 | T = ds.THETA.where(ds.maskC) 36 | 37 | # calculate buoyancy using linear EOS 38 | alpha = 2E-4 39 | T0 = 20 40 | g = 9.81 41 | b = (alpha * (T - T0) * g).rename('buoyancy') 42 | 43 | #%% initialize contours 44 | # Initialize equally-spaced contours from minimum value to maximum value 45 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 46 | 47 | N = 121 # increase the contour number may get non-monotonic A(q) relation 48 | increase = False # Y-index increases with depth 49 | lt = False # northward of PV contours (larger than) is inside the contour 50 | # change this should not change the result of Keff, but may alter 51 | # the values at boundaries 52 | dtype = np.float32 # use float32 to save memory 53 | undef = -9.99e8 # for maskout topography if present 54 | 55 | # initialize a Contour2D analysis class using PV as the tracer 56 | analysis = Contour2D(grid, b, 57 | dims={'X':'XC','Z':'Z'}, 58 | dimEq={'Z':'Z'}, 59 | increase=increase, 60 | lt=lt) 61 | ctr = analysis.cal_contours(N) 62 | 63 | # Mask for A(q) relation table. 64 | # This can be done analytically in simple case, but we choose to do it 65 | # numerically in case there are undefined values inside the domain. 66 | mask = ds.maskC 67 | 68 | 69 | #%% calculate related quantities for Keff 70 | # xarray's conditional integration, memory consuming and not preferred, for test only 71 | table = analysis.cal_area_eqCoord_table(mask) # A(Yeq) table 72 | area = analysis.cal_integral_within_contours(ctr).rename('intArea') 73 | ZEq = table.lookup_coordinates(area).rename('ZEq') 74 | 75 | 76 | #%% calculate related quantities for Keff 77 | # Alternative using _hist APIs, memory friendly and is preferred. 78 | # Note that since xhistogram does not support time- or level-varying bins, 79 | # this way does not support multi-dimensional calculation well as xarray's 80 | # conditional integration 81 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 82 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 83 | ZEq = table.lookup_coordinates(area).rename('ZEq') 84 | 85 | 86 | #%% combined the results 87 | # results in contour space 88 | ds_contour = xr.merge([ctr, area, ZEq]) 89 | 90 | # interpolate from contour space to equivalent-latitude space 91 | preZs = b.Z.astype(dtype) 92 | # results in latEq space 93 | ds_ZEq = analysis.interp_to_dataset(preZs, ZEq, ds_contour) 94 | 95 | 96 | #%% calculate local finite-amplitude wave activity 97 | lape, ctrs, masks = analysis.cal_local_APE(b, ds_ZEq.buoyancy, 98 | mask_idx=[8,28,51,81]) 99 | lape2, ctrs2, masks2 = analysis.cal_local_wave_activity2(b, ds_ZEq.buoyancy, 100 | mask_idx=[8,28,51,81]) 101 | 102 | 103 | #%% LWA 104 | import proplot as pplt 105 | 106 | fig, axes = pplt.subplots(nrows=3, ncols=2, figsize=(10, 8.5), sharex=3, sharey=3) 107 | 108 | fontsize = 12 109 | 110 | def plot_time(tidx, add_colorbar=False): 111 | ax = axes[tidx, 0] 112 | m1=ax.contourf(-lape.where(mask)[tidx]*1e4, levels=np.linspace(0,50,26), cmap='reds') # minus sign to ensure positive definite 113 | ax.contour(b.where(mask)[tidx], levels=11, cmap='viridis', lw=0.8) 114 | if add_colorbar: 115 | ax.colorbar(m1, loc='b', ticks=5, label='') 116 | ax.set_title('buoyancy and local APE density (t={})'.format(tidx), fontsize=fontsize) 117 | ax.set_xlabel('x-coordinate (m)', fontsize=fontsize-1) 118 | ax.set_ylabel('z-coordinate (m)', fontsize=fontsize-1) 119 | ax.set_xticks([0, 2000, 4000, 6000, 8000]) 120 | ax.set_yticks([-200, -150, -100, -50, 0]) 121 | ax.set_ylim([-200, 0]) 122 | 123 | ax = axes[tidx, 1] 124 | msk = sum(masks) 125 | lev = xr.concat(ctrs, 'Z').isel(time=tidx).values 126 | m1=ax.contourf(msk.where(msk!=0)[tidx], cmap='bwr') 127 | ax.contour(b.where(mask)[tidx], levels=lev[::-1], lw=0.8, color='k') 128 | if add_colorbar: 129 | ax.colorbar(m1, loc='b', ticks=1, label='') 130 | ax.set_title('masks for local APE calculation', fontsize=fontsize) 131 | ax.set_xlabel('x-coordinate (m)', fontsize=fontsize-1) 132 | ax.set_ylabel('z-coordinate (m)', fontsize=fontsize-1) 133 | ax.set_xticks([0, 2000, 4000, 6000, 8000]) 134 | ax.set_yticks([-200, -150, -100, -50, 0]) 135 | ax.set_ylim([-200, 0]) 136 | 137 | plot_time(0) 138 | plot_time(1) 139 | plot_time(2, True) 140 | 141 | axes.format(abc='(a)') 142 | 143 | #%% IC 144 | import proplot as pplt 145 | 146 | fig, axes = pplt.subplots(nrows=3, ncols=2, figsize=(10, 8.5), sharex=3, sharey=3) 147 | 148 | fontsize = 12 149 | 150 | def plot_time(tidx, add_colorbar=False): 151 | ax = axes[tidx, 0] 152 | m1=ax.contourf(lape2.where(mask)[tidx]*1e4, levels=np.linspace(0,50,26), cmap='reds') # minus sign to ensure positive definite 153 | ax.contour(b.where(mask)[tidx], levels=11, cmap='viridis', lw=0.8) 154 | if add_colorbar: 155 | ax.colorbar(m1, loc='b', ticks=5, label='') 156 | ax.set_title('buoyancy and local APE density (t={})'.format(tidx), fontsize=fontsize) 157 | ax.set_xlabel('x-coordinate (m)', fontsize=fontsize-1) 158 | ax.set_ylabel('z-coordinate (m)', fontsize=fontsize-1) 159 | ax.set_xticks([0, 2000, 4000, 6000, 8000]) 160 | ax.set_yticks([-200, -150, -100, -50, 0]) 161 | ax.set_ylim([-200, 0]) 162 | 163 | ax = axes[tidx, 1] 164 | msk = sum(masks2) 165 | lev = xr.concat(ctrs, 'Z').isel(time=tidx).values 166 | m1=ax.contourf(msk.where(msk!=0)[tidx], cmap='bwr') 167 | ax.contour(b.where(mask)[tidx], levels=lev[::-1], lw=0.8, color='k') 168 | if add_colorbar: 169 | ax.colorbar(m1, loc='b', ticks=1, label='') 170 | ax.set_title('masks for local APE calculation', fontsize=fontsize) 171 | ax.set_xlabel('x-coordinate (m)', fontsize=fontsize-1) 172 | ax.set_ylabel('z-coordinate (m)', fontsize=fontsize-1) 173 | ax.set_xlim([0, 8960]) 174 | ax.set_xticks([0, 2000, 4000, 6000, 8000]) 175 | ax.set_yticks([-200, -150, -100, -50, 0]) 176 | ax.set_ylim([-200, 0]) 177 | 178 | plot_time(0) 179 | plot_time(1) 180 | plot_time(2, True) 181 | 182 | axes.format(abc='(a)') -------------------------------------------------------------------------------- /tests/test_LWA.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2020.08.01 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% 10 | import xarray as xr 11 | import numpy as np 12 | from xcontour.xcontour import Contour2D, add_latlon_metrics 13 | 14 | dset = xr.open_dataset('./xcontour/Data/barotropic_vorticity.nc') 15 | 16 | # # add metrics for xgcm 17 | dset, grid = add_latlon_metrics(dset) 18 | 19 | # # get PV as a tracer and its squared gradient 20 | tracer = dset.absolute_vorticity 21 | 22 | #%% initialize contours 23 | # Initialize equally-spaced contours from minimum value to maximum value 24 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 25 | 26 | N = 121 # increase the contour number may get non-monotonic A(q) relation 27 | increase = True # Y-index increases with latitude 28 | lt = True # northward of PV contours (larger than) is inside the contour 29 | # change this should not change the result of Keff, but may alter 30 | # the values at boundaries 31 | dtype = np.float32 # use float32 to save memory 32 | undef = -9.99e8 # for maskout topography if present 33 | 34 | # initialize a Contour2D analysis class using PV as the tracer 35 | analysis = Contour2D(grid, tracer, 36 | dims={'X':'longitude','Y':'latitude'}, 37 | dimEq={'Y':'latitude'}, 38 | increase=increase, 39 | lt=lt) 40 | ctr = analysis.cal_contours(N) 41 | 42 | # Mask for A(q) relation table. 43 | # This can be done analytically in simple case, but we choose to do it 44 | # numerically in case there are undefined values inside the domain. 45 | mask = xr.where(tracer!=undef, 1, 0).astype(dtype) 46 | 47 | 48 | #%% calculate related quantities for Keff 49 | # xarray's conditional integration, memory consuming and not preferred, for test only 50 | table = analysis.cal_area_eqCoord_table(mask) # A(Yeq) table 51 | area = analysis.cal_integral_within_contours(ctr).rename('intArea') 52 | latEq = table.lookup_coordinates(area).rename('latEq') 53 | 54 | 55 | #%% calculate related quantities for Keff 56 | # Alternative using _hist APIs, memory friendly and is preferred. 57 | # Note that since xhistogram does not support time- or level-varying bins, 58 | # this way does not support multi-dimensional calculation well as xarray's 59 | # conditional integration 60 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 61 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 62 | latEq = table.lookup_coordinates(area).rename('latEq') 63 | 64 | 65 | #%% combined the results 66 | # results in contour space 67 | ds_contour = xr.merge([ctr, area, latEq]) 68 | 69 | # interpolate from contour space to equivalent-latitude space 70 | preLats = tracer.latitude.astype(dtype) 71 | # results in latEq space 72 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 73 | 74 | 75 | #%% calculate local finite-amplitude wave activity 76 | lwaA, ctrs, masks = analysis.cal_local_wave_activity(tracer, ds_latEq.absolute_vorticity, 77 | mask_idx=[37,125,170,213], 78 | part='all') 79 | lwaA2, ctrs2, masks2 = analysis.cal_local_wave_activity2(tracer, ds_latEq.absolute_vorticity, 80 | mask_idx=[37,125,170,213], 81 | part='all') 82 | # lwaU, ctrs, masks = analysis.cal_local_wave_activity(tracer, ds_latEq.absolute_vorticity, 83 | # mask_idx=[37,125,170,213], 84 | # part='upper') 85 | # lwaL, ctrs, masks = analysis.cal_local_wave_activity(tracer, ds_latEq.absolute_vorticity, 86 | # mask_idx=[37,125,170,213], 87 | # part='lower') 88 | 89 | #%% check masks 90 | 91 | m = masks[0] 92 | print(-m.where(m<0).sum().values) 93 | print( m.where(m>0).sum().values) 94 | print(grid.integrate(-m.where(m<0), ['X','Y']).values) 95 | print(grid.integrate( m.where(m>0), ['X','Y']).values) 96 | print(grid.get_metric(m, ['X','Y']).sum().values) 97 | 98 | #%% 99 | import proplot as pplt 100 | 101 | fontsize = 15 102 | 103 | fig, axes = pplt.subplots(nrows=2, ncols=2, figsize=(11,9), 104 | sharex=3, sharey=3) 105 | 106 | ax = axes[0] 107 | m=ax.contourf(tracer*1e5, levels=21, cmap='RdBu_r') 108 | ax.set_title('vorticity * 1E5', fontsize=fontsize) 109 | ax.colorbar(m, loc='b', label='') 110 | 111 | ax = axes[2] 112 | m=ax.contourf(lwaA, levels=np.linspace(0, 30, 31), cmap='jet', extend='both') 113 | ax.set_title('local FAWA (LWA)', fontsize=fontsize) 114 | ax.colorbar(m, loc='b', label='') 115 | 116 | ax = axes[3] 117 | m=ax.contourf(-lwaA2, levels=np.linspace(0, 30, 31), cmap='jet', extend='both') 118 | ax.set_title('impulse-Casimir wave activity (local APE)', fontsize=fontsize) 119 | ax.colorbar(m, loc='b', label='') 120 | 121 | ax = axes[1] 122 | m=ax.contourf((lwaA+lwaA2), levels=21, cmap='RdBu_r') 123 | ax.set_title('difference (LWA - ICWA)', fontsize=fontsize) 124 | ax.colorbar(m, loc='b', label='') 125 | 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /tests/test_breaking.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.08.31 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% 10 | # for data 11 | import xarray as xr 12 | 13 | # for algorithm 14 | import numpy as np 15 | import itertools as itertools 16 | from skimage import measure 17 | from scipy import spatial 18 | import pandas as pd 19 | 20 | # for plotting 21 | import matplotlib.pyplot as plt 22 | import cartopy.crs as ccrs 23 | import cartopy.feature as cfeature 24 | import matplotlib.path as mpath 25 | import matplotlib.patches as mpatches 26 | 27 | 28 | # load data 29 | path = "D:/Data/ERAInterim/ElenaIsen/198508ERAInterim.nc" 30 | dataset = xr.open_dataset(path).isel({'time':0, 'level':-1}) 31 | 32 | # specify names of coordinates 33 | lat_name = "latitude" 34 | lon_name = "longitude" 35 | time_name = "time" 36 | var_name = "pv" 37 | 38 | # get lon border 39 | lon_border = [int(dataset[lon_name].values.min()), int(dataset[lon_name].values.max())] 40 | lon_border = [0, 360] 41 | 42 | 43 | # Subroutine 1 44 | def ex_contours(data, level): 45 | """ 46 | The extractions is based on the "find_contours" function of "measure" by "skimage". 47 | The output of the "find_contours" function is modified to obtain the original coordinates. 48 | The final output contains a list with entries each representing a single contour segment. 49 | 50 | Input: 51 | ----- 52 | dataset: xarray 53 | xarray with two spatial dimensions but no time dimension 54 | 55 | level: float 56 | contour level 57 | 58 | Returns: 59 | ------- 60 | list: arrays 61 | List with entries each representing a single contour segment 62 | 63 | """ 64 | return [np.c_[contour[:,1]+min(lon_border),contour[:,0]] 65 | for contour in measure.find_contours(data.values,level)] 66 | 67 | 68 | # Subroutine 2 69 | def rescale_contours(contours, dataset): 70 | """ 71 | Rescale the coordinates of the contour points. 72 | The coordinates are fitted on a grid with the same resolution as the input field. 73 | 74 | Input: 75 | ----- 76 | contours: list 77 | List with entries each representing a single contour segment 78 | 79 | dataset: xarray 80 | xarray with two spatial dimensions but no time dimension 81 | 82 | Returns: 83 | ------- 84 | list: arrays 85 | List with entries each representing a single contour segment 86 | 87 | """ 88 | x, y = np.meshgrid(dataset[lon_name],dataset[lat_name]) 89 | x, y = x.flatten(), y.flatten() 90 | grid_points = np.vstack((x,y)).T 91 | tree = spatial.KDTree(grid_points) 92 | 93 | contours_scaled = [] 94 | for contour in contours: 95 | temp = [] 96 | for point in contour: 97 | temp.append(grid_points[tree.query([point])[1][0]]) 98 | unique = list(dict.fromkeys(map(tuple,temp))) 99 | contours_scaled.append(np.asarray(unique)) 100 | 101 | return contours_scaled 102 | 103 | 104 | # Subroutine 3 105 | def group_contours(contours, y_overlap): 106 | """ 107 | Group the contours according to their start and end point. 108 | Using the parameter "y_overlap" (in degrees), the interval representing the overlap region can be specified. 109 | 110 | Input: 111 | ----- 112 | contours: list 113 | List with entries each representing a single contour segment 114 | 115 | y_overlap: float, optional 116 | Overlap in y direction in degrees in between unclosed contours at longitude border are grouped 117 | 118 | Returns: 119 | ------- 120 | list: arrays 121 | List with entries each representing a single contour segment 122 | 123 | """ 124 | contours_index = list(range(0,len(contours))) 125 | borders = [] 126 | for contour,index in zip(contours,contours_index): 127 | stp = contour[0].tolist() 128 | ndp = contour[-1].tolist() 129 | borders.append([index]+stp+ndp) 130 | 131 | start = [[border[1],border[2]] for border in borders] 132 | end = [[border[3],border[4]] for border in borders] 133 | both = start+end 134 | 135 | for point in both: 136 | ind = [i for i, x in enumerate(both) if (x[0]==point[0] or all(i in [x[0],point[0]] for i in lon_border)) and point[1]-y_overlap <= x[1] <= point[1]+y_overlap] 137 | ind = np.mod(ind,len(contours)) 138 | add = [borders[i][0] for i in ind] 139 | for i in ind: 140 | borders[i][0] = borders[min(add)][0] 141 | 142 | all_values = [border[0] for border in borders] 143 | unique_values = set(all_values) 144 | 145 | contours_grouped = [] 146 | for value in unique_values: 147 | this_group = [] 148 | for border,contour in zip(borders,contours): 149 | if border[0] == value: 150 | this_group.append(contour) 151 | contours_grouped.append(this_group) 152 | 153 | contours_grouped_sorted = [] 154 | for group in contours_grouped: 155 | if len(group) > 1: 156 | bigest = sorted(group, key=len, reverse=True)[0] 157 | rest = sorted(group, key=len, reverse=True)[1:] 158 | 159 | temp = [bigest] 160 | while len(rest) > 0: 161 | test = temp[-1][-1,1] 162 | for item,ind in zip(rest, range(0,len(rest))): 163 | if test-y_overlap <= item[0,1] <= test+y_overlap: 164 | temp.append(item) 165 | break 166 | del rest[ind] 167 | contours_grouped_sorted.append(np.asarray(list(itertools.chain.from_iterable(temp)))) 168 | else: 169 | contours_grouped_sorted.append(np.asarray(list(itertools.chain.from_iterable(group)))) 170 | 171 | 172 | return contours_grouped_sorted 173 | 174 | # Subroutine 4 175 | def filter_contours(contours, dataset, x_extent): 176 | """ 177 | The contours are filtered in respect to the previoulsy defined parameter "x_extent". 178 | 179 | Input: 180 | ----- 181 | contours: list 182 | List with entries each representing a single contour segment 183 | 184 | dataset: xarray 185 | xarray with two spatial dimensions but no time dimension 186 | 187 | x_extent: float, optional 188 | Set minimal extent of a contour in the x direction. A coverage of all longitudes means x_extent = 1. 189 | 190 | Returns: 191 | ------- 192 | list: arrays 193 | List with entries each representing a single contour segment 194 | 195 | """ 196 | lons = dataset[lon_name].values 197 | contour_expansion = [len(np.unique(np.round(contour[:,0]))) for contour in contours] 198 | test_expansion = [expansion/len(lons) >= x_extent for expansion in contour_expansion] 199 | 200 | return list(itertools.compress(contours, test_expansion)) 201 | 202 | # Subroutine 5 203 | def single_contours(contours, dataset, x_extent): 204 | """ 205 | Select largest contour fully encircling the pole. 206 | 207 | Input: 208 | ----- 209 | contours: list 210 | List with entries each representing a single contour segment 211 | 212 | dataset: xarray 213 | xarray with two spatial dimensions but no time dimension 214 | 215 | x_extent: float, optional 216 | Set minimal extent of a contour in the x direction. A coverage of all longitudes means x_extent = 1. 217 | 218 | Returns: 219 | ------- 220 | list: arrays 221 | List with entries each representing a single contour segment 222 | 223 | """ 224 | lons = dataset[lon_name].values 225 | contour_expansion = [len(np.unique(np.round(contour[:,0]))) for contour in contours] 226 | test_expansion = [expansion/len(lons) >= x_extent for expansion in contour_expansion] 227 | 228 | if sum([i==1 for i in contour_expansion])>1: 229 | mean_lat = [np.mean(contour[:,1]) for contour in contours] 230 | contours_single = contours[mean_lat.index(min(mean_lat))] 231 | else: 232 | contours_single = contours[test_expansion.index(max(test_expansion))] 233 | 234 | return contours_single 235 | 236 | def df_contours(contours): 237 | """ 238 | Store final contour in a pandas data frame 239 | 240 | Input: 241 | ----- 242 | contours: list 243 | List with entries each representing a single contour segment 244 | 245 | Returns: 246 | ------- 247 | dataframe: float 248 | Dataframe with columns lon and lat 249 | 250 | """ 251 | if type(contours)=="list": 252 | temp = np.asarray(list(itertools.chain.from_iterable(contours))) 253 | else: 254 | temp = contours 255 | return pd.DataFrame({'lon': temp[:,0].tolist(), 'lat': temp[:,1].tolist()}) 256 | 257 | #%% calculation 258 | 259 | # enter parameters 260 | level = 5e-6 261 | x_extent = 1 # in longitude percentage 262 | y_overlap = 1 # in degrees 263 | single = True 264 | scale = True 265 | 266 | contours = ex_contours(dataset[var_name], level) 267 | contours_scaled = rescale_contours(contours, dataset) 268 | # contours_grouped = group_contours(contours_scaled, y_overlap) 269 | # contours_filtered = filter_contours(contours_grouped, dataset, x_extent) 270 | # contours_single = single_contours(contours_filtered, dataset, x_extent) 271 | 272 | 273 | #%% plotting 274 | import seaborn as sns 275 | 276 | data_crs = ccrs.PlateCarree() 277 | proj = ccrs.PlateCarree() 278 | 279 | fig, axes = plt.subplots(3,1, subplot_kw=dict(projection=proj), figsize=(17, 8)) 280 | 281 | pal_contours = "magma" 282 | colors = sns.color_palette(pal_contours, n_colors=len(contours)) 283 | 284 | rgb_colors = ["#9CC7DF", "white"] 285 | 286 | levels = [0,4e-6,1e-5] 287 | 288 | theta = np.linspace(0, 2*np.pi, 100) 289 | center, radius = [0.5, 0.5], 0.5 290 | verts = np.vstack([np.sin(theta), np.cos(theta)]).T 291 | circle = mpath.Path(verts * radius + center) 292 | 293 | line_wd = 2 294 | 295 | plt.subplots_adjust(left = 0.125, # the left side of the subplots of the figure 296 | right = 0.9, # the right side of the subplots of the figure 297 | bottom = 0.1, # the bottom of the subplots of the figure 298 | top = 0.9, # the top of the subplots of the figure 299 | wspace = 0.3, # the amount of width reserved for blank space between subplots 300 | hspace = 0.25 # the amount of height reserved for white space between subplots) 301 | ) 302 | 303 | for ax in axes.flat: 304 | ax.add_feature(cfeature.COASTLINE) 305 | ax.set_extent([-180, 180, 40, 90], crs=data_crs) 306 | p0 = dataset[var_name].plot(ax=ax, cmap='jet' , add_colorbar=False, 307 | levels = np.linspace(2e-6, 10e-6, 9), transform=data_crs, robust=True) 308 | 309 | data = [contours, contours_scaled]#, contours_filtered, [contours_single]] 310 | for ax,item in zip(axes.flat[1:],data): 311 | for contour, color in zip(item,colors): 312 | ax.plot(contour[:, 0], contour[:, 1],".", markersize=6, 313 | linewidth=line_wd, color = color, transform=data_crs) 314 | 315 | # titles = ["Input field", 316 | # "Subroutine 1", 317 | # "Subroutine 2", 318 | # "Subroutine 3", 319 | # "Subroutine 4", 320 | # "Subroutine 5"] 321 | 322 | # number = ["a)", "b)", "c)", "d)", "e)", "f)"] 323 | 324 | # for ax,title,num in zip(axes.flat,titles, number): 325 | # ax.set_title("") 326 | # ax.set_title(title, fontweight='bold',fontsize=24, loc='center', y = 1.08) 327 | # ax.set_title(num, fontweight='bold',fontsize=24, loc='left', y = 1.08) 328 | # # ax.set_boundary(circle, transform=ax.transAxes) 329 | # ax.add_patch(mpatches.Circle((0.5, 0.5), radius=0.5, color='k', linewidth=5, fill=False, transform = ax.transAxes)) 330 | # gr = ax.gridlines(draw_labels=True, color="black", linestyle="dotted", linewidth = 1.1) 331 | # gr.xlabel_style = {'size': 16, 'color': 'black', "rotation":0, "fontweight":"bold"} 332 | # gr.ylabel_style = {'size': 12, 'color': 'black'} 333 | 334 | plt.show() 335 | 336 | 337 | 338 | #%% simple example 339 | import xarray as xr 340 | 341 | ds = xr.open_dataset('./xcontour/Data/barotropic_vorticity.nc') 342 | 343 | vor = ds.absolute_vorticity 344 | vor[60:190,200:400] = np.nan 345 | 346 | 347 | #%% 348 | import numpy as np 349 | from xcontour.xcontour import contour_length 350 | from skimage import measure 351 | 352 | def contour_lengths(data, contours, dims=[None,None], latlon=True, period=[None, None]): 353 | """Calculate contour length in a 2D numpy data. 354 | This is designed for xarray's ufunc. 355 | 356 | Parameters 357 | ---------- 358 | data: numpy.ndarray 359 | 2D numpy data. 360 | contours: numpy.ndarray 361 | a list of contour values. 362 | latlon: boolean, optional 363 | Whether dimension is latlon or cartesian. 364 | 365 | Returns 366 | ------- 367 | lengths: numpy.ndarray 368 | List of contour lengths. 369 | """ 370 | coord1 = dims[0] 371 | coord2 = dims[1] 372 | 373 | idx1 = np.arange(len(coord1)) 374 | idx2 = np.arange(len(coord2)) 375 | 376 | lengths = [] 377 | 378 | for c in contours: 379 | # in unit of grid points 380 | segments = measure.find_contours(data, c) 381 | 382 | segs_coords = [] 383 | 384 | # change to unit of coordinates 385 | for segment in segments: 386 | d1pos = np.interp(segment[:,0], idx1, coord1, period=period[0]) 387 | d2pos = np.interp(segment[:,1], idx2, coord2, period=period[1]) 388 | 389 | segs_coords.append(np.c_[d2pos, d1pos]) 390 | 391 | lengths.append(sum([contour_length(seg) for seg in segs_coords])) 392 | 393 | return np.asarray(lengths) 394 | 395 | 396 | lengths = contour_lengths(vor.data, np.array([0.00007]), dims=[vor.latitude, vor.longitude], 397 | latlon=True, period=[360, None]) 398 | 399 | cs = xr.DataArray(np.array([0.00006, 0.00007]), dims='contour', 400 | coords={'contour':np.array([0, 1])}) 401 | 402 | lens = xr.apply_ufunc(contour_lengths, vor, cs, 403 | kwargs={'latlon':True, 'period':[None, 360], 'dims':[vor.latitude, vor.longitude],}, 404 | dask='allowed', 405 | input_core_dims=[['latitude','longitude'], ['contour']], 406 | vectorize=True, 407 | output_core_dims=[['contour']]) 408 | 409 | 410 | # %% 411 | import numpy as np 412 | from xcontour.xcontour import contour_length, find_contour 413 | 414 | cs = [-0.00006, -0.00007] 415 | 416 | def c_len(c): 417 | c_segs = find_contour(vor, ['latitude', 'longitude'], c) 418 | return sum([contour_length(seg, latlon=False) for seg in c_segs]) 419 | 420 | 421 | lengths = [c_len(c) for c in cs] 422 | 423 | 424 | #%% 425 | import proplot as pplt 426 | 427 | fontsize= 14 428 | 429 | fig, axes = pplt.subplots(nrows=1, ncols=2, figsize=(10, 5), sharex=0, sharey=0) 430 | 431 | ax = axes[0] 432 | ax.contourf(vor, levels=41) 433 | ax.contour(vor, levels=cs, color='k') 434 | ax.set_title('vorticity', fontsize=fontsize) 435 | 436 | ax = axes[1] 437 | ax.plot(lengths, cs) 438 | ax.set_title('contour length', fontsize=fontsize) 439 | 440 | 441 | 442 | #%% simple example 443 | import xarray as xr 444 | 445 | ds = xr.open_dataset('./xcontour/Data/internalwave.nc') 446 | 447 | vor = ds.THETA.where(ds.THETA!=0)[2] 448 | 449 | 450 | #%% 451 | import numpy as np 452 | from skimage import measure 453 | from xcontour.xcontour import contour_length 454 | 455 | def contour_lengths(data, contours, dims=[None,None], latlon=True, period=[None, None]): 456 | """Calculate contour length in a 2D numpy data. 457 | This is designed for xarray's ufunc. 458 | 459 | Parameters 460 | ---------- 461 | data: numpy.ndarray 462 | 2D numpy data. 463 | contours: numpy.ndarray 464 | a list of contour values. 465 | latlon: boolean, optional 466 | Whether dimension is latlon or cartesian. 467 | 468 | Returns 469 | ------- 470 | lengths: numpy.ndarray 471 | List of contour lengths. 472 | """ 473 | coord1 = dims[0] 474 | coord2 = dims[1] 475 | 476 | idx1 = np.arange(len(coord1)) 477 | idx2 = np.arange(len(coord2)) 478 | 479 | lengths = [] 480 | 481 | for c in contours: 482 | # in unit of grid points 483 | segments = measure.find_contours(data, c) 484 | 485 | segs_coords = [] 486 | 487 | # change to unit of coordinates 488 | for segment in segments: 489 | d1pos = np.interp(segment[:,0], idx1, coord1, period=period[0]) 490 | d2pos = np.interp(segment[:,1], idx2, coord2, period=period[1]) 491 | 492 | segs_coords.append(np.c_[d2pos, d1pos]) 493 | 494 | lengths.append(sum([contour_length(seg) for seg in segs_coords])) 495 | 496 | return np.asarray(lengths) 497 | 498 | 499 | lengths = contour_lengths(vor.data, np.array([26]), dims=[vor.XC, vor.Z], 500 | latlon=False, period=[8960, None]) 501 | 502 | cs = xr.DataArray(np.array([25.9, 26.1]), dims='contour', 503 | coords={'contour':np.array([0, 1])}) 504 | 505 | lens = xr.apply_ufunc(contour_lengths, vor, cs, 506 | kwargs={'latlon':False, 'period':[8960, None], 'dims':[vor.Z, vor.XC],}, 507 | dask='allowed', 508 | input_core_dims=[['Z','XC'], ['contour']], 509 | vectorize=True, 510 | output_core_dims=[['contour']]) 511 | 512 | 513 | #%% 514 | import numpy as np 515 | from xcontour.xcontour import contour_length, find_contour 516 | 517 | cs = np.linspace(25.8, 26.2, 5) 518 | 519 | def c_len(c): 520 | c_segs = find_contour(vor, ['Z', 'XC'], vor.max()) 521 | 522 | if len(c_segs) == 0: 523 | return np.nan 524 | else: 525 | return sum([contour_length(seg, False) for seg in c_segs]) 526 | 527 | 528 | lengths = [c_len(c) for c in cs] 529 | 530 | 531 | #%% 532 | import proplot as pplt 533 | 534 | segs = find_contour(vor, ['Z', 'XC'], vor.min()) 535 | 536 | 537 | fig, axes = pplt.subplots(nrows=1, ncols=1, figsize=(11, 6)) 538 | 539 | ax = axes[0] 540 | 541 | ax.contourf(vor, cmap='bwr') 542 | for seg in segs: 543 | ax.plot(seg[:, 0], seg[:, 1], linewidth=1, color='k') 544 | ax.set_ylim([-200, 0]) 545 | ax.set_xlim([0, 8960]) 546 | 547 | 548 | #%% 549 | import proplot as pplt 550 | 551 | fontsize= 14 552 | 553 | fig, axes = pplt.subplots(nrows=1, ncols=2, figsize=(10, 5), sharex=0, sharey=0) 554 | 555 | ax = axes[0] 556 | ax.contourf(vor, levels=41) 557 | ax.contour(vor, levels=cs, color='k') 558 | ax.set_title('vorticity', fontsize=fontsize) 559 | 560 | ax = axes[1] 561 | ax.plot(lengths, cs) 562 | ax.set_title('contour length', fontsize=fontsize) 563 | 564 | -------------------------------------------------------------------------------- /tests/test_clength.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.09.30 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | #%% 9 | import xarray as xr 10 | import numpy as np 11 | from GeoApps.DiagnosticMethods import Dynamics 12 | from xcontour.xcontour import Contour2D, add_latlon_metrics 13 | 14 | dset = xr.open_dataset('./xcontour/Data/barotropic_vorticity.nc') 15 | 16 | vor = dset.absolute_vorticity 17 | # vor = (vor - vor + vor.latitude).rename('absolute_vorticity') 18 | # vor[50:100, 256:385] = np.nan 19 | # vor[50:100, 256] = np.nan 20 | 21 | # vor[60:190,200:400] = np.nan 22 | 23 | 24 | # # add metrics for xgcm 25 | dset, grid = add_latlon_metrics(dset) 26 | 27 | dyn = Dynamics(dset, grid=grid, arakawa='A') 28 | 29 | # # get PV as a tracer and its squared gradient 30 | tracer = vor 31 | grdS = dyn.cal_squared_gradient(tracer, dims=['Y', 'X'], 32 | boundary={'Y':'extend', 'X':'periodic'}) 33 | 34 | grdS = grdS.where(np.isfinite(grdS)) 35 | 36 | #%% 37 | from xinvert.xinvert import FiniteDiff 38 | 39 | fd = FiniteDiff({'X':'longitude', 'Y':'latitude'}, 40 | BCs={'X':'periodic', 'Y':'extend'}) 41 | 42 | grdy, grdx = fd.grad(tracer, dims=['Y', 'X']) 43 | 44 | grdS = grdy**2 + grdx**2 45 | grdm = np.hypot(grdx, grdy) 46 | 47 | 48 | #%% initialize contours 49 | # Initialize equally-spaced contours from minimum value to maximum value 50 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 51 | 52 | N = 121 # increase the contour number may get non-monotonic A(q) relation 53 | increase = True # Y-index increases with latitude 54 | lt = True # northward of PV contours (larger than) is inside the contour 55 | # change this should not change the result of Keff, but may alter 56 | # the values at boundaries 57 | dtype = np.float32 # use float32 to save memory 58 | undef = np.nan # for maskout topography if present 59 | 60 | # initialize a Contour2D analysis class using PV as the tracer 61 | analysis = Contour2D(grid, tracer, 62 | dims={'X':'longitude','Y':'latitude'}, 63 | dimEq={'Y':'latitude'}, 64 | increase=increase, 65 | lt=lt) 66 | ctr = analysis.cal_contours(N) 67 | 68 | # Mask for A(q) relation table. 69 | # This can be done analytically in simple case, but we choose to do it 70 | # numerically in case there are undefined values inside the domain. 71 | mask = xr.where(np.isnan(tracer), 0, 1).astype(dtype) 72 | 73 | 74 | #%% calculate related quantities for Keff 75 | # xarray's conditional integration, memory consuming and not preferred, for test only 76 | from xcontour.xcontour import latitude_lengths_at 77 | 78 | def calLmin(mask, latEq): 79 | latLen = latitude_lengths_at(mask.latitude) 80 | preLmin = (mask.sum('longitude') / len(mask.longitude) * latLen).reset_coords(drop=True) 81 | 82 | re = preLmin.interp(latitude=latEq.values).rename({'latitude':'contour'})\ 83 | .assign_coords({'contour': latEq['contour'].values}) 84 | 85 | return re 86 | 87 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 88 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 89 | lengths = analysis.cal_contour_lengths(ctr, latlon=True).rename('lengths') 90 | latEq = table.lookup_coordinates(area).rename('latEq') 91 | intgrdS = analysis.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 92 | Lmin = calLmin(mask, latEq).rename('Lmin') 93 | cmInvGrd= analysis.cal_contour_mean_hist(ctr, 1.0/grdm, grdm, area).rename('cmInvGrd') 94 | cmGrd = analysis.cal_contour_mean_hist(ctr, grdm, grdm, area).rename('cmGrd') 95 | dintSdA = analysis.cal_gradient_wrt_area(intgrdS, area).rename('dintSdA') 96 | dqdA = analysis.cal_gradient_wrt_area(ctr, area).rename('dqdA') 97 | Leq2 = analysis.cal_sqared_equivalent_length(dintSdA, dqdA).rename('Leq2') 98 | nkeff = analysis.cal_normalized_Keff(Leq2, Lmin).rename('nkeff') 99 | 100 | #%% combined the results 101 | # results in contour space 102 | ds_contour = xr.merge([ctr, area, latEq, lengths, intgrdS, Lmin, cmInvGrd, 103 | cmGrd, dintSdA, dqdA, Leq2, nkeff]) 104 | 105 | # interpolate from contour space to equivalent-latitude space 106 | preLats = tracer.latitude.astype(dtype) 107 | # results in latEq space 108 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 109 | 110 | 111 | #%% 112 | import proplot as pplt 113 | 114 | fontsize= 14 115 | 116 | array = [ 117 | [1, 1, 1, 1, 2, 2] 118 | ] 119 | 120 | fig, axes = pplt.subplots(array, figsize=(10, 5), sharex=0, sharey=3) 121 | 122 | ax = axes[0] 123 | ax.contourf(tracer, levels=41) 124 | ax.contour(tracer, levels=21, color='k') 125 | ax.set_title('vorticity', fontsize=fontsize) 126 | 127 | ax = axes[1] 128 | m1=ax.plot((ds_latEq.lengths/ds_latEq.Lmin), ds_latEq.latEq, label='$L$') 129 | m2=ax.plot((np.sqrt(ds_latEq.Leq2)/ds_latEq.Lmin), ds_latEq.latEq, label='$L_{eq}$') 130 | ax.set_xlim([0, 8]) 131 | ax.set_title('contour length', fontsize=fontsize) 132 | 133 | ax.legend([m1,m2], loc='lr', ncols=1) 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | #%% 144 | import xarray as xr 145 | import numpy as np 146 | from GeoApps.DiagnosticMethods import Dynamics 147 | from xcontour.xcontour import Contour2D, add_MITgcm_missing_metrics 148 | 149 | dset = xr.open_dataset('./xcontour/Data/internalwave.nc') 150 | 151 | # # get PV as a tracer and its squared gradient 152 | tracer = dset.THETA.where(dset.THETA!=0) 153 | 154 | dset, grid = add_MITgcm_missing_metrics(dset) 155 | 156 | #%% 157 | from xinvert.xinvert import FiniteDiff 158 | 159 | fd = FiniteDiff({'X':'XC', 'Z':'Z'}, 160 | BCs={'X':'periodic', 'Z':'extend'}) 161 | 162 | grdz, grdx = fd.grad(tracer, dims=['Z', 'X']) 163 | 164 | grdS = grdz**2 + grdx**2 165 | 166 | 167 | #%% initialize contours 168 | # Initialize equally-spaced contours from minimum value to maximum value 169 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 170 | 171 | N = 101 # increase the contour number may get non-monotonic A(q) relation 172 | increase = False # Y-index increases with latitude 173 | lt = False # northward of PV contours (larger than) is inside the contour 174 | # change this should not change the result of Keff, but may alter 175 | # the values at boundaries 176 | dtype = np.float32 # use float32 to save memory 177 | undef = 0 # for maskout topography if present 178 | 179 | # initialize a Contour2D analysis class using PV as the tracer 180 | analysis = Contour2D(grid, tracer, 181 | dims={'X':'XC','Z':'Z'}, 182 | dimEq={'Z':'Z'}, 183 | increase=increase, 184 | lt=lt) 185 | ctr = analysis.cal_contours(N) 186 | 187 | # Mask for A(q) relation table. 188 | # This can be done analytically in simple case, but we choose to do it 189 | # numerically in case there are undefined values inside the domain. 190 | mask = xr.where(np.isnan(tracer), 0, 1)[0].astype(dtype) 191 | 192 | 193 | #%% calculate related quantities for Keff 194 | # xarray's conditional integration, memory consuming and not preferred, for test only 195 | hgrid, xcount = 2, 4480 196 | 197 | def calLmin(mask, zEq): 198 | preLmin = (mask.sum('XC') * hgrid).reset_coords(drop=True) 199 | 200 | re = [] 201 | 202 | for tim in zEq: 203 | re.append(preLmin.interp(Z=tim.values).rename({'Z':'contour'}) 204 | .assign_coords({'contour': zEq['contour'].values})) 205 | 206 | return xr.concat(re, dim='time') 207 | 208 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 209 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 210 | lengths = analysis.cal_contour_lengths(ctr, False).rename('lengths') 211 | ZEq = table.lookup_coordinates(area).rename('zEq') 212 | intgrdS = analysis.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 213 | Lmin = calLmin(mask, ZEq).rename('Lmin') 214 | dintSdA = analysis.cal_gradient_wrt_area(intgrdS, area).rename('dintSdA') 215 | dqdA = analysis.cal_gradient_wrt_area(ctr, area).rename('dqdA') 216 | Leq2 = analysis.cal_sqared_equivalent_length(dintSdA, dqdA).rename('Leq2') 217 | nkeff = analysis.cal_normalized_Keff(Leq2, Lmin).rename('nkeff') 218 | 219 | #%% combined the results 220 | # results in contour space 221 | ds_contour = xr.merge([ctr, area, ZEq, lengths, intgrdS, Lmin, dintSdA, dqdA, 222 | Leq2, nkeff]) 223 | 224 | # interpolate from contour space to equivalent-latitude space 225 | preZs = tracer.Z.astype(dtype) 226 | # results in latEq space 227 | ds_ZEq = analysis.interp_to_dataset(preZs, ZEq, ds_contour) 228 | 229 | 230 | #%% 231 | import proplot as pplt 232 | 233 | fontsize= 14 234 | 235 | array = [ 236 | [1, 1, 1, 1, 2, 2], 237 | [3, 3, 3, 3, 4, 4], 238 | [5, 5, 5, 5, 6, 6], 239 | ] 240 | 241 | fig, axes = pplt.subplots(array, figsize=(10, 10), sharex=0, sharey=3) 242 | 243 | tstep = 0 244 | ax = axes[0] 245 | ax.contourf(tracer[tstep], levels=41, cmap='bwr') 246 | ax.contour(tracer[tstep], levels=21, color='k') 247 | ax.set_title('vorticity', fontsize=fontsize) 248 | ax.set_ylim([-200, 0]) 249 | 250 | ax = axes[1] 251 | m1=ax.plot((ds_ZEq.lengths/ds_ZEq.Lmin)[tstep], ds_ZEq.zEq[tstep], label='L') 252 | m2=ax.plot(np.sqrt(ds_ZEq.nkeff)[tstep], ds_ZEq.zEq[tstep], label='$L_{eq}$') 253 | ax.set_xlim([0, 3]) 254 | ax.set_title('contour length', fontsize=fontsize) 255 | ax.legend([m1,m2], loc='ur') 256 | 257 | tstep = 1 258 | ax = axes[2] 259 | ax.contourf(tracer[tstep], levels=41, cmap='bwr') 260 | ax.contour(tracer[tstep], levels=21, color='k') 261 | ax.set_title('vorticity', fontsize=fontsize) 262 | ax.set_ylim([-200, 0]) 263 | 264 | ax = axes[3] 265 | m1=ax.plot((ds_ZEq.lengths/ds_ZEq.Lmin)[tstep], ds_ZEq.zEq[tstep], label='L') 266 | m2=ax.plot(np.sqrt(ds_ZEq.nkeff)[tstep], ds_ZEq.zEq[tstep], label='$L_{eq}$') 267 | ax.set_xlim([0, 3]) 268 | ax.set_title('contour length', fontsize=fontsize) 269 | ax.legend([m1,m2], loc='ur') 270 | 271 | tstep = 2 272 | ax = axes[4] 273 | ax.contourf(tracer[tstep], levels=41, cmap='bwr') 274 | ax.contour(tracer[tstep], levels=21, color='k') 275 | ax.set_title('vorticity', fontsize=fontsize) 276 | ax.set_ylim([-200, 0]) 277 | 278 | ax = axes[5] 279 | m1=ax.plot((ds_ZEq.lengths/ds_ZEq.Lmin)[tstep], ds_ZEq.zEq[tstep], label='L') 280 | m2=ax.plot(np.sqrt(ds_ZEq.nkeff)[tstep], ds_ZEq.zEq[tstep], label='$L_{eq}$') 281 | ax.set_xlim([0, 3]) 282 | ax.set_title('contour length', fontsize=fontsize) 283 | ax.legend([m1,m2], loc='ur') 284 | 285 | axes.format(xlabel='', ylabel='') 286 | 287 | 288 | 289 | 290 | #%% global tracer test 291 | import xmitgcm 292 | from matplotlib import pyplot as plt 293 | import proplot as pplt 294 | import numpy as np 295 | import xarray as xr 296 | from GeoApps.GridUtils import add_MITgcm_missing_metrics 297 | from GeoApps.DiagnosticMethods import Dynamics 298 | 299 | path = 'I:/AVISO/GlobalTracers/' 300 | 301 | ds = xmitgcm.open_mdsdataset(path, prefix=['PTRACER01','PTRACER04','PTRACER07']).isel(time=slice(0,2)) 302 | dset, grid = add_MITgcm_missing_metrics(ds, periodic='X', boundary={'Y':'extend'}) 303 | 304 | dyn = Dynamics(dset, grid=grid, arakawa='A') 305 | 306 | # # get PV as a tracer and its squared gradient 307 | tr1 = dset.PTRACER01.where(dset.PTRACER01!=0) 308 | grdS1 = dyn.cal_squared_gradient(tr1, dims=['Y', 'X'], boundary={'Y':'fill', 'X':'periodic'}) 309 | 310 | tr4 = dset.PTRACER04.where(dset.PTRACER04!=0) 311 | grdS4 = dyn.cal_squared_gradient(tr4, dims=['Y', 'X'], boundary={'Y':'fill', 'X':'periodic'}) 312 | 313 | tr7 = dset.PTRACER07.where(dset.PTRACER07!=0) 314 | grdS7 = dyn.cal_squared_gradient(tr7, dims=['Y', 'X'], boundary={'Y':'fill', 'X':'periodic'}) 315 | 316 | print(dset) 317 | 318 | #%% 319 | # calculate contour length 320 | from xcontour.xcontour import Contour2D 321 | 322 | increase = True 323 | lt = True 324 | check_mono = False 325 | 326 | def computeKeff(tracer, grdS): 327 | # Construct an analysis class using the tracer 328 | cm = Contour2D(grid, tracer, 329 | dims={'X':'XC','Y':'YC'}, 330 | dimEq={'Y':'YC'}, 331 | increase=increase, 332 | lt=lt, check_mono=check_mono) 333 | 334 | N = 401 335 | mask = dset.maskC.rename('mask') 336 | preY = np.linspace(-70, 75, N) 337 | 338 | # This should be called first to initialize contours from minimum value 339 | # to maximum value (within lat/lon dims) using `N` contours. 340 | table = cm.cal_area_eqCoord_table_hist(mask) 341 | ctr = cm.cal_contours(N).load() 342 | area = cm.cal_integral_within_contours_hist(ctr).load().rename('intArea') 343 | intgrdS = cm.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 344 | 345 | def calLmin(mask, Yeq): 346 | preLmin = (mask*dset.dxF).sum('XC').reset_coords(drop=True) 347 | 348 | re = [] 349 | 350 | for tim in Yeq: 351 | re.append(preLmin.interp(YC=tim.values).rename({'YC':'contour'}) 352 | .assign_coords({'contour': Yeq['contour'].values})) 353 | 354 | return xr.concat(re, dim='time') 355 | 356 | Yeq = table.lookup_coordinates(area).rename('Yeq') 357 | Lmin = calLmin(mask, Yeq).rename('Lmin') 358 | lengths = cm.cal_contour_lengths(ctr, latlon=True).rename('lengths') 359 | dgrdSdA = cm.cal_gradient_wrt_area(intgrdS, area) 360 | dqdA = cm.cal_gradient_wrt_area(ctr, area) 361 | Leq2 = cm.cal_sqared_equivalent_length(dgrdSdA, dqdA) 362 | nkeff = cm.cal_normalized_Keff(Leq2, Lmin, mask=2e7) 363 | 364 | # Collect all these as a xarray.Dataset defined on N contours and interp to equivalent latitudes 365 | origin = xr.merge([ctr, area, Yeq, intgrdS, dgrdSdA, dqdA, Leq2, Lmin, lengths, nkeff]) 366 | interp = cm.interp_to_dataset(preY, Yeq, origin).rename({'new':'YC'}) 367 | 368 | return interp, origin 369 | 370 | re1, o1 = computeKeff(tr1, grdS1) 371 | print('re1 ok') 372 | re4, o4 = computeKeff(tr4, grdS4) 373 | print('re4 ok') 374 | re7, o7 = computeKeff(tr7, grdS7) 375 | print('re7 ok') 376 | 377 | 378 | #%% compare initial and final 379 | import proplot as pplt 380 | 381 | fontsize= 14 382 | 383 | array = [ 384 | [1, 1, 1, 2], 385 | [3, 3, 3, 4], 386 | ] 387 | 388 | fig, axes = pplt.subplots(array, figsize=(11, 10), sharex=0, sharey=0, 389 | proj=['kav7', None, 'kav7', None], 390 | proj_kw={'central_longitude': 180}) 391 | 392 | step = 0 393 | ax = axes[0] 394 | m1=ax.contourf(tr1[step], levels=np.linspace(1,2,21), cmap='rainbow') 395 | ax.contour(tr1[step], levels=11, color='k') 396 | ax.set_title('vorticity (t=1)', fontsize=fontsize) 397 | ax.colorbar(m1, loc='b', label='', ticks=0.2, length=0.7) 398 | 399 | ax = axes[1] 400 | m1=ax.plot((re1.lengths/re1.Lmin)[step], re1.Yeq[step], label='$L$') 401 | m2=ax.plot((np.sqrt(re1.Leq2)/re1.Lmin)[step], re1.Yeq[step], label='$L_{eq}$') 402 | ax.set_xlim([0, 3]) 403 | ax.set_title('contour length (t=1)', fontsize=fontsize) 404 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 405 | ax.set_ylim([-90, 90]) 406 | 407 | ax.legend([m1,m2], loc='lr', ncols=1) 408 | 409 | step = -1 410 | ax = axes[2] 411 | m1=ax.contourf(tr1[step], levels=np.linspace(1,2,21), cmap='rainbow') 412 | ax.contour(tr1[step], levels=11, color='k', linewidth=0.5) 413 | ax.set_title('vorticity (t=365)', fontsize=fontsize) 414 | 415 | ax = axes[3] 416 | m1=ax.plot((re1.lengths/re1.Lmin)[step], re1.Yeq[step], label='$L$') 417 | m2=ax.plot((np.sqrt(re1.Leq2)/re1.Lmin)[step], re1.Yeq[step], label='$L_{eq}$') 418 | ax.set_xlim([0, 40]) 419 | ax.set_title('contour length (t=365)', fontsize=fontsize) 420 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 421 | ax.set_ylim([-90, 90]) 422 | 423 | ax.legend([m1,m2], loc='lr', ncols=1) 424 | 425 | axes.format(abc='(a)', land=True, landcolor='gray') 426 | 427 | #%% compare different kappa 428 | import proplot as pplt 429 | 430 | fontsize= 14 431 | step = -1 432 | 433 | array = [ 434 | [1, 1, 1, 2], 435 | [3, 3, 3, 4], 436 | [5, 5, 5, 6], 437 | ] 438 | 439 | fig, axes = pplt.subplots(array, figsize=(10, 11), sharex=0, sharey=0, 440 | proj=['kav7', None, 'kav7', None, 'kav7', None], 441 | proj_kw={'central_longitude': 150}) 442 | 443 | ax = axes[0] 444 | ax.contourf(tr1[step], levels=np.linspace(1,2,21), cmap='rainbow') 445 | ax.contour(tr1[step], levels=21, color='k', linewidth=0.5) 446 | ax.set_title('vorticity ($\kappa=0$)', fontsize=fontsize) 447 | 448 | ax = axes[1] 449 | m1=ax.plot((re1.lengths/re1.Lmin)[step], re1.Yeq[step], label='$L$') 450 | m2=ax.plot((np.sqrt(re1.Leq2)/re1.Lmin)[step], re1.Yeq[step], label='$L_{eq}$') 451 | ax.set_xlim([0, 40]) 452 | ax.set_title('contour length ($\kappa=0$)', fontsize=fontsize) 453 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 454 | ax.set_ylim([-90, 90]) 455 | 456 | ax.legend([m1,m2], loc='lr', ncols=1) 457 | 458 | ax = axes[2] 459 | m1=ax.contourf(tr4[step], levels=np.linspace(1,2,21), cmap='rainbow') 460 | ax.contour(tr4[step], levels=21, color='k', linewidth=0.5) 461 | ax.set_title('vorticity ($\kappa=20$)', fontsize=fontsize) 462 | ax.colorbar(m1, loc='r', ticks=0.2, label='', length=1) 463 | 464 | ax = axes[3] 465 | m1=ax.plot((re4.lengths/re4.Lmin)[step], re4.Yeq[step], label='$L$') 466 | m2=ax.plot((np.sqrt(re4.Leq2)/re4.Lmin)[step], re4.Yeq[step], label='$L_{eq}$') 467 | ax.set_xlim([0, 40]) 468 | ax.set_title('contour length ($\kappa=20$)', fontsize=fontsize) 469 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 470 | ax.set_ylim([-90, 90]) 471 | 472 | ax.legend([m1,m2], loc='lr', ncols=1) 473 | 474 | ax = axes[4] 475 | ax.contourf(tr7[step], levels=np.linspace(1,2,21), cmap='rainbow') 476 | ax.contour(tr7[step], levels=21, color='k', linewidth=0.5) 477 | ax.set_title('vorticity ($\kappa=50$)', fontsize=fontsize) 478 | 479 | ax = axes[5] 480 | m1=ax.plot((re7.lengths/re7.Lmin-1)[step], re7.Yeq[step], label='$L$') 481 | m2=ax.plot((np.sqrt(re7.Leq2)/re7.Lmin)[step], re7.Yeq[step], label='$L_{eq}$') 482 | ax.set_xlim([0, 40]) 483 | ax.set_title('contour length ($\kappa=50$)', fontsize=fontsize) 484 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 485 | ax.set_ylim([-90, 90]) 486 | 487 | ax.legend([m1,m2], loc='lr', ncols=1) 488 | 489 | axes.format(abc='(a)', land=True, landcolor='gray') 490 | 491 | 492 | 493 | 494 | 495 | 496 | 497 | 498 | 499 | 500 | 501 | 502 | 503 | 504 | 505 | #%% 506 | import xmitgcm 507 | from matplotlib import pyplot as plt 508 | import proplot as pplt 509 | import numpy as np 510 | import xarray as xr 511 | from GeoApps.GridUtils import add_MITgcm_missing_metrics 512 | from GeoApps.DiagnosticMethods import Dynamics 513 | 514 | path = 'D:/AVISO/GlobalTracers/' 515 | 516 | ds = xmitgcm.open_mdsdataset(path, prefix=['PTRACER01']).isel(time=slice(0,2)) 517 | dset, grid = add_MITgcm_missing_metrics(ds, periodic='X', boundary={'Y':'extend'}) 518 | 519 | dyn = Dynamics(dset, grid=grid, arakawa='A') 520 | 521 | # # get PV as a tracer and its squared gradient 522 | tr1 = dset.PTRACER01.where(dset.PTRACER01!=0) 523 | grdS1 = dyn.cal_squared_gradient(tr1, dims=['Y', 'X'], boundary={'Y':'fill', 'X':'periodic'}) 524 | 525 | print(dset) 526 | 527 | #%% 528 | # calculate contour length 529 | from xcontour.xcontour import Contour2D 530 | 531 | increase = True 532 | lt = True 533 | check_mono = False 534 | 535 | def computeKeff(tracer, grdS): 536 | # Construct an analysis class using the tracer 537 | cm = Contour2D(grid, tracer, 538 | dims={'Y':'YC','X':'XC'}, 539 | dimEq={'Y':'YC'}, 540 | increase=increase, 541 | lt=lt, check_mono=check_mono) 542 | 543 | N = 401 544 | ydef = dset.YC 545 | mask = dset.maskC.rename('mask') 546 | preY = np.linspace(-70, 75, N) 547 | 548 | # This should be called first to initialize contours from minimum value 549 | # to maximum value (within lat/lon dims) using `N` contours. 550 | table = cm.cal_area_eqCoord_table_hist(mask) 551 | ctr = cm.cal_contours(N).load() 552 | area = cm.cal_integral_within_contours_hist(ctr).load().rename('intArea') 553 | intgrdS = cm.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 554 | 555 | def calLmin(mask, Yeq): 556 | preLmin = (mask*dset.dxF).sum('XC').reset_coords(drop=True) 557 | preLmin = preLmin.where(preLmin!=0) 558 | 559 | re = [] 560 | 561 | for tim in Yeq: 562 | re.append(preLmin.interp(YC=tim.values).rename({'YC':'contour'}) 563 | .assign_coords({'contour': Yeq['contour'].values})) 564 | 565 | return xr.concat(re, dim='time') 566 | 567 | Yeq = table.lookup_coordinates(area).rename('Yeq') 568 | Lmin = calLmin(mask, Yeq).rename('Lmin') 569 | lengths = cm.cal_contour_lengths(ctr, latlon=True).rename('lengths') 570 | dgrdSdA = cm.cal_gradient_wrt_area(intgrdS, area) 571 | dqdA = cm.cal_gradient_wrt_area(ctr, area) 572 | Leq2 = cm.cal_sqared_equivalent_length(dgrdSdA, dqdA) 573 | nkeff = cm.cal_normalized_Keff(Leq2, Lmin, mask=2e7) 574 | 575 | # Collect all these as a xarray.Dataset defined on N contours and interp to equivalent latitudes 576 | origin = xr.merge([ctr, area, Yeq, intgrdS, dgrdSdA, dqdA, Leq2, Lmin, lengths, nkeff]) 577 | interp = cm.interp_to_dataset(preY, Yeq, origin).rename({'new':'YC'}) 578 | 579 | return interp, origin 580 | 581 | re2, o2 = computeKeff(tr1, grdS1) 582 | 583 | #%% 584 | import proplot as pplt 585 | 586 | fontsize= 13 587 | 588 | array = [ 589 | [1, 1, 1, 2], 590 | ] 591 | 592 | fig, axes = pplt.subplots(array, figsize=(11, 5), sharex=0, sharey=0, facecolor='w') 593 | 594 | step = 0 595 | ax = axes[0] 596 | m1=ax.contourf(tr1[step], levels=np.linspace(1,2,21), cmap='rainbow') 597 | ax.contour(tr1[step], levels=11, color='k') 598 | ax.set_title('vorticity (t=1)', fontsize=fontsize) 599 | ax.colorbar(m1, loc='b', label='', ticks=0.2, length=0.7) 600 | 601 | tmp = (re2.lengths/re2.Lmin)[step] 602 | ax = axes[1] 603 | m1=ax.plot(tmp, re2.Yeq[step], label='$L$') 604 | tmp = (np.sqrt(re2.Leq2)/re2.Lmin)[step] 605 | m2=ax.plot(tmp, re2.Yeq[step], label='$L_{eq}$') 606 | ax.set_xlim([0, 3]) 607 | ax.set_title('contour length (t=1)', fontsize=fontsize) 608 | ax.set_ylabel('Equivalent Latitude', fontsize=fontsize-2) 609 | ax.set_ylim([-90, 90]) 610 | 611 | ax.legend([m1,m2], loc='lr', ncols=1) 612 | 613 | 614 | #%% 615 | from xcontour.xcontour import find_contour, contour_length 616 | 617 | 618 | segs = find_contour(tr1[0], ['YC', 'XC'], 1.15076609, period=[None, None]) 619 | 620 | fig, axes = pplt.subplots(nrows=1, ncols=1, figsize=(11, 6)) 621 | 622 | ax = axes[0] 623 | 624 | ax.contourf(tr1[0, 120:300], cmap='jet') 625 | for seg in segs: 626 | ax.plot(seg[:, 1], seg[:, 0], linewidth=1, color='k') 627 | ax.set_ylim([-68, -50]) 628 | ax.set_xlim([0, 360]) 629 | 630 | print(contour_length(segs[0][:,::-1], latlon=True)) 631 | 632 | 633 | 634 | -------------------------------------------------------------------------------- /tests/test_dask.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.09.30 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | #%% 9 | import xarray as xr 10 | import numpy as np 11 | from GeoApps.DiagnosticMethods import Dynamics 12 | from xcontour.xcontour import Contour2D, add_latlon_metrics 13 | 14 | dset = xr.tutorial.open_dataset('air_temperature') 15 | 16 | tracer = dset.air 17 | 18 | # # add metrics for xgcm 19 | dset, grid = add_latlon_metrics(dset) 20 | 21 | dyn = Dynamics(dset, grid=grid, arakawa='A') 22 | 23 | # # get PV as a tracer and its squared gradient 24 | grdS = dyn.cal_squared_gradient(tracer, dims=['Y', 'X'], 25 | boundary={'Y':'extend', 'X':'extend'}) 26 | 27 | grdS = grdS.where(np.isfinite(grdS)) 28 | 29 | #%% 30 | from xinvert.xinvert import FiniteDiff 31 | 32 | fd = FiniteDiff({'X':'lon', 'Y':'lat'}, 33 | BCs={'X':'extend', 'Y':'extend'}) 34 | 35 | grdy, grdx = fd.grad(tracer, dims=['Y', 'X']) 36 | 37 | grdS2 = grdy**2 + grdx**2 38 | 39 | 40 | #%% initialize contours 41 | # Initialize equally-spaced contours from minimum value to maximum value 42 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 43 | 44 | N = 20 # increase the contour number may get non-monotonic A(q) relation 45 | increase = True # Y-index increases with latitude 46 | lt = True # northward of PV contours (larger than) is inside the contour 47 | # change this should not change the result of Keff, but may alter 48 | # the values at boundaries 49 | dtype = np.float32 # use float32 to save memory 50 | undef = np.nan # for maskout topography if present 51 | 52 | # initialize a Contour2D analysis class using PV as the tracer 53 | analysis = Contour2D(grid, tracer, 54 | dims={'X':'lon','Y':'lat'}, 55 | dimEq={'Y':'lat'}, 56 | increase=increase, 57 | lt=lt) 58 | ctr = analysis.cal_contours(N) 59 | 60 | # Mask for A(q) relation table. 61 | # This can be done analytically in simple case, but we choose to do it 62 | # numerically in case there are undefined values inside the domain. 63 | mask = xr.where(np.isnan(tracer), 0, 1).astype(dtype) 64 | 65 | 66 | #%% calculate related quantities for Keff 67 | # xarray's conditional integration, memory consuming and not preferred, for test only 68 | from xcontour.xcontour import latitude_lengths_at 69 | 70 | def calLmin(mask, latEq): 71 | frac = (mask.lon[-1] - mask.lon[0]) / 360.0 72 | latLen = latitude_lengths_at(mask['lat']) * frac 73 | preLmin = (mask.sum('lon') / len(mask['lon']) * latLen).reset_coords(drop=True) 74 | 75 | 76 | re = [] 77 | 78 | for tim in latEq: 79 | re.append(preLmin.interp(lat=tim.values).rename({'lat':'contour'}) 80 | .assign_coords({'contour': latEq['contour'].values})) 81 | 82 | return xr.concat(re, dim='time') 83 | 84 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 85 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 86 | # print('ok1') 87 | # lengths = analysis.cal_contour_lengths(ctr, True).rename('lengths') 88 | # print('ok2') 89 | latEq = table.lookup_coordinates(area).rename('latEq') 90 | intgrdS = analysis.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 91 | Lmin = calLmin(mask, latEq).rename('Lmin') 92 | dintSdA = analysis.cal_gradient_wrt_area(intgrdS, area).rename('dintSdA') 93 | dqdA = analysis.cal_gradient_wrt_area(ctr, area).rename('dqdA') 94 | Leq2 = analysis.cal_sqared_equivalent_length(dintSdA, dqdA).rename('Leq2') 95 | nkeff = analysis.cal_normalized_Keff(Leq2, Lmin).rename('nkeff') 96 | 97 | #%% combined the results 98 | # results in contour space 99 | ds_contour = xr.merge([ctr, area, latEq, lengths, intgrdS, Lmin, dintSdA, dqdA, 100 | Leq2, nkeff]) 101 | 102 | # interpolate from contour space to equivalent-latitude space 103 | preLats = tracer.latitude.astype(dtype) 104 | # results in latEq space 105 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 106 | 107 | 108 | #%% 109 | import proplot as pplt 110 | 111 | fontsize= 14 112 | 113 | array = [ 114 | [1, 1, 1, 1, 2, 2] 115 | ] 116 | 117 | fig, axes = pplt.subplots(array, figsize=(10, 5), sharex=0, sharey=3) 118 | 119 | ax = axes[0] 120 | ax.contourf(tracer, levels=41) 121 | ax.contour(tracer, levels=21, color='k') 122 | ax.set_title('vorticity', fontsize=fontsize) 123 | 124 | ax = axes[1] 125 | m1=ax.plot((ds_latEq.lengths/ds_latEq.Lmin), ds_latEq.latEq, label='$L$') 126 | m2=ax.plot((np.sqrt(ds_latEq.Leq2)/ds_latEq.Lmin), ds_latEq.latEq, label='$L_{eq}$') 127 | ax.set_xlim([0, 8]) 128 | ax.set_title('contour length', fontsize=fontsize) 129 | 130 | ax.legend([m1,m2], loc='lr', ncols=1) 131 | 132 | 133 | 134 | 135 | 136 | -------------------------------------------------------------------------------- /tests/test_fractal.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.09.30 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | #%% 9 | import xarray as xr 10 | import numpy as np 11 | from GeoApps.DiagnosticMethods import Dynamics 12 | from xcontour.xcontour import Contour2D, add_latlon_metrics, latitude_lengths_at 13 | 14 | dset = xr.open_dataset('./xcontour/Data/barotropic_vorticity.nc') 15 | 16 | vor = dset.absolute_vorticity 17 | 18 | # # add metrics for xgcm 19 | dset, grid = add_latlon_metrics(dset) 20 | 21 | dyn = Dynamics(dset, grid=grid, arakawa='A') 22 | 23 | tracer = vor 24 | 25 | 26 | #%% initialize contours 27 | # Initialize equally-spaced contours from minimum value to maximum value 28 | # (within lat/lon dims). Here will implicitly loop over each isentropic level 29 | 30 | N = 121 # increase the contour number may get non-monotonic A(q) relation 31 | increase = True # Y-index increases with latitude 32 | lt = True # northward of PV contours (larger than) is inside the contour 33 | # change this should not change the result of Keff, but may alter 34 | # the values at boundaries 35 | dtype = np.float32 # use float32 to save memory 36 | undef = np.nan # for maskout topography if present 37 | 38 | # initialize a Contour2D analysis class using PV as the tracer 39 | analysis = Contour2D(grid, tracer, 40 | dims={'X':'longitude','Y':'latitude'}, 41 | dimEq={'Y':'latitude'}, 42 | increase=increase, 43 | lt=lt) 44 | ctr = analysis.cal_contours(N) 45 | 46 | # Mask for A(q) relation table. 47 | # This can be done analytically in simple case, but we choose to do it 48 | # numerically in case there are undefined values inside the domain. 49 | mask = xr.where(np.isnan(tracer), 0, 1).astype(dtype) 50 | 51 | 52 | #%% calculate related quantities for Keff 53 | 54 | table = analysis.cal_area_eqCoord_table_hist(mask) # A(Yeq) table 55 | area = analysis.cal_integral_within_contours_hist(ctr).rename('intArea') 56 | latEq = table.lookup_coordinates(area).rename('latEq') 57 | Lmin = latitude_lengths_at(latEq).rename('Lmin') 58 | 59 | #%% 60 | from utils.XarrayUtils import coarsen 61 | 62 | strides = [1, 2, 4, 8, 16, 32] 63 | 64 | re = [] 65 | 66 | for ratio in strides: 67 | tracerS = coarsen(tracer, dims=['latitude','longitude'], 68 | periodic='longitude', ratio=ratio) 69 | lengths = analysis.cal_contour_lengths(ctr, tracer=tracerS, 70 | latlon=True).rename('lengths') 71 | re.append(lengths) 72 | 73 | bclens = analysis.cal_contour_crossing(ctr, stride=strides, mode='edge') 74 | 75 | 76 | #%% combined the results 77 | re = [r.rename('length'+str(s)) for r, s in zip(re , strides)] 78 | bc = [b.rename('bclens'+str(s)) for b, s in zip(bclens, strides)] 79 | 80 | # results in contour space 81 | ds_contour = xr.merge([ctr, area, latEq, Lmin]+re+bc) 82 | 83 | # interpolate from contour space to equivalent-latitude space 84 | preLats = tracer.latitude.astype(dtype) 85 | # results in latEq space 86 | ds_latEq = analysis.interp_to_dataset(preLats, latEq, ds_contour) 87 | 88 | 89 | #%% calculate fractal dimensions 90 | def linear_fit(x, y): 91 | # wrapper function of np.polyfit 92 | try: 93 | fitted = np.polyfit(x, y, 1) 94 | return fitted[0] # return slope only, no intersect 95 | except Exception: 96 | return np.nan 97 | 98 | reso = dset.longitude.diff('longitude')[0] # degree 99 | 100 | lengths = xr.concat([ds_latEq.length1 , ds_latEq.length2 , ds_latEq.length4 , 101 | ds_latEq.length8 , ds_latEq.length16 , ds_latEq.length32], 102 | dim='stride') 103 | lengths['stride'] = strides 104 | 105 | rulers = lengths.stride * np.cos(np.deg2rad(ds_latEq.latitude)) * reso * np.pi / 180.0 * 6371200 106 | 107 | counts = lengths / rulers 108 | 109 | xcoord = -np.log(rulers)[::-1] 110 | ycoord = np.log(counts)[::-1] 111 | 112 | fd = xr.apply_ufunc(linear_fit, xcoord, ycoord, 113 | input_core_dims=[['stride'], ['stride']], 114 | vectorize=True, 115 | dask='allowed') 116 | 117 | #%% 118 | import proplot as pplt 119 | 120 | fontsize= 14 121 | 122 | array = [ 123 | [1, 1, 1, 1, 1, 2, 2, 3, 3, 4, 4] 124 | ] 125 | 126 | fig, axes = pplt.subplots(array, figsize=(11.5, 5), sharex=0, sharey=3) 127 | 128 | ax = axes[0] 129 | ax.contourf(tracer, levels=41) 130 | ax.contour(tracer, levels=21, color='k') 131 | ax.set_title('vorticity', fontsize=fontsize) 132 | ax.set_ylabel('') 133 | 134 | ax = axes[1] 135 | m1=ax.plot((ds_latEq.length1/ds_latEq.Lmin), ds_latEq.latEq, label='$L1$') 136 | m2=ax.plot((ds_latEq.length2/ds_latEq.Lmin), ds_latEq.latEq, label='$L2$') 137 | m3=ax.plot((ds_latEq.length4/ds_latEq.Lmin), ds_latEq.latEq, label='$L4$') 138 | m4=ax.plot((ds_latEq.length8/ds_latEq.Lmin), ds_latEq.latEq, label='$L8$') 139 | m5=ax.plot((ds_latEq.length16/ds_latEq.Lmin), ds_latEq.latEq, label='$L16$') 140 | m6=ax.plot((ds_latEq.length32/ds_latEq.Lmin), ds_latEq.latEq, label='$L32$') 141 | ax.set_xlim([0, 5]) 142 | ax.set_title('contour length', fontsize=fontsize) 143 | ax.legend([m1,m2,m3,m4,m5,m6], loc='lr', ncols=1) 144 | 145 | ax = axes[2] 146 | m1=ax.plot((ds_latEq.bclens1/ds_latEq.Lmin), ds_latEq.latEq, label='$L1$') 147 | m2=ax.plot((ds_latEq.bclens2/ds_latEq.Lmin), ds_latEq.latEq, label='$L2$') 148 | m3=ax.plot((ds_latEq.bclens4/ds_latEq.Lmin), ds_latEq.latEq, label='$L4$') 149 | m4=ax.plot((ds_latEq.bclens8/ds_latEq.Lmin), ds_latEq.latEq, label='$L8$') 150 | m5=ax.plot((ds_latEq.bclens16/ds_latEq.Lmin), ds_latEq.latEq, label='$L16$') 151 | m6=ax.plot((ds_latEq.bclens32/ds_latEq.Lmin), ds_latEq.latEq, label='$L32$') 152 | ax.set_xlim([0, 5]) 153 | ax.set_title('contour length (BC)', fontsize=fontsize) 154 | ax.legend([m1,m2,m3,m4,m5,m6], loc='lr', ncols=1) 155 | 156 | ax = axes[3] 157 | ax.plot(fd, ds_latEq.latEq) 158 | ax.set_title('fractal dimension', fontsize=fontsize) 159 | ax.set_xlim([1, 1.6]) 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /tests/test_hist.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.08.31 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | 9 | #%% define two methods 10 | import numpy as np 11 | import xarray as xr 12 | from GeoApps.DiagnosticMethods import Dynamics 13 | from xcontour.xcontour import Contour2D 14 | 15 | 16 | def computeKeff_hist(tracer, grdS, increase=True, lt=True, check_mono=False): 17 | # Construct an analysis class using the tracer 18 | cm = Contour2D(grid, tracer, 19 | dims={'X':'longitude','Y':'latitude'}, 20 | dimEq={'Y':'latitude'}, 21 | increase=increase, 22 | lt=lt, check_mono=check_mono) 23 | 24 | N = 251 25 | mask = dset.absolute_vorticity.rename('mask') 26 | mask[:,:] = 1 27 | preY = np.linspace(-90, 90, N) 28 | 29 | # This should be called first to initialize contours from minimum value 30 | # to maximum value (within lat/lon dims) using `N` contours. 31 | table = cm.cal_area_eqCoord_table_hist(mask) 32 | ctr = cm.cal_contours(N).load() 33 | 34 | area = cm.cal_integral_within_contours_hist(ctr).load().rename('intArea') 35 | intgrdS = cm.cal_integral_within_contours_hist(ctr, integrand=grdS).rename('intgrdS') 36 | 37 | def calLmin(mask, Yeq): 38 | preLmin = (mask*dset.dxF).sum('longitude').reset_coords(drop=True) 39 | 40 | re = preLmin.interp(latitude=Yeq.values).rename({'latitude':'contour'})\ 41 | .assign_coords({'contour': Yeq['contour'].values}) 42 | 43 | return re 44 | 45 | Yeq = table.lookup_coordinates(area).rename('Yeq') 46 | Lmin = calLmin(mask, Yeq).rename('Lmin') 47 | # lengths = cm.cal_contour_lengths(ctr, latlon=True, period=[None, None]).rename('lengths') 48 | dgrdSdA = cm.cal_gradient_wrt_area(intgrdS, area) 49 | dqdA = cm.cal_gradient_wrt_area(ctr, area) 50 | Leq2 = cm.cal_sqared_equivalent_length(dgrdSdA, dqdA) 51 | nkeff = cm.cal_normalized_Keff(Leq2, Lmin, mask=2e7) 52 | 53 | # Collect all these as a xarray.Dataset defined on N contours and interp to equivalent latitudes 54 | origin = xr.merge([ctr, area, Yeq, intgrdS, dgrdSdA, dqdA, Leq2, Lmin, nkeff]) 55 | interp = cm.interp_to_dataset(preY, Yeq, origin).rename({'new':'latitude'}) 56 | 57 | return interp, origin, table 58 | 59 | 60 | def computeKeff(tracer, grdS, increase=True, lt=True, check_mono=False): 61 | # Construct an analysis class using the tracer 62 | cm = Contour2D(grid, tracer, 63 | dims={'X':'longitude','Y':'latitude'}, 64 | dimEq={'Y':'latitude'}, 65 | increase=increase, 66 | lt=lt, check_mono=check_mono) 67 | 68 | N = 251 69 | mask = dset.absolute_vorticity.rename('mask') 70 | mask[:,:] = 1 71 | preY = np.linspace(-90, 90, N) 72 | 73 | # This should be called first to initialize contours from minimum value 74 | # to maximum value (within lat/lon dims) using `N` contours. 75 | table = cm.cal_area_eqCoord_table(mask) 76 | ctr = cm.cal_contours(N).load() 77 | 78 | area = cm.cal_integral_within_contours(ctr).load().rename('intArea') 79 | intgrdS = cm.cal_integral_within_contours(ctr, integrand=grdS).rename('intgrdS') 80 | 81 | def calLmin(mask, Yeq): 82 | preLmin = (mask*dset.dxF).sum('longitude').reset_coords(drop=True) 83 | 84 | re = preLmin.interp(latitude=Yeq.values).rename({'latitude':'contour'})\ 85 | .assign_coords({'contour': Yeq['contour'].values}) 86 | 87 | return re 88 | 89 | Yeq = table.lookup_coordinates(area).rename('Yeq') 90 | Lmin = calLmin(mask, Yeq).rename('Lmin') 91 | # lengths = cm.cal_contour_lengths(ctr, latlon=True, period=[None, None]).rename('lengths') 92 | dgrdSdA = cm.cal_gradient_wrt_area(intgrdS, area) 93 | dqdA = cm.cal_gradient_wrt_area(ctr, area) 94 | Leq2 = cm.cal_sqared_equivalent_length(dgrdSdA, dqdA) 95 | nkeff = cm.cal_normalized_Keff(Leq2, Lmin, mask=2e7) 96 | 97 | # Collect all these as a xarray.Dataset defined on N contours and interp to equivalent latitudes 98 | origin = xr.merge([ctr, area, Yeq, intgrdS, dgrdSdA, dqdA, Leq2, Lmin, nkeff]) 99 | interp = cm.interp_to_dataset(preY, Yeq, origin).rename({'new':'latitude'}) 100 | 101 | return interp, origin, table 102 | 103 | #%% preparing data 104 | from GeoApps.GridUtils import add_latlon_metrics 105 | 106 | 107 | path = 'E:/OneDrive/Python/MyPack/xcontour/Data/barotropic_vorticity.nc' 108 | ds = xr.open_dataset(path) 109 | 110 | dset, grid = add_latlon_metrics(ds, boundary={'Y':'extend', 'X':'periodic'}) 111 | dyn = Dynamics(dset, grid=grid, arakawa='A') 112 | 113 | # # get PV as a tracer and its squared gradient 114 | tr1 = dset.absolute_vorticity.where(dset.absolute_vorticity!=0) 115 | tr2 = tr1.copy() 116 | tr2[:] = tr1[::-1].values 117 | grdS1 = dyn.cal_squared_gradient(tr1, dims=['Y', 'X'], boundary={'Y':'extend', 'X':'periodic'}) 118 | grdS2 = dyn.cal_squared_gradient(tr2, dims=['Y', 'X'], boundary={'Y':'extend', 'X':'periodic'}) 119 | 120 | grdS1 = xr.where(np.isfinite(grdS1), grdS1, np.nan) 121 | grdS2 = xr.where(np.isfinite(grdS2), grdS2, np.nan) 122 | 123 | grdS1[0:2,:] = 0 124 | grdS1[-1:-2,:] = 0 125 | grdS2[0:2,:] = 0 126 | grdS2[-1:-2,:] = 0 127 | 128 | 129 | #%% tests 130 | import proplot as pplt 131 | 132 | def doTest(tr, grdS, increase=True, lt=True): 133 | print('test for tracer {}, increase={}, lt={}'.format(tr.name, increase, lt)) 134 | 135 | re1, o1, t1 = computeKeff(tr, grdS, increase=increase, lt=lt) 136 | re2, o2, t2 = computeKeff_hist(tr, grdS, increase=increase, lt=lt) 137 | 138 | def plotPanel(ax, var1, var2, title): 139 | m1 = ax.plot(var1, label='xarray') 140 | m2 = ax.plot(var2, label='histo') 141 | ax.set_title(title) 142 | ax.legend([m1,m2], loc='ul') 143 | return ax 144 | 145 | fig, axes = pplt.subplots(nrows=2, ncols=3, figsize=(11,8), 146 | sharex=True, sharey=False) 147 | 148 | ax = plotPanel(axes[0,0], t1._table, t2._table, 'A(q) table') 149 | ax = plotPanel(axes[0,1], re1.absolute_vorticity, re2.absolute_vorticity, 'tracer q(Y)') 150 | ax = plotPanel(axes[0,2], re1.Yeq, re2.Yeq, 'equivalent Yeq') 151 | ax = plotPanel(axes[1,0], re1.intArea, re2.intArea, 'area A(Y)') 152 | ax = plotPanel(axes[1,1], re1.Leq2, re2.Leq2, 'Leq2') 153 | ax = plotPanel(axes[1,2], re1.nkeff, re2.nkeff, 'nkeff') 154 | 155 | return ax 156 | 157 | #%% test for tr1 158 | doTest(tr1, grdS1, increase=True , lt=True ) 159 | doTest(tr1, grdS1, increase=True , lt=False) 160 | doTest(tr1, grdS1, increase=False, lt=True ) 161 | doTest(tr1, grdS1, increase=False, lt=False) 162 | 163 | #%% test for tr2 164 | # doTest(tr2, grdS2, increase=True , lt=True ) 165 | # doTest(tr2, grdS2, increase=True , lt=False) 166 | # doTest(tr2, grdS2, increase=False, lt=True ) 167 | doTest(tr2, grdS2, increase=False, lt=False) 168 | 169 | 170 | 171 | -------------------------------------------------------------------------------- /tests/test_localLength.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2022.11.16 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | #%% 9 | import xmitgcm 10 | import numpy as np 11 | import xarray as xr 12 | 13 | path = 'D:/AVISO/GlobalTracers/' 14 | 15 | ds = xmitgcm.open_mdsdataset(path, prefix=['PTRACER01']).isel(time=slice(0,2)) 16 | 17 | # # get PV as a tracer and its squared gradient 18 | tr1 = ds.PTRACER01.where(ds.PTRACER01!=0) 19 | 20 | print(tr1) 21 | 22 | 23 | #%% 24 | from skimage import measure 25 | from xcontour.xcontour import contour_length 26 | import numba as nb 27 | 28 | 29 | def c_len(data, contour, coordy=None, coordx=None): 30 | # in unit of grid points 31 | segments = measure.find_contours(data, contour) 32 | 33 | f_list = nb.typed.List.empty_list(nb.typeof(np.zeros((9,2)))) 34 | 35 | for segment in segments: 36 | f_list.append(segment) 37 | 38 | return contour_length(f_list, coordx.data, coordy.data, latlon=True) 39 | 40 | 41 | rolling = tr1.rolling(YC=101, XC=101, center=True, min_periods=2000) 42 | rolled = rolling.construct({'YC':'yw','XC':'xw'}, stride=10)#chunk({'time':1, 'YC':400, 'XC':400}) 43 | rollmean = rolling.construct({'YC':'yw','XC':'xw'}, stride=10).mean(['yw','xw']) 44 | 45 | mrolled = (tr1-tr1+tr1.YC)[0].rolling(YC=101, XC=101, center=True, 46 | min_periods=10).construct({'YC':'yw','XC':'xw'}, 47 | stride=10) 48 | mrollm = (tr1-tr1+tr1.YC)[0].rolling(YC=101, XC=101, center=True, 49 | min_periods=10).construct({'YC':'yw','XC':'xw'}, 50 | stride=10).mean(['yw','xw']) 51 | 52 | # print(c_len(tr1, 1.35, tr1.YC, tr1.XC)) 53 | 54 | #%% 55 | clens = xr.apply_ufunc(c_len, rolled, rollmean, 56 | dask='parallelized', 57 | kwargs={'coordy':np.deg2rad(rolled.YC), 58 | 'coordx':np.deg2rad(rolled.XC)}, 59 | input_core_dims=[['yw','xw'], []], 60 | vectorize=True, 61 | output_dtypes=[np.float32]) 62 | 63 | mlens = xr.apply_ufunc(c_len, mrolled, mrollm, 64 | dask='parallelized', 65 | kwargs={'coordy':np.deg2rad(mrolled.YC), 66 | 'coordx':np.deg2rad(mrolled.XC)}, 67 | input_core_dims=[['yw','xw'], []], 68 | vectorize=True, 69 | output_dtypes=[np.float32]) 70 | 71 | #%% 72 | tmp1 = (clens/mlens)[0].load() 73 | tmp2 = (clens/mlens)[1].load() 74 | 75 | #%% 76 | import proplot as pplt 77 | 78 | fontsize = 15 79 | 80 | fig, axes = pplt.subplots(nrows=2, ncols=2, figsize=(11, 7.7), sharex=3, sharey=3, 81 | proj='cyl', proj_kw={'central_longitude':180}) 82 | 83 | ax = axes[0,0] 84 | m1 = ax.contourf(tr1[0], levels=np.linspace(1, 2, 41), cmap='prism') 85 | ax.set_title('tracer distribution (t=0)', fontsize=fontsize) 86 | ax.colorbar(m1, loc='b', label='', ticks=0.2) 87 | 88 | ax = axes[0,1] 89 | m1 = ax.contourf(tmp1, levels=np.linspace(0, 2, 21), cmap='jet') 90 | ax.set_title('local contour length relative to Lmin (t=0)', fontsize=fontsize) 91 | ax.colorbar(m1, loc='b', label='', ticks=0.5) 92 | 93 | ax = axes[1,0] 94 | m1 = ax.contourf(tr1[1], levels=np.linspace(1, 2, 41), cmap='prism') 95 | ax.set_title('tracer distribution (t=1)', fontsize=fontsize) 96 | ax.colorbar(m1, loc='b', label='', ticks=0.2) 97 | 98 | ax = axes[1,1] 99 | m1 = ax.contourf(tmp2, levels=np.linspace(0, 30, 31), cmap='jet') 100 | ax.set_title('local contour length relative to Lmin (t=1)', fontsize=fontsize) 101 | ax.colorbar(m1, loc='b', label='', ticks=5) 102 | 103 | axes.format(abc='(a)', land=True, coast=True, lonlabels=60, latlabels=30, 104 | landcolor='gray') 105 | 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /xcontour/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .core import Contour2D, Table 3 | from .utils import equivalent_latitudes, latitude_lengths_at,\ 4 | add_latlon_metrics, add_MITgcm_missing_metrics, \ 5 | contour_area, contour_length,\ 6 | add_latlon_metrics_old 7 | 8 | 9 | __version__ = "0.0.1" 10 | -------------------------------------------------------------------------------- /xcontour/core.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on 2020.02.05 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | """ 8 | import numpy as np 9 | import numba as nb 10 | import xarray as xr 11 | from xhistogram.xarray import histogram 12 | from skimage import measure 13 | from .utils import contour_length 14 | 15 | 16 | class Contour2D(object): 17 | """ 18 | This class is designed for performing the 2D contour analysis. 19 | """ 20 | def __init__(self, trcr, dA, dims, dimEq, arakawa='A', 21 | increase=True, lt=False, check_mono=False, dtype=np.float32): 22 | """ 23 | Construct a Dynamics instance using a Dataset and a tracer 24 | 25 | Parameters 26 | ---------- 27 | trcr: xarray.DataArray 28 | A given tracer on the given grid 29 | dA: xarray.DataArray 30 | Area occupied by each tracer grid point 31 | dims: dict 32 | Dimensions along which the min/max values are defined and then 33 | mapped to the contour space. Example: 34 | dims = {'X': 'lon', 'Y': 'lat', 'Z': 'Z'} 35 | Note that only 2D (e.g., X-Y horizontal or X-Z, Y-Z vertical planes) 36 | is allowed for this class. 37 | dimEq: dict 38 | Equivalent dimension that should be mapped from contour space. 39 | Example: dimEq = {'Y': 'lat'} or dimEq = {'Z', 'depth'} 40 | arakawa: str 41 | The type of the grid in ['A', 'C']. Reference: 42 | https://db0nus869y26v.cloudfront.net/en/Arakawa_grids 43 | Others are not well tested. 44 | increase: bool 45 | Contour increase with the index of equivalent dimension or not 46 | after the sorting. 47 | lt: bool 48 | If true, less than a contour is defined as inside the contour. 49 | check_mono: bool 50 | Check the monotonicity of the result or not (default: False). 51 | """ 52 | 53 | if len(dimEq) != 1: 54 | raise Exception('dimEq should be one dimension e.g., {"Y","lat"}') 55 | 56 | if len(dims) != 2: 57 | raise Exception('dims should be a 2D plane') 58 | 59 | self.dA = dA 60 | self.arakawa = arakawa 61 | self.tracer = trcr 62 | self.dims = dims 63 | self.dimNs = list(dims.keys()) # dim names, ['X', 'Y', 'Z'] 64 | self.dimVs = list(dims.values()) # dim values, ['lon', 'lat', 'Z'] 65 | self.dimEqN = list(dimEq.keys())[0] # equiv. dim name 66 | self.dimEqV = list(dimEq.values())[0]# equiv. dim value 67 | self.lt = lt 68 | self.dtype = dtype 69 | self.check_mono = check_mono 70 | self.increase = increase 71 | 72 | 73 | def cal_area_eqCoord_table(self, mask): 74 | """ 75 | Calculate the discretized relation table between area and equivalent 76 | coordinate. Sometimes, this can be done analytically but mostly it is 77 | done numerically when dealing with an arbitarily-shaped domain. 78 | 79 | Note: it is assumed that the mask does not change with time. 80 | 81 | Since here we use conditional integration of xarray, broadcasting the 82 | arrays could be memory consuming. So this is used for small dataset or 83 | validation only. 84 | 85 | Parameters 86 | ---------- 87 | mask: xarray.DataArray 88 | A boolean mask, 1 if valid data and 0 if topography. 89 | 90 | Returns 91 | ---------- 92 | tbl: xarray.DataArray 93 | The relation table between area and equivalent coordinate. This 94 | table will be used to represent the relation of A(Yeq) or its 95 | inverse relation Yeq(A), if equivalent dimension is Yeq. 96 | """ 97 | ctr = mask[self.dimEqV].copy().rename({self.dimEqV:'contour'}) \ 98 | .rename('contour') 99 | ctrVar, _ = xr.broadcast(mask[self.dimEqV], mask) 100 | 101 | eqDimIncre = ctr[-1] > ctr[0] 102 | 103 | if self.lt: 104 | if eqDimIncre == self.increase: 105 | # if self.increase: 106 | # print('case 1: increase & lt') 107 | # else: 108 | # print('case 1: decrease & lt') 109 | mskVar = mask.where(ctrVar < ctr) 110 | else: 111 | # if self.increase: 112 | # print('case 2: increase & lt') 113 | # else: 114 | # print('case 2: decrease & lt') 115 | mskVar = mask.where(ctrVar > ctr) 116 | else: 117 | if eqDimIncre == self.increase: 118 | # if self.increase: 119 | # print('case 3: increase & gt') 120 | # else: 121 | # print('case 3: decrease & gt') 122 | mskVar = mask.where(ctrVar > ctr) 123 | else: 124 | # if self.increase: 125 | # print('case 4: increase & gt') 126 | # else: 127 | # print('case 4: decrease & gt') 128 | mskVar = mask.where(ctrVar < ctr) 129 | 130 | tbl = abs(_integrate(mskVar, self.dA, self.dimNs).rename('AeqCTbl')) \ 131 | .rename({'contour':self.dimEqV}).squeeze().load() 132 | 133 | maxArea = abs(_integrate(mask, self.dA, self.dimNs)).load().squeeze() 134 | 135 | # assign the maxArea to the endpoint 136 | tmp = tbl[{self.dimEqV:-1}] > tbl[{self.dimEqV:0}] 137 | if (tmp == True ).all(): 138 | tbl[{self.dimEqV:-1}] = maxArea 139 | elif (tmp == False).all(): 140 | tbl[{self.dimEqV: 0}] = maxArea 141 | else: 142 | raise Exception('not every time or level is increasing/decreasing') 143 | 144 | if self.check_mono: 145 | _check_monotonicity(tbl, 'contour') 146 | 147 | return Table(tbl, self.dimEqV) 148 | 149 | 150 | def cal_area_eqCoord_table_hist(self, mask): 151 | """ 152 | Calculate the discretized relation table between area and equivalent 153 | coordinate. Sometimes, this can be done analytically but mostly it is 154 | done numerically when dealing with an arbitarily-shaped domain. 155 | 156 | Note: it is assumed that the land/sea mask does not change with time. 157 | 158 | Since the implementation based on xarray could be memory consuming, 159 | we here implement this function using xhistogram, which fast and 160 | memory-friendly. This is why this function is end with '_hist' 161 | 162 | Parameters 163 | ---------- 164 | mask: xarray.DataArray 165 | A boolean mask, 1 if valid data and 0 if topography. 166 | 167 | Returns 168 | ---------- 169 | tbl: xarray.DataArray 170 | The relation table between area and equivalent coordinate. This 171 | table will be used to represent the relation of A(Yeq) or its 172 | inverse relation Yeq(A), if equivalent dimension is Yeq. 173 | """ 174 | ctr = mask[self.dimEqV].copy().rename({self.dimEqV:'contour'}) \ 175 | .rename('contour') 176 | ctrVar, _ = xr.broadcast(mask[self.dimEqV], mask) 177 | 178 | ctrVar = ctrVar.where(mask==1) 179 | 180 | yIncre = True # Yeq increases with index 181 | if ctr.values[-1] < ctr.values[0]: 182 | yIncre = False 183 | 184 | ylt = True 185 | if self.increase == yIncre: 186 | ylt = self.lt 187 | else: 188 | ylt = not self.lt 189 | 190 | tbl = _histogram(ctrVar, ctr, self.dimVs, self.dA, # weights 191 | ylt # less than or greater than 192 | ).rename('AeqCTbl').rename({'contour':self.dimEqV})\ 193 | .squeeze().load() 194 | 195 | if yIncre: 196 | tbl = tbl.assign_coords({self.dimEqV:ctr.values}).squeeze() 197 | else: 198 | tbl = tbl.assign_coords({self.dimEqV:ctr.values[::-1]}).squeeze() 199 | 200 | if self.check_mono: 201 | _check_monotonicity(tbl, 'contour') 202 | 203 | return Table(tbl, self.dimEqV) 204 | 205 | def cal_contours(self, levels=10): 206 | """ 207 | Establishing contour levels (space) of the tracer from its minimum 208 | to maximum values. If a integer is specified, equally-spaced contour 209 | interval is used. Otherwise, each level should be provided in an 210 | array. 211 | 212 | Parameters 213 | ---------- 214 | levels : int or numpy.array 215 | The number of contour levels or specified levels. 216 | 217 | Returns 218 | ---------- 219 | contour : xarray.DataArray 220 | A array of contour levels. 221 | """ 222 | if type(levels) is int: 223 | # specifying number of contours 224 | mmin = self.tracer.min(dim=self.dimVs) 225 | mmax = self.tracer.max(dim=self.dimVs) 226 | 227 | # if numpy.__version__ > 1.16, use numpy.linspace instead 228 | def mylinspace(start, stop, levels): 229 | divisor = levels - 1 230 | steps = (1.0/divisor) * (stop - start) 231 | 232 | return steps[..., None] * np.arange(levels) + start[..., None] 233 | 234 | if self.increase: 235 | start = mmin 236 | end = mmax 237 | else: 238 | start = mmax 239 | end = mmin 240 | 241 | ctr = xr.apply_ufunc(mylinspace, start, end, levels, 242 | dask='allowed', 243 | input_core_dims=[[], [], []], 244 | vectorize=True, 245 | output_core_dims=[['contour']], 246 | output_dtypes=[self.dtype]) 247 | 248 | ctr.coords['contour'] = np.linspace(0.0, levels-1.0, levels, 249 | dtype=self.dtype) 250 | 251 | else: 252 | # specifying levels of contours 253 | def mylinspace(tracer, levs): 254 | return tracer[..., None] - tracer[..., None] + levs 255 | 256 | ctr = xr.apply_ufunc(mylinspace, 257 | self.tracer.min(dim=self.dimVs), levels, 258 | dask='allowed', 259 | input_core_dims=[[], []], 260 | vectorize=True, 261 | output_core_dims=[['contour']], 262 | output_dtypes=[self.dtype]) 263 | 264 | ctr.coords['contour'] = levels 265 | 266 | return ctr 267 | 268 | 269 | def cal_contours_at(self, predef, table): 270 | """ 271 | Calculate contours for a tracer at prescribed Ys (equivalent Ys), 272 | so that the returned contours are defined almost at Ys. 273 | 274 | This function will first rough estimate the contour-enclosed 275 | area and equivalent Ys, and then interpolate the Y(q) relation 276 | table to get the q(Y) and return q. 277 | 278 | Since here we use conditional integration of xarray, broadcasting the 279 | arrays could be memory consuming. So this is used for small dataset or 280 | validation only. 281 | 282 | Parameters 283 | ---------- 284 | predef : xarray.DataArray or numpy.ndarray or numpy.array 285 | An 1D array of prescribed coordinate values. 286 | table : Table 287 | Relation A(Yeq) table between area and equivalent dimension. 288 | 289 | Returns 290 | ---------- 291 | contour : xarray.DataArray 292 | A array of contour levels corresponding to preY. 293 | """ 294 | if len(predef.shape) != 1: 295 | raise Exception('predef should be a 1D array') 296 | 297 | if type(predef) in [np.ndarray, np.array]: 298 | # add coordinate as a DataArray 299 | predef = xr.DataArray(predef, dims='new', coords={'new': predef}) 300 | 301 | N = predef.size 302 | 303 | ctr = self.cal_contours(N) 304 | area = self.cal_integral_within_contours(ctr) 305 | dimEq = table.lookup_coordinates(area).rename('Z') 306 | # print(self.interp_to_coords(predef, dimEq, ctr)) 307 | qIntp = self.interp_to_coords(predef.squeeze(), dimEq, ctr.squeeze()) \ 308 | .rename({'new': 'contour'}) \ 309 | .rename(ctr.name) 310 | 311 | qIntp['contour'] = np.linspace(0, N-1, N, dtype=self.dtype) 312 | 313 | return qIntp 314 | 315 | 316 | def cal_contours_at_hist(self, predef, table): 317 | """ 318 | Calculate contours for a tracer at prescribed Ys, 319 | so that the returned contour and its enclosed area will give a 320 | monotonic increasing/decreasing results. 321 | 322 | This function will first rough estimate the contour-enclosed 323 | area and equivalent Ys, and then interpolate the Y(q) relation 324 | table to get the q(Y) and return q. 325 | 326 | Since the implementation based on xarray could be memory consuming, 327 | we here implement this function using xhistogram, which fast and 328 | memory-friendly. This is why this function is end with '_hist' 329 | 330 | Parameters 331 | ---------- 332 | predef : xarray.DataArray or numpy.ndarray or numpy.array 333 | An 1D array of prescribed coordinate values. 334 | table : Table 335 | A(dimEq) table. 336 | 337 | Returns 338 | ---------- 339 | contour : xarray.DataArray 340 | A array of contour levels corresponding to preY. 341 | """ 342 | if len(predef.shape) != 1: 343 | raise Exception('predef should be a 1D array') 344 | 345 | if type(predef) in [np.ndarray, np.array]: 346 | # add coordinate as a DataArray 347 | predef = xr.DataArray(predef, dims='new', coords={'new': predef}) 348 | 349 | N = predef.size 350 | 351 | ctr = self.cal_contours(N) 352 | area = self.cal_integral_within_contours_hist(ctr) 353 | dimEq = table.lookup_coordinates(area).rename('Z') 354 | qIntp = self.interp_to_coords(predef.squeeze(), dimEq, ctr.squeeze()) \ 355 | .rename({'new': 'contour'}) \ 356 | .rename(ctr.name) 357 | 358 | qIntp['contour'] = np.linspace(0, N-1, N, dtype=self.dtype) 359 | 360 | return qIntp 361 | 362 | 363 | def cal_integral_within_contours(self, contour, tracer=None, integrand=None): 364 | """ 365 | Calculate conditional integral of a (masked) variable within each 366 | pre-calculated tracer contour. 367 | 368 | Since here we use conditional integration of xarray, broadcasting the 369 | arrays could be memory consuming. So this is used for small dataset or 370 | validation only. 371 | 372 | Parameters 373 | ---------- 374 | contour: xarray.DataArray 375 | A given contour levels. 376 | integrand: xarray.DataArray 377 | A given variable in dset. If None, area enclosed by contour 378 | will be calculated and returned 379 | 380 | Returns 381 | ---------- 382 | intVar : xarray.DataArray 383 | The integral of var inside contour. If None, area enclosed by 384 | contour will be calculated and returned 385 | """ 386 | if type(contour) in [np.ndarray, np.array]: 387 | # add coordinate as a DataArray 388 | contour = xr.DataArray(contour, dims='contour', 389 | coords={'contour': contour}) 390 | 391 | if tracer is None: 392 | tracer = self.tracer 393 | 394 | # this allocates large memory, xhistogram works better 395 | if integrand is None: 396 | integrand = tracer - tracer + 1 397 | 398 | if self.lt: # this allocates large memory, xhistogram works better 399 | mskVar = integrand.where(tracer < contour) 400 | else: 401 | mskVar = integrand.where(tracer > contour) 402 | 403 | # conditional integrate (not memory-friendly because of broadcasting) 404 | intVar = _integrate(mskVar, self.dA, self.dimNs) 405 | 406 | if self.check_mono: 407 | _check_monotonicity(intVar, 'contour') 408 | 409 | return intVar 410 | 411 | 412 | def cal_integral_within_contours_hist(self, contour, tracer=None, 413 | integrand=None): 414 | """ 415 | Calculate integral of a masked variable within 416 | pre-calculated tracer contours, using histogram method. 417 | 418 | Since the implementation based on xarray could be memory consuming, 419 | we here implement this function using xhistogram, which fast and 420 | memory-friendly. This is why this function is end with '_hist' 421 | 422 | Parameters 423 | ---------- 424 | contour : xarray.DataArray 425 | A given contour levels. 426 | tracer : xarray.DataArray 427 | A given tracer to replace self.tracer. This is somewhat 428 | redundant but useful in some cases if we need to change the tracer. 429 | integrand: xarray.DataArray 430 | A given variable. If None, area enclosed by contour 431 | will be calculated and returned 432 | 433 | Returns 434 | ---------- 435 | intVar : xarray.DataArray 436 | The integral of var inside contour. If None, area enclosed by 437 | contour will be calculated and returned 438 | """ 439 | if tracer is None: 440 | tracer = self.tracer 441 | 442 | # weights are the metrics multiplied by integrand 443 | if integrand is not None: 444 | wei = integrand * self.dA 445 | else: 446 | wei = self.dA 447 | 448 | # replacing nan with 0 in weights, as weights cannot have nan 449 | wei = wei.fillna(0.) 450 | 451 | CDF = _histogram(tracer, contour, self.dimVs, wei, self.lt) 452 | 453 | # ensure that the contour index is increasing 454 | if CDF['contour'][-1] < CDF['contour'][0]: 455 | CDF = CDF.isel({'contour':slice(None, None, -1)}) 456 | 457 | if self.check_mono: 458 | _check_monotonicity(CDF, 'contour') 459 | 460 | return CDF 461 | 462 | 463 | def cal_gradient_wrt_area(self, var, area): 464 | """ 465 | Calculate gradient with respect to area. 466 | 467 | Parameters 468 | ---------- 469 | var : xarray.DataArray 470 | A variable that need to be differentiated. 471 | area : xarray.DataArray 472 | Area enclosed by contour levels. 473 | 474 | Returns 475 | ---------- 476 | dVardA : xarray.DataArray 477 | The derivative of var w.r.t contour-encloesd area. 478 | """ 479 | # centered difference rather than neighboring difference (diff) 480 | dfVar = var.differentiate('contour') 481 | dfArea = area.differentiate('contour') 482 | 483 | dVardA = dfVar / dfArea 484 | 485 | if var.name is None: 486 | return dVardA.rename('dvardA') 487 | else: 488 | return dVardA.rename('d'+var.name+'dA') 489 | 490 | 491 | def cal_contour_weigh_mean(self, contour, integrand, area=None): 492 | """ 493 | Calculate average between adjacent contours (i.e., thickness-weighted 494 | line-average). 495 | 496 | Parameters 497 | ---------- 498 | contour: xarray.DataArray 499 | A given contour levels. 500 | integrand: xarray.DataArray 501 | A given integrand to be averaged. 502 | area: xarray.DataArray 503 | Area enclosed by tracer contour. 504 | 505 | Returns 506 | ---------- 507 | lm : xarray.DataArray 508 | Along-contour (Lagrangian) mean of the integrand. 509 | """ 510 | intA = self.cal_integral_within_contours(contour, integrand=integrand) 511 | 512 | if area is None: 513 | area = self.cal_integral_within_contours(contour) 514 | 515 | lmA = self.cal_gradient_wrt_area(intA, area) 516 | 517 | if integrand.name is None: 518 | return lmA.rename('lwm') 519 | else: 520 | return lmA.rename('lwm'+integrand.name) 521 | 522 | 523 | def cal_contour_weigh_mean_hist(self, contour, integrand, area=None): 524 | """ 525 | Calculate average between adjacent contours (i.e., thickness-weighted 526 | line-average). 527 | 528 | Parameters 529 | ---------- 530 | contour: xarray.DataArray 531 | A given contour levels. 532 | integrand: xarray.DataArray 533 | A given integrand to be averaged. 534 | area: xarray.DataArray 535 | Area enclosed by tracer contour. 536 | 537 | Returns 538 | ---------- 539 | lm : xarray.DataArray 540 | Along-contour (Lagrangian) mean of the integrand. 541 | """ 542 | intA = self.cal_integral_within_contours_hist(contour, integrand=integrand) 543 | 544 | if area is None: 545 | area = self.cal_integral_within_contours_hist(contour) 546 | 547 | lmA = self.cal_gradient_wrt_area(intA, area) 548 | 549 | if integrand.name is None: 550 | return lmA.rename('lwm') 551 | else: 552 | return lmA.rename('lwm'+integrand.name) 553 | 554 | 555 | def cal_contour_mean(self, contour, integrand, grdm, area=None): 556 | """ 557 | Calculate along-contour average (simple line-integral). 558 | 559 | Parameters 560 | ---------- 561 | contour: xarray.DataArray 562 | A given contour levels. 563 | integrand: xarray.DataArray 564 | A given integrand to be averaged. 565 | grdm: xarray.DataArray 566 | Magnitude of tracer gradient. 567 | area: xarray.DataArray 568 | Area enclosed by tracer contour. 569 | 570 | Returns 571 | ---------- 572 | lm : xarray.DataArray 573 | Along-contour (Lagrangian) mean of the integrand. 574 | """ 575 | upper = self.cal_contour_weigh_mean(contour, integrand*grdm, area=area) 576 | lower = self.cal_contour_weigh_mean(contour, grdm, area=area) 577 | 578 | lmA = upper / lower 579 | 580 | if integrand.name is None: 581 | return lmA.rename('cm') 582 | else: 583 | return lmA.rename('cm'+integrand.name) 584 | 585 | 586 | def cal_contour_mean_hist(self, contour, integrand, grdm, area=None): 587 | """ 588 | Calculate along-contour average (simple line-integral). 589 | 590 | Parameters 591 | ---------- 592 | contour: xarray.DataArray 593 | A given contour levels. 594 | integrand: xarray.DataArray 595 | A given integrand to be averaged. 596 | grdm: xarray.DataArray 597 | Magnitude of tracer gradient. 598 | area: xarray.DataArray 599 | Area enclosed by tracer contour. 600 | 601 | Returns 602 | ---------- 603 | lm : xarray.DataArray 604 | Along-contour (Lagrangian) mean of the integrand. 605 | """ 606 | upper = self.cal_contour_weigh_mean_hist(contour, integrand*grdm, 607 | area=area) 608 | lower = self.cal_contour_weigh_mean_hist(contour, grdm, 609 | area=area) 610 | 611 | lmA = upper / lower 612 | 613 | if integrand.name is None: 614 | return lmA.rename('cm') 615 | else: 616 | return lmA.rename('cm'+integrand.name) 617 | 618 | 619 | def cal_sqared_equivalent_length(self, dgrdSdA, dqdA): 620 | """ 621 | Calculate squared equivalent length. 622 | 623 | Parameters 624 | ---------- 625 | dgrdSdA : xarray.DataArray 626 | d [Integrated |grd(q)|^2] / dA. 627 | dqdA : xarray.DataArray 628 | d [q] / dA. 629 | 630 | Returns 631 | ---------- 632 | Leq2 : xarray.DataArray 633 | The squared equivalent length. 634 | """ 635 | Leq2 = (dgrdSdA / dqdA ** 2).rename('Leq2') 636 | 637 | return Leq2 638 | 639 | 640 | def cal_contour_crossing(self, ctr, stride=1, mode='edge'): 641 | """ 642 | Calculate whether contour is crossing using 'box-counting' method. 643 | 644 | Parameters 645 | ---------- 646 | ctr: xarray.DataArray 647 | Contour levels. 648 | stride: int or list of ints 649 | Sample crossing every stride grid points. 650 | 1 for original grid, 2 for half the resolution, ... 651 | mode: str 652 | Pad mode passing to xarray.DataArray.pad(). 653 | 654 | Returns 655 | ---------- 656 | re: xarray.DataArray or list of xarray.DataArray 657 | Boolean arrays indicating whether a contour is crossing. 658 | """ 659 | from collections.abc import Iterable 660 | 661 | if isinstance(stride, Iterable): 662 | maxStride = max(stride) 663 | isiterable = True 664 | else: 665 | maxStride = stride 666 | stride = [stride] 667 | isiterable = False 668 | 669 | data = self.tracer 670 | area = self.dA 671 | dims = [d for d in data.dims if d in self.dimVs] 672 | 673 | if 'X' in self.dims: 674 | dataPad = data.pad({self.dims['X']:(0, maxStride)}, mode=mode) 675 | areaPad = area.pad({self.dims['X']:(0, maxStride)}, mode=mode) 676 | else: 677 | dataPad = data 678 | areaPad = area 679 | 680 | re = [] 681 | for strd in stride: 682 | re.append(xr.apply_ufunc(_contour_crossing, 683 | dataPad, ctr, areaPad, 684 | kwargs={'stride':strd}, 685 | dask='parallelized', 686 | input_core_dims=[dims, [], dims], 687 | vectorize=True, 688 | output_dtypes=[self.dtype])) 689 | 690 | if isiterable: 691 | return re 692 | else: 693 | return re[0] 694 | 695 | 696 | def cal_local_wave_activity(self, q, Q, mask_idx=None, part='all'): 697 | """ 698 | Calculate local finite-amplitude wave activity density. 699 | Reference: Huang and Nakamura 2016, JAS 700 | 701 | Parameters 702 | ---------- 703 | q: xarray.DataArray 704 | A tracer field. 705 | Q: xarray.DataArray 706 | The sorted tracer field along the equivalent dimension. 707 | mask_idx: list of int 708 | Return masks at the indices of equivalent dimension. 709 | part: str 710 | The parts over which the integration is taken. Available options 711 | are ['all', 'upper', 'lower'], corresponding to all, W+, and W- 712 | regions defined in Huang and Nakamura (2016, JAS) 713 | 714 | Returns 715 | ---------- 716 | lwa : xarray.DataArray 717 | Local finite-amplitude wave activity, corresponding to part. 718 | contours : list 719 | A list of Q-contour corresponding to mask_idx. 720 | masks : list 721 | A list of mask corresponding to mask_idx. 722 | """ 723 | wei = self.dA.squeeze() 724 | wei = wei / wei.max() # normalize between [0-1], similar to cos(lat) 725 | part = part.lower() 726 | # q2 = q.squeeze() 727 | 728 | eqDim = q[self.dimEqV] 729 | eqDimLen = len(eqDim) 730 | tmp = [] 731 | 732 | if part.lower() not in ['all', 'upper', 'lower']: 733 | raise Exception('invalid part, should be in [\'all\', \'upper\', \'lower\']') 734 | 735 | # equivalent dimension is increasing or not 736 | coord_incre = True 737 | if eqDim.values[-1] < eqDim.values[0]: 738 | coord_incre = False 739 | 740 | # output contours and masks if mask_idx is provided 741 | masks = [] 742 | contours = [] 743 | returnmask = False 744 | if mask_idx is None: 745 | mask_idx = [] 746 | else: 747 | if max(mask_idx) >= len(eqDim): 748 | raise Exception('indices in mask_idx out of boundary') 749 | returnmask = True 750 | 751 | # loop for each contour (or each equivalent dimension surface) 752 | for j in range(eqDimLen): 753 | # deviation from the reference 754 | qe = q - Q.isel({self.dimEqV:j}) 755 | 756 | # above or below the reference coordinate surface 757 | m = eqDim>=eqDim.values[j] if coord_incre else eqDim<=eqDim.values[j] 758 | 759 | if self.increase: 760 | mask1 = xr.where(qe>0, -1, 0) 761 | mask2 = xr.where(m, 0, mask1).transpose(*(mask1.dims)) 762 | mask3 = xr.where(np.logical_and(qe<0, m), 1, mask2) 763 | else: 764 | mask1 = xr.where(qe<0, -1, 0) 765 | mask2 = xr.where(m, 0, mask1).transpose(*(mask1.dims)) 766 | mask3 = xr.where(np.logical_and(qe>0, m), 1, mask2) 767 | 768 | if j in mask_idx: 769 | contours.append(Q.isel({self.dimEqV:j})) 770 | masks.append(mask3) 771 | 772 | # select part over which integration is performed 773 | if part == 'all': 774 | maskFinal = mask3 775 | elif part == 'upper': 776 | if self.increase: 777 | maskFinal = mask3.where(mask3>0) 778 | else: 779 | maskFinal = mask3.where(mask3<0) 780 | else: 781 | if self.increase: 782 | maskFinal = mask3.where(mask3<0) 783 | else: 784 | maskFinal = mask3.where(mask3>0) 785 | 786 | # perform area-weighted conditional integration 787 | # lwa = (qe * maskFinal * wei * 788 | # self.grid.get_metric(qe, self.dimEqN)).sum(self.dimEqV) 789 | lwa = -_integrate(qe * maskFinal * wei, self.dA, self.dimEqN) 790 | 791 | tmp.append(lwa) 792 | 793 | LWA = xr.concat(tmp, self.dimEqV).transpose(*(q.dims)) 794 | LWA[self.dimEqV] = eqDim.values 795 | 796 | if returnmask: 797 | return LWA.rename('LWA'), contours, masks 798 | else: 799 | return LWA.rename('LWA') 800 | 801 | 802 | def cal_local_wave_activity2(self, q, Q, mask_idx=None, part='all'): 803 | """ 804 | Calculate local finite-amplitude wave activity density. 805 | Reference: Huang and Nakamura 2016, JAS 806 | 807 | Parameters 808 | ---------- 809 | q: xarray.DataArray 810 | A tracer field. 811 | Q: xarray.DataArray 812 | The sorted tracer field along the equivalent dimension. 813 | mask_idx: list of int 814 | Return masks at the indices of equivalent dimension. 815 | part: str 816 | The parts over which the integration is taken. Available options 817 | are ['all', 'upper', 'lower'], corresponding to all, W+, and W- 818 | regions defined in Huang and Nakamura (2016, JAS) 819 | 820 | Returns 821 | ---------- 822 | lwa : xarray.DataArray 823 | Local finite-amplitude wave activity, corresponding to part. 824 | contours : list 825 | A list of Q-contour corresponding to mask_idx. 826 | masks : list 827 | A list of mask corresponding to mask_idx. 828 | """ 829 | wei = self.dA.squeeze() 830 | wei = wei / wei.max() # normalize between [0-1], similar to cos(lat) 831 | part = part.lower() 832 | # q2 = q.squeeze() 833 | 834 | eqDim = q[self.dimEqV] 835 | eqDimLen = len(eqDim) 836 | tmp = [] 837 | 838 | if part.lower() not in ['all', 'upper', 'lower']: 839 | raise Exception('invalid part, should be in [\'all\', \'upper\', \'lower\']') 840 | 841 | # equivalent dimension is increasing or not 842 | coord_incre = True 843 | if eqDim.values[-1] < eqDim.values[0]: 844 | coord_incre = False 845 | 846 | # output contours and masks if mask_idx is provided 847 | masks = [] 848 | contours = [] 849 | returnmask = False 850 | if mask_idx is None: 851 | mask_idx = [] 852 | else: 853 | if max(mask_idx) >= len(eqDim): 854 | raise Exception('indices in mask_idx out of boundary') 855 | returnmask = True 856 | 857 | # loop for each contour (or each equivalent dimension surface) 858 | for j in range(eqDimLen): 859 | # deviation from the reference 860 | qe = q.isel({self.dimEqV:j}) - Q 861 | 862 | # above or below the reference coordinate surface 863 | m = eqDim>=eqDim.values[j] if coord_incre else eqDim<=eqDim.values[j] 864 | 865 | if not self.increase: 866 | mask1 = xr.where(qe>0, -1, 0) 867 | mask2 = xr.where(m, 0, mask1).transpose(*(mask1.dims)) 868 | mask3 = xr.where(np.logical_and(qe<0, m), 1, mask2) 869 | else: 870 | mask1 = xr.where(qe<0, -1, 0) 871 | mask2 = xr.where(m, 0, mask1).transpose(*(mask1.dims)) 872 | mask3 = xr.where(np.logical_and(qe>0, m), 1, mask2) 873 | 874 | if j in mask_idx: 875 | contours.append(Q.isel({self.dimEqV:j})) 876 | masks.append(mask3) 877 | 878 | # select part over which integration is performed 879 | if part == 'all': 880 | maskFinal = mask3 881 | elif part == 'upper': 882 | if self.increase: 883 | maskFinal = mask3.where(mask3>0) 884 | else: 885 | maskFinal = mask3.where(mask3<0) 886 | else: 887 | if self.increase: 888 | maskFinal = mask3.where(mask3<0) 889 | else: 890 | maskFinal = mask3.where(mask3>0) 891 | 892 | # perform area-weighted conditional integration 893 | # lwa = (qe * maskFinal * wei * 894 | # self.grid.get_metric(qe, self.dimEqN)).sum(self.dimEqV) 895 | lwa = -_integrate(qe * maskFinal * wei, self.dA, self.dimEqN) 896 | 897 | tmp.append(lwa) 898 | 899 | LWA = xr.concat(tmp, self.dimEqV).transpose(*(q.dims)) 900 | LWA[self.dimEqV] = eqDim.values 901 | 902 | if returnmask: 903 | return LWA.rename('LWA'), contours, masks 904 | else: 905 | return LWA.rename('LWA') 906 | 907 | 908 | def cal_local_APE(self, q, Q, mask_idx=None, part='all'): 909 | """ 910 | Calculate local available potential energy (APE) density. This is 911 | mathematically identical to local wave activity density. 912 | Reference: Winters and Barkan 2013, JFM; Scotti and White 2014, JFM 913 | 914 | Parameters 915 | ---------- 916 | q: xarray.DataArray 917 | A tracer field. 918 | Q: xarray.DataArray 919 | The sorted tracer field. 920 | mask_idx: list of int 921 | Return masks at the indices of equivalent dimension. 922 | part: str 923 | The parts over which the integration is taken. Available options 924 | are ['all', 'upper', 'lower'], corresponding to all, W+, and W- 925 | regions defined in Huang and Nakamura (2016, JAS) 926 | 927 | Returns 928 | ---------- 929 | lape : xarray.DataArray 930 | Local APE density. 931 | contours : list 932 | A list of Q-contour corresponding to mask_idx. 933 | masks : list 934 | A list of mask corresponding to mask_idx. 935 | """ 936 | if mask_idx is not None: 937 | LWA, contours, masks = \ 938 | self.cal_local_wave_activity(q, Q, mask_idx, part=part) 939 | 940 | return LWA.rename('LAPE'), contours, masks 941 | else: 942 | return self.cal_local_wave_activity(q, Q, None, part).rename('LAPE') 943 | 944 | 945 | def cal_normalized_Keff(self, Leq2, Lmin, mask=1e5): 946 | """ 947 | Calculate normalized effective diffusivity. 948 | 949 | Parameters 950 | ---------- 951 | Leq2 : xarray.DataArray 952 | Squared equivalent length. 953 | Lmin : xarray.DataArray 954 | Minimum possible length. 955 | mask : float 956 | A threshold larger than which is set to nan. 957 | 958 | Returns 959 | ---------- 960 | nkeff : xarray.DataArray 961 | The normalized effective diffusivity (Nusselt number). 962 | """ 963 | nkeff = Leq2 / Lmin / Lmin 964 | nkeff = nkeff.where(nkeff 1: 1083 | vals = vals[0] 1084 | 1085 | if vals[0] < vals[-1]: 1086 | increasing = True 1087 | else: 1088 | increasing = False 1089 | 1090 | # no dask support for np.linspace 1091 | varIntp = xr.apply_ufunc(_interp1d, predef, eqCoords, var.load(), 1092 | kwargs={'inc': increasing}, 1093 | # dask='allowed', 1094 | input_core_dims =[[dimTmp],[interpDim],[interpDim]], 1095 | output_core_dims=[[dimTmp]], 1096 | exclude_dims=set((interpDim,)), 1097 | vectorize=True 1098 | ).rename(var.name) 1099 | 1100 | return varIntp 1101 | 1102 | 1103 | class Table(object): 1104 | """ 1105 | This class is designed as a one-to-one mapping table between two 1106 | mononitical increasing/decreasing quantities. 1107 | 1108 | The table is represented as y = F(x), with y as the values and 1109 | x the coordinates. 1110 | """ 1111 | def __init__(self, table, dimEq): 1112 | """ 1113 | Construct a table. 1114 | 1115 | Parameters 1116 | ---------- 1117 | table : xarray.Dataset 1118 | A table quantity as a function of specific coordinate. 1119 | dimEq : numpy.array or xarray.Dataset 1120 | A set of equivalent coordinates along a dimension 1121 | """ 1122 | tmp = table[{dimEq:-1}] > table[{dimEq:0}] 1123 | if (tmp == True ).all(): 1124 | areaInc = True 1125 | elif (tmp == False).all(): 1126 | areaInc = False 1127 | else: 1128 | raise Exception('not every time or level is increasing/decreasing') 1129 | 1130 | self._table = table 1131 | self._coord = table[dimEq] 1132 | self._dimEq = dimEq 1133 | self._incVl = areaInc 1134 | self._incCd = table[dimEq][-1] > table[dimEq][0] 1135 | 1136 | def lookup_coordinates(self, values): 1137 | """ 1138 | For y = F(x), get coordinates (x) given values (y). 1139 | 1140 | Parameters 1141 | ---------- 1142 | values: numpy.ndarray or xarray.DataArray 1143 | Values as y. 1144 | 1145 | Returns 1146 | ------- 1147 | coords : xarray.DataArray 1148 | Coordinates as x. 1149 | """ 1150 | dimEq = self._dimEq 1151 | iDims = [[],[dimEq],[dimEq]] 1152 | oDims = [[]] 1153 | 1154 | if 'contour' in values.dims: 1155 | iDims = [['contour'],[dimEq],[dimEq]] 1156 | oDims = [['contour']] 1157 | 1158 | # if len(values.shape) == 1: 1159 | # return _interp1d(values, self._table, self._coord, self._incVl) 1160 | # else: 1161 | re = xr.apply_ufunc(_interp1d, 1162 | values, self._table, self._coord, 1163 | kwargs={'inc': self._incVl}, 1164 | dask='allowed', 1165 | input_core_dims = iDims, 1166 | output_core_dims= oDims, 1167 | output_dtypes=[self._table.dtype], 1168 | # exclude_dims=set(('contour',)), 1169 | vectorize=True) 1170 | 1171 | # if isinstance(re, np.ndarray): 1172 | # re = xr.DataArray(re, dims=values.dims, coords=values.coords) 1173 | 1174 | return re 1175 | 1176 | def lookup_values(self, coords): 1177 | """ 1178 | For y = F(x), get values (y) given coordinates (x). 1179 | 1180 | Parameters 1181 | ---------- 1182 | coords : list or numpy.array or xarray.DataArray 1183 | Coordinates as x. 1184 | 1185 | Returns 1186 | ------- 1187 | values : xarray.DataArray 1188 | Values as y. 1189 | """ 1190 | re = _interp1d(coords, self._coord, self._vables, self._incCd) 1191 | 1192 | if isinstance(re, np.ndarray): 1193 | re = xr.DataArray(re, dims=coords.dims, coords=coords.coords) 1194 | 1195 | return re 1196 | 1197 | 1198 | 1199 | """ 1200 | Below are the private helper methods 1201 | """ 1202 | def _histogram(var, bins, dim, weights, lt): 1203 | """ 1204 | A wrapper for xhistogram, which allows decreasing bins and return 1205 | a result that contains the same size as that of bins. 1206 | 1207 | Note that it is assumed the start and end bins correspond to the tracer 1208 | extrema. 1209 | 1210 | Parameters 1211 | ---------- 1212 | var: xarray.DataArray 1213 | A variable that need to be histogrammed. 1214 | bins: list or numpy.array or xarray.DataArray 1215 | An array of bins. 1216 | dim: str or list of str 1217 | Dimensions along which histogram is performed. 1218 | weights: xarray.DataArray 1219 | Weights of each data in var. 1220 | increase: bool 1221 | Increasing bins with index or not. 1222 | lt: bool 1223 | Less than a given value or not. 1224 | 1225 | Returns 1226 | ---------- 1227 | hist : xarray.DataArray 1228 | Result of the histogram. 1229 | """ 1230 | if type(bins) in [np.ndarray, np.array]: 1231 | bvalues = bins 1232 | 1233 | if not np.diff(bvalues).all(): 1234 | raise Exception('non monotonic bins') 1235 | 1236 | elif type(bins) in [xr.DataArray]: 1237 | bvalues = bins.squeeze() # squeeze the dimensions 1238 | 1239 | if not bvalues.diff('contour').all(): 1240 | raise Exception('non monotonic bins') 1241 | 1242 | if not 'time' in bvalues.dims: 1243 | bvalues = bvalues.values 1244 | 1245 | elif type(bins) in [list]: 1246 | bvalues = np.array(bins) 1247 | 1248 | if not np.diff(bvalues).all(): 1249 | raise Exception('non monotonic bins') 1250 | else: 1251 | raise Exception('bins should be numpy.array or xarray.DataArray') 1252 | 1253 | # unified index of the contour coordinate 1254 | if type(bvalues) in [xr.DataArray]: 1255 | binNum = np.array(range(len(bvalues['contour']))).astype(np.float32) 1256 | else: 1257 | binNum = np.array(range(len(bvalues))).astype(np.float32) 1258 | 1259 | if type(bvalues) in [xr.DataArray]: 1260 | re = [] 1261 | 1262 | for l in range(len(bvalues.time)): 1263 | rng = {'time': l} 1264 | 1265 | trc = var.isel(rng) 1266 | ctr = bvalues.isel(rng).values 1267 | 1268 | if 'time' in weights.dims: 1269 | wt = weights.isel(rng) 1270 | else: 1271 | wt = weights 1272 | 1273 | bincrease = True if ctr[0] < ctr[-1] else False 1274 | 1275 | # add a bin so that the result has the same length of contour 1276 | if bincrease: 1277 | step = (ctr[-1] - ctr[0]) / (len(ctr) - 1) 1278 | bins = np.concatenate([[ctr[0]-step], ctr]) 1279 | else: 1280 | step = (ctr[0] - ctr[-1]) / (len(ctr) - 1) 1281 | bins = np.concatenate([[ctr[-1]-step], ctr[::-1]]) 1282 | # bins[1:] -= step / 1e3 1283 | 1284 | tmp = histogram(trc, bins=[bins], dim=dim, weights=wt) \ 1285 | .assign_coords({trc.name+'_bin':binNum}) 1286 | 1287 | re.append(tmp) 1288 | 1289 | pdf = xr.concat(re, 'time').rename({var.name+'_bin':'contour'}) 1290 | 1291 | if bincrease: 1292 | pdf = pdf.assign_coords(contour=binNum) 1293 | else: 1294 | pdf = pdf.assign_coords(contour=binNum[::-1]) 1295 | else: 1296 | bincrease = True if bvalues[0] < bvalues[-1] else False 1297 | 1298 | # add a bin so that the result has the same length of contour 1299 | if bincrease: 1300 | step = (bvalues[-1] - bvalues[0]) / (len(bvalues) - 1) 1301 | bins = np.insert(bvalues, 0, bvalues[0]-step) 1302 | else: 1303 | step = (bvalues[0] - bvalues[-1]) / (len(bvalues) - 1) 1304 | bins = np.insert(bvalues[::-1], 0, bvalues[-1]-step) 1305 | # bins[1:] -= step / 1e3 1306 | 1307 | pdf = histogram(var, bins=[bins], dim=dim, weights=weights) \ 1308 | .rename({var.name+'_bin':'contour'}) 1309 | 1310 | if bincrease: 1311 | pdf = pdf.assign_coords(contour=binNum) 1312 | else: 1313 | pdf = pdf.assign_coords(contour=binNum[::-1]) 1314 | 1315 | # assign time coord. to pdf 1316 | if 'time' in var.dims: 1317 | pdf = pdf.assign_coords(time=var['time'].values) 1318 | 1319 | # get CDF from PDF 1320 | cdf = pdf.cumsum('contour') 1321 | 1322 | if not lt: # for the case of greater than 1323 | cdf = cdf.isel({'contour':-1}) - cdf 1324 | 1325 | return cdf 1326 | 1327 | 1328 | def _check_monotonicity(var, dim): 1329 | """ 1330 | Check monotonicity of a variable along a dimension. 1331 | 1332 | Parameters 1333 | ---------- 1334 | var : xarray.DataArray 1335 | A variable that need to be checked. 1336 | dim : str 1337 | A string indicate the dimension. 1338 | 1339 | Returns 1340 | ---------- 1341 | None. Raise exception if not monotonic 1342 | """ 1343 | dfvar = var.diff(dim) 1344 | 1345 | if not dfvar.all(): 1346 | pos = (dfvar == 0).argmax(dim=var.dims) 1347 | 1348 | for tmp in pos: 1349 | print(tmp) 1350 | print(pos[tmp].values) 1351 | 1352 | if tmp != dim: 1353 | v = var.isel({tmp:pos[tmp].values}).load() 1354 | 1355 | raise Exception('not monotonic var at\n' + str(v)) 1356 | 1357 | 1358 | def _integrate(var, dA, dims): 1359 | """ 1360 | Integration of var over the whole domain. 1361 | 1362 | Parameters 1363 | ---------- 1364 | var: xarray.DataArray 1365 | A given variable as the integrand. 1366 | dA: xarray.DataArray 1367 | Area occupied by each grid point. 1368 | dims: list of str 1369 | A string indicate the dimension. 1370 | 1371 | Returns 1372 | ---------- 1373 | re: xarray.DataArray 1374 | Integrated result. 1375 | """ 1376 | re = (var * dA).sum(dims) 1377 | 1378 | return re 1379 | 1380 | 1381 | def _get_extrema_extend(data, N): 1382 | """ 1383 | Get the extrema by extending the endpoints 1384 | 1385 | Parameters 1386 | ---------- 1387 | data: xarray.DataArray 1388 | A variable that need to be histogrammed. 1389 | N: int 1390 | A given length to get step 1391 | 1392 | Returns 1393 | ---------- 1394 | vmin, vmax : float, float 1395 | Extended extrema. 1396 | """ 1397 | vmin = data.min().values 1398 | vmax = data.max().values 1399 | 1400 | step = (vmax - vmin) / N 1401 | 1402 | return vmin - step, vmax + step 1403 | 1404 | 1405 | def _interp1d(x, xf, yf, inc=True, outside=None): 1406 | """ 1407 | Wrapper of np.interp, taking into account the decreasing case. 1408 | 1409 | Parameters 1410 | ---------- 1411 | x : array_like 1412 | The x-coordinates at which to evaluate the interpolated values. 1413 | xf : 1-D sequence of floats 1414 | The x-coordinates of the data points. 1415 | yf : 1-D sequence of float or complex 1416 | The y-coordinates of the data points, same length as `xp`. 1417 | inc : boolean 1418 | xf is increasing or decresing. 1419 | 1420 | Returns 1421 | ---------- 1422 | y : float or complex (corresponding to fp) or ndarray 1423 | The interpolated values, same shape as `x`. 1424 | """ 1425 | # print(f"x : {x.shape} \nxf: {xf.shape} \nyf: {yf.shape}") 1426 | if inc: # increasing case 1427 | re = np.interp(x, xf, yf) 1428 | else: # decreasing case 1429 | # print(f"x: {x} \n xf: {xf} \n yf: {yf}") 1430 | re = np.interp(x, xf[::-1], yf[::-1]) 1431 | 1432 | # print(f"x: {x} \n xf: {xf} \n yf: {yf} \n y: {re}") 1433 | 1434 | return re 1435 | 1436 | 1437 | def _contour_lengths(data, contours, dims=[], latlon=True): 1438 | """Calculate contour length in a 2D numpy data. 1439 | This is designed for xarray's ufunc. 1440 | 1441 | Parameters 1442 | ---------- 1443 | data: numpy.ndarray 1444 | 2D numpy data. 1445 | contours: numpy.ndarray 1446 | a list of contour values. 1447 | dims: xarray.DataArray 1448 | a list of two dimension variables. 1449 | latlon: boolean, optional 1450 | Whether dimension is latlon or cartesian. 1451 | period: list of floats, optional 1452 | Period in each dimension if coordinate is periodic. 1453 | 1454 | Returns 1455 | ------- 1456 | lengths: numpy.ndarray 1457 | List of contour lengths. 1458 | """ 1459 | if latlon: 1460 | coord1 = np.deg2rad(dims[0].values) # assume to be y 1461 | coord2 = np.deg2rad(dims[1].values) # assume to be x 1462 | else: 1463 | coord1 = dims[0].values # assume to be y 1464 | coord2 = dims[1].values # assume to be x 1465 | 1466 | lengths = [] 1467 | 1468 | for i,c in enumerate(contours): 1469 | # in unit of grid points 1470 | segments = measure.find_contours(data, c) 1471 | 1472 | tlist = nb.typed.List.empty_list(nb.typeof(np.zeros((9,2)))) 1473 | 1474 | for segment in segments: 1475 | tlist.append(segment) 1476 | 1477 | lengths.append(contour_length(tlist, coord2, coord1, latlon=latlon)) 1478 | 1479 | # if i == 0: 1480 | # print(latlon) 1481 | # print(tlist) 1482 | # print(len(tlist[0])) 1483 | # print(coord2) 1484 | # print(coord1) 1485 | # print(contour_length(tlist, coord2, coord1, latlon=latlon, disp=True)) 1486 | 1487 | return np.asarray(lengths) 1488 | 1489 | 1490 | @nb.jit(nopython=True, cache=False) 1491 | def _contour_crossing(dataPad, contour, areaPad, stride=1): 1492 | """Whether a given contour is crossing the grid point using box-counting. 1493 | 1494 | Parameters 1495 | ---------- 1496 | dataPad: numpy.ndarray 1497 | Original data padded with stride in a 2D slice format. 1498 | contour: float 1499 | A given contour value. 1500 | areaPad: numpy.ndarray 1501 | Area of each grid point padded with stride. 1502 | stride: int 1503 | A given contour value. 1504 | 1505 | Returns 1506 | ------- 1507 | re: numpy.ndarray 1508 | Result of the data in bool: 0 for false and 1 for true. 1509 | """ 1510 | # strange implementation for np.zeros to work in numba function 1511 | jj, nn = dataPad.shape 1512 | shape = (np.round(jj / stride), np.round(nn / stride)) 1513 | 1514 | J = np.zeros(shape=(), dtype=np.int64) 1515 | N = np.zeros(shape=(), dtype=np.int64) 1516 | J[()], N[()] = shape 1517 | 1518 | re = np.zeros((J[()], N[()])) 1519 | Jn, In = re.shape 1520 | Jo, Io = dataPad.shape 1521 | 1522 | for j in range(0, Jn-1): 1523 | jstr = j * stride 1524 | 1525 | for i in range(0, Jn-1): 1526 | istr = i * stride 1527 | le = False 1528 | gt = False 1529 | 1530 | for jj in range(jstr, jstr + stride): 1531 | for ii in range(istr, istr + stride): 1532 | ll = dataPad[jj , ii ] 1533 | lr = dataPad[jj , ii+1] 1534 | ul = dataPad[jj+1, ii ] 1535 | ur = dataPad[jj+1, ii+1] 1536 | 1537 | if not np.isnan(ll): # lower-left corner 1538 | if ll <= contour: 1539 | le = True 1540 | else: 1541 | gt = True 1542 | 1543 | if not np.isnan(lr): # lower-right corner 1544 | if lr <= contour: 1545 | le = True 1546 | else: 1547 | gt = True 1548 | 1549 | if not np.isnan(ul): # upper-left corner 1550 | if ul <= contour: 1551 | le = True 1552 | else: 1553 | gt = True 1554 | 1555 | if not np.isnan(ur): # upper-right corner 1556 | if ur <= contour: 1557 | le = True 1558 | else: 1559 | gt = True 1560 | 1561 | if le and gt: # contour is inside the current grid box 1562 | re[j, i] = np.sqrt(areaPad[j, i]) * stride 1563 | else: 1564 | re[j, i] = 0 1565 | 1566 | return np.nansum(re) 1567 | 1568 | 1569 | """ 1570 | Testing codes for each class 1571 | """ 1572 | if __name__ == '__main__': 1573 | print('start testing in ContourMethods') 1574 | 1575 | 1576 | 1577 | -------------------------------------------------------------------------------- /xcontour/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | Created on 2020.02.04 4 | 5 | @author: MiniUFO 6 | Copyright 2018. All rights reserved. Use is subject to license terms. 7 | ''' 8 | import numpy as np 9 | import numba as nb 10 | import xarray as xr 11 | from xgcm import Grid 12 | from xgcm.autogenerate import generate_grid_ds 13 | 14 | 15 | ''' 16 | Here defines all the constants that are commonly used in earth sciences 17 | ''' 18 | # Radius of the Earth (m) 19 | Rearth = 6371200.0 20 | 21 | # distance of unit degree at the equator 22 | def deg2m(Rearth=Rearth): 23 | degtom = 2.0 * np.pi * Rearth / 360.0 24 | return degtom 25 | 26 | # Gravitational acceleration g (m s^-2) 27 | g = 9.80665 28 | 29 | # Rotating angular speed of the Earth (1) 30 | omega = 7.292e-5 31 | 32 | 33 | 34 | dimXList = ['lon', 'longitude', 'LON', 'LONGITUDE', 'geolon', 'GEOLON', 35 | 'xt_ocean'] 36 | dimYList = ['lat', 'latitude' , 'LAT', 'LATITUDE' , 'geolat', 'GEOLAT', 37 | 'yt_ocean'] 38 | dimZList = ['lev', 'level', 'LEV', 'LEVEL', 'pressure', 'PRESSURE', 39 | 'depth', 'DEPTH'] 40 | 41 | 42 | 43 | def add_latlon_metrics(dset, dims=None, boundary=None, Rearth=Rearth): 44 | """ 45 | Infer 2D metrics (latitude/longitude) from gridded data file. 46 | 47 | Parameters 48 | ---------- 49 | dset : xarray.Dataset 50 | A dataset open from a file 51 | dims : dict 52 | Dimension pair in a dict, e.g., {'Y':'latitude', 'X':'longitude'} 53 | boundary : dict 54 | Default boundary conditions applied to each coordinate 55 | 56 | Return 57 | ------- 58 | dset : xarray.Dataset 59 | Input dataset with appropriated metrics added 60 | grid : xgcm.Grid 61 | The grid with appropriated metrics 62 | """ 63 | lon, lat, lev = None, None, None 64 | 65 | if dims is None: 66 | for dim in dimXList: 67 | if dim in dset.dims: 68 | lon = dim 69 | break 70 | 71 | for dim in dimYList: 72 | if dim in dset.dims: 73 | lat = dim 74 | break 75 | 76 | for dim in dimZList: 77 | if dim in dset.dims: 78 | lev = dim 79 | break 80 | 81 | if lon is None or lat is None: 82 | raise Exception('unknown dimension names in dset, should be in ' 83 | + str(dimXList + dimYList)) 84 | else: 85 | lon = dims['X'] if 'X' in dims else None 86 | lat = dims['Y'] if 'Y' in dims else None 87 | lev = dims['Z'] if 'Z' in dims else None 88 | 89 | if lev is None: 90 | ds = generate_grid_ds(dset, {'X':lon, 'Y':lat}) 91 | else: 92 | ds = generate_grid_ds(dset, {'X':lon, 'Y':lat, 'Z':lev}) 93 | 94 | coords = ds.coords 95 | 96 | BCx, BCy, BCz = 'extend', 'extend', 'extend' 97 | 98 | if boundary is not None: 99 | BCx = boundary['X'] if 'X' in boundary else 'extend' 100 | BCy = boundary['Y'] if 'Y' in boundary else 'extend' 101 | BCz = boundary['Z'] if 'Z' in boundary else 'extend' 102 | 103 | if __is_periodic(coords[lon], 360.0): 104 | periodic = 'X' 105 | 106 | if lev is None: 107 | grid = Grid(ds, periodic=[periodic], boundary={'Y': BCy}) 108 | else: 109 | grid = Grid(ds, periodic=[periodic], boundary={'Z':BCz, 'Y': BCy}) 110 | else: 111 | periodic = [] 112 | 113 | if lev is None: 114 | grid = Grid(ds, periodic=False, boundary={'Y': BCy, 'X': BCx}) 115 | else: 116 | grid = Grid(ds, periodic=False, boundary={'Z': BCz, 'Y': BCy, 'X': BCx}) 117 | 118 | 119 | lonC = ds[lon] # original grid point as tracer point 120 | latC = ds[lat] # original grid point as tracer point 121 | lonG = ds[lon + '_left'] # new grid point as u-point 122 | latG = ds[lat + '_left'] # new grid point as v-point 123 | 124 | if 'X' in periodic: 125 | # dlonC = grid.diff(lonC, 'X', boundary_discontinuity=360) 126 | # dlonG = grid.diff(lonG, 'X', boundary_discontinuity=360) 127 | dlonC = grid.diff(lonC, 'X') 128 | dlonG = grid.diff(lonG, 'X') 129 | for i in [0,-1]: # mini-dong: adjust periodic terminal point 130 | if dlonC[i ] < 0: 131 | dlonC[i ] += 360 132 | elif dlonC[i ] > 360: 133 | dlonC[i ] -= 360 134 | if dlonG[i ] < 0: 135 | dlonG[i ] += 360 136 | elif dlonG[i ] > 360: 137 | dlonG[i ] -= 360 138 | 139 | else: 140 | dlonC = grid.diff(lonC, 'X', boundary='extend') 141 | dlonG = grid.diff(lonG, 'X', boundary='extend') 142 | if dlonC[0] == 0: # mini-dong: adjust terminal point 143 | dlonC[0] = dlonC[1].values 144 | elif dlonC[-1] == 0 : 145 | dlonC[-1] = dlonC[1].values 146 | 147 | if dlonG[0] == 0: # mini-dong: adjust terminal point 148 | dlonG[0] = dlonG[1].values 149 | elif dlonG[-1] == 0: 150 | dlonG[-1] = dlonG[1].values 151 | 152 | dlatC = grid.diff(latC, 'Y') 153 | dlatG = grid.diff(latG, 'Y') 154 | if dlatC[0] == 0: # mini-dong: adjust terminal point 155 | dlatC[0] = dlatC[1].values 156 | elif dlatC[-1] == 0 : 157 | dlatC[-1] = dlatC[1].values 158 | 159 | if dlatG[0] == 0: # mini-dong: adjust terminal point 160 | dlatG[0] = dlatG[1].values 161 | elif dlatG[-1] == 0: 162 | dlatG[-1] = dlatG[1].values 163 | 164 | # coords['lonG'], coords['latG'] = lonG, latG # mini-dong: add for test 165 | # coords['lonC'], coords['latC'] = lonC, latC # mini-dong: add for test 166 | coords['dxG'], coords['dyG'] = __dll_dist(dlonG, dlatG, lonG, latG, Rearth) 167 | coords['dxC'], coords['dyC'] = __dll_dist(dlonC, dlatC, lonC, latC, Rearth) 168 | coords['dxF'] = grid.interp(coords['dxG'], 'Y') 169 | coords['dyF'] = grid.interp(coords['dyG'], 'X') 170 | coords['dxV'] = grid.interp(coords['dxG'], 'X') 171 | coords['dyU'] = grid.interp(coords['dyG'], 'Y') 172 | 173 | # copy from 'MITgcm/model/src/ini_cartesian_grid.F' 174 | # coords['rA' ] = ds['dyF'] * ds['dxF'] 175 | # coords['rAw'] = ds['dyG'] * ds['dxC'] 176 | # coords['rAs'] = ds['dyC'] * ds['dxG'] 177 | # coords['rAz'] = ds['dyU'] * ds['dxV'] 178 | 179 | # mini-dong: S = r**2*(sin(phi1)-sin(phi2))*(lambda1-lambda2) 180 | deg2rad = np.pi/180 181 | phi1 = (latC-latC+latG.values+dlatG) # upper boundary 182 | phi2 = (latC-latC+latG.values) # lower boundary 183 | # Processing pole 184 | temp1 = phi1 185 | temp2 = phi2 186 | if abs(phi1[1])<90 and abs(phi1[-2])<90: 187 | temp1 = xr.where(phi1> 90, 90,phi1) # temporary for calculate poles 188 | if abs(phi2[1])<90 and abs(phi2[-2])<90: 189 | temp2 = xr.where(phi2<-90,-90,phi2) # temporary for calculate poles 190 | rAc = Rearth*Rearth*abs(np.sin(temp1*deg2rad)-np.sin(temp2*deg2rad))*(dlonG*deg2rad) # rA 191 | rAw = Rearth*Rearth*abs(np.sin(temp1*deg2rad)-np.sin(temp2*deg2rad))*(dlonC*deg2rad) 192 | 193 | phi1 = (latG-latG+latC.values) # upper boundary 194 | phi2 = (latG-latG+latC.values-dlatC) # lower boundary 195 | # Processing pole 196 | temp1 = phi1 197 | temp2 = phi2 198 | if abs(phi1[1])<90 and abs(phi1[-2])<90: 199 | temp1 = xr.where(phi1> 90, 90,phi1) # temporary for calculate poles 200 | if abs(phi2[1])<90 and abs(phi2[-2])<90: 201 | temp2 = xr.where(phi2<-90,-90,phi2) # temporary for calculate poles 202 | rAs = Rearth*Rearth*abs(np.sin(temp1*deg2rad)-np.sin(temp2*deg2rad))*(dlonG*deg2rad) 203 | rAz = Rearth*Rearth*abs(np.sin(temp1*deg2rad)-np.sin(temp2*deg2rad))*(dlonC*deg2rad) 204 | 205 | coords['rA' ] = rAc 206 | coords['rAw'] = rAw 207 | coords['rAs'] = rAs 208 | coords['rAz'] = rAz 209 | 210 | if lev is not None: 211 | levC = ds[lev].values 212 | tmp = np.diff(levC) 213 | tmp = np.concatenate([[(levC[0]-tmp[0])], levC]) 214 | levG = tmp[:-1] 215 | delz = np.diff(tmp) 216 | 217 | ds[lev + '_left'] = levG 218 | coords['drF'] = xr.DataArray(delz, dims=lev, coords={lev: levC}) 219 | coords['drG'] = xr.DataArray(np.concatenate([[delz[0]/2], delz[1:-1], 220 | [delz[-1]/2]]), dims=lev+'_left', 221 | coords={lev+'_left': levG}) 222 | 223 | metrics={('X', ): ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 224 | ('Y' , ): ['dyG', 'dyF', 'dyC', 'dyU'], # Y distances 225 | ('Z' , ): ['drG', 'drF'], # Z distances 226 | ('X', 'Y'): ['rAw', 'rAs', 'rA' , 'rAz']} 227 | else: 228 | metrics={('X', ): ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 229 | ('Y' , ): ['dyG', 'dyF', 'dyC', 'dyU'], # Y distances 230 | ('X', 'Y'): ['rAw', 'rAs', 'rA' , 'rAz']} 231 | 232 | # print('lonC', lonC.dims) 233 | # print('latC', latC.dims) 234 | # print('lonG', lonG.dims) 235 | # print('latG', latG.dims) 236 | # print('') 237 | # print('dlonC', dlonC.dims) 238 | # print('dlatC', dlatC.dims) 239 | # print('dlonG', dlonG.dims) 240 | # print('dlatG', dlatG.dims) 241 | # print('') 242 | # print('dxG', coords['dxG'].dims) 243 | # print('dyG', coords['dyG'].dims) 244 | # print('dxF', coords['dxF'].dims) 245 | # print('dyF', coords['dyF'].dims) 246 | # print('dxC', coords['dxC'].dims) 247 | # print('dyC', coords['dyC'].dims) 248 | # print('dxV', coords['dxV'].dims) 249 | # print('dyU', coords['dyU'].dims) 250 | # print('') 251 | # print('rA' , coords['rA' ].dims) 252 | # print('rAz', coords['rAz'].dims) 253 | # print('rAw', coords['rAw'].dims) 254 | # print('rAs', coords['rAs'].dims) 255 | 256 | for key, value in metrics.items(): 257 | grid.set_metrics(key, value) 258 | 259 | return ds, grid 260 | 261 | def add_latlon_metrics_old(dset, dims=None, boundary=None): 262 | """ 263 | Infer 2D metrics (latitude/longitude) from gridded data file. 264 | 265 | Parameters 266 | ---------- 267 | dset : xarray.Dataset 268 | A dataset open from a file 269 | dims : dict 270 | Dimension pair in a dict, e.g., {'Y':'latitude', 'X':'longitude'} 271 | boundary : dict 272 | Default boundary conditions applied to each coordinate 273 | 274 | Return 275 | ------- 276 | dset : xarray.Dataset 277 | Input dataset with appropriated metrics added 278 | grid : xgcm.Grid 279 | The grid with appropriated metrics 280 | """ 281 | lon, lat, lev = None, None, None 282 | 283 | if dims is None: 284 | for dim in dimXList: 285 | if dim in dset.dims: 286 | lon = dim 287 | break 288 | 289 | for dim in dimYList: 290 | if dim in dset.dims: 291 | lat = dim 292 | break 293 | 294 | for dim in dimZList: 295 | if dim in dset.dims: 296 | lev = dim 297 | break 298 | 299 | if lon is None or lat is None: 300 | raise Exception('unknown dimension names in dset, should be in ' 301 | + str(dimXList + dimYList)) 302 | else: 303 | lon = dims['X'] if 'X' in dims else None 304 | lat = dims['Y'] if 'Y' in dims else None 305 | lev = dims['Z'] if 'Z' in dims else None 306 | 307 | if lev is None: 308 | ds = generate_grid_ds(dset, {'X':lon, 'Y':lat}) 309 | else: 310 | ds = generate_grid_ds(dset, {'X':lon, 'Y':lat, 'Z':lev}) 311 | 312 | coords = ds.coords 313 | 314 | BCx, BCy, BCz = 'extend', 'extend', 'extend' 315 | 316 | if boundary is not None: 317 | BCx = boundary['X'] if 'X' in boundary else 'extend' 318 | BCy = boundary['Y'] if 'Y' in boundary else 'extend' 319 | BCz = boundary['Z'] if 'Z' in boundary else 'extend' 320 | 321 | if __is_periodic(coords[lon], 360.0): 322 | periodic = 'X' 323 | 324 | if lev is None: 325 | grid = Grid(ds, periodic=[periodic], boundary={'Y': BCy}) 326 | else: 327 | grid = Grid(ds, periodic=[periodic], boundary={'Z':BCz, 'Y': BCy}) 328 | else: 329 | periodic = [] 330 | 331 | if lev is None: 332 | grid = Grid(ds, periodic=False, boundary={'Y': BCy, 'X': BCx}) 333 | else: 334 | grid = Grid(ds, periodic=False, boundary={'Z': BCz, 'Y': BCy, 'X': BCx}) 335 | 336 | 337 | lonC = ds[lon] 338 | latC = ds[lat] 339 | lonG = ds[lon + '_left'] 340 | latG = ds[lat + '_left'] 341 | 342 | if 'X' in periodic: 343 | # dlonC = grid.diff(lonC, 'X', boundary_discontinuity=360) 344 | # dlonG = grid.diff(lonG, 'X', boundary_discontinuity=360) 345 | dlonC = grid.diff(lonC, 'X') 346 | dlonG = grid.diff(lonG, 'X') 347 | else: 348 | dlonC = grid.diff(lonC, 'X', boundary='extend') 349 | dlonG = grid.diff(lonG, 'X', boundary='extend') 350 | 351 | dlatC = grid.diff(latC, 'Y') 352 | dlatG = grid.diff(latG, 'Y') 353 | 354 | coords['dxG'], coords['dyG'] = __dll_dist_old(dlonG, dlatG, lonG, latG) 355 | coords['dxC'], coords['dyC'] = __dll_dist_old(dlonC, dlatC, lonC, latC) 356 | coords['dxF'] = grid.interp(coords['dxG'], 'Y') 357 | coords['dyF'] = grid.interp(coords['dyG'], 'X') 358 | coords['dxV'] = grid.interp(coords['dxG'], 'X') 359 | coords['dyU'] = grid.interp(coords['dyG'], 'Y') 360 | 361 | coords['rA' ] = ds['dyF'] * ds['dxF'] 362 | coords['rAw'] = ds['dyG'] * ds['dxC'] 363 | coords['rAs'] = ds['dyC'] * ds['dxG'] 364 | coords['rAz'] = ds['dyU'] * ds['dxV'] 365 | 366 | if lev is not None: 367 | levC = ds[lev].values 368 | tmp = np.diff(levC) 369 | tmp = np.concatenate([[(levC[0]-tmp[0])], levC]) 370 | levG = tmp[:-1] 371 | delz = np.diff(tmp) 372 | 373 | ds[lev + '_left'] = levG 374 | coords['drF'] = xr.DataArray(delz, dims=lev, coords={lev: levC}) 375 | coords['drG'] = xr.DataArray(np.concatenate([[delz[0]/2], delz[1:-1], 376 | [delz[-1]/2]]), dims=lev+'_left', 377 | coords={lev+'_left': levG}) 378 | 379 | metrics={('X', ): ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 380 | ('Y' , ): ['dyG', 'dyF', 'dyC', 'dyU'], # Y distances 381 | ('Z' , ): ['drG', 'drF'], # Z distances 382 | ('X', 'Y'): ['rAw', 'rAs', 'rA' , 'rAz']} 383 | else: 384 | metrics={('X', ): ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 385 | ('Y' , ): ['dyG', 'dyF', 'dyC', 'dyU'], # Y distances 386 | ('X', 'Y'): ['rAw', 'rAs', 'rA' , 'rAz']} 387 | 388 | # print('lonC', lonC.dims) 389 | # print('latC', latC.dims) 390 | # print('lonG', lonG.dims) 391 | # print('latG', latG.dims) 392 | # print('') 393 | # print('dlonC', dlonC.dims) 394 | # print('dlatC', dlatC.dims) 395 | # print('dlonG', dlonG.dims) 396 | # print('dlatG', dlatG.dims) 397 | # print('') 398 | # print('dxG', coords['dxG'].dims) 399 | # print('dyG', coords['dyG'].dims) 400 | # print('dxF', coords['dxF'].dims) 401 | # print('dyF', coords['dyF'].dims) 402 | # print('dxC', coords['dxC'].dims) 403 | # print('dyC', coords['dyC'].dims) 404 | # print('dxV', coords['dxV'].dims) 405 | # print('dyU', coords['dyU'].dims) 406 | # print('') 407 | # print('rA' , coords['rA' ].dims) 408 | # print('rAz', coords['rAz'].dims) 409 | # print('rAw', coords['rAw'].dims) 410 | # print('rAs', coords['rAs'].dims) 411 | 412 | for key, value in metrics.items(): 413 | grid.set_metrics(key, value) 414 | 415 | return ds, grid 416 | 417 | 418 | def add_MITgcm_missing_metrics(dset, periodic=None, boundary=None, partial_cell=True): 419 | """ 420 | Infer missing metrics from MITgcm output files. 421 | 422 | Parameters 423 | ---------- 424 | dset: xarray.Dataset 425 | A dataset open from a file 426 | periodic: str 427 | Which coordinate is periodic 428 | boundary: dict 429 | Default boundary conditions applied to each coordinate 430 | partial_cell: bool 431 | Turn on the partial-cell or not (default is on). 432 | 433 | Return 434 | ------- 435 | dset : xarray.Dataset 436 | Input dataset with appropriated metrics added 437 | grid : xgcm.Grid 438 | The grid with appropriated metrics 439 | """ 440 | coords = dset.coords 441 | grid = Grid(dset, periodic=periodic, boundary=boundary) 442 | 443 | if 'drW' not in coords: # vertical cell size at u point 444 | coords['drW'] = dset.hFacW * dset.drF if partial_cell else dset.drF 445 | if 'drS' not in coords: # vertical cell size at v point 446 | coords['drS'] = dset.hFacS * dset.drF if partial_cell else dset.drF 447 | if 'drC' not in coords: # vertical cell size at tracer point 448 | coords['drC'] = dset.hFacC * dset.drF if partial_cell else dset.drF 449 | # if 'drG' not in coords: # vertical cell size at tracer point # ldy: commented 450 | # coords['drG'] = dset.Zl - dset.Zl + dset.drC.values[:-1] 451 | # # coords['drG'] = xr.DataArray(dset.drC[:-1].values, dims='Zl', 452 | # # coords={'Zl':dset.Zl.values}) 453 | 454 | if 'dxF' not in coords: 455 | coords['dxF'] = grid.interp(dset.dxC, 'X') 456 | if 'dyF' not in coords: 457 | coords['dyF'] = grid.interp(dset.dyC, 'Y') 458 | if 'dxV' not in coords: 459 | coords['dxV'] = grid.interp(dset.dxG, 'X') 460 | if 'dyU' not in coords: 461 | coords['dyU'] = grid.interp(dset.dyG, 'Y') 462 | 463 | if 'hFacZ' not in coords: 464 | coords['hFacZ'] = grid.interp(dset.hFacS, 'X') 465 | if 'maskZ' not in coords: 466 | coords['maskZ'] = coords['hFacZ'] 467 | 468 | if 'yA' not in coords: 469 | coords['yA'] = dset.drF * dset.hFacC * dset.dxF if partial_cell \ 470 | else dset.drF * dset.dxF 471 | 472 | # Calculate vertical distances located on the cellboundary 473 | # ds.coords['dzC'] = grid.diff(ds.depth, 'Z', boundary='extrapolate') 474 | # Calculate vertical distances located on the cellcenter 475 | # ds.coords['dzT'] = grid.diff(ds.depth_left, 'Z', boundary='extrapolate') 476 | 477 | metrics = { 478 | ('X',) : ['dxG', 'dxF', 'dxC', 'dxV'], # X distances 479 | ('Y',) : ['dyG', 'dyF', 'dyC', 'dyU'], # Y distances 480 | ('Z',) : ['drW', 'drS', 'drC', 'drF'], # Z distances # ldy 481 | # ('Z',) : ['drW', 'drS', 'drC', 'drF', 'drG'], # Z distances # ori, ldy: commented 482 | # ('X', 'Y'): ['rAw', 'rAs', 'rA' , 'rAz'], # Areas in X-Y plane 483 | ('X', 'Z'): ['yA']} # Areas in X-Z plane 484 | 485 | for key, value in metrics.items(): 486 | grid.set_metrics(key, value) 487 | 488 | return dset, grid 489 | 490 | 491 | def equivalent_latitudes(areas, Rearth=Rearth): 492 | """ 493 | Calculate equivalent latitude using the formular: 494 | 2 * pi * a^2 * [sin(latEq) + sin(90)] = area. 495 | This is similar to a EqY(A) table. 496 | 497 | Parameters 498 | ---------- 499 | areas : xarray.DataArray 500 | Contour-enclosed areas. 501 | 502 | Returns 503 | ---------- 504 | latEq : xarray.DataArray 505 | The equivalent latitudes. 506 | """ 507 | ratio = areas/2.0/np.pi/Rearth/Rearth - 1.0 508 | 509 | # clip ratio within [-1, 1] 510 | ratio = xr.where(ratio<-1, -1.0, ratio) 511 | ratio = xr.where(ratio> 1, 1.0, ratio) 512 | 513 | latEq = np.rad2deg(np.arcsin(ratio)).astype(areas.dtype) 514 | 515 | return latEq 516 | 517 | 518 | def latitude_lengths_at(lats, Rearth=Rearth): 519 | """ 520 | Calculate minimum length on a sphere given latitudes. 521 | 522 | Parameters 523 | ---------- 524 | latEq : xarray.DataArray 525 | Equivalent latitude. 526 | 527 | Returns 528 | ---------- 529 | Lmin : xarray.DataArray 530 | The minimum possible length of the contour. 531 | """ 532 | Lmin = (2.0 * np.pi * Rearth * np.cos(np.deg2rad(lats))).astype(lats.dtype) 533 | 534 | return Lmin 535 | 536 | 537 | def contour_area(verts): 538 | """ 539 | Compute the area enclosed by a contour. Copied from 540 | https://github.com/rabernat/floater/blob/master/floater/rclv.py 541 | 542 | Parameters 543 | ---------- 544 | verts : array_like 545 | 2D shape (N,2) array of vertices. Uses scikit image convetions 546 | (j,i indexing) 547 | 548 | Returns 549 | ---------- 550 | area : float 551 | Area of polygon enclosed by verts. Sign is determined by vertex 552 | order (cc vs ccw) 553 | """ 554 | verts_roll = np.roll(verts, 1, axis=0) 555 | 556 | # use scikit image convetions (j,i indexing) 557 | area_elements = ((verts_roll[:,1] + verts[:,1]) * 558 | (verts_roll[:,0] - verts[:,0])) 559 | 560 | # absolute value makes results independent of orientation 561 | return abs(area_elements.sum())/2.0 562 | 563 | 564 | # @nb.jit(nopython=True, cache=False) 565 | def contour_length(segments, xdef, ydef, latlon=True, disp=False, Rearth=Rearth): 566 | """Compute the length of a contour. 567 | 568 | Parameters 569 | ---------- 570 | segments: numpy.array 571 | Segments of a single contour position returned by 572 | `measure.find_contours`. 573 | xdef : numpy.array 574 | X-coordinates 575 | ydef : numpy.array 576 | Y-coordinates 577 | latlon : boolean 578 | Is coordinates latlon in radian or cartesian 579 | 580 | Returns 581 | ---------- 582 | Perimeter: float 583 | Perimeter of a contour. 584 | """ 585 | yidx = np.arange(len(ydef)) 586 | xidx = np.arange(len(xdef)) 587 | 588 | total = 0 589 | 590 | if latlon: 591 | for segment in segments: 592 | dypos = np.interp(segment[:,0], yidx, ydef) 593 | dxpos = np.interp(segment[:,1], xidx, xdef) 594 | 595 | total = total + __segment_length_latlon(dxpos, dypos) 596 | else: 597 | for segment in segments: 598 | dypos = np.interp(segment[:,0], yidx, ydef) 599 | dxpos = np.interp(segment[:,1], xidx, xdef) 600 | 601 | total = total + __segment_length_cartesian(dxpos, dypos) 602 | 603 | if total == 0: 604 | return np.nan 605 | else: 606 | if latlon: 607 | return total * Rearth 608 | else: 609 | return total 610 | 611 | 612 | """ 613 | Helper (private) methods are defined below 614 | """ 615 | def __dll_dist(dlon, dlat, lon, lat, Rearth=Rearth): 616 | """ 617 | Converts lat/lon differentials into distances in meters. 618 | 619 | Parameters 620 | ---------- 621 | dlon : xarray.DataArray 622 | longitude differentials 623 | dlat : xarray.DataArray 624 | latitude differentials 625 | lon : xarray.DataArray 626 | longitude values 627 | lat : xarray.DataArray 628 | latitude values 629 | 630 | Return 631 | ------- 632 | dx : xarray.DataArray 633 | Distance inferred from dlon 634 | dy : xarray.DataArray 635 | Distance inferred from dlat 636 | """ 637 | degtom = deg2m(Rearth=Rearth) 638 | dx = np.cos(np.deg2rad(lat)) * dlon * degtom 639 | dy = (dlat + lon - lon) * degtom 640 | 641 | # mini-dong: cos(90) is negative 642 | dx = xr.where(dx<0, -dx, dx) 643 | # cos(+/-90) is not exactly zero, add a threshold 644 | dx = xr.where(dx<1e-15, 0, dx) 645 | 646 | return dx, dy 647 | 648 | def __dll_dist_old(dlon, dlat, lon, lat): 649 | """ 650 | Converts lat/lon differentials into distances in meters. 651 | 652 | Parameters 653 | ---------- 654 | dlon : xarray.DataArray 655 | longitude differentials 656 | dlat : xarray.DataArray 657 | latitude differentials 658 | lon : xarray.DataArray 659 | longitude values 660 | lat : xarray.DataArray 661 | latitude values 662 | 663 | Return 664 | ------- 665 | dx : xarray.DataArray 666 | Distance inferred from dlon 667 | dy : xarray.DataArray 668 | Distance inferred from dlat 669 | """ 670 | degtom = 2.0 * np.pi * Rearth / 360.0 671 | dx = np.cos(np.deg2rad(lat)) * dlon * degtom 672 | dy = (dlat + lon - lon) * degtom 673 | 674 | # cos(+/-90) is not exactly zero, add a threshold 675 | dx = xr.where(dx<1e-15, 0, dx) 676 | 677 | return dx, dy 678 | 679 | def __is_periodic(coord, period): 680 | """ 681 | Whether a given coordinate array is periodic. 682 | 683 | Parameters 684 | ---------- 685 | coord : xarray.DataArray 686 | A given coordinate e.g., longitude 687 | period : float 688 | Period used to justify the coordinate, e.g., 360 for longitude 689 | """ 690 | # assume it is linear increasing 691 | if coord.size == 1: 692 | return False 693 | 694 | delta = coord[1] - coord[0] 695 | 696 | start = coord[-1] + delta - period; 697 | 698 | if np.abs((start - coord[0]) / delta) > 1e-4: 699 | return False; 700 | 701 | return True 702 | 703 | 704 | # @nb.jit(nopython=True, cache=False) 705 | def __segment_length_latlon(xpos, ypos): 706 | n = len(xpos) 707 | 708 | if n <= 1: 709 | return np.nan 710 | 711 | total = 0 712 | 713 | for i in range(n-1): 714 | total += __geodist(xpos[i], xpos[i+1], ypos[i], ypos[i+1]) 715 | 716 | return total 717 | 718 | 719 | @nb.jit(nopython=True, cache=False) 720 | def __segment_length_cartesian(xpos, ypos): 721 | n = len(xpos) 722 | 723 | if n <= 1: 724 | return np.nan 725 | 726 | total = 0 727 | 728 | for i in range(n-1): 729 | total += np.hypot(xpos[i]-xpos[i+1], ypos[i]-ypos[i+1]) 730 | 731 | return total 732 | 733 | 734 | @nb.jit(nopython=True, cache=False) 735 | def __geodist(lon1, lon2, lat1, lat2): 736 | """Calculate great-circle distance on a sphere. 737 | 738 | Parameters 739 | ---------- 740 | lon1: float 741 | Longitude for point 1 in radian. 742 | lon2: float 743 | Longitude for point 2 in radian. 744 | lat1: float 745 | Latitude for point 1 in radian. 746 | lat2: float 747 | Latitude for point 2 in radian. 748 | 749 | Returns 750 | ------- 751 | dis: float 752 | Great circle distance 753 | """ 754 | dlon = lon2 - lon1 755 | dlat = lat2 - lat1 756 | 757 | a = np.sin(dlat/2.0)**2.0 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2.0 758 | 759 | dis = 2.0 * np.arcsin(np.sqrt(a)) 760 | 761 | return dis 762 | 763 | 764 | ''' 765 | Testing codes for each class 766 | ''' 767 | if __name__ == '__main__': 768 | print('start testing in ContourUtils.py') 769 | 770 | 771 | 772 | --------------------------------------------------------------------------------