├── .github
└── workflows
│ ├── cleancode.yml
│ └── pylint.yml
├── .gitignore
├── LICENSE
├── MANIFEST.in
├── README.md
├── cpol_processing
├── __init__.py
├── attenuation.py
├── cfmetadata.py
├── data
│ └── GM_model_CPOL.pkl.gz
├── filtering.py
├── hydrometeors.py
├── phase.py
├── production.py
├── radar_codes.py
└── velocity.py
├── output_11_0.png
├── output_17_0.png
├── output_24_0.png
├── scripts
├── dask_pack.py
├── proc_missings.py
├── radar_pack.py
└── radar_single.py
└── setup.py
/.github/workflows/cleancode.yml:
--------------------------------------------------------------------------------
1 | # name: Check code quality
2 | # on: [push]
3 |
4 | # jobs:
5 | # code-quality:
6 | # strategy:
7 | # fail-fast: false
8 | # matrix:
9 | # python-version: [3.9]
10 | # poetry-version: [1.1.8]
11 | # os: [ubuntu-latest]
12 | # runs-on: ${{ matrix.os }}
13 | # steps:
14 | # - uses: actions/checkout@v2
15 | # - uses: actions/setup-python@v2
16 | # with:
17 | # python-version: ${{ matrix.python-version }}
18 | # - name: Run image
19 | # uses: abatilo/actions-poetry@v2.0.0
20 | # with:
21 | # poetry-version: ${{ matrix.poetry-version }}
22 | # - name: Install dep
23 | # run: poetry install
24 | # - name: Run black
25 | # run: poetry run black . --check -l 120
26 | # - name: Run isort
27 | # run: poetry run isort . --check-only --profile black
28 | # - name: Run flake8
29 | # run: poetry run flake8 .
30 | # - name: Run bandit
31 | # run: poetry run bandit .
32 | # - name: Run safety
33 | # run: poetry run safety check
34 |
35 |
36 |
--------------------------------------------------------------------------------
/.github/workflows/pylint.yml:
--------------------------------------------------------------------------------
1 | name: Pylint
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.9
13 | uses: actions/setup-python@v2
14 | with:
15 | python-version: 3.9
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install black isort flake8
20 | # - name: Analysing the code with pylint
21 | # run: |
22 | # pylint `ls -R|grep .py$|xargs`
23 | - name: Analysing the code with black
24 | run: |
25 | black -l 120 .
26 | - name: Analysing the code with isort
27 | run: |
28 | isort . --check-only --profile black
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | scripts/pbs/
107 |
108 | .DS*
109 | *.pbs
110 | *.pbs.*
111 | .f
112 | *.pyc
113 | .vscode/
114 | settings.json
115 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2017 CPOL-dataset
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Disclaimer
3 |
4 | This dataset is supported by a funding from the U.S. Department of Energy as part of the Atmospheric Radiation Measurement (ARM) Climate Research Facility, an Office of Science user facility.
5 |
6 | If you use this dataset to prepare a publication, please consider offering me (Valentin Louf) co-authorship and add the following line in the acknowledgments:
7 |
8 | > This work has been supported by the U.S. Department of Energy Atmospheric Systems Research Program through the grant DE-SC0014063.
9 |
10 | # CPOL general information
11 |
12 | (Excerpt of my calibration paper)
13 |
14 | > CPOL is a dual-polarization, Doppler radar, working at a frequency of 5.6 GHz with a pulse repetition frequency of 1000 Hz and a beamwidth of 1°. CPOL is located at Gunn Pt (-12.245°N, 131.045°E), about 25 km North-East from Darwin airport. CPOL performs a set of scans with an update time of 10 minutes. This includes, nominally, a volume scan, vertically pointing scan and two RHI scans. The scan comprises 15 elevations: 0.5°, 0.9°, 1.3°, 1.8°, 2.4°, 3.1°, 4.2°, 5.6°, 7.4°, 10°, 13.3°, 17.9°, 23.9°, 32°, and 43.1°. An additional series of scans at 90° is also performed regularly. The periodicity of the vertically pointing scan changes from season to season, and there are no such scans for seasons 2009/2010 and 2010/2011. The observed parameters are Zh, Zdr, Doppler velocity (v), differential phase φdp, spectrum width σv, and cross-correlation coefficient at zero lag ρhv. The maximum unambiguous range of the volume scan is 140 km with a range gate spacing of 250 m and an azimuthal beam spacing of 1°. Between 2001 and 2007, to reduce the data size and allow real-time transmission to the regional forecasting office, the radar gate range was changed to 300 m, and data were sampled with an azimuthal resolution of 1.5°. Before 2007, the azimuthal indexing had to be corrected while, after 2007, the data are generated with the data synced to the azimuthal sampling. CPOL has produced more than 350,000 plan position indicator scans over 17 wet seasons (November to May) between 1998 to 2017. During that period, three seasons are missing: 2000/2001, 2007/2008, and 2008/2009. The first season is missing because the radar was moved to Sydney to support the 2000 Sydney Olympic games. The two latter seasons are missing because the radar antenna and receiver needed replacement.
15 |
16 | The CPOL data are available at several levels. If you're not a radar guy, then you are probably interested in the level 2 data, as knowledge in radars and in signal processing are implicitly required to use the level 1 data. All levels use the netCDF format. The level 1 uses the CF/radial convention, while the level 2 tries to follow the CF convention.
17 |
18 | ## Level 1a
19 |
20 | The level 1a are for the radar peoples who want to play with the raw data. At this level, nothing has been processed, nothing has been removed (artifacts, noise, and clutter are presents), and nothing has passed through any quality check. Only use it if you know what you're doing. $\rho_{hv}$ is missing from season 09/10, seasons after 2012 might contain a normalized coherent power field. The signal-to-noise ratio has to be computed for all seasons (radiosoundings are used for this).
21 |
22 | On Raijin, the data are found here: `/g/data2/rr5/CPOL_radar/CPOL_level_1a`
23 |
24 | ## Level 1b
25 |
26 | These are the processed data. It comes in 3 versions:
27 | - PPI (radar coordinates, i.e. polar). On Raijin, it is found here: `/g/data2/rr5/CPOL_radar/CPOL_level_1b/PPI`
28 | - GRIDDED (In Cartesian coordinates). For the gridded data, the z-axis goes from 0 to 20 km of altitude by step of 0.5 km.
29 | - GRID_150km_2500m: Full domain, with a 2.5 km grid resolution: `/g/data2/rr5/CPOL_radar/CPOL_level_1b/GRIDDED/GRID_150km_2500m`
30 | - GRID_70km_1000m: Half the domain, with a 1 km grid resolution: `/g/data2/rr5/CPOL_radar/CPOL_level_1b/GRIDDED/GRID_70km_1000m`
31 |
32 | The PPIs contain the following fields:
33 | - D0, median diameter
34 | - NW, intercept parameter
35 | - corrected_differential_phase, corrected differential phase
36 | - corrected_differential_reflectivity, corrected differential reflectivity (attenuation and range correction)
37 | - corrected_specific_differential_phase, specific differential phase
38 | - cross_correlation_ratio, corrected cross correlation ratio (range correction).
39 | - differential_phase, raw differential phase
40 | - differential_reflectivity, raw differential reflectivity
41 | - radar_echo_classification, echo classification.
42 | - radar_estimated_rain_rate, rainfall rate (see appropriate citation down below)
43 | - raw_velocity, velocity of scatterers away from instrument
44 | - reflectivity, corrected reflectivity (noise and clutter remove, and attenuation corrected)
45 | - region_dealias_velocity, Doppler velocity dealiased using the region based algorithm
46 | - signal_to_noise_ratio, Signal to noise ratio, computed using radiosoundings from YPDN site.
47 | - specific_attenuation_differential_reflectivity, differential reflectivity specific attenuation, using Bringi's coefficients.
48 | - specific_attenuation_reflectivity, reflectivity specific attenuation, using Wu et al. relationship.
49 | - spectrum_width
50 | - temperature, from radiosoundings
51 | - total_power, uncorrected reflectivity
52 |
53 | The gridded data are made using the Barnes at al. algorithm from Py-ART with a constant radius of influence of 2.5 km or 1 km (depending on the grid resolution). The 1 km resolution data have these dimensions (z, y, x) -> (41, 141, 141) and the 2.5 km resolution have these ones: (z, y, x) -> (41, 117, 117). The z dimension goes from 0 to 20 km by step of 500 m.
54 | - D0
55 | - NW
56 | - corrected_differential_reflectivity
57 | - cross_correlation_ratio
58 | - latitude
59 | - longitude
60 | - radar_echo_classification
61 | - radar_estimated_rain_rate
62 | - reflectivity
63 | - region_dealias_velocity
64 | - temperature
65 | - total_power
66 | - velocity
67 |
68 | If you are interested in the gridding technique, please read Rob's post on github: [https://github.com/ARM-DOE/pyart/issues/753]
69 |
70 | Quicklooks and animation for the lowest elevation of the PPI data are available in the folders `FIGURE_CHECK` and `ANIMATED_GIF`, respectively.
71 |
72 | ## Level 2
73 |
74 | The level 2 are daily files, i.e. all the data for one day have been copied into a single file. Because the radar temporal resolution is 10 minutes, the time dimension length is 144 and it is set as constant in these files. These are 2D fields, but if 3D fields are a things of interest, then I'll consider adding them.
75 |
76 | The data have constant dimensions:
77 |
78 | - (time, y, x) -> (144, 117, 117) for the 2.5 km resolution files: `/g/data2/rr5/CPOL_radar/CPOL_level_2`
79 | - (time, y, x) -> (144, 141, 141) for the 1 km resolution files: `/g/data2/rr5/CPOL_radar/CPOL_level_2_1km`
80 |
81 | The following fields are available:
82 | - Reflectivity at 2500 m, copied from the level 1b gridded, in: `/g/data2/rr5/CPOL_radar/CPOL_level_2/REFLECTIVITY`
83 | - Rainfall rate, copied, and NaN values put to zero, from the level 1b gridded: `/g/data2/rr5/CPOL_radar/CPOL_level_2/RADAR_ESTIMATED_RAIN_RATE`
84 | - Steiner echo classification (0: nothing, 1: stratiform, 2: convective) at 2500 m, computed using the reflectivity at 2500m:
85 | `/g/data2/rr5/CPOL_radar/CPOL_level_2/STEINER_ECHO_CLASSIFICATION`
86 | - The 0dB echo top height, computed using Lakshman algorithm on the level 1B PPIs and then gridded manually, in: `/g/data2/rr5/CPOL_radar/CPOL_level_2/ECHO_TOP_HEIGHT`
87 |
88 |
89 | If you want more level 2 fields (that are already present on level 1b), like velocity, D0, $Z_{dr}$, or else, just ask for it ;-)
90 |
91 | IMPORTANT: because we use a set of constant dimensions for all level 2 files, some time slices may be empty if no measurements have been made at this particular timestep. The variable called `isfile` will help you to know if measurements exist or not at a particular timestep. It is equal to 0 if there is no data and 1 if there is data.
92 |
93 | # Georeferencing information
94 |
95 | The gridded product in level 1b contains 2D array of latitude/longitude. Here are the georeferencing information that you can find in the level 2 files metadata:
96 |
97 | ```
98 | :geospatial_bounds = "(129.70320368213441, 132.3856852067545, -13.552905831511362, -10.941777804922253)" ;
99 | :geospatial_lat_min = "-13.552905831511362" ;
100 | :geospatial_lat_max = "-10.941777804922253" ;
101 | :geospatial_lat_units = "degrees_north" ;
102 | :geospatial_lon_min = "129.70320368213441" ;
103 | :geospatial_lon_max = "132.3856852067545" ;
104 | :geospatial_lon_units = "degrees_east" ;
105 | :geospatial_vertical_min = "0" ;
106 | :geospatial_vertical_max = "20000" ;
107 | :geospatial_vertical_resolution = "500" ;
108 | :geospatial_vertical_units = "meters" ;
109 | :origin_latitude = "-12.249" ;
110 | :origin_longitude = "131.044" ;
111 | :origin_altitude = "50" ;
112 | :geospatial_projection = "Azimuthal equidistant projection" ;
113 | ```
114 |
115 | In the next version of the CPOL level 2 data, these 2D lat/lon arrays will also be included.
116 |
117 |
118 | ## Important note about rainfalls
119 |
120 | The level 1b is a 3D fields, because that's how the algorithm that compute the rainfall rate works, BUT the rainfall rate is a surface field, it does not have meaning after the first elevation/first slice. If you were to use the level 1b, then only use the first elevation/slice of the `radar_estimated_rain_rate`.
121 |
122 | At level 1b: the hydrometeors retreivals, namely D0, NW, and the rainfall rate at this level don't have any sort of post-processing applied to them, they are direct output from the algorithm that has been used to compute these fields. Meaning that if the radar did not measure anything, then the rainfall rate is equal to NaN, not zero, even inside the radar domain.
123 |
124 | At level 2: a post processing has been applied, the rainfalls has been put to zero instead of NaN. What is NaN is outside the CPOL domain. Inside the domain the rainfall rates have real values.
125 |
126 | Moreover, [Bobby Jackson](https://github.com/rcjackson) (Argonne Laboratory) recently worked CPOL rainfall rate retrievals and made comparisons with impact and video disdrometers. Here are his conclusion:
127 | > The best estimate we have for rainfall so far is from the Thompson et al. blended technique .Thankfully, this is the one that Valentin already has in his processing code, so literally no work needs to be done to put that in the latest version. The only thing we are working on now is collaborating with Elizabeth Thompson on using her disdrometer data to estimate the retrieval random uncertainty by looking at the P.D.F. of rainfall rate as a function of the various moments and calculating the quartiles of the P.D.F at the given moments.
128 |
129 | ## How to cite CPOL and/or the different products
130 |
131 | CPOL instrument:
132 |
133 | > Keenan, T. et al. The BMRC/NCAR C-Band Polarimetric (C-POL) Radar System. J. Atmos. Ocean. Technol. 15, 871–886 (1998).
134 |
135 | CPOL calibration:
136 |
137 | > Louf, V. et al. An integrated approach to weather radar calibration and monitoring using ground clutter and satellite comparisons. J. Atmos. Ocean. Technol. (Under review).
138 |
139 | The level 1b were produced using the Py-ART software, it should be cited:
140 |
141 | > Helmus, J. J. & Collis, S. M. The Python ARM Radar Toolkit (Py-ART ), a Library for Working with Weather Radar Data in the Python Programming Language. J. Open Res. Softw. 4, e25 (2016).
142 |
143 | The following products are not "raw measurements" from the radar, they have been retrieved using various algorithm. If you use one of these products, the correct citations are:
144 |
145 | Rainfall rate estimates:
146 |
147 | > Thompson, E. J., Rutledge, S. A., Dolan, B., Thurai, M. & Chandrasekar, V. Dual-Polarization Radar Rainfall Estimation over Tropical Oceans. J. Appl. Meteorol. Climatol. 57, 755–775 (2018).
148 |
149 | Steiner echo classification:
150 |
151 | > Steiner, M., Houze, R. A. & Yuter, S. E. Climatological Characterization of Three-Dimensional Storm Structure from Operational Radar and Rain Gauge Data. J. Appl. Meteorol. 34, 1978–2007 (1995).
152 |
153 | Echo top height:
154 |
155 | > Lakshmanan, V., Hondl, K., Potvin, C. K. & Preignitz, D. An Improved Method for Estimating Radar Echo-Top Height. Weather Forecast. 28, 481–488 (2013).
156 |
157 | Here a recent paper were we verify the Echo top height:
158 |
159 | > Jackson, R. C., Collis, S. M., Louf, V., Protat, A. & Majewski, L. A 17 year climatology of convective cloud top heights in Darwin. Atmos. Chem. Phys. Discuss. 1–26 (2018). doi:10.5194/acp-2018-408
160 |
161 | For other products, like the dual-polarisation radar products, just contact me (I haven't listed them as I suppose very few people might know about them).
162 |
163 | # Known issues / future version
164 |
165 | Level 2:
166 | - Only 1D arrays of latitude/longitude (the lat/lon crossing the radar origin) are available in the level 2, this should be fix in the next version. For now, use the georeferencing information in the metadata to rebuild the lat/lon arrays, or read the lat/lon arrays from the gridded level 1b files (next version).
167 |
168 | Level 1b and potentially future level 2:
169 | - There are problems with the velocity dealiasing, due to a lack of competent dealiasing algorithms. We developped a new and promising method of dealiasing that hopefully will solve this issue (next version).
170 |
171 | Level 1b:
172 | - Processing of dual-polar products are always a struggle that is incrementally improved, the next version should use a more performing PHIDP/KDP processing.
173 | - The level 1b rainfall rates don't contain true zero values, but NaN values instead, it's not a bug, it's a feature (direct processing output). Use the level 2 rainfalls, or filled the array with zero and put to NaN all values outside of the radar domain.
174 |
175 | # Other libraries that use CPOL_processing
176 |
177 | - [PyHail](https://github.com/joshua-wx/PyHail)
178 |
179 | # Example with the level 1a
180 |
181 | Let's read, display information and plot some raw data.
182 |
183 |
184 | ```python
185 | %matplotlib inline
186 | import glob
187 | import warnings
188 | import netCDF4
189 | import pyart
190 | import numpy as np
191 | import matplotlib.pyplot as pl
192 | import dask
193 | import dask.bag as db
194 | from dask.diagnostics import ProgressBar
195 | warnings.simplefilter('ignore')
196 | ```
197 |
198 | ```python
199 | radar = pyart.io.read('/g/data2/rr5/CPOL_radar/CPOL_level_1a/2006/20060103/cfrad.20060103_001000.000_to_20060103_001816.000_CPOL_PPI_level1a.nc')
200 | ```
201 |
202 |
203 | ```python
204 | radar.info('compact')
205 | ```
206 |
207 | altitude:
208 | altitude_agl: None
209 | antenna_transition: None
210 | azimuth:
211 | elevation:
212 | fields:
213 | DBZ:
214 | VEL:
215 | WIDTH:
216 | ZDR:
217 | PHIDP:
218 | RHOHV:
219 | fixed_angle:
220 | instrument_parameters:
221 | frequency:
222 | pulse_width:
223 | prt:
224 | polarization_mode:
225 | radar_beam_width_h:
226 | radar_beam_width_v:
227 | latitude:
228 | longitude:
229 | nsweeps: 17
230 | ngates: 480
231 | nrays: 4148
232 | radar_calibration: None
233 | range:
234 | scan_rate:
235 | scan_type: ppi
236 | sweep_end_ray_index:
237 | sweep_mode:
238 | sweep_number:
239 | sweep_start_ray_index:
240 | target_scan_rate: None
241 | time:
242 | metadata:
243 | Conventions: CF/Radial instrument_parameters
244 | version: 2017-10
245 | title: PPI volume from Australia Bureau of Meteorology
246 | institution: Australia Bureau of Meteorology
247 | references: If you use this dataset, please cite: 'An integrated approach to weather radar calibration and monitoring using ground clutter and satellite comparisons' by Louf et al. 2018 Journal of Atmospheric and Oceanic Technology.
248 | source:
249 | comment: This dataset has been created by Valentin Louf at the Bureau of Meteorology for Monash University and ARM.
250 | instrument_name: CPOL
251 | original_container: UF
252 | site_name: Gunn_Pt
253 | radar_name: CPOL
254 | field_names: DBZ VEL WIDTH ZDR PHIDP RHOHV
255 | NCO: "4.6.4"
256 | author: Valentin Louf
257 | author_email: valentin.louf@bom.gov.au
258 | calibration: Calibration offsets applied: ZH = -1.2 dB, and ZDR = -0.13 dB.
259 | country: Australia
260 | created: 2017-10-26T13:04:17.512932
261 | project: CPOL
262 | description: This dataset was processed using the ARM PyART toolbox, the trmm_rsl library, the NCO toolbox, and RadX from NCAR.
263 | state: NT
264 | history: October 2017 recalibration: Thu Aug 3 11:40:07 2017: ncrename -v .total_power,DBZ -v velocity,VEL -v spectrum_width,WIDTH -v differential_reflectivity,ZDR -v differential_phase,PHIDP -v cross_correlation_ratio,RHOHV -v .corrected_reflectivity,Refl cfrad.20060103_001000.000_to_20060103_001816.000_CPOL_PPI_level0.nc
265 |
266 | volume_number: 0
267 | platform_type: fixed
268 | instrument_type: radar
269 | primary_axis: axis_z
270 |
271 |
272 |
273 | ```python
274 | # Note that Bathurst Island is missing from these plots, this is an issue coming from Basemap.
275 | gr = pyart.graph.RadarMapDisplay(radar)
276 | fig, ax = pl.subplots(3, 2, figsize=(12, 15), sharex=True, sharey=True)
277 | ax = ax.flatten()
278 | sw = 0
279 | gr.plot_ppi_map('DBZ', ax=ax[0], sweep=sw, vmin=-35, vmax=65, cmap='pyart_NWSRef')
280 | gr.plot_ppi_map('ZDR', ax=ax[1], sweep=sw, vmin=-2, vmax=8, cmap='pyart_RefDiff')
281 | gr.plot_ppi_map('WIDTH', ax=ax[2], sweep=sw, vmin=0, vmax=30, cmap='pyart_NWS_SPW')
282 | gr.plot_ppi_map('RHOHV', ax=ax[3], sweep=sw, vmin=0.5, vmax=1.05, cmap='pyart_RefDiff')
283 | gr.plot_ppi_map('PHIDP', ax=ax[4], sweep=sw, cmap='pyart_Wild25', vmin=-180, vmax=180)
284 | gr.plot_ppi_map('VEL', ax=ax[5], sweep=sw, cmap='pyart_NWSVel')
285 |
286 | for ax_sl in ax:
287 | gr.plot_range_rings([50, 100, 150], ax=ax_sl)
288 | ax_sl.set_aspect(1)
289 |
290 | pl.show()
291 | ```
292 |
293 |
294 | 
295 |
296 |
297 | # Example with the level 1b
298 |
299 | Read all files during a day and plot some QVPs.
300 |
301 |
302 | ```python
303 | def retrieve_qvp(filename, fields=None, desired_angle=20.0):
304 | if fields == None:
305 | fields = ['differential_phase', 'cross_correlation_ratio', 'total_power', 'differential_reflectivity']
306 |
307 | try:
308 | radar = pyart.io.read(filename)
309 | except Exception:
310 | return None
311 | index = abs(radar.fixed_angle['data'] - desired_angle).argmin()
312 |
313 | qvp = {}
314 | for field in fields:
315 | data = radar.get_field(index, field).mean(axis = 0)
316 | qvp.update({field: data})
317 |
318 | qvp.update({'range': radar.range['data'], 'time': radar.time})
319 | z = qvp['range'] / 1000.0 * np.sin(radar.fixed_angle['data'][index] * np.pi / 180)
320 | qvp.update({'height': z})
321 |
322 | del radar
323 | return qvp
324 | ```
325 |
326 |
327 | ```python
328 | flist = sorted(glob.glob('/g/data2/rr5/CPOL_radar/CPOL_level_1b/PPI/2017/20170304/*.nc'))
329 | ```
330 |
331 |
332 | ```python
333 | bag = db.from_sequence(flist)
334 | dbobj = bag.map(retrieve_qvp)
335 | with ProgressBar():
336 | rslt = dbobj.compute()
337 |
338 | rslt = [r for r in rslt if r is not None]
339 | ```
340 |
341 | [########################################] | 100% Completed | 41.8s
342 |
343 |
344 |
345 | ```python
346 | # Unpack data
347 | differential_phase = np.zeros((599, len(rslt)))
348 | cross_correlation_ratio = np.zeros((599, len(rslt)))
349 | reflectivity = np.zeros((599, len(rslt)))
350 | differential_reflectivity = np.zeros((599, len(rslt)))
351 | time = [None] * len(rslt)
352 |
353 | for i, r in enumerate(rslt):
354 | differential_phase[:, i] = r['differential_phase']
355 | cross_correlation_ratio[:, i] = r['cross_correlation_ratio']
356 | reflectivity[:, i] = r['total_power']
357 | differential_reflectivity[:, i] = r['differential_reflectivity']
358 | time[i] = netCDF4.num2date(r['time']['data'][0], r['time']['units'])
359 | height = r['height']
360 | ```
361 |
362 |
363 | ```python
364 | titles = ['Reflectivity (dBZ)', r'$Z_{dr}$ (dB)', r'$\phi_{dp}\,(^\circ)$', r'$\rho_{hv}$', r'$\omega$ (m/s)']
365 | with pl.style.context('bmh'):
366 | fig, ax = pl.subplots(4, 1, figsize=(10, 12), sharex=True)
367 | ax = ax.flatten()
368 | cax = [None] * len(ax)
369 |
370 | cax[0] = ax[0].pcolormesh(time, height, reflectivity, cmap='pyart_NWSRef', vmin=-15, vmax=75)
371 | ax[0].set_title('Evolution of tropical cyclone Blanche\nCPOL 2017-03-04')
372 | cax[1] = ax[1].pcolormesh(time, height, differential_reflectivity, cmap='pyart_RefDiff', vmin=-1, vmax=8)
373 | cax[2] = ax[2].pcolormesh(time, height, 90 + differential_phase, cmap='pyart_Wild25', vmin=-180, vmax=180)
374 | cax[3] = ax[3].pcolormesh(time, height, cross_correlation_ratio, cmap='pyart_RefDiff', vmin=0.5, vmax=1.05)
375 |
376 | for i in range(len(ax)):
377 | ax[i].set_ylim(0, 20)
378 | ax[i].set_ylabel('Height (km)')
379 | cbar = pl.colorbar(cax[i], ax=ax[i])
380 | cbar.set_label(titles[i])
381 |
382 | fig.tight_layout()
383 | pl.show()
384 | ```
385 |
386 |
387 | 
388 |
389 |
390 | # Example with the level 2
391 |
392 | In this example we remake the Fig. 3 from Kumar et al. (2013) The four cumulus cloud modes and their progression during rainfall events: A C-band polarimetric radar perspective .
393 |
394 |
395 | ```python
396 | def read_data(infile, key='radar_estimated_rain_rate'):
397 | with netCDF4.Dataset(infile) as ncid:
398 | data = np.squeeze(ncid[key][:])
399 | return data
400 | ```
401 |
402 |
403 | ```python
404 | def proc_file(inargs):
405 | file_eth, file_stein, file_refl = inargs
406 |
407 | eth = read_data(file_eth, 'echo_top_height')
408 | stein = read_data(file_stein, 'steiner_echo_classification')
409 | refl = read_data(file_refl, 'reflectivity')
410 | pos = stein == 2
411 |
412 | return eth[pos], refl[pos]
413 | ```
414 |
415 |
416 | ```python
417 | flist_eth = sorted(glob.glob('/g/data2/rr5/CPOL_radar/CPOL_level_2/ECHO_TOP_HEIGHT/*.nc'))
418 | flist_stein = sorted(glob.glob('/g/data2/rr5/CPOL_radar/CPOL_level_2/STEINER_ECHO_CLASSIFICATION/*.nc'))
419 | flist_refl = sorted(glob.glob('/g/data2/rr5/CPOL_radar/CPOL_level_2/REFLECTIVITY/*.nc'))
420 | arg_list = [(a, b, c) for a, b, c in zip(flist_eth, flist_stein, flist_refl)]
421 | ```
422 |
423 |
424 | ```python
425 | bag = db.from_sequence(arg_list)
426 | bagobj = bag.map(proc_file)
427 | with ProgressBar():
428 | rslt = bagobj.compute()
429 | ```
430 |
431 | [########################################] | 100% Completed | 42.7s
432 |
433 |
434 |
435 | ```python
436 | # Unpacking results
437 | ct = np.sum([len(a) for a, b in rslt])
438 | eth = np.zeros((ct))
439 | refl = np.zeros((ct))
440 | pos = 0
441 | for a, b in rslt:
442 | if len(a) == 0:
443 | continue
444 | eth[pos: pos + len(a)] = a
445 | refl[pos: pos + len(a)] = b
446 | pos += len(a)
447 |
448 | pos = refl > 0
449 | eth = eth[pos]
450 | refl = refl[pos]
451 | ```
452 |
453 |
454 | ```python
455 | with pl.style.context('bmh'):
456 | pl.hist2d(refl, eth, range=[[10, 60], [3e3, 20e3]], bins=[50, 40], cmap='jet')
457 | pl.xlabel('Reflectivity at 2.5 km (dBZ)')
458 | pl.ylabel('0dB Echo Top Height (m)')
459 | pl.hlines(6.5e3, 10, 60, 'w', 'dashed')
460 | pl.hlines(15e3, 10, 60, 'w', 'dashed')
461 | pl.title('CPOL 1998-2017.\n{:,} elements.'.format(ct))
462 | pl.colorbar()
463 | pl.show()
464 | ```
465 |
466 |
467 | 
468 |
469 |
--------------------------------------------------------------------------------
/cpol_processing/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | CPOL Level 1b main production line.
3 |
4 | @title: CPOL_PROD_1b
5 | @author: Valentin Louf
6 | @institution: Bureau of Meteorology
7 |
8 | .. autosummary::
9 | :toctree: generated/
10 |
11 | production_line
12 | """
13 |
14 | from .production import production_line
15 | from .production import process_and_save
16 |
--------------------------------------------------------------------------------
/cpol_processing/attenuation.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for correcting and estimating attenuation on ZH and ZDR.
3 |
4 | @title: attenuation
5 | @author: Valentin Louf
6 | @institutions: Monash University and the Australian Bureau of Meteorology
7 | @date: 24/02/2021
8 |
9 | .. autosummary::
10 | :toctree: generated/
11 |
12 | correct_attenuation_zdr
13 | correct_attenuation_zh_pyart
14 | """
15 | from typing import Dict
16 |
17 | import pyart
18 | import numpy as np
19 | from scipy.integrate import cumtrapz
20 |
21 |
22 | def correct_attenuation_zdr(
23 | radar, gatefilter, zdr_name: str = "ZDR_CORR", phidp_name: str = "PHIDP_GG", alpha: float = 0.016
24 | ) -> Dict:
25 | """
26 | Correct attenuation on differential reflectivity. KDP_GG has been
27 | cleaned of noise, that's why we use it.
28 |
29 | V. N. Bringi, T. D. Keenan and V. Chandrasekar, "Correcting C-band radar
30 | reflectivity and differential reflectivity data for rain attenuation: a
31 | self-consistent method with constraints," in IEEE Transactions on Geoscience
32 | and Remote Sensing, vol. 39, no. 9, pp. 1906-1915, Sept. 2001.
33 | doi: 10.1109/36.951081
34 |
35 | Parameters:
36 | ===========
37 | radar:
38 | Py-ART radar structure.
39 | gatefilter: GateFilter
40 | Filter excluding non meteorological echoes.
41 | zdr_name: str
42 | Differential reflectivity field name.
43 | phidp_name: str
44 | PHIDP field name.
45 | alpha: float
46 | Z-PHI coefficient.
47 |
48 | Returns:
49 | ========
50 | zdr_corr: array
51 | Attenuation corrected differential reflectivity.
52 | """
53 | zdr = radar.fields[zdr_name]["data"].copy()
54 | phi = radar.fields[phidp_name]["data"].copy()
55 |
56 | zdr_corr = zdr + alpha * phi
57 | zdr_corr[gatefilter.gate_excluded] = np.NaN
58 | zdr_corr = np.ma.masked_invalid(zdr_corr)
59 | np.ma.set_fill_value(zdr_corr, np.NaN)
60 |
61 | # Z-PHI coefficient from Bringi et al. 2001
62 | zdr_meta = pyart.config.get_metadata("differential_reflectivity")
63 | zdr_meta["description"] = "Attenuation corrected differential reflectivity using Bringi et al. 2001."
64 | zdr_meta["_FillValue"] = np.NaN
65 | zdr_meta["_Least_significant_digit"] = 2
66 | zdr_meta["data"] = zdr_corr
67 |
68 | return zdr_meta
69 |
70 |
71 | def correct_attenuation_zh_pyart(
72 | radar, gatefilter, refl_field: str = "DBZ", rhv_field: str = "RHOHV_CORR", phidp_field: str = "PHIDP_GG",
73 | ) -> np.ndarray:
74 | """
75 | Correct attenuation on reflectivity using Py-ART tool. The attenuation from
76 | atmospheric gases is also corrected.
77 |
78 | Parameters:
79 | ===========
80 | radar:
81 | Py-ART radar structure.
82 | gatefilter:
83 | Filter excluding non meteorological echoes.
84 | refl_name: str
85 | Reflectivity field name.
86 | phidp_name: str
87 | PHIDP field name.
88 | rhv_field: str
89 | RHOHV field name.
90 |
91 | Returns:
92 | ========
93 | zh_corr: np.ndarray
94 | Attenuation corrected reflectivity.
95 | """
96 | # Compute attenuation
97 | spec_atten, _ = pyart.correct.calculate_attenuation(
98 | radar, 0, rhv_min=0.3, refl_field=refl_field, ncp_field=rhv_field, rhv_field=rhv_field, phidp_field=phidp_field,
99 | )
100 |
101 | specific_atten = np.ma.masked_invalid(spec_atten["data"])
102 | r = radar.range["data"] / 1000
103 | dr = r[2] - r[1]
104 |
105 | na, nr = radar.fields[refl_field]["data"].shape
106 | attenuation = np.zeros((na, nr))
107 | attenuation[:, :-1] = 2 * cumtrapz(specific_atten, dx=dr)
108 | refl_corr = radar.fields[refl_field]["data"].copy() + attenuation
109 | refl_corr = np.ma.masked_where(gatefilter.gate_excluded, refl_corr)
110 |
111 | return refl_corr.astype(np.float32)
112 |
--------------------------------------------------------------------------------
/cpol_processing/cfmetadata.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for correcting and estimating various radar and meteorological parameters.
3 |
4 | @title: cfmetadata
5 | @author: Valentin Louf
6 | @institutions: Australian Bureau of Meteorology
7 | @creation: 26/05/2020
8 | @date: 02/06/2020
9 |
10 | .. autosummary::
11 | :toctree: generated/
12 |
13 | correct_standard_name
14 | correct_units
15 | coverage_content_type
16 | """
17 |
18 |
19 | def correct_standard_name(radar):
20 | """
21 | 'standard_name' is a protected keyword for metadata in the CF conventions.
22 | To respect the CF conventions we can only use the standard_name field that
23 | exists in the CF table.
24 |
25 | Parameter:
26 | ==========
27 | radar: Radar object
28 | Py-ART data structure.
29 | """
30 | try:
31 | radar.range.pop("standard_name")
32 | radar.azimuth.pop("standard_name")
33 | radar.elevation.pop("standard_name")
34 | except Exception:
35 | pass
36 |
37 | try:
38 | radar.range.pop("axis")
39 | radar.azimuth["axis"] = "T"
40 | radar.elevation["axis"] = "T"
41 | except Exception:
42 | pass
43 |
44 | try:
45 | radar.sweep_number.pop("standard_name")
46 | radar.fixed_angle.pop("standard_name")
47 | radar.sweep_mode.pop("standard_name")
48 | except Exception:
49 | pass
50 |
51 | good_keys = [
52 | "corrected_reflectivity",
53 | "total_power",
54 | "radar_estimated_rain_rate",
55 | "corrected_velocity",
56 | ]
57 | for k in radar.fields.keys():
58 | if k not in good_keys:
59 | try:
60 | radar.fields[k].pop("standard_name")
61 | except Exception:
62 | continue
63 |
64 | try:
65 | radar.fields["velocity"][
66 | "standard_name"
67 | ] = "radial_velocity_of_scatterers_away_from_instrument"
68 | radar.fields["velocity"][
69 | "long_name"
70 | ] = "Doppler radial velocity of scatterers away from instrument"
71 | except KeyError:
72 | pass
73 |
74 | radar.latitude["standard_name"] = "latitude"
75 | radar.longitude["standard_name"] = "longitude"
76 | radar.altitude["standard_name"] = "altitude"
77 |
78 | return None
79 |
80 |
81 | def correct_units(radar):
82 | """
83 | Correct units according to CF/convention.
84 |
85 | Parameter:
86 | ==========
87 | radar: Radar object
88 | Py-ART data structure.
89 | """
90 | ufields = {"cross_correlation_ratio": "1", "spectrum_width": "m s-1"}
91 |
92 | for k, v in ufields.items():
93 | try:
94 | radar.fields[k]["units"] = v
95 | except KeyError:
96 | pass
97 |
98 | radar.sweep_mode["units"] = " "
99 | radar.scan_rate["units"] = "degree s-1"
100 | radar.instrument_parameters["nyquist_velocity"]["units"] = "m s-1"
101 |
102 | return None
103 |
104 |
105 | def coverage_content_type(radar):
106 | """
107 | Adding metadata for compatibility with ACDD-1.3
108 |
109 | Parameter:
110 | ==========
111 | radar: Radar object
112 | Py-ART data structure.
113 | """
114 | radar.range["coverage_content_type"] = "coordinate"
115 | radar.azimuth["coverage_content_type"] = "coordinate"
116 | radar.elevation["coverage_content_type"] = "coordinate"
117 | radar.latitude["coverage_content_type"] = "coordinate"
118 | radar.longitude["coverage_content_type"] = "coordinate"
119 | radar.altitude["coverage_content_type"] = "coordinate"
120 |
121 | radar.sweep_number["coverage_content_type"] = "auxiliaryInformation"
122 | radar.fixed_angle["coverage_content_type"] = "auxiliaryInformation"
123 | radar.sweep_mode["coverage_content_type"] = "auxiliaryInformation"
124 |
125 | for k in radar.instrument_parameters.keys():
126 | try:
127 | radar.instrument_parameters[k][
128 | "coverage_content_type"
129 | ] = "auxiliaryInformation"
130 | except KeyError:
131 | pass
132 |
133 | for k in radar.fields.keys():
134 | if k == "radar_echo_classification":
135 | radar.fields[k]["coverage_content_type"] = "thematicClassification"
136 | elif k in ["normalized_coherent_power", "normalized_coherent_power_v"]:
137 | radar.fields[k]["coverage_content_type"] = "qualityInformation"
138 | else:
139 | radar.fields[k]["coverage_content_type"] = "physicalMeasurement"
140 |
141 | return None
142 |
--------------------------------------------------------------------------------
/cpol_processing/data/GM_model_CPOL.pkl.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vlouf/cpol_processing/097994422c46773754e04a3d4911b81c01673fa5/cpol_processing/data/GM_model_CPOL.pkl.gz
--------------------------------------------------------------------------------
/cpol_processing/filtering.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for creating and manipulating gate filters. New functions: use of trained
3 | Gaussian Mixture Models to remove noise and clutter from CPOL data before 2009.
4 |
5 | @title: filtering.py
6 | @author: Valentin Louf
7 | @institutions: Monash University and the Australian Bureau of Meteorology
8 | @created: 20/11/2017
9 | @date: 25/02/2021
10 |
11 | .. autosummary::
12 | :toctree: generated/
13 |
14 | texture
15 | get_clustering
16 | get_gatefilter_GMM
17 | do_gatefilter_cpol
18 | do_gatefilter
19 | """
20 | # Libraries
21 | import os
22 | import gzip
23 | import pickle
24 |
25 | import pyart
26 | import cftime
27 | import numpy as np
28 | import pandas as pd
29 |
30 |
31 | def texture(data: np.ndarray) -> np.ndarray:
32 | """
33 | Compute the texture of data.
34 | Compute the texture of the data by comparing values with a 3x3 neighborhood
35 | (based on :cite:`Gourley2007`). NaN values in the original array have
36 | NaN textures. (Wradlib function)
37 |
38 | Parameters:
39 | ==========
40 | data : :class:`numpy:numpy.ndarray`
41 | multi-dimensional array with shape (..., number of beams, number
42 | of range bins)
43 |
44 | Returns:
45 | =======
46 | texture : :class:`numpy:numpy.ndarray`
47 | array of textures with the same shape as data
48 | """
49 | x1 = np.roll(data, 1, -2) # center:2
50 | x2 = np.roll(data, 1, -1) # 4
51 | x3 = np.roll(data, -1, -2) # 8
52 | x4 = np.roll(data, -1, -1) # 6
53 | x5 = np.roll(x1, 1, -1) # 1
54 | x6 = np.roll(x4, 1, -2) # 3
55 | x7 = np.roll(x3, -1, -1) # 9
56 | x8 = np.roll(x2, -1, -2) # 7
57 |
58 | # at least one NaN would give a sum of NaN
59 | xa = np.array([x1, x2, x3, x4, x5, x6, x7, x8])
60 |
61 | # get count of valid neighboring pixels
62 | xa_valid = np.ones(np.shape(xa))
63 | xa_valid[np.isnan(xa)] = 0
64 | # count number of valid neighbors
65 | xa_valid_count = np.sum(xa_valid, axis=0)
66 |
67 | num = np.zeros(data.shape)
68 | for xarr in xa:
69 | diff = data - xarr
70 | # difference of NaNs will be converted to zero
71 | # (to not affect the summation)
72 | diff[np.isnan(diff)] = 0
73 | # only those with valid values are considered in the summation
74 | num += diff ** 2
75 |
76 | # reinforce that NaN values should have NaN textures
77 | num[np.isnan(data)] = np.nan
78 |
79 | return np.sqrt(num / xa_valid_count)
80 |
81 |
82 | def get_clustering(radar, vel_name: str = "VEL", phidp_name: str = "PHIDP", zdr_name: str = "ZDR"):
83 | """
84 | Create cluster using a trained Gaussian Mixture Model (I use scikit-learn)
85 | to cluster the radar data. Cluster 5 is clutter and 2 is noise. Cluster 1
86 | correponds to a high gradient on PHIDP (folding), so it may corresponds to
87 | either real data that fold or noise. A threshold on reflectivity should be
88 | used on cluster 1.
89 |
90 | Parameters:
91 | ===========
92 | radar:
93 | Py-ART radar structure.
94 | vel_name: str
95 | Velocity field name.
96 | phidp_name: str
97 | Name of the PHIDP field.
98 | zdr_name: str
99 | Name of the differential_reflectivity field.
100 |
101 | Returns:
102 | ========
103 | cluster: ndarray
104 | Data ID using GMM (5: clutter, 2: noise, and 1: high-phidp gradient).
105 | """
106 | # Load and deserialize GMM
107 | location = os.path.dirname(os.path.realpath(__file__))
108 | my_file = os.path.join(location, "data", "GM_model_CPOL.pkl.gz")
109 | with gzip.GzipFile(my_file, "r") as gzid:
110 | gmm = pickle.load(gzid)
111 |
112 | df_orig = pd.DataFrame(
113 | {
114 | "VEL": texture(radar.fields[vel_name]["data"]).flatten(),
115 | "PHIDP": texture(radar.fields[phidp_name]["data"]).flatten(),
116 | "ZDR": texture(radar.fields[zdr_name]["data"]).flatten(),
117 | }
118 | )
119 |
120 | df = df_orig.dropna()
121 | pos_droped = df_orig.dropna().index
122 | clusters = gmm.predict(df)
123 |
124 | r = radar.range["data"]
125 | time = radar.time["data"]
126 | R, _ = np.meshgrid(r, time)
127 |
128 | clus = np.zeros_like(R.flatten())
129 | clus[pos_droped] = clusters + 1
130 | cluster = clus.reshape(R.shape)
131 |
132 | return cluster
133 |
134 |
135 | def get_gatefilter_GMM(
136 | radar, refl_name: str = "DBZ", vel_name: str = "VEL", phidp_name: str = "PHIDP", zdr_name: str = "ZDR"
137 | ):
138 | """
139 | Filtering function adapted to CPOL before 2009 using ML Gaussian Mixture
140 | Model. Function does 4 things:
141 | 1) Cutoff of the reflectivities below the noise level.
142 | 2) GMM using the texture of velocity, phidp and zdr.
143 | 3) Filtering using 1) and 2) results.
144 | 4) Removing temporary fields from the radar object.
145 |
146 | Parameters:
147 | ===========
148 | radar:
149 | Py-ART radar structure.
150 | refl_name: str
151 | Reflectivity field name.
152 | vel_name: str
153 | Velocity field name.
154 | phidp_name: str
155 | Name of the PHIDP field.
156 | zdr_name: str
157 | Name of the differential_reflectivity field.
158 |
159 | Returns:
160 | ========
161 | gf: GateFilter
162 | Gate filter (excluding all bad data).
163 | """
164 | # GMM clustering (indpdt from cutoff)
165 | cluster = get_clustering(radar, vel_name=vel_name, phidp_name=phidp_name, zdr_name=zdr_name)
166 | radar.add_field_like(refl_name, "CLUS", cluster, replace_existing=True)
167 |
168 | pos = (cluster == 1) & (radar.fields[refl_name]["data"] < 20)
169 | radar.add_field_like(refl_name, "TPOS", pos, replace_existing=True)
170 |
171 | # Using GMM results to filter.
172 | gf = pyart.filters.GateFilter(radar)
173 | gf.exclude_equal("CLUS", 5)
174 | gf.exclude_equal("CLUS", 2)
175 | gf.exclude_equal("TPOS", 1)
176 | gf = pyart.correct.despeckle_field(radar, refl_name, gatefilter=gf)
177 |
178 | # Removing temp keys.
179 | for k in ["TPOS", "CLUS"]:
180 | try:
181 | radar.fields.pop(k)
182 | except KeyError:
183 | continue
184 |
185 | return gf
186 |
187 |
188 | def do_gatefilter_cpol(
189 | radar,
190 | refl_name: str = "DBZ",
191 | phidp_name: str = "PHIDP",
192 | rhohv_name: str = "RHOHV_CORR",
193 | zdr_name: str = "ZDR",
194 | snr_name: str = "SNR",
195 | vel_name: str = "VEL",
196 | ):
197 | """
198 | Filtering function adapted to CPOL.
199 |
200 | Parameters:
201 | ===========
202 | radar:
203 | Py-ART radar structure.
204 | refl_name: str
205 | Reflectivity field name.
206 | rhohv_name: str
207 | Cross correlation ratio field name.
208 | ncp_name: str
209 | Name of the normalized_coherent_power field.
210 | zdr_name: str
211 | Name of the differential_reflectivity field.
212 |
213 | Returns:
214 | ========
215 | gf_despeckeld: GateFilter
216 | Gate filter (excluding all bad data).
217 | """
218 | radar_start_date = cftime.num2pydate(radar.time["data"][0], radar.time["units"])
219 |
220 | # if radar_start_date.year < 2009:
221 | gf = get_gatefilter_GMM(
222 | radar, refl_name=refl_name, vel_name=vel_name, phidp_name=phidp_name, zdr_name=zdr_name,
223 | )
224 | # else:
225 | # gf = pyart.filters.GateFilter(radar)
226 |
227 | r = radar.range["data"]
228 | azi = radar.azimuth["data"]
229 | R, _ = np.meshgrid(r, azi)
230 | # refl = radar.fields[refl_name]["data"].copy()
231 | # fcut = 10 * np.log10(4e-5 * R)
232 | # refl[refl < fcut] = np.NaN
233 | # radar.add_field_like(refl_name, "NDBZ", refl)
234 |
235 | # gf.exclude_invalid("NDBZ")
236 | gf.exclude_below(snr_name, 9)
237 | gf.exclude_outside(zdr_name, -3.0, 7.0)
238 | gf.exclude_outside(refl_name, -20.0, 80.0)
239 |
240 | # if radar_start_date.year > 2007:
241 | # gf.exclude_below(rhohv_name, 0.7)
242 | # else:
243 | rhohv = radar.fields[rhohv_name]["data"]
244 | pos = np.zeros_like(rhohv) + 1
245 | pos[(R < 90e3) & (rhohv < 0.7)] = 0
246 | radar.add_field_like(refl_name, "TMPRH", pos)
247 | gf.exclude_equal("TMPRH", 0)
248 |
249 | # Remove rings in march 1999.
250 | if radar_start_date.year == 1999 and radar_start_date.month == 3:
251 | radar.add_field_like(refl_name, "RRR", R)
252 | gf.exclude_above("RRR", 140e3)
253 |
254 | gf_despeckeld = pyart.correct.despeckle_field(radar, refl_name, gatefilter=gf)
255 |
256 | # Remove temporary fields.
257 | for k in ["NDBZ", "RRR", "TMPRH"]:
258 | try:
259 | radar.fields.pop(k)
260 | except KeyError:
261 | pass
262 |
263 | return gf_despeckeld
264 |
265 |
266 | def do_gatefilter(
267 | radar,
268 | refl_name: str = "DBZ",
269 | phidp_name: str = "PHIDP",
270 | rhohv_name: str = "RHOHV_CORR",
271 | zdr_name: str = "ZDR",
272 | snr_name: str = "SNR",
273 | ):
274 | """
275 | Basic filtering function for dual-polarisation data.
276 |
277 | Parameters:
278 | ===========
279 | radar:
280 | Py-ART radar structure.
281 | refl_name: str
282 | Reflectivity field name.
283 | rhohv_name: str
284 | Cross correlation ratio field name.
285 | ncp_name: str
286 | Name of the normalized_coherent_power field.
287 | zdr_name: str
288 | Name of the differential_reflectivity field.
289 |
290 | Returns:
291 | ========
292 | gf_despeckeld: GateFilter
293 | Gate filter (excluding all bad data).
294 | """
295 | # Initialize gatefilter
296 | gf = pyart.filters.GateFilter(radar)
297 |
298 | # Remove obviously wrong data.
299 | gf.exclude_outside(zdr_name, -6.0, 7.0)
300 | gf.exclude_outside(refl_name, -20.0, 80.0)
301 |
302 | # Compute texture of PHIDP and remove noise.
303 | dphi = texture(radar.fields[phidp_name]["data"])
304 | radar.add_field_like(phidp_name, "PHITXT", dphi)
305 | gf.exclude_above("PHITXT", 20)
306 | gf.exclude_below(rhohv_name, 0.6)
307 |
308 | # Despeckle
309 | gf_despeckeld = pyart.correct.despeckle_field(radar, refl_name, gatefilter=gf)
310 |
311 | try:
312 | # Remove PHIDP texture
313 | radar.fields.pop("PHITXT")
314 | except Exception:
315 | pass
316 |
317 | return gf_despeckeld
318 |
--------------------------------------------------------------------------------
/cpol_processing/hydrometeors.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for estimating various parameters related to Hydrometeors.
3 |
4 | @title: hydrometeors
5 | @author: Valentin Louf
6 | @institutions: Monash University and the Australian Bureau of Meteorology
7 | @creation: 04/04/2017
8 | @date: 26/08/2020
9 |
10 | .. autosummary::
11 | :toctree: generated/
12 |
13 | dsd_retrieval
14 | hydrometeor_classification
15 | liquid_ice_mass
16 | merhala_class_convstrat
17 | rainfall_rate
18 | """
19 | # Other Libraries
20 | import numpy as np
21 |
22 | from csu_radartools import csu_fhc, csu_blended_rain, csu_dsd
23 |
24 |
25 | def dsd_retrieval(radar, gatefilter, kdp_name, zdr_name, refl_name="DBZ_CORR", band="C"):
26 | """
27 | Compute the DSD retrieval using the csu library.
28 |
29 | Parameters:
30 | ===========
31 | radar:
32 | Py-ART radar structure.
33 | refl_name: str
34 | Reflectivity field name.
35 | zdr_name: str
36 | ZDR field name.
37 | kdp_name: strs
38 | KDP field name.
39 | band: str
40 | Radar frequency band.
41 |
42 | Returns:
43 | ========
44 | nw_dict: dict
45 | Normalized Intercept Parameter.
46 | d0_dict: dict
47 | Median Volume Diameter.
48 | """
49 | dbz = radar.fields[refl_name]["data"].copy().filled(np.NaN)
50 | zdr = radar.fields[zdr_name]["data"].copy()
51 | try:
52 | kdp = radar.fields[kdp_name]["data"].copy().filled(np.NaN)
53 | except AttributeError:
54 | kdp = radar.fields[kdp_name]["data"].copy()
55 |
56 | d0, Nw, mu = csu_dsd.calc_dsd(dz=dbz, zdr=zdr, kdp=kdp, band=band)
57 |
58 | Nw = np.log10(Nw)
59 | Nw[gatefilter.gate_excluded] = np.NaN
60 | Nw = np.ma.masked_invalid(Nw).astype(np.float32)
61 | np.ma.set_fill_value(Nw, np.NaN)
62 |
63 | d0[gatefilter.gate_excluded] = np.NaN
64 | d0 = np.ma.masked_invalid(d0).astype(np.float32)
65 | np.ma.set_fill_value(d0, np.NaN)
66 |
67 | nw_dict = {
68 | "data": Nw,
69 | "long_name": "normalized_intercept_parameter",
70 | "units": " ",
71 | "_FillValue": np.NaN,
72 | "_Least_significant_digit": 2,
73 | "reference": "doi:10.1175/2009JTECHA1258.1",
74 | }
75 |
76 | d0_dict = {
77 | "data": d0,
78 | "units": "mm",
79 | "long_name": "median_volume_diameter",
80 | "_FillValue": np.NaN,
81 | "_Least_significant_digit": 2,
82 | "reference": "doi:10.1175/2009JTECHA1258.1",
83 | }
84 |
85 | return nw_dict, d0_dict
86 |
87 |
88 | def hydrometeor_classification(
89 | radar,
90 | gatefilter,
91 | kdp_name,
92 | zdr_name,
93 | refl_name="DBZ_CORR",
94 | rhohv_name="RHOHV_CORR",
95 | temperature_name="temperature",
96 | height_name="height",
97 | band="C",
98 | ):
99 | """
100 | Compute hydrometeo classification.
101 |
102 | Parameters:
103 | ===========
104 | radar:
105 | Py-ART radar structure.
106 | refl_name: str
107 | Reflectivity field name.
108 | zdr_name: str
109 | ZDR field name.
110 | kdp_name: str
111 | KDP field name.
112 | rhohv_name: str
113 | RHOHV field name.
114 | temperature_name: str
115 | Sounding temperature field name.
116 | height: str
117 | Gate height field name.
118 | band: str
119 | Radar frequency band.
120 |
121 | Returns:
122 | ========
123 | hydro_meta: dict
124 | Hydrometeor classification.
125 | """
126 | refl = radar.fields[refl_name]["data"].copy().filled(np.NaN)
127 | zdr = radar.fields[zdr_name]["data"].copy().filled(np.NaN)
128 | try:
129 | kdp = radar.fields[kdp_name]["data"].copy().filled(np.NaN)
130 | except AttributeError:
131 | kdp = radar.fields[kdp_name]["data"].copy()
132 | rhohv = radar.fields[rhohv_name]["data"]
133 | try:
134 | radar_T = radar.fields[temperature_name]["data"]
135 | use_temperature = True
136 | except Exception:
137 | use_temperature = False
138 |
139 | if use_temperature:
140 | scores = csu_fhc.csu_fhc_summer(dz=refl, zdr=zdr, rho=rhohv, kdp=kdp, use_temp=True, band=band, T=radar_T)
141 | else:
142 | scores = csu_fhc.csu_fhc_summer(dz=refl, zdr=zdr, rho=rhohv, kdp=kdp, use_temp=False, band=band)
143 |
144 | hydro = np.argmax(scores, axis=0) + 1
145 | hydro[gatefilter.gate_excluded] = 0
146 | hydro_data = np.ma.masked_equal(hydro.astype(np.int16), 0)
147 |
148 | the_comments = (
149 | "1: Drizzle; 2: Rain; 3: Ice Crystals; 4: Aggregates; "
150 | + "5: Wet Snow; 6: Vertical Ice; 7: LD Graupel; 8: HD Graupel; 9: Hail; 10: Big Drops"
151 | )
152 |
153 | hydro_meta = {
154 | "data": hydro_data,
155 | "units": " ",
156 | "long_name": "Hydrometeor classification",
157 | "_FillValue": np.int16(0),
158 | "standard_name": "Hydrometeor_ID",
159 | "comments": the_comments,
160 | }
161 |
162 | return hydro_meta
163 |
164 |
165 | def merhala_class_convstrat(
166 | radar, dbz_name="DBZ_CORR", rain_name="radar_estimated_rain_rate", d0_name="D0", nw_name="NW"
167 | ):
168 | """
169 | Merhala Thurai's has a criteria for classifying rain either Stratiform
170 | Convective or Mixed, based on the D-Zero value and the log10(Nw) value.
171 | Merhala's rain classification is 1 for Stratiform, 2 for Convective and 3
172 | for Mixed, 0 if no rain.
173 |
174 | Parameters:
175 | ===========
176 | radar:
177 | Py-ART radar structure.
178 | dbz_name: str
179 | Reflectivity field name.
180 |
181 | Returns:
182 | ========
183 | class_meta: dict
184 | Merhala Thurai classification.
185 | """
186 | # Extracting data.
187 | d0 = radar.fields[d0_name]["data"]
188 | nw = radar.fields[nw_name]["data"]
189 | dbz = radar.fields[dbz_name]["data"]
190 |
191 | classification = np.zeros(dbz.shape, dtype=np.int16)
192 |
193 | # Invalid data
194 | pos0 = (d0 >= -5) & (d0 <= 100)
195 | pos1 = (nw >= -10) & (nw <= 100)
196 |
197 | # Classification index.
198 | indexa = nw - 6.4 + 1.7 * d0
199 |
200 | # Classifying
201 | classification[(indexa > 0.1) & (dbz > 20)] = 2
202 | classification[(indexa > 0.1) & (dbz <= 20)] = 1
203 | classification[indexa < -0.1] = 1
204 | classification[(indexa >= -0.1) & (indexa <= 0.1)] = 3
205 |
206 | # Masking invalid data.
207 | classification = np.ma.masked_where(~pos0 | ~pos1 | dbz.mask, classification)
208 |
209 | # Generate metada.
210 | class_meta = {
211 | "data": classification,
212 | "long_name": "thurai_echo_classification",
213 | "valid_min": 0,
214 | "valid_max": 3,
215 | "comment_1": "Convective-stratiform echo classification based on Merhala Thurai",
216 | "comment_2": "0 = Undefined, 1 = Stratiform, 2 = Convective, 3 = Mixed",
217 | }
218 |
219 | return class_meta
220 |
221 |
222 | def rainfall_rate(
223 | radar,
224 | gatefilter,
225 | kdp_name,
226 | zdr_name,
227 | refl_name="DBZ_CORR",
228 | hydro_name="radar_echo_classification",
229 | temperature_name="temperature",
230 | band="C",
231 | ):
232 | """
233 | Rainfall rate algorithm from csu_radartools.
234 |
235 | Parameters:
236 | ===========
237 | radar:
238 | Py-ART radar structure.
239 | refl_name: str
240 | Reflectivity field name.
241 | zdr_name: str
242 | ZDR field name.
243 | kdp_name: str
244 | KDP field name.
245 | hydro_name: str
246 | Hydrometeor classification field name.
247 | band: str
248 | Radar frequency band.
249 |
250 | Returns:
251 | ========
252 | rainrate: dict
253 | Rainfall rate.
254 | """
255 | dbz = radar.fields[refl_name]["data"].filled(np.NaN)
256 | zdr = radar.fields[zdr_name]["data"].filled(np.NaN)
257 | fhc = radar.fields[hydro_name]["data"]
258 | try:
259 | kdp = radar.fields[kdp_name]["data"].filled(np.NaN)
260 | except AttributeError:
261 | kdp = radar.fields[kdp_name]["data"]
262 |
263 | rain, _ = csu_blended_rain.calc_blended_rain_tropical(dz=dbz, zdr=zdr, kdp=kdp, fhc=fhc, band=band)
264 |
265 | rain[(gatefilter.gate_excluded) | np.isnan(rain) | (rain < 0)] = 0
266 |
267 | try:
268 | temp = radar.fields[temperature_name]["data"]
269 | rain[temp < 0] = 0
270 | except Exception:
271 | pass
272 |
273 | rainrate = {
274 | "long_name": "Blended Rainfall Rate",
275 | "units": "mm h-1",
276 | "standard_name": "rainfall_rate",
277 | "_Least_significant_digit": 2,
278 | "_FillValue": np.NaN,
279 | "description": "Rainfall rate algorithm based on Thompson et al. 2016.",
280 | "data": rain.astype(np.float32),
281 | }
282 |
283 | return rainrate
284 |
--------------------------------------------------------------------------------
/cpol_processing/phase.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for correcting the differential phase and estimating KDP.
3 |
4 | @title: phase
5 | @author: Valentin Louf
6 | @institutions: Monash University and the Australian Bureau of Meteorology
7 | @date: 08/02/2020
8 |
9 | .. autosummary::
10 | :toctree: generated/
11 |
12 | _fix_phidp_from_kdp
13 | phidp_bringi
14 | phidp_giangrande
15 | """
16 | import pyart
17 | import numpy as np
18 |
19 | from scipy import integrate
20 | from csu_radartools import csu_kdp
21 |
22 |
23 | def _fix_phidp_from_kdp(phidp, kdp, r, gatefilter):
24 | """
25 | Correct PHIDP and KDP from spider webs.
26 |
27 | Parameters
28 | ==========
29 | r:
30 | Radar range.
31 | gatefilter:
32 | Gate filter.
33 | kdp_name: str
34 | Differential phase key name.
35 | phidp_name: str
36 | Differential phase key name.
37 |
38 | Returns:
39 | ========
40 | phidp: ndarray
41 | Differential phase array.
42 | """
43 | kdp[gatefilter.gate_excluded] = 0
44 | kdp[(kdp < -4)] = 0
45 | kdp[kdp > 15] = 0
46 | interg = integrate.cumtrapz(kdp, r, axis=1)
47 |
48 | phidp[:, :-1] = interg / (len(r))
49 | return phidp, kdp
50 |
51 |
52 | def phidp_bringi(radar, gatefilter, unfold_phidp_name="PHI_UNF", refl_field='DBZ'):
53 | """
54 | Compute PHIDP and KDP Bringi.
55 |
56 | Parameters
57 | ==========
58 | radar:
59 | Py-ART radar data structure.
60 | gatefilter:
61 | Gate filter.
62 | unfold_phidp_name: str
63 | Differential phase key name.
64 | refl_field: str
65 | Reflectivity key name.
66 |
67 | Returns:
68 | ========
69 | phidpb: ndarray
70 | Bringi differential phase array.
71 | kdpb: ndarray
72 | Bringi specific differential phase array.
73 | """
74 | dp = radar.fields[unfold_phidp_name]['data'].copy()
75 | dz = radar.fields[refl_field]['data'].copy().filled(-9999)
76 |
77 | try:
78 | if np.nanmean(dp[gatefilter.gate_included]) < 0:
79 | dp += 90
80 | except ValueError:
81 | pass
82 |
83 | # Extract dimensions
84 | rng = radar.range['data']
85 | azi = radar.azimuth['data']
86 | dgate = rng[1] - rng[0]
87 | [R, A] = np.meshgrid(rng, azi)
88 |
89 | # Compute KDP bringi.
90 | kdpb, phidpb, _ = csu_kdp.calc_kdp_bringi(dp, dz, R / 1e3, gs=dgate, bad=-9999, thsd=12, window=3.0, std_gate=11)
91 |
92 | # Mask array
93 | phidpb = np.ma.masked_where(phidpb == -9999, phidpb)
94 | kdpb = np.ma.masked_where(kdpb == -9999, kdpb)
95 |
96 | # Get metadata.
97 | phimeta = pyart.config.get_metadata("differential_phase")
98 | phimeta['data'] = phidpb
99 | kdpmeta = pyart.config.get_metadata("specific_differential_phase")
100 | kdpmeta['data'] = kdpb
101 |
102 | return phimeta, kdpmeta
103 |
104 |
105 | def phidp_giangrande(radar, gatefilter, refl_field='DBZ', ncp_field='NCP',
106 | rhv_field='RHOHV_CORR', phidp_field='PHIDP'):
107 | """
108 | Phase processing using the LP method in Py-ART. A LP solver is required,
109 |
110 | Parameters:
111 | ===========
112 | radar:
113 | Py-ART radar structure.
114 | gatefilter:
115 | Gate filter.
116 | refl_field: str
117 | Reflectivity field label.
118 | ncp_field: str
119 | Normalised coherent power field label.
120 | rhv_field: str
121 | Cross correlation ration field label.
122 | phidp_field: str
123 | Differential phase label.
124 |
125 | Returns:
126 | ========
127 | phidp_gg: dict
128 | Field dictionary containing processed differential phase shifts.
129 | kdp_gg: dict
130 | Field dictionary containing recalculated differential phases.
131 | """
132 | unfphidic = pyart.correct.dealias_unwrap_phase(radar,
133 | gatefilter=gatefilter,
134 | skip_checks=True,
135 | vel_field=phidp_field,
136 | nyquist_vel=90)
137 |
138 | radar.add_field_like(phidp_field, 'PHITMP', unfphidic['data'])
139 |
140 | phidp_gg, kdp_gg = pyart.correct.phase_proc_lp(radar, 0.0,
141 | LP_solver='cylp',
142 | ncp_field=ncp_field,
143 | refl_field=refl_field,
144 | rhv_field=rhv_field,
145 | phidp_field='PHITMP')
146 |
147 | phidp_gg['data'], kdp_gg['data'] = _fix_phidp_from_kdp(phidp_gg['data'],
148 | kdp_gg['data'],
149 | radar.range['data'],
150 | gatefilter)
151 |
152 | try:
153 | # Remove temp variables.
154 | radar.fields.pop('unfolded_differential_phase')
155 | radar.fields.pop('PHITMP')
156 | except Exception:
157 | pass
158 |
159 | phidp_gg['data'] = phidp_gg['data'].astype(np.float32)
160 | phidp_gg['_Least_significant_digit'] = 4
161 | kdp_gg['data'] = kdp_gg['data'].astype(np.float32)
162 | kdp_gg['_Least_significant_digit'] = 4
163 |
164 | return phidp_gg, kdp_gg
165 |
--------------------------------------------------------------------------------
/cpol_processing/production.py:
--------------------------------------------------------------------------------
1 | """
2 | CPOL Level 1b main production line. These are the drivers function.
3 |
4 | @title: production
5 | @author: Valentin Louf
6 | @email: valentin.louf@bom.gov.au
7 | @copyright: Valentin Louf (2017-2021)
8 | @institution: Bureau of Meteorology and Monash University
9 | @date: 30/03/2021
10 |
11 | .. autosummary::
12 | :toctree: generated/
13 |
14 | _mkdir
15 | buffer
16 | process_and_save
17 | production_line
18 | """
19 | # Python Standard Library
20 | import gc
21 | import os
22 | import time
23 | import uuid
24 | import datetime
25 | import traceback
26 | import warnings
27 |
28 | # Other Libraries
29 | import pyart
30 | import cftime
31 | import numpy as np
32 |
33 | # Custom modules.
34 | from . import attenuation
35 | from . import cfmetadata
36 | from . import filtering
37 | from . import hydrometeors
38 | from . import phase
39 | from . import radar_codes
40 | from . import velocity
41 |
42 |
43 | def _mkdir(dir):
44 | """
45 | Make directory. Might seem redundant but you might have concurrency issue
46 | when dealing with multiprocessing.
47 | """
48 | if os.path.exists(dir):
49 | return None
50 |
51 | try:
52 | os.mkdir(dir)
53 | except FileExistsError:
54 | pass
55 |
56 | return None
57 |
58 |
59 | def buffer(func):
60 | """
61 | Decorator to catch and kill error message. Almost want to name the function
62 | dont_fail.
63 | """
64 |
65 | def wrapper(*args, **kwargs):
66 | try:
67 | rslt = func(*args, **kwargs)
68 | except Exception:
69 | traceback.print_exc()
70 | rslt = None
71 | return rslt
72 |
73 | return wrapper
74 |
75 |
76 | class Chronos():
77 | def __init__(self, messg=None):
78 | self.messg = messg
79 | def __enter__(self):
80 | self.start = time.time()
81 | def __exit__(self, ntype, value, traceback):
82 | self.time = time.time() - self.start
83 | if self.messg is not None:
84 | print(f"{self.messg} took {self.time:.2f}s.")
85 | else:
86 | print(f"Processed in {self.time:.2f}s.")
87 |
88 |
89 | @buffer
90 | def process_and_save(
91 | radar_file_name: str, outpath: str, sound_dir: str = None, do_dealiasing: bool = True, instrument: str = "CPOL",
92 | ) -> None:
93 | """
94 | Call processing function and write data.
95 |
96 | Parameters:
97 | ===========
98 | radar_file_name: str
99 | Name of the input radar file.
100 | outpath: str
101 | Path for saving output data.
102 | sound_dir: str
103 | Path to radiosoundings directory.
104 | instrument: str
105 | Name of radar (only CPOL will change something).
106 | do_dealiasing: bool
107 | Dealias velocity.
108 | """
109 | today = datetime.datetime.utcnow()
110 | if instrument == "CPOL":
111 | is_cpol = True
112 | else:
113 | is_cpol = False
114 |
115 | # Create directories.
116 | _mkdir(outpath)
117 | outpath = os.path.join(outpath, "v{}".format(today.strftime("%Y")))
118 | _mkdir(outpath)
119 | outpath_ppi = os.path.join(outpath, "ppi")
120 | _mkdir(outpath_ppi)
121 | tick = time.time()
122 |
123 | # Business start here.
124 | with warnings.catch_warnings():
125 | warnings.simplefilter("ignore")
126 | radar = production_line(radar_file_name, sound_dir, is_cpol=is_cpol, do_dealiasing=do_dealiasing)
127 | # Business over.
128 | gc.collect()
129 |
130 | if radar is None:
131 | print(f"{radar_file_name} has not been processed. Check logs.")
132 | return None
133 |
134 | radar_start_date = cftime.num2pydate(radar.time["data"][0], radar.time["units"])
135 | radar_end_date = cftime.num2pydate(radar.time["data"][-1], radar.time["units"])
136 | outpath_ppi = os.path.join(outpath_ppi, str(radar_start_date.year))
137 | _mkdir(outpath_ppi)
138 | outpath_ppi = os.path.join(outpath_ppi, radar_start_date.strftime("%Y%m%d"))
139 | _mkdir(outpath_ppi)
140 |
141 | # Generate output file name.
142 | if instrument == "CPOL":
143 | outfilename = "twp10cpolppi.b1.{}00.nc".format(radar_start_date.strftime("%Y%m%d.%H%M"))
144 | else:
145 | outfilename = "cfrad." + radar_start_date.strftime("%Y%m%d_%H%M%S") + ".nc"
146 |
147 | outfilename = os.path.join(outpath_ppi, outfilename)
148 |
149 | # Check if output file already exists.
150 | if os.path.isfile(outfilename):
151 | print(f"Output file {outfilename} already exists.")
152 | return None
153 |
154 | if is_cpol:
155 | # Lat/lon informations
156 | latitude = radar.gate_latitude["data"]
157 | longitude = radar.gate_longitude["data"]
158 | maxlon = longitude.max()
159 | minlon = longitude.min()
160 | maxlat = latitude.max()
161 | minlat = latitude.min()
162 | origin_altitude = "50"
163 | origin_latitude = "-12.2491"
164 | origin_longitude = "131.0444"
165 |
166 | unique_id = str(uuid.uuid4())
167 | metadata = {
168 | "Conventions": "CF-1.6, ACDD-1.3",
169 | "acknowledgement": "This work has been supported by the U.S. Department of Energy Atmospheric Systems Research Program through the grant DE-SC0014063. Data may be freely distributed.",
170 | "country": "Australia",
171 | "creator_email": "CPOL-support@bom.gov.au",
172 | "creator_name": "Commonwealth of Australia, Bureau of Meteorology, Science and Innovation, Research, Weather and Environmental Prediction, Radar Science and Nowcasting",
173 | "creator_url": "http://www.bom.gov.au/australia/radar/",
174 | "date_created": today.isoformat(),
175 | "geospatial_bounds": f"POLYGON(({minlon:0.6} {minlat:0.6},{minlon:0.6} {maxlat:0.6},{maxlon:0.6} {maxlat:0.6},{maxlon:0.6} {minlat:0.6},{minlon:0.6} {minlat:0.6}))",
176 | "geospatial_lat_max": f"{maxlat:0.6}",
177 | "geospatial_lat_min": f"{minlat:0.6}",
178 | "geospatial_lat_units": "degrees_north",
179 | "geospatial_lon_max": f"{maxlon:0.6}",
180 | "geospatial_lon_min": f"{minlon:0.6}",
181 | "geospatial_lon_units": "degrees_east",
182 | "history": "created by Valentin Louf on raijin.nci.org.au at " + today.isoformat() + " using Py-ART",
183 | "id": unique_id,
184 | "institution": "Bureau of Meteorology",
185 | "instrument": "radar",
186 | "instrument_name": "CPOL",
187 | "instrument_type": "radar",
188 | "keywords": "radar, tropics, Doppler, dual-polarization",
189 | "license": "CC BY-NC-SA 4.0",
190 | "naming_authority": "au.gov.bom",
191 | "origin_altitude": origin_altitude,
192 | "origin_latitude": origin_latitude,
193 | "origin_longitude": origin_longitude,
194 | "platform_is_mobile": "false",
195 | "processing_level": "b1",
196 | "project": "CPOL",
197 | "publisher_name": "NCI",
198 | "publisher_url": "nci.gov.au",
199 | "product_version": f"v{today.year}.{today.month:02}",
200 | "references": "doi:10.1175/JTECH-D-18-0007.1",
201 | "site_name": "Gunn Pt",
202 | "source": "radar",
203 | "state": "NT",
204 | "standard_name_vocabulary": "CF Standard Name Table v71",
205 | "summary": "Volumetric scan from CPOL dual-polarization Doppler radar (Darwin, Australia)",
206 | "time_coverage_start": radar_start_date.isoformat(),
207 | "time_coverage_end": radar_end_date.isoformat(),
208 | "time_coverage_duration": "P10M",
209 | "time_coverage_resolution": "PT10M",
210 | "title": "radar PPI volume from CPOL",
211 | "uuid": unique_id,
212 | "version": radar.metadata["version"],
213 | }
214 |
215 | radar.metadata = metadata
216 |
217 | # Write results
218 | with Chronos(f"Writing {outfilename}"):
219 | pyart.io.write_cfradial(outfilename, radar, format="NETCDF4")
220 | print("%s processed in %0.2fs." % (os.path.basename(radar_file_name), (time.time() - tick)))
221 |
222 | # Free memory
223 | del radar
224 |
225 | return None
226 |
227 |
228 | def production_line(
229 | radar_file_name: str, sound_dir: str, is_cpol: bool = True, do_dealiasing: bool = True
230 | ) -> pyart.core.radar.Radar:
231 | """
232 | Production line for correcting and estimating CPOL data radar parameters.
233 | The naming convention for these parameters is assumed to be DBZ, ZDR, VEL,
234 | PHIDP, KDP, SNR, RHOHV, and NCP. KDP, NCP, and SNR are optional and can be
235 | recalculated.
236 |
237 | Parameters:
238 | ===========
239 | radar_file_name: str
240 | Name of the input radar file.
241 | sound_dir: str
242 | Path to radiosounding directory.
243 | is_cpol: bool
244 | Name of radar (only CPOL will change something).
245 | do_dealiasing: bool
246 | Dealias velocity.
247 |
248 | Returns:
249 | ========
250 | radar: pyart.core.radar.Radar
251 | Py-ART radar structure.
252 |
253 | PLAN:
254 | =====
255 | 01/ Read input radar file.
256 | 02/ Check if radar file OK (no problem with azimuth and reflectivity).
257 | 03/ Get radar date.
258 | 04/ Check if NCP field exists (creating a fake one if it doesn't)
259 | 05/ Check if RHOHV field exists (creating a fake one if it doesn't)
260 | 06/ Compute SNR and temperature using radiosoundings.
261 | 07/ Correct RHOHV using Ryzhkov algorithm.
262 | 08/ Create gatefilter (remove noise and incorrect data).
263 | 09/ Correct ZDR using Ryzhkov algorithm.
264 | 10/ Compute Giangrande's PHIDP using pyart.
265 | 11/ Unfold velocity.
266 | 12/ Compute attenuation for ZH
267 | 13/ Compute attenuation for ZDR
268 | 14/ Estimate Hydrometeors classification using csu toolbox.
269 | 15/ Estimate Rainfall rate using csu toolbox.
270 | 16/ Removing fake/temporary fieds.
271 | 17/ Rename fields to pyart standard names.
272 | """
273 | FIELDS_NAMES = [
274 | ("VEL", "velocity"),
275 | ("VEL_UNFOLDED", "corrected_velocity"),
276 | ("DBZ", "total_power"),
277 | ("DBZ_CORR", "corrected_reflectivity"),
278 | ("RHOHV_CORR", "cross_correlation_ratio"),
279 | ("ZDR", "differential_reflectivity"),
280 | ("ZDR_CORR_ATTEN", "corrected_differential_reflectivity"),
281 | ("PHIDP", "differential_phase"),
282 | ("PHIDP_BRINGI", "bringi_differential_phase"),
283 | ("PHIDP_GG", "giangrande_differential_phase"),
284 | ("PHIDP_VAL", "corrected_differential_phase"),
285 | ("KDP", "specific_differential_phase"),
286 | ("KDP_BRINGI", "bringi_specific_differential_phase"),
287 | ("KDP_GG", "giangrande_specific_differential_phase"),
288 | ("KDP_VAL", "corrected_specific_differential_phase"),
289 | ("WIDTH", "spectrum_width"),
290 | ("SNR", "signal_to_noise_ratio"),
291 | ("NCP", "normalized_coherent_power"),
292 | ("DBZV", "reflectivity_v"),
293 | ("WRADV", "spectrum_width_v"),
294 | ("SNRV", "signal_to_noise_ratio_v"),
295 | ("SQIV", "normalized_coherent_power_v"),
296 | ]
297 |
298 | # List of keys that we'll keep in the output radar dataset.
299 | OUTPUT_RADAR_FLD = [
300 | "corrected_differential_phase",
301 | "corrected_differential_reflectivity",
302 | "corrected_reflectivity",
303 | "corrected_specific_differential_phase",
304 | "corrected_velocity",
305 | "cross_correlation_ratio",
306 | "differential_phase",
307 | "differential_reflectivity",
308 | "radar_echo_classification",
309 | "radar_estimated_rain_rate",
310 | "signal_to_noise_ratio",
311 | "spectrum_width",
312 | "total_power",
313 | "velocity",
314 | ]
315 |
316 | # !!! READING THE RADAR !!!
317 | if is_cpol:
318 | radar = pyart.io.read(radar_file_name)
319 | else:
320 | radar = radar_codes.read_radar(radar_file_name)
321 |
322 | pos = radar.range['data'] < 3e3
323 | for k in radar.fields.keys():
324 | radar.fields[k]['data'][:, pos] = np.NaN
325 |
326 | # Correct data type manually
327 | try:
328 | radar.longitude["data"] = np.ma.masked_invalid(radar.longitude["data"].astype(np.float32))
329 | radar.latitude["data"] = np.ma.masked_invalid(radar.latitude["data"].astype(np.float32))
330 | radar.altitude["data"] = np.ma.masked_invalid(radar.altitude["data"].astype(np.int32))
331 | except Exception:
332 | pass
333 |
334 | # Check if radar reflecitivity field is correct.
335 | if not radar_codes.check_reflectivity(radar):
336 | raise TypeError(f"Reflectivity field is empty in {radar_file_name}.")
337 |
338 | if not radar_codes.check_azimuth(radar):
339 | raise TypeError(f"Azimuth field is empty in {radar_file_name}.")
340 |
341 | if not radar_codes.check_year(radar):
342 | print(f"{radar_file_name} date probably wrong. Had to correct century.")
343 |
344 | new_azimuth, azi_has_changed = radar_codes.correct_azimuth(radar)
345 | if azi_has_changed:
346 | radar.azimuth["data"] = new_azimuth
347 |
348 | # Getting radar's date and time.
349 | radar_start_date = cftime.num2pydate(radar.time["data"][0], radar.time["units"])
350 | radar.time["units"] = radar.time["units"].replace("since", "since ")
351 |
352 | # Correct Doppler velocity units.
353 | try:
354 | radar.fields["VEL"]["units"] = "m s-1"
355 | vel_missing = False
356 | except KeyError:
357 | vel_missing = True
358 |
359 | # Looking for RHOHV field
360 | # For CPOL, season 09/10, there are no RHOHV fields before March!!!!
361 | try:
362 | radar.fields["RHOHV"]
363 | fake_rhohv = False # Don't need to delete this field cause it's legit.
364 | except KeyError:
365 | # Creating a fake RHOHV field.
366 | fake_rhohv = True # We delete this fake field later.
367 | rho = pyart.config.get_metadata("cross_correlation_ratio")
368 | rho["data"] = np.ones_like(radar.fields["DBZ"]["data"])
369 | radar.add_field("RHOHV", rho)
370 | radar.add_field("RHOHV_CORR", rho)
371 |
372 | # Compute SNR and extract radiosounding temperature.
373 | # Requires radiosoundings
374 | if sound_dir is not None:
375 | radiosonde_fname = radar_codes.get_radiosoundings(sound_dir, radar_start_date)
376 | try:
377 | height, temperature, snr = radar_codes.snr_and_sounding(radar, radiosonde_fname)
378 | radar.add_field("temperature", temperature, replace_existing=True)
379 | radar.add_field("height", height, replace_existing=True)
380 | except ValueError:
381 | traceback.print_exc()
382 | print(f"Impossible to compute SNR {radar_file_name}")
383 | return None
384 |
385 | # Looking for SNR
386 | try:
387 | radar.fields["SNR"]
388 | except KeyError:
389 | radar.add_field("SNR", snr, replace_existing=True)
390 |
391 | # Correct RHOHV
392 | if not fake_rhohv:
393 | rho_corr = radar_codes.correct_rhohv(radar)
394 | radar.add_field_like("RHOHV", "RHOHV_CORR", rho_corr, replace_existing=True)
395 |
396 | # Correct ZDR
397 | corr_zdr = radar_codes.correct_zdr(radar)
398 | radar.add_field_like("ZDR", "ZDR_CORR", corr_zdr, replace_existing=True)
399 |
400 | # GateFilter
401 | if is_cpol:
402 | gatefilter = filtering.do_gatefilter_cpol(
403 | radar, refl_name="DBZ", phidp_name="PHIDP", rhohv_name="RHOHV_CORR", zdr_name="ZDR"
404 | )
405 | else:
406 | gatefilter = filtering.do_gatefilter(
407 | radar, refl_name="DBZ", phidp_name="PHIDP", rhohv_name="RHOHV_CORR", zdr_name="ZDR"
408 | )
409 |
410 | # Check if NCP exists.
411 | try:
412 | radar.fields["NCP"]
413 | fake_ncp = False
414 | except KeyError:
415 | fake_ncp = True
416 | ncp = pyart.config.get_metadata("normalized_coherent_power")
417 | ncp["data"] = np.zeros_like(radar.fields["RHOHV"]["data"])
418 | ncp["data"][gatefilter.gate_included] = 1
419 | radar.add_field("NCP", ncp)
420 |
421 | with Chronos(f"PHIDP for {os.path.basename(radar_file_name)}"):
422 | phidp, kdp = phase.phidp_giangrande(radar, gatefilter)
423 | radar.add_field("PHIDP_VAL", phidp)
424 | radar.add_field("KDP_VAL", kdp)
425 | kdp_field_name = "KDP_VAL"
426 | phidp_field_name = "PHIDP_VAL"
427 |
428 | # Unfold VELOCITY
429 | if do_dealiasing:
430 | with Chronos(f"UNRAVEL for {os.path.basename(radar_file_name)}"):
431 | if not vel_missing:
432 | if is_cpol:
433 | vdop_unfold = velocity.unravel(radar, gatefilter, nyquist=13.3)
434 | else:
435 | vdop_unfold = velocity.unravel(radar, gatefilter)
436 | radar.add_field("VEL_UNFOLDED", vdop_unfold, replace_existing=True)
437 |
438 | # Correct attenuation ZH and ZDR and hardcode gatefilter
439 | zh_corr = attenuation.correct_attenuation_zh_pyart(radar, gatefilter, phidp_field=phidp_field_name)
440 | radar.add_field_like("DBZ", "DBZ_CORR", zh_corr)
441 |
442 | zdr_corr = attenuation.correct_attenuation_zdr(radar, gatefilter, phidp_name=phidp_field_name)
443 | radar.add_field("ZDR_CORR_ATTEN", zdr_corr)
444 |
445 | # Hydrometeors classification
446 | hydro_class = hydrometeors.hydrometeor_classification(
447 | radar, gatefilter, kdp_name=kdp_field_name, zdr_name="ZDR_CORR_ATTEN"
448 | )
449 |
450 | radar.add_field("radar_echo_classification", hydro_class, replace_existing=True)
451 |
452 | # Rainfall rate
453 | rainfall = hydrometeors.rainfall_rate(
454 | radar, gatefilter, kdp_name=kdp_field_name, refl_name="DBZ_CORR", zdr_name="ZDR_CORR_ATTEN"
455 | )
456 | radar.add_field("radar_estimated_rain_rate", rainfall)
457 |
458 | # Removing fake and useless fields.
459 | if fake_ncp:
460 | radar.fields.pop("NCP")
461 |
462 | if fake_rhohv:
463 | radar.fields.pop("RHOHV")
464 | radar.fields.pop("RHOHV_CORR")
465 |
466 | # Remove obsolete fields:
467 | for obsolete_key in ["Refl", "PHI_UNF", "PHI_CORR", "height", "TH", "TV", "ZDR_CORR", "RHOHV"]:
468 | try:
469 | radar.fields.pop(obsolete_key)
470 | except KeyError:
471 | continue
472 |
473 | # Change the temporary working name of fields to the one define by the user.
474 | for old_key, new_key in FIELDS_NAMES:
475 | try:
476 | radar.add_field(new_key, radar.fields.pop(old_key), replace_existing=True)
477 | except KeyError:
478 | continue
479 |
480 | # Delete working variables.
481 | if is_cpol:
482 | for k in list(radar.fields.keys()):
483 | if k not in OUTPUT_RADAR_FLD:
484 | radar.fields.pop(k)
485 |
486 | # Correct the standard_name metadata:
487 | cfmetadata.correct_standard_name(radar)
488 | # ACDD-1.3 compliant metadata:
489 | cfmetadata.coverage_content_type(radar)
490 | cfmetadata.correct_units(radar)
491 |
492 | return radar
493 |
--------------------------------------------------------------------------------
/cpol_processing/radar_codes.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for correcting and estimating various radar and meteorological parameters.
3 |
4 | @title: radar_codes
5 | @author: Valentin Louf
6 | @institutions: Monash University and the Australian Bureau of Meteorology
7 | @creation: 04/04/2017
8 | @date: 24/02/2021
9 |
10 | .. autosummary::
11 | :toctree: generated/
12 |
13 | _my_snr_from_reflectivity
14 | _nearest
15 | check_azimuth
16 | check_reflectivity
17 | check_year
18 | correct_rhohv
19 | correct_zdr
20 | get_radiosoundings
21 | read_radar
22 | snr_and_sounding
23 | """
24 | # Python Standard Library
25 | import os
26 | import re
27 | import fnmatch
28 | import datetime
29 |
30 | # Other Libraries
31 | import pyart
32 | import cftime
33 | import netCDF4
34 | import numpy as np
35 |
36 |
37 | def _my_snr_from_reflectivity(radar, refl_field="DBZ"):
38 | """
39 | Just in case pyart.retrieve.calculate_snr_from_reflectivity, I can calculate
40 | it 'by hand'.
41 | Parameter:
42 | ===========
43 | radar:
44 | Py-ART radar structure.
45 | refl_field_name: str
46 | Name of the reflectivity field.
47 |
48 | Return:
49 | =======
50 | snr: dict
51 | Signal to noise ratio.
52 | """
53 | range_grid, _ = np.meshgrid(radar.range["data"], radar.azimuth["data"])
54 | range_grid += 1 # Cause of 0
55 |
56 | # remove range scale.. This is basically the radar constant scaled dBm
57 | pseudo_power = radar.fields[refl_field]["data"] - 20.0 * np.log10(range_grid / 1000.0)
58 | # The noise_floor_estimate can fail sometimes in pyart, that's the reason
59 | # why this whole function exists.
60 | noise_floor_estimate = -40
61 |
62 | snr_field = pyart.config.get_field_name("signal_to_noise_ratio")
63 | snr_dict = pyart.config.get_metadata(snr_field)
64 | snr_dict["data"] = pseudo_power - noise_floor_estimate
65 |
66 | return snr_dict
67 |
68 |
69 | def _nearest(items, pivot):
70 | """
71 | Find the nearest item.
72 |
73 | Parameters:
74 | ===========
75 | items:
76 | List of item.
77 | pivot:
78 | Item we're looking for.
79 |
80 | Returns:
81 | ========
82 | item:
83 | Value of the nearest item found.
84 | """
85 | return min(items, key=lambda x: abs(x - pivot))
86 |
87 |
88 | def check_azimuth(radar, refl_field_name="DBZ"):
89 | """
90 | Checking if radar has a proper reflectivity field. It's a minor problem
91 | concerning a few days in 2011 for CPOL.
92 |
93 | Parameters:
94 | ===========
95 | radar:
96 | Py-ART radar structure.
97 | refl_field_name: str
98 | Name of the reflectivity field.
99 |
100 | Return:
101 | =======
102 | True if radar has a proper azimuth field.
103 | """
104 | if radar.fields[refl_field_name]["data"].shape[0] < 360:
105 | return False
106 |
107 | return True
108 |
109 |
110 | def check_reflectivity(radar, refl_field_name="DBZ"):
111 | """
112 | Checking if radar has a proper reflectivity field. It's a minor problem
113 | concerning a few days in 2011 for CPOL.
114 |
115 | Parameters:
116 | ===========
117 | radar:
118 | Py-ART radar structure.
119 | refl_field_name: str
120 | Name of the reflectivity field.
121 |
122 | Return:
123 | =======
124 | True if radar has a non-empty reflectivity field.
125 | """
126 | dbz = radar.fields[refl_field_name]["data"]
127 |
128 | if np.ma.isMaskedArray(dbz):
129 | if dbz.count() == 0:
130 | # Reflectivity field is empty.
131 | return False
132 |
133 | return True
134 |
135 |
136 | def check_year(radar):
137 | """
138 | Check if time unit is correct. We have encountered some old files with the
139 | year 2000 bug, i.e. date being 2098 instead of 1998.
140 |
141 | Parameters:
142 | ===========
143 | radar:
144 | Py-ART radar structure.
145 |
146 | Returns:
147 | ========
148 | True if date seems valid and False if date century had to be corrected.
149 | """
150 | dtime = cftime.num2pydate(radar.time["data"][0], radar.time["units"])
151 | if dtime.year < 2050:
152 | # Date seems valid.
153 | return True
154 | else:
155 | wyr = dtime.year
156 | tunit = radar.time["units"]
157 | radar.time["units"] = tunit.replace(str(wyr), str(wyr - 100))
158 |
159 | return False
160 |
161 |
162 | def correct_azimuth(radar):
163 | """
164 | Check if the azimuth is right.
165 |
166 | Parameters:
167 | ===========
168 | radar: Py-ART radar structure
169 |
170 | Returns:
171 | ========
172 | azimuth: ndarray
173 | Corrected azimuth
174 | has_changed: bool
175 | Is there any change?
176 | """
177 | has_changed = False
178 | azimuth = radar.azimuth["data"]
179 | for sl in range(radar.nsweeps):
180 | azi = azimuth[radar.get_slice(sl)]
181 | if np.sum(azi == 0) <= 2:
182 | continue
183 |
184 | azi_zero = azi[-1]
185 | for na in range(len(azi) - 2, -1, -1):
186 | if azi[na] != azi_zero - 1:
187 | if azi_zero == 0 and azi[na] == 359:
188 | azi_zero = azi[na]
189 | continue
190 | else:
191 | has_changed = True
192 | azi[na] = azi_zero - 1
193 | azi_zero = azi[na]
194 |
195 | azimuth[azimuth < 0] += 360
196 | azimuth[radar.get_slice(sl)] = azi
197 |
198 | return azimuth, has_changed
199 |
200 |
201 | def correct_rhohv(radar, rhohv_name="RHOHV", snr_name="SNR"):
202 | """
203 | Correct cross correlation ratio (RHOHV) from noise. From the Schuur et al.
204 | 2003 NOAA report (p7 eq 5)
205 |
206 | Parameters:
207 | ===========
208 | radar:
209 | Py-ART radar structure.
210 | rhohv_name: str
211 | Cross correlation field name.
212 | snr_name: str
213 | Signal to noise ratio field name.
214 |
215 | Returns:
216 | ========
217 | rho_corr: array
218 | Corrected cross correlation ratio.
219 | """
220 | rhohv = radar.fields[rhohv_name]["data"].copy()
221 | snr = radar.fields[snr_name]["data"].copy()
222 |
223 | natural_snr = 10 ** (0.1 * snr)
224 | natural_snr = natural_snr.filled(-9999)
225 | rho_corr = rhohv * (1 + 1 / natural_snr)
226 |
227 | # Not allowing the corrected RHOHV to be lower than the raw rhohv
228 | rho_corr[np.isnan(rho_corr) | (rho_corr < 0) | (rho_corr > 1)] = 1
229 | try:
230 | rho_corr = rho_corr.filled(1)
231 | except Exception:
232 | pass
233 |
234 | return rho_corr
235 |
236 |
237 | def correct_zdr(radar, zdr_name="ZDR", snr_name="SNR"):
238 | """
239 | Correct differential reflectivity (ZDR) from noise. From the Schuur et al.
240 | 2003 NOAA report (p7 eq 6)
241 |
242 | Parameters:
243 | ===========
244 | radar:
245 | Py-ART radar structure.
246 | zdr_name: str
247 | Differential reflectivity field name.
248 | snr_name: str
249 | Signal to noise ratio field name.
250 |
251 | Returns:
252 | ========
253 | corr_zdr: array
254 | Corrected differential reflectivity.
255 | """
256 | zdr = radar.fields[zdr_name]["data"].copy()
257 | snr = radar.fields[snr_name]["data"].copy()
258 | alpha = 1.48
259 | natural_zdr = 10 ** (0.1 * zdr)
260 | natural_snr = 10 ** (0.1 * snr)
261 | corr_zdr = 10 * np.log10((alpha * natural_snr * natural_zdr) / (alpha * natural_snr + alpha - natural_zdr))
262 |
263 | return corr_zdr
264 |
265 |
266 | def get_radiosoundings(sound_dir, radar_start_date):
267 | """
268 | Find the radiosoundings
269 | """
270 |
271 | def _fdate(flist):
272 | rslt = [None] * len(flist)
273 | for cnt, f in enumerate(flist):
274 | try:
275 | rslt[cnt] = datetime.datetime.strptime(re.findall("[0-9]{8}", f)[0], "%Y%m%d")
276 | except Exception:
277 | continue
278 | return rslt
279 |
280 | # Looking for radiosoundings:
281 | all_sonde_files = sorted(os.listdir(sound_dir))
282 |
283 | pos = [
284 | cnt
285 | for cnt, f in enumerate(all_sonde_files)
286 | if fnmatch.fnmatch(f, "*" + radar_start_date.strftime("%Y%m%d") + "*")
287 | ]
288 | if len(pos) > 0:
289 | # Looking for the exact date.
290 | sonde_name = all_sonde_files[pos[0]]
291 | sonde_name = os.path.join(sound_dir, sonde_name)
292 | else:
293 | # Looking for the closest date.
294 | dtime_none = _fdate(all_sonde_files)
295 | dtime = [d for d in dtime_none if d is not None]
296 | closest_date = _nearest(dtime, radar_start_date)
297 | sonde_name = [e for e in all_sonde_files if closest_date.strftime("%Y%m%d") in e]
298 | if len(sonde_name) == 0:
299 | sonde_name = os.path.join(sound_dir, all_sonde_files[-1])
300 | elif type(sonde_name) is list:
301 | sonde_name = os.path.join(sound_dir, sonde_name[0])
302 | else:
303 | sonde_name = os.path.join(sound_dir, sonde_name)
304 |
305 | return sonde_name
306 |
307 |
308 | def read_radar(radar_file_name: str) -> pyart.core.radar.Radar:
309 | """
310 | Read the input radar file.
311 |
312 | Parameter:
313 | ==========
314 | radar_file_name: str
315 | Radar file name.
316 |
317 | Return:
318 | =======
319 | radar: pyart.core.radar.Radar
320 | Py-ART radar structure.
321 | """
322 | # Read the input radar file.
323 | try:
324 | if radar_file_name.lower().endswith((".h5", ".hdf", ".hdf5")):
325 | radar = pyart.aux_io.read_odim_h5(radar_file_name, file_field_names=True)
326 | else:
327 | radar = pyart.io.read(radar_file_name)
328 | except Exception:
329 | print(f"!!!! Problem with {radar_file_name} !!!!")
330 | raise
331 |
332 | # SEAPOL hack change fields key.
333 | try:
334 | radar.fields["DBZ"]
335 | except KeyError:
336 | myfields = [
337 | ("SQIH", "NCP"),
338 | ("NCPH", "NCP"),
339 | ("SNRH", "SNR"),
340 | ("normalized_coherent_power", "NCP"),
341 | ("DBZH", "DBZ"),
342 | ("DBZH_CLEAN", "DBZ"),
343 | ("reflectivity", "DBZ"),
344 | ("WRADH", "WIDTH"),
345 | ("WIDTHH", "WIDTH"),
346 | ("sprectrum_width", "WIDTH"),
347 | ("UH", "DBZ"),
348 | ("total_power", "DBZ"),
349 | ("differential_reflectivity", "ZDR"),
350 | ("VRADH", "VEL"),
351 | ("VELH", "VEL"),
352 | ("velocity", "VEL"),
353 | ("cross_correlation_ratio", "RHOHV"),
354 | ("differential_phase", "PHIDP"),
355 | ("specific_differential_phase", "KDP"),
356 | ]
357 | for mykey, newkey in myfields:
358 | try:
359 | radar.add_field(newkey, radar.fields.pop(mykey))
360 | except Exception:
361 | continue
362 |
363 | return radar
364 |
365 |
366 | def snr_and_sounding(radar, sonde_name, refl_field_name="DBZ", temp_field_name="temp"):
367 | """
368 | Compute the signal-to-noise ratio as well as interpolating the radiosounding
369 | temperature on to the radar grid. The function looks for the radiosoundings
370 | that happened at the closest time from the radar. There is no time
371 | difference limit.
372 |
373 | Parameters:
374 | ===========
375 | radar:
376 | sonde_name: str
377 | Path to the radiosoundings.
378 | refl_field_name: str
379 | Name of the reflectivity field.
380 |
381 | Returns:
382 | ========
383 | z_dict: dict
384 | Altitude in m, interpolated at each radar gates.
385 | temp_info_dict: dict
386 | Temperature in Celsius, interpolated at each radar gates.
387 | snr: dict
388 | Signal to noise ratio.
389 | """
390 | radar_start_date = cftime.num2pydate(radar.time["data"][0], radar.time["units"])
391 | # Altitude hack.
392 | true_alt = radar.altitude["data"].copy()
393 | radar.altitude["data"] = np.array([0])
394 |
395 | # print("Reading radiosounding %s" % (sonde_name))
396 | with netCDF4.Dataset(sonde_name) as interp_sonde:
397 | temperatures = interp_sonde[temp_field_name][:]
398 | temperatures[(temperatures < -100) | (temperatures > 100)] = np.NaN
399 | try:
400 | temperatures = temperatures.filled(np.NaN)
401 | except AttributeError:
402 | pass
403 | # Height profile corresponding to radar.
404 | my_profile = pyart.retrieve.fetch_radar_time_profile(interp_sonde, radar)
405 |
406 | # CPOL altitude is 50 m.
407 | good_altitude = my_profile["height"] >= 0
408 | # Getting the temperature
409 | z_dict, temp_dict = pyart.retrieve.map_profile_to_gates(
410 | temperatures[good_altitude], my_profile["height"][good_altitude], radar
411 | )
412 |
413 | temp_info_dict = {
414 | "data": temp_dict["data"],
415 | "long_name": "Sounding temperature at gate",
416 | "standard_name": "temperature",
417 | "valid_min": -100,
418 | "valid_max": 100,
419 | "units": "degrees Celsius",
420 | "comment": "Radiosounding date: %s" % (radar_start_date.strftime("%Y/%m/%d")),
421 | }
422 |
423 | # Altitude hack
424 | radar.altitude["data"] = true_alt
425 |
426 | # Calculate SNR
427 | snr = pyart.retrieve.calculate_snr_from_reflectivity(radar, refl_field=refl_field_name)
428 | # Sometimes the SNR is an empty array, this is due to the toa parameter.
429 | # Here we try to recalculate the SNR with a lower value for toa (top of atm).
430 | if snr["data"].count() == 0:
431 | snr = pyart.retrieve.calculate_snr_from_reflectivity(radar, refl_field=refl_field_name, toa=20000)
432 |
433 | if snr["data"].count() == 0:
434 | # If it fails again, then we compute the SNR with the noise value
435 | # given by the CPOL radar manufacturer.
436 | snr = _my_snr_from_reflectivity(radar, refl_field=refl_field_name)
437 |
438 | return z_dict, temp_info_dict, snr
439 |
--------------------------------------------------------------------------------
/cpol_processing/velocity.py:
--------------------------------------------------------------------------------
1 | """
2 | Codes for correcting Doppler velocity.
3 |
4 | @title: velocity
5 | @author: Valentin Louf
6 | @institutions: Monash University and the Australian Bureau of Meteorology
7 | @creation: 11/12/2017
8 | @date: 26/08/2020
9 |
10 | .. autosummary::
11 | :toctree: generated/
12 |
13 | _check_nyquist_velocity
14 | unravel
15 | """
16 | import pyart
17 | import numpy as np
18 |
19 |
20 | def _check_nyquist_velocity(radar, vel_name="VEL"):
21 | """
22 | Check if Nyquist velocity is present in the instrument parameters. If not,
23 | then it is created.
24 | """
25 | try:
26 | vnyq = radar.instrument_parameters["nyquist_velocity"]["data"]
27 | if vnyq is None:
28 | raise KeyError("Nyquist velocity does not exists.")
29 | except KeyError:
30 | vnyq = np.nanmax(radar.fields[vel_name]["data"])
31 | nray = len(radar.azimuth["data"])
32 | vnyq_array = np.array([vnyq] * nray, dtype=np.float32)
33 | nyquist_velocity = pyart.config.get_metadata("nyquist_velocity")
34 | nyquist_velocity["data"] = vnyq_array
35 | nyquist_velocity["_Least_significant_digit"] = 2
36 | radar.instrument_parameters["nyquist_velocity"] = nyquist_velocity
37 |
38 | return vnyq
39 |
40 |
41 | def unravel(radar, gatefilter, vel_name="VEL", dbz_name="DBZ", nyquist=None):
42 | """
43 | Unfold Doppler velocity using Py-ART region based algorithm. Automatically
44 | searches for a folding-corrected velocity field.
45 |
46 | Parameters:
47 | ===========
48 | radar:
49 | Py-ART radar structure.
50 | gatefilter:
51 | Filter excluding non meteorological echoes.
52 | vel_name: str
53 | Name of the (original) Doppler velocity field.
54 | dbz_name: str
55 | Name of the reflecitivity field.
56 | nyquist: float
57 | Nyquist velocity co-interval.
58 |
59 | Returns:
60 | ========
61 | vel_meta: dict
62 | Unfolded Doppler velocity.
63 | """
64 | import unravel
65 |
66 | vnyq = _check_nyquist_velocity(radar, vel_name)
67 | if nyquist is None:
68 | if np.isscalar(vnyq):
69 | nyquist = vnyq
70 |
71 | unfvel = unravel.unravel_3D_pyart(
72 | radar, vel_name, dbz_name, gatefilter=gatefilter, alpha=0.8, nyquist_velocity=nyquist, strategy="long_range"
73 | )
74 |
75 | vel_meta = pyart.config.get_metadata("velocity")
76 | vel_meta["data"] = np.ma.masked_where(gatefilter.gate_excluded, unfvel).astype(np.float32)
77 | vel_meta["_Least_significant_digit"] = 2
78 | vel_meta["_FillValue"] = np.NaN
79 | vel_meta["comment"] = "UNRAVEL algorithm."
80 | vel_meta["long_name"] = "Doppler radial velocity of scatterers away from instrument"
81 | vel_meta["units"] = "m s-1"
82 |
83 | return vel_meta
84 |
--------------------------------------------------------------------------------
/output_11_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vlouf/cpol_processing/097994422c46773754e04a3d4911b81c01673fa5/output_11_0.png
--------------------------------------------------------------------------------
/output_17_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vlouf/cpol_processing/097994422c46773754e04a3d4911b81c01673fa5/output_17_0.png
--------------------------------------------------------------------------------
/output_24_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vlouf/cpol_processing/097994422c46773754e04a3d4911b81c01673fa5/output_24_0.png
--------------------------------------------------------------------------------
/scripts/dask_pack.py:
--------------------------------------------------------------------------------
1 | """
2 | Raw radar PPIs processing. Quality control, filtering, attenuation correction,
3 | dealiasing, unfolding, hydrometeors calculation, rainfall rate estimation.
4 |
5 | @title: cpol_processing
6 | @author: Valentin Louf
7 | @institution: Monash University and Bureau of Meteorology
8 | @date: 10/02/2021
9 |
10 | .. autosummary::
11 | :toctree: generated/
12 |
13 | chunks
14 | main
15 | welcome_message
16 | """
17 | # Python Standard Library
18 | import gc
19 | import os
20 | import sys
21 | import glob
22 | import argparse
23 | import datetime
24 |
25 | import pandas as pd
26 | import dask.bag as db
27 | import crayons
28 | import cpol_processing
29 |
30 |
31 | def chunks(l, n):
32 | """
33 | Yield successive n-sized chunks from l.
34 | From http://stackoverflow.com/a/312464
35 | """
36 | for i in range(0, len(l), n):
37 | yield l[i : i + n]
38 |
39 |
40 | def welcome_message():
41 | """
42 | Display a welcome message with the input information.
43 | """
44 | print("#" * 79)
45 | print("")
46 | print(" " * 25 + crayons.red("Raw radar PPIs production line.\n", bold=True))
47 | print(" - Input data directory path is: " + crayons.yellow(INPATH))
48 | print(" - Output data directory path is: " + crayons.yellow(OUTPATH))
49 | print(" - Radiosounding directory path is: " + crayons.yellow(SOUND_DIR))
50 | print(f" - The process will occur between {crayons.yellow(START_DATE)} and {crayons.yellow(END_DATE)}.")
51 | print("\n" + "#" * 79 + "\n")
52 |
53 |
54 | def main(start: datetime.datetime, end: datetime.datetime) -> None:
55 | """
56 | It calls the production line and manages it. Buffer function that is used
57 | to catch any problem with the processing line without screwing the whole
58 | multiprocessing stuff.
59 |
60 | Parameters:
61 | ===========
62 | start: datetime.datetime
63 | First date to process CPOL data
64 | end: datetime.datetime
65 | End date of processing CPOL data
66 | """
67 | date_range = pd.date_range(start, end)
68 | for day in date_range:
69 | input_dir = os.path.join(INPATH, str(day.year), day.strftime("%Y%m%d"), "*.*")
70 | flist = sorted(glob.glob(input_dir))
71 | if len(flist) == 0:
72 | print("No file found for {}.".format(day.strftime("%Y-%b-%d")))
73 | continue
74 |
75 | argslist = []
76 | for f in flist:
77 | argslist.append((f, OUTPATH, SOUND_DIR, DO_DEALIASING))
78 |
79 | print(f"{len(flist)} files found for " + day.strftime("%Y-%b-%d"))
80 |
81 | for chunk in chunks(argslist, 32):
82 | bag = db.from_sequence(chunk).starmap(cpol_processing.process_and_save)
83 | _ = bag.compute()
84 |
85 | del bag
86 | gc.collect()
87 | return None
88 |
89 |
90 | if __name__ == "__main__":
91 | """
92 | Global variables definition.
93 | """
94 | # Main global variables (Path directories).
95 | INPATH = "/g/data/hj10/admin/cpol_level_1a/v2019/ppi/"
96 | OUTPATH = "/scratch/kl02/vhl548/cpol_level_1b/v2020/"
97 | SOUND_DIR = "/g/data/kl02/vhl548/darwin_ancillary/DARWIN_radiosonde"
98 |
99 | # Parse arguments
100 | parser_description = """Raw radar PPIs processing. It provides Quality
101 | control, filtering, attenuation correction, dealiasing, unfolding, hydrometeors
102 | calculation, and rainfall rate estimation."""
103 | parser = argparse.ArgumentParser(description=parser_description)
104 | parser.add_argument(
105 | "-s", "--start-date", dest="start_date", default=None, type=str, help="Starting date.", required=True
106 | )
107 | parser.add_argument("-e", "--end-date", dest="end_date", default=None, type=str, help="Ending date.", required=True)
108 | parser.add_argument("--dealias", dest="dealias", action="store_true")
109 | parser.add_argument("--no-dealias", dest="dealias", action="store_false")
110 | parser.set_defaults(dealias=True)
111 |
112 | args = parser.parse_args()
113 | START_DATE = args.start_date
114 | END_DATE = args.end_date
115 | DO_DEALIASING = args.dealias
116 |
117 | # Check date
118 | try:
119 | start = datetime.datetime.strptime(START_DATE, "%Y%m%d")
120 | end = datetime.datetime.strptime(END_DATE, "%Y%m%d")
121 | if start > end:
122 | parser.error("End date older than start date.")
123 | except ValueError:
124 | parser.error("Invalid dates.")
125 | sys.exit()
126 |
127 | # Display infos
128 | welcome_message()
129 | main(start, end)
130 |
--------------------------------------------------------------------------------
/scripts/proc_missings.py:
--------------------------------------------------------------------------------
1 | """
2 | cpol_processing scripts for missing radar files in Radar archive on NCI.
3 |
4 | @title: cpol_processing
5 | @author: Valentin Louf
6 | @institution: Bureau of Meteorology
7 | @date: 26/03/2021
8 |
9 | .. autosummary::
10 | :toctree: generated/
11 |
12 | chunks
13 | main
14 | """
15 | import os
16 | import glob
17 | import traceback
18 | from typing import Iterable, Any
19 |
20 | import cpol_processing
21 |
22 | from concurrent.futures import TimeoutError
23 | from pebble import ProcessPool, ProcessExpired
24 |
25 |
26 | def chunks(l: Any, n: int) -> Iterable[Any]:
27 | """
28 | Yield successive n-sized chunks from l.
29 | From http://stackoverflow.com/a/312464
30 | """
31 | for i in range(0, len(l), n):
32 | yield l[i : i + n]
33 |
34 |
35 | def buffer(infile):
36 | cpol_processing.process_and_save(infile, OUTPATH, SOUND_DIR, True)
37 | return None
38 |
39 |
40 | def main(year: int) -> None:
41 | """
42 | It calls the production line and manages it. Buffer function that is used
43 | to catch any problem with the processing line without screwing the whole
44 | multiprocessing stuff.
45 |
46 | Parameters:
47 | ===========
48 | infile: str
49 | Name of the input radar file.
50 | outpath: str
51 | Path for saving output data.
52 | """
53 | flist = glob.glob(os.path.join(INPATH, f"{year}/**/*.nc"))
54 | outlist = glob.glob(os.path.join(OUTPATH, f"v2021/ppi/{year}/**/*.nc"))
55 |
56 | oset = set([f[-18:-3] for f in outlist])
57 | iset = set([f[-18:-3] for f in flist])
58 | datelist = [*oset ^ iset]
59 |
60 | if len(datelist) == 0:
61 | print(f"No file to process for {YEAR}.")
62 | return None
63 | print(f"{year}: {len(datelist)} files to process.")
64 |
65 | inflist = []
66 | for d in datelist:
67 | inflist.append([f for f in flist if d in f][0])
68 |
69 | for fchunk in chunks(inflist, NCPUS):
70 | with ProcessPool() as pool:
71 | future = pool.map(buffer, fchunk, timeout=360)
72 | iterator = future.result()
73 |
74 | while True:
75 | try:
76 | _ = next(iterator)
77 | except StopIteration:
78 | break
79 | except TimeoutError as error:
80 | print("function took longer than %d seconds" % error.args[1])
81 | except ProcessExpired as error:
82 | print("%s. Exit code: %d" % (error, error.exitcode))
83 | except TypeError:
84 | continue
85 | except Exception:
86 | traceback.print_exc()
87 |
88 |
89 | if __name__ == "__main__":
90 | """
91 | Global variables definition.
92 | """
93 | INPATH = "/g/data/hj10/admin/cpol_level_1a/v2019/ppi/"
94 | OUTPATH = "/scratch/kl02/vhl548/cpol_level_1b/"
95 | SOUND_DIR = "/g/data/kl02/vhl548/darwin_ancillary/DARWIN_radiosonde"
96 | NCPUS = 16
97 | for YEAR in range(2009, 2018):
98 | main(YEAR)
99 |
--------------------------------------------------------------------------------
/scripts/radar_pack.py:
--------------------------------------------------------------------------------
1 | """
2 | Raw radar PPIs processing. Quality control, filtering, attenuation correction,
3 | dealiasing, unfolding, hydrometeors calculation, rainfall rate estimation.
4 | Tested on CPOL.
5 |
6 | @title: cpol_processing
7 | @author: Valentin Louf
8 | @institution: Monash University
9 | @date: 08/04/2021
10 | @version: 2.6
11 |
12 | .. autosummary::
13 | :toctree: generated/
14 |
15 | chunks
16 | main
17 | """
18 | # Python Standard Library
19 | import os
20 | import sys
21 | import glob
22 | import argparse
23 | import datetime
24 | import traceback
25 |
26 | import crayons
27 | import cpol_processing
28 |
29 | from concurrent.futures import TimeoutError
30 | from pebble import ProcessPool, ProcessExpired
31 |
32 |
33 | def chunks(l, n):
34 | """
35 | Yield successive n-sized chunks from l.
36 | From http://stackoverflow.com/a/312464
37 | """
38 | for i in range(0, len(l), n):
39 | yield l[i : i + n]
40 |
41 |
42 | def main(infile: str) -> None:
43 | """
44 | It calls the production line and manages it. Buffer function that is used
45 | to catch any problem with the processing line without screwing the whole
46 | multiprocessing stuff.
47 |
48 | Parameters:
49 | ===========
50 | infile: str
51 | Name of the input radar file.
52 | outpath: str
53 | Path for saving output data.
54 | """
55 | cpol_processing.process_and_save(infile, OUTPATH, sound_dir=SOUND_DIR, do_dealiasing=DO_DEALIASING)
56 | return None
57 |
58 |
59 | def welcome_message():
60 | """
61 | Display a welcome message with the input information.
62 | """
63 | print("#" * 79)
64 | print("")
65 | print(" " * 25 + crayons.red("Raw radar PPIs production line.\n", bold=True))
66 | print(" - Input data directory path is: " + crayons.yellow(INPATH))
67 | print(" - Output data directory path is: " + crayons.yellow(OUTPATH))
68 | print(" - Radiosounding directory path is: " + crayons.yellow(SOUND_DIR))
69 | print(f" - The process will occur between {crayons.yellow(START_DATE)} and {crayons.yellow(END_DATE)}.")
70 | print("\n" + "#" * 79 + "\n")
71 |
72 |
73 | if __name__ == "__main__":
74 | """
75 | Global variables definition.
76 | """
77 | # Main global variables (Path directories).
78 | INPATH = "/g/data/hj10/admin/cpol_level_1a/v2019/ppi/"
79 | OUTPATH = "/scratch/kl02/vhl548/cpol_level_1b/"
80 | SOUND_DIR = "/g/data/kl02/vhl548/darwin_ancillary/DARWIN_radiosonde"
81 |
82 | # Parse arguments
83 | parser_description = """Raw radar PPIs processing. It provides Quality
84 | control, filtering, attenuation correction, dealiasing, unfolding, hydrometeors
85 | calculation, and rainfall rate estimation."""
86 | parser = argparse.ArgumentParser(description=parser_description)
87 | parser.add_argument(
88 | "-s", "--start-date", dest="start_date", default=None, type=str, help="Starting date.", required=True
89 | )
90 | parser.add_argument("-e", "--end-date", dest="end_date", default=None, type=str, help="Ending date.", required=True)
91 | parser.add_argument("-j", "--ncpus", dest="ncpus", default=16, type=int, help="Number of process.")
92 | parser.add_argument("--dealias", dest="dealias", action="store_true")
93 | parser.add_argument("--no-dealias", dest="dealias", action="store_false")
94 | parser.set_defaults(dealias=True)
95 |
96 | args = parser.parse_args()
97 | START_DATE = args.start_date
98 | END_DATE = args.end_date
99 | DO_DEALIASING = args.dealias
100 | NCPUS = args.ncpus
101 |
102 | # Check date
103 | try:
104 | start = datetime.datetime.strptime(START_DATE, "%Y%m%d")
105 | end = datetime.datetime.strptime(END_DATE, "%Y%m%d")
106 | if start > end:
107 | parser.error("End date older than start date.")
108 | date_range = [start + datetime.timedelta(days=x) for x in range(0, (end - start).days + 1,)]
109 | except ValueError:
110 | parser.error("Invalid dates.")
111 | sys.exit()
112 |
113 | # Display infos
114 | welcome_message()
115 |
116 | for day in date_range:
117 | input_dir = os.path.join(INPATH, str(day.year), day.strftime("%Y%m%d"), "*.*")
118 | flist = sorted(glob.glob(input_dir))
119 | if len(flist) == 0:
120 | print("No file found for {}.".format(day.strftime("%Y-%b-%d")))
121 | continue
122 |
123 | print(f"{len(flist)} files found for " + day.strftime("%Y-%b-%d"))
124 |
125 | for flist_chunk in chunks(flist, NCPUS):
126 | with ProcessPool() as pool:
127 | future = pool.map(main, flist_chunk, timeout=1200)
128 | iterator = future.result()
129 |
130 | while True:
131 | try:
132 | result = next(iterator)
133 | except StopIteration:
134 | break
135 | except TimeoutError as error:
136 | print("function took longer than %d seconds" % error.args[1])
137 | except ProcessExpired as error:
138 | print("%s. Exit code: %d" % (error, error.exitcode))
139 | except Exception:
140 | traceback.print_exc()
141 |
--------------------------------------------------------------------------------
/scripts/radar_single.py:
--------------------------------------------------------------------------------
1 | """
2 | Raw radar PPIs processing. Quality control, filtering, attenuation correction,
3 | dealiasing, unfolding, hydrometeors calculation, rainfall rate estimation.
4 | Tested on CPOL.
5 |
6 | @title: cpol_processing
7 | @author: Valentin Louf
8 | @institution: Monash University and Australian Bureau of Meteorology
9 | @date: 27/06/2020
10 | @version: 2
11 |
12 | .. autosummary::
13 | :toctree: generated/
14 |
15 | main
16 | """
17 | # Python Standard Library
18 | import os
19 | import argparse
20 | import warnings
21 |
22 | import crayons
23 | import cpol_processing
24 |
25 |
26 | def main():
27 | """
28 | Just print a welcoming message and calls the production_line_multiproc.
29 | """
30 | # Start with a welcome message.
31 | print("#" * 79)
32 | print("")
33 | print(" " * 25 + crayons.red("Raw radar PPIs production line.\n", bold=True))
34 | print(" - Input data directory path is: " + crayons.yellow(INFILE))
35 | print(" - Output data directory path is: " + crayons.yellow(OUTPATH))
36 | print(" - Radiosounding directory path is: " + crayons.yellow(SOUND_DIR))
37 | print("\n" + "#" * 79 + "\n")
38 |
39 | with warnings.catch_warnings():
40 | warnings.simplefilter("ignore")
41 | cpol_processing.process_and_save(INFILE, OUTPATH, SOUND_DIR)
42 |
43 | print(crayons.green("Process completed."))
44 | return None
45 |
46 |
47 | if __name__ == "__main__":
48 | """
49 | Global variables definition and logging file initialisation.
50 | """
51 | # Parse arguments
52 | parser_description = """Raw radar PPIs processing. It provides Quality
53 | control, filtering, attenuation correction, dealiasing, unfolding, hydrometeors
54 | calculation, and rainfall rate estimation."""
55 | parser = argparse.ArgumentParser(description=parser_description)
56 | parser.add_argument(
57 | "-i", "--input", dest="infile", type=str, help="Input file", required=True
58 | )
59 | parser.add_argument(
60 | "-o",
61 | "--output",
62 | dest="outdir",
63 | type=str,
64 | help="Output directory.",
65 | required=True,
66 | )
67 | parser.add_argument(
68 | "-r",
69 | "--radiosounds",
70 | dest="rs_dir",
71 | type=str,
72 | help="Radiosoundings directory.",
73 | default="/g/data/kl02/vhl548/darwin_ancillary/DARWIN_radiosonde",
74 | )
75 |
76 | args = parser.parse_args()
77 | INFILE = args.infile
78 | OUTPATH = args.outdir
79 | SOUND_DIR = args.rs_dir
80 |
81 | if not os.path.isfile(INFILE):
82 | parser.error("Invalid input file.")
83 |
84 | if not os.path.isdir(OUTPATH):
85 | parser.error("Invalid (or don't exist) output directory.")
86 |
87 | main()
88 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | import io
4 | import os
5 | import sys
6 | from shutil import rmtree
7 |
8 | from setuptools import find_packages, setup, Command
9 |
10 | # Package meta-data.
11 | NAME = "cpol_processing"
12 | DESCRIPTION = """Radar PPIs data processing, quality control, filtering, attenuation
13 | correction, dealiasing, unfolding, hydrometeors calculation, rainfall rate estimation."""
14 | URL = "https://github.com/vlouf/cpol_processing"
15 | EMAIL = "valentin.louf@bom.gov.au"
16 | AUTHOR = "Valentin Louf"
17 |
18 | # What packages are required for this module to be executed?
19 | REQUIRED = [
20 | "arm_pyart",
21 | "numpy",
22 | "csu_radartools",
23 | "crayons",
24 | "netCDF4",
25 | "scipy",
26 | "unravel",
27 | ]
28 |
29 | here = os.path.abspath(os.path.dirname(__file__))
30 |
31 | with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
32 | long_description = "\n" + f.read()
33 |
34 | class PublishCommand(Command):
35 | """Support setup.py publish."""
36 |
37 | description = "Build and publish the package."
38 | user_options = []
39 |
40 | @staticmethod
41 | def status(s):
42 | """Prints things in bold."""
43 | print("\033[1m{0}\033[0m".format(s))
44 |
45 | def initialize_options(self):
46 | pass
47 |
48 | def finalize_options(self):
49 | pass
50 |
51 | def run(self):
52 | try:
53 | self.status("Removing previous builds…")
54 | rmtree(os.path.join(here, "dist"))
55 | except FileNotFoundError:
56 | pass
57 |
58 | self.status("Building Source and Wheel (universal) distribution…")
59 | os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable))
60 |
61 | self.status("Uploading the package to PyPi via Twine…")
62 | os.system("twine upload dist/*")
63 |
64 | sys.exit()
65 |
66 |
67 | setup(
68 | name=NAME,
69 | version='2.6.2',
70 | description=DESCRIPTION,
71 | long_description=long_description,
72 | long_description_content_type="text/markdown",
73 | author=AUTHOR,
74 | author_email=EMAIL,
75 | url=URL,
76 | packages=find_packages(exclude=["contrib", "docs", "tests"]),
77 | package_data={"cpol_processing": ["data/GM_model_CPOL.pkl.gz"]},
78 | install_requires=REQUIRED,
79 | include_package_data=True,
80 | license="ISC",
81 | classifiers=[
82 | "Development Status :: 5 - Production/Stable",
83 | "License :: OSI Approved :: MIT License",
84 | "Intended Audience :: Science/Research",
85 | "Topic :: Scientific/Engineering :: Atmospheric Science",
86 | "Programming Language :: Python",
87 | "Programming Language :: Python :: 3.6",
88 | "Programming Language :: Python :: 3.7",
89 | "Programming Language :: Python :: 3.8",
90 | ],
91 | keywords="radar weather meteorology dual-polarization hydrometeors rainfall",
92 | cmdclass={"publish": PublishCommand,},
93 | )
94 |
--------------------------------------------------------------------------------