├── OWZ_Documentation_v1,1.docx ├── README.md ├── data_info_file ├── input_thresh_NH ├── input_thresh_SH ├── owdata_1979010100_NH.nc ├── owdata_1979010100_SH.nc ├── owdata_1979010112_NH.nc ├── owdata_1979010112_SH.nc ├── owz_tracker.py ├── thrsh.py ├── thrsh.pyc ├── topog ├── topogNH.nc └── topogSH.nc ├── tracker.py └── tracker.pyc /OWZ_Documentation_v1,1.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/OWZ_Documentation_v1,1.docx -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # owz_python -------------------------------------------------------------------------------- /data_info_file: -------------------------------------------------------------------------------- 1 | 1.0 !dlon = Longitude increment (degrees) 2 | 1.0 !dlat = Latitude increment (degrees) 3 | 12.0 !dtim = Time increment (hours) 4 | 2 !e_min= Minimum number of neighbouring events for a clump to be considered 5 | 5 !TC_min= Minimum number of True links before TC declared 6 | 2 !sea_min= Minimum number of sea points to make a land influenced clump True 7 | 0.09 !land_lim= Topography value above which grid point is considered land (m) 8 | 550.0 !srch_rad= Search factor to determine links in CT strings (km) 9 | 550.0 !clmp_rad= Clump radius, distance in which two clumps are to be combined 10 | 60.0 !TH_OWZ850 = Overiding thresholds 11 | 50.0 !TH_OWZ500 12 | 85.0 !TH_rh950 13 | 70.0 !TH_rh700 14 | 12.5 !TH_wsh 15 | 14.0 !TH_sh950 16 | -------------------------------------------------------------------------------- /input_thresh_NH: -------------------------------------------------------------------------------- 1 | -5.0 ! -60.0 Min latitude for output window 2 | 60.0 ! 5.0 Max latitude for output window 3 | 20.0 ! 20.0 Min longitude for output window 4 | 350.0 ! 350.0 Max longitude for output window 5 | 3 ! Number of smoothing operations for windshear 6 | False ! Write threshold array to NetCDF file if TRUE 7 | True ! Write lat/lon of thrsh=1.0 locations if TRUE 8 | 1 ! Number of threshld combinations 9 | 50.0, 40.0, 70.0, 50.0, 25.0, 10.0, ! 850 OWZ, 500 OWZ, 950RH, 700 RH, 850-200 shear, SH 950 10 | -------------------------------------------------------------------------------- /input_thresh_SH: -------------------------------------------------------------------------------- 1 | -60.0 ! -60.0 Min latitude for output window 2 | 5.0 ! 5.0 Max latitude for output window 3 | 20.0 ! 20.0 Min longitude for output window 4 | 350.0 ! 350.0 Max longitude for output window 5 | 3 ! Number of smoothing operations for windshear 6 | False ! Write threshold array to NetCDF file if TRUE 7 | True ! Write lat/lon of thrsh=1.0 locations if TRUE 8 | 1 ! Number of threshld combinations 9 | 50.0, 40.0, 70.0, 50.0, 25.0, 10.0, ! 850 OWZ, 500 OWZ, 950RH, 700 RH, 850-200 shear, SH 950 10 | -------------------------------------------------------------------------------- /owdata_1979010100_NH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/owdata_1979010100_NH.nc -------------------------------------------------------------------------------- /owdata_1979010100_SH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/owdata_1979010100_SH.nc -------------------------------------------------------------------------------- /owdata_1979010112_NH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/owdata_1979010112_NH.nc -------------------------------------------------------------------------------- /owdata_1979010112_SH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/owdata_1979010112_SH.nc -------------------------------------------------------------------------------- /owz_tracker.py: -------------------------------------------------------------------------------- 1 | import os, fnmatch 2 | from calendar import monthrange 3 | import thrsh as Threshold 4 | import tracker as Tracker 5 | import sys, getopt 6 | 7 | indir = "/var/climatenas/ERA5/OWZ_Tracker/data" 8 | outdir = "" 9 | var1="rh" 10 | var2="mrsh" 11 | var3="temp" 12 | var4="uwnd" 13 | var5="vwnd" 14 | 15 | 16 | # 17 | # Index number of level in netcdf file 18 | # Change this index number according to the data 19 | # e.g. ERA5 data have levels in assending order 200, 500,700,850,950 20 | # So I have set the index values accoring to the data 21 | # 22 | # 23 | # The script will rearrange the data in following format during preprocessing stage: 24 | # 950, 850, 700, 500, 200 25 | LVL200 = 1 26 | LVL500 = 2 27 | LVL700 = 3 28 | LVL850 = 4 29 | LVL950 = 5 30 | 31 | 32 | topog_NH="topog/topogNH.nc" 33 | topog_SH="topog/topogSH.nc" 34 | 35 | #os.system("ls") 36 | 37 | #** 38 | # - convert variable name to the name expected by threshold algorithm 39 | # - extract levels data (950,850,700, 500, 200) for specified date for 00 and 12 40 | # module assumes that levels are in sequence 200, 500, 700, 850, 950 41 | # input : 42 | # varName - Name of the the variable 43 | # timestamp - yyymmddhh format 44 | # timeIndex - index value of 0 hour 45 | # inputFile - name of input file 46 | # output: Temporary output file will generated with extrated data 47 | ## 48 | def extractLevelData(varName, timestamp,timeIndex,inputFile): 49 | print("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL200)+" "+indir+"/"+inputFile+" "+varName+"200_"+str(timestamp)+"_tmp.nc") 50 | os.system("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL200)+" "+indir+"/"+inputFile+" "+varName+"200_"+str(timestamp)+"_tmp.nc") 51 | os.system("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL500)+" "+indir+"/"+inputFile+" "+varName+"500_"+str(timestamp)+"_tmp.nc") 52 | os.system("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL700)+" "+indir+"/"+inputFile+" "+varName+"700_"+str(timestamp)+"_tmp.nc") 53 | os.system("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL850)+" "+indir+"/"+inputFile+" "+varName+"850_"+str(timestamp)+"_tmp.nc") 54 | os.system("ncks -O -F -v "+ varName + " -d time,"+str(timeIndex)+" -d lvl,"+str(LVL950)+" "+indir+"/"+inputFile+" "+varName+"950_"+str(timestamp)+"_tmp.nc") 55 | 56 | #Merge levels in sequence 950 to 200 for timestamp 57 | os.system("cdo merge "+varName+"950_"+timestamp+"_tmp.nc "+varName+"850_"+timestamp+"_tmp.nc "+varName+"700_"+timestamp+"_tmp.nc "+varName+"500_"+timestamp+"_tmp.nc "+varName+"200_"+timestamp+"_tmp.nc "+varName+"_"+timestamp+".nc") 58 | os.system("rm *_tmp.nc") 59 | 60 | 61 | 62 | # 63 | # Append all variables into a single file 64 | # 65 | def appendFiles(timestamp): 66 | print("appending Files " + timestamp) 67 | #Append all variables of 00:00 hr into a single file 68 | os.system("ncks -A "+var2+"_"+timestamp+".nc "+ var1+"_"+timestamp+".nc") 69 | os.system("ncks -A "+var3+"_"+timestamp+".nc "+ var1+"_"+timestamp+".nc") 70 | os.system("ncks -A "+var4+"_"+timestamp+".nc "+ var1+"_"+timestamp+".nc") 71 | os.system("ncks -A "+var5+"_"+timestamp+".nc "+ var1+"_"+timestamp+".nc") 72 | 73 | os.system("rm "+var2+"_"+timestamp+".nc") 74 | os.system("rm "+var3+"_"+timestamp+".nc") 75 | os.system("rm "+var4+"_"+timestamp+".nc") 76 | os.system("rm "+var5+"_"+timestamp+".nc") 77 | 78 | 79 | 80 | # 81 | # Split NH and SH data and save in separate files for further processing 82 | # 83 | def split_NH_SH_data(timestamp): 84 | print("split_NH_SH_data Files " + timestamp) 85 | 86 | #NH 87 | os.system("ncks -F -d lon,21,351 -d lat,86,151 "+var1+"_"+timestamp+".nc "+ var1+"_"+timestamp+"NH_tmp.nc") 88 | 89 | #SH 90 | os.system("ncks -F -d lon,21,351 -d lat,31,96 "+var1+"_"+timestamp+".nc "+ var1+"_"+timestamp+"SH_tmp.nc") 91 | 92 | os.system("rm "+var1+"_"+timestamp+".nc") 93 | 94 | 95 | 96 | # 97 | # Append togography data to NH and SH files 98 | # 99 | def appendTopography(timestamp): 100 | print("appendTopography Files " + timestamp) 101 | 102 | #NH 103 | os.system("ncks -A "+topog_NH+" "+var1+"_"+timestamp+"NH_tmp.nc") 104 | #rename file 105 | os.system("mv "+var1+"_"+timestamp+"NH_tmp.nc owdata_"+timestamp+"_NH.nc") 106 | 107 | #SH 108 | os.system("ncks -A "+topog_SH+" "+var1+"_"+timestamp+"SH_tmp.nc") 109 | #rename file 110 | os.system("mv "+var1+"_"+timestamp+"SH_tmp.nc owdata_"+timestamp+"_SH.nc") 111 | 112 | 113 | 114 | # 115 | # Concat individual theshold data of individual days of year into single file (OWZ2) 116 | # This file will be used as an input for the OWZ Tracker 117 | # 118 | def concatThresholdFiles(year): 119 | 120 | #Remove Old OWZ and SubJ Files 121 | os.system("rm OWZ2tracker_"+str(year)+"*.txt") 122 | os.system("rm SubJ_"+str(year)+"*.txt") 123 | 124 | 125 | #Concat TH files for this year to singlge OWZ2 file which will be used for tracking 126 | os.system("cat TH_001_"+str(year)+"*_NH*.txt >OWZ2tracker_"+str(year)+"_NH.txt") 127 | os.system("cat STJ_"+str(year)+"*_NH*.txt >SubJ_"+str(year)+"_NH.txt") 128 | 129 | os.system("cat TH_001_"+str(year)+"*_SH*.txt >OWZ2tracker_"+str(year)+"_SH.txt") 130 | os.system("cat STJ_"+str(year)+"*_SH*.txt >SubJ_"+str(year)+"_SH.txt") 131 | 132 | os.system("rm STJ* TH*") 133 | 134 | # 135 | # Run threshold detector with preprocessed files for NH and SH 136 | # 137 | def runThresholdDetector(date,time, hem,file): 138 | 139 | timestamp = date+time 140 | 141 | if hem != "both": 142 | Threshold.process(file,date,time,hem) 143 | else: 144 | #NH 145 | Threshold.process("owdata_"+timestamp+"_NH.nc",date,time,"NH") 146 | 147 | #SH 148 | Threshold.process("owdata_"+timestamp+"_SH.nc",date,time,"SH") 149 | # END OF runThresholdDetector 150 | 151 | 152 | 153 | 154 | # 155 | # Run tracker on the threshold from OWZ2 file and save output in Clump, S_CT, S_CT2 and S_CT3 156 | # 157 | def runTracker(year,hem): 158 | Tracker.start("OWZ2tracker_"+str(year)+"_"+hem+".txt") 159 | os.system("mv Clump_out_py.txt "+outdir+"Clump_out_"+str(year)+"_"+hem+".txt") 160 | os.system("mv S_CT_out_py.txt "+outdir+"S_CT_out_"+str(year)+"_"+hem+".txt") 161 | os.system("mv S_CT_out2_py.txt "+outdir+"S_CT_out2_"+str(year)+"_"+hem+".txt") 162 | os.system("mv S_CT_out3_py.txt "+outdir+"S_CT_out3_"+str(year)+"_"+hem+".txt") 163 | # END OF runTracker 164 | 165 | 166 | 167 | 168 | 169 | 170 | # 171 | # 172 | # 173 | def preprocessData(fromYear, toYear): 174 | 175 | time00 = 1 176 | time12 = 2 177 | for year in range(fromYear,toYear+1) : 178 | for month in range(1,13): 179 | mm = '{:02d}'.format(month) 180 | for day in range(1,monthrange(year, month)[1]+1): 181 | dd = '{:02d}'.format(day) 182 | 183 | timestamp00= str(year)+mm+dd+"00" 184 | timestamp12= str(year)+mm+dd+"12" 185 | #Extract RH 186 | extractLevelData(var1,timestamp00,time00,"RH_"+str(year)+".nc") 187 | extractLevelData(var1,timestamp12,time12,"RH_"+str(year)+".nc") 188 | 189 | #Extract SPFH 190 | extractLevelData(var2,timestamp00,time00,"SPFH_"+str(year)+".nc") 191 | extractLevelData(var2,timestamp12,time12,"SPFH_"+str(year)+".nc") 192 | 193 | #Extract TEMP 194 | extractLevelData(var3,timestamp00,time00,"TEMP_"+str(year)+".nc") 195 | extractLevelData(var3,timestamp12,time12,"TEMP_"+str(year)+".nc") 196 | 197 | #Extract UWIND 198 | extractLevelData(var4,timestamp00,time00,"UWND_"+str(year)+".nc") 199 | extractLevelData(var4,timestamp12,time12,"UWND_"+str(year)+".nc") 200 | 201 | #Extract VWIND 202 | extractLevelData(var5,timestamp00,time00,"VWND_"+str(year)+".nc") 203 | extractLevelData(var5,timestamp12,time12,"VWND_"+str(year)+".nc") 204 | 205 | #Append all variables in single file for timestamp 206 | appendFiles(timestamp00) 207 | appendFiles(timestamp12) 208 | 209 | 210 | #Extract NH and SH data from source file 211 | split_NH_SH_data(timestamp00) 212 | split_NH_SH_data(timestamp12) 213 | 214 | #Append topography data to NH and SH 215 | appendTopography(timestamp00) 216 | appendTopography(timestamp12) 217 | 218 | #Detect threshold for NH and SH 219 | runThresholdDetector(str(year)+mm+dd,"00","both","") 220 | runThresholdDetector(str(year)+mm+dd,"12","both","") 221 | 222 | 223 | time00=time00 + 2 224 | time12=time12 + 2 225 | 226 | concatThresholdFiles(year) 227 | runTracker(year,"NH") 228 | runTracker(year,"SH") 229 | 230 | 231 | 232 | ## 233 | def processTestData(dataPath): 234 | listOfFiles = os.listdir(dataPath) 235 | years = [] 236 | for entry in listOfFiles: 237 | if fnmatch.fnmatch(entry, "*.nc"): 238 | timestamp = entry.split("_")[1] 239 | year = timestamp[0:4] 240 | if(year not in years): 241 | years.append(year) 242 | date = timestamp[0:8] 243 | time = str(timestamp[-2:]) 244 | hem = entry.split("_")[2].split(".")[0] 245 | path = "" 246 | if path != ".": 247 | path = dataPath 248 | if path[-1] != "/": 249 | path += "/" 250 | 251 | path = path+entry 252 | print path,timestamp,date,time,hem 253 | runThresholdDetector(date,time,hem,path) 254 | 255 | runTrackerOnTestData(years) 256 | 257 | def runTrackerOnTestData(years): 258 | for year in years: 259 | concatThresholdFiles(year) 260 | runTracker(year,"NH") 261 | runTracker(year,"SH") 262 | 263 | 264 | 265 | 266 | 267 | 268 | def main(argv): 269 | global indir, outdir 270 | indir = '' 271 | outdir = '' 272 | yearFrom = 0 273 | yearTo = 0 274 | test = False 275 | testFiles = [] 276 | if len(argv) == 0: 277 | print 'usage: owz_tracker.py -i -o -f - t ' 278 | print 'For Testing preprocessed owd_data file, type --test followed by name of files separated by , ' 279 | print 'E.g --test file1,file2,file3' 280 | sys.exit() 281 | 282 | if argv[0] == "--test": 283 | 284 | #Run on preprocessed owz_data files for testing 285 | testFiles = argv[1] #.split(",") 286 | print testFiles 287 | processTestData(testFiles) 288 | # testOWZ(testFiles) 289 | else: 290 | 291 | try: 292 | opts, args = getopt.getopt(argv,"hi:o:f:t:",["ifile=","ofile=","yearFrom=","yearTo="]) 293 | except getopt.GetoptError: 294 | print 'owz_tracker.py -i -o -f - t ' 295 | print 'For Testing preprocessed owd_data file, type --test followed by name of files separated by , ' 296 | print 'E.g --test file1,file2,file3' 297 | 298 | sys.exit(2) 299 | 300 | 301 | # Run OWZ on dataset 302 | for opt, arg in opts: 303 | if opt == '-h': 304 | print 'usage: owz_tracker.py -i -o -f - t ' 305 | sys.exit() 306 | elif opt in ("-i", "--ifile"): 307 | indir = arg 308 | elif opt in ("-o", "--ofile"): 309 | outdir = arg 310 | elif opt in ("-f", "--yearFrom"): 311 | yearFrom = arg 312 | elif opt in ("-t", "--yearTo"): 313 | yearTo = arg 314 | 315 | 316 | 317 | if yearFrom == 0 or yearTo == 0 or indir == '': 318 | print 'Invalid arguments' 319 | print 'usage: owz_tracker.py -i -o -f - t ' 320 | sys.exit() 321 | 322 | if indir != "" and os.path.isdir(indir) == False: 323 | print "Input directory doesn't exist" 324 | sys.exit() 325 | 326 | if outdir != "" and os.path.isdir(outdir) == False: 327 | print "Output directory doesn't exist" 328 | sys.exit() 329 | 330 | preprocessData(int(yearFrom),int(yearTo)) 331 | 332 | if __name__ == "__main__": 333 | main(sys.argv[1:]) 334 | -------------------------------------------------------------------------------- /thrsh.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Please refer to the Documentation for citation and acknowledgement 4 | # 5 | 6 | import sys 7 | import numpy as np 8 | from netCDF4 import Dataset 9 | 10 | 11 | 12 | owz = [] # Normalized Okubo-Weiss * abs vort (s^-1) 13 | rh = [] # Relative humidity (%) 14 | mrsh = [] # Mixing ratio or spec hum (In kg/kg, Out g/kg) 15 | wsh = [] # Smoothed wind shear (m/s) 16 | avor = [] # Absolute vorticity (s^-1) 17 | thrsh = [] # Combined threshold (value 1.0 when threshold met) 18 | fld1 = [] #Temporary threshold variable arrays 19 | fld2 = [] #Temporary threshold variable arrays 20 | fld3 = [] #Temporary threshold variable arrays 21 | fld4 = [] #Temporary threshold variable arrays 22 | fld5 = [] #Temporary threshold variable arrays 23 | fld6 = [] # Temporary threshold variable arrays 24 | uwnd = [] # Zonal wind (m/s) 25 | vwnd = [] # Meridional wind (m/s) 26 | temp = [] # Air temperature (K) 27 | topo3d = [] # Topography (when determined from a 3-d field) 28 | topo = [] # Topography 29 | 30 | lat = [] # latitude (degrees) 31 | lon = [] # longitude (degrees) 32 | lvl = [] # pressure levels 33 | date = [] # date (YYYYMMDD) 34 | time = [] # time (HHMM) 35 | 36 | xd = 0 # length of longitude dimension 37 | yd = 0 # length of latitude dimension 38 | zd = 0 # length of height dimension 39 | td = 0 # length of time dimension 40 | 41 | k950 = -1 # pressure level integers 42 | k850 = -1 # pressure level integers 43 | k700 = -1 # pressure level integers 44 | k500 = -1 # pressure level integers 45 | k200 = -1 # pressure level integers 46 | i1 = 0 47 | il = 0 48 | j1 = 0 49 | jl = 0 # First and last i and j indices that define the output window 50 | 51 | 52 | Hem = "SH" 53 | 54 | 55 | Lowz = True 56 | Lwsh = True 57 | Lrh = True # ! Flags=T if fields (OWZ, wind shear, relative humidity) in input file 58 | Lmr = True 59 | Lsh = True #! Flags=T if fields (mixing ratio, specific humidity) in input file 60 | Ltop = True 61 | Lsfg = True # ! Flags=T if fields (topography, surface geopotential) in input file 62 | Lav = True # Flags=T if fields (absolute vorticity) in input file 63 | 64 | 65 | num_th_comb = 0 # ! Number of threshold combinations 66 | Towz850 = 0.0 # ! Threshold owz value for 850 hPa 67 | Towz500 = 0.0 # ! Threshold owz value for 500 hPa 68 | Trh950 = 0.0 # ! Threshold rh or sd value for 950 hPa 69 | Trh700 = 0.0 # ! Threshold rh or sd value for 700 hPa 70 | Twsh = 0.0 # ! Threshold wsh value for 850 - 200 hPa wind shear 71 | Tsh950 = 0.0 # ! Threshold sh value for 950 hPa 72 | 73 | Wltmin = 0.0 # ! Minimum latitude for output Window 74 | Wltmax = 0.0 # ! Maximum latitude for output Window 75 | Wlnmin = 0.0 # ! Minimum longitude for output Window 76 | Wlnmax = 0.0 # ! Maximum longitude for output Window 77 | 78 | num_smth = 0 # ! Number of smoothing operations for wind shear 79 | 80 | Lwrite = False # ! If true, write threshold or other arrays to input NetCDF file 81 | Loutput = False #! If true, write lat/lon of every thrsh satisfied gridpoint to ASCII file 82 | Lsd = False # ! If true, RH arrays will carry saturation deficit instead of RH 83 | 84 | 85 | 86 | 87 | # 88 | # Load configuration from input_thrsh_NH/SH file 89 | # Read the input file to determine: the "window of interest" in which the combined 90 | # thresholds are to be calculated 91 | 92 | def input_read(input_thrsh_file): 93 | global num_th_comb,num_smth, Towz850, Towz850, Towz500, Trh950,Trh700, Twsh, Tsh950, Wltmin,Wltmax,Wlnmin ,Wlnmax,num_smth 94 | global Lwrite,Loutput,Lsd 95 | 96 | 97 | file = open(input_thrsh_file, "r") 98 | Wltmin = float(file.readline().split(" ")[0]) #Minimum latitude for output Window 99 | Wltmax = float(file.readline().split(" ")[0]) #Maximum latitude for output Window 100 | Wlnmin = float(file.readline().split(" ")[0]) #Minimum longitude for output Window 101 | Wlnmax = float(file.readline().split(" ")[0]) #Maximum longitude for output Window 102 | num_smth = int(file.readline().split(" ")[0]) #Number of smoothing operations for wind shear 103 | Lwrite = bool(file.readline().split(" ")[0]) #If true, write threshold or other arrays to input NetCDF file 104 | Loutput = bool(file.readline().split(" ")[0]) #If true, write lat/lon of every thrsh satisfied gridpoint to ASCII file 105 | num_th_comb = int(file.readline().split(" ")[0]) # Number of threshold combinations 106 | 107 | 108 | Towz850 = np.zeros(num_th_comb) #Threshold owz value for 850 hPa 109 | Towz500 = np.zeros(num_th_comb) #Threshold owz value for 500 hPa 110 | Trh950 = np.zeros(num_th_comb) #Threshold rh or sd value for 950 hPa 111 | Trh700 = np.zeros(num_th_comb) #Threshold rh or sd value for 700 hPa 112 | Twsh = np.zeros(num_th_comb) #Threshold wsh value for 850 - 200 hPa wind shear 113 | Tsh950 = np.zeros(num_th_comb) #Threshold sh value for 950 hPa 114 | 115 | 116 | for i in range(num_th_comb): 117 | line = file.readline() 118 | Towz850[i] = float(line.split(",")[0]) 119 | Towz500[i] = float(line.split(",")[1]) 120 | Trh950[i] = float(line.split(",")[2]) 121 | Trh700[i] = float(line.split(",")[3]) 122 | Twsh[i] = float(line.split(",")[4]) 123 | Tsh950[i] = float(line.split(",")[5]) 124 | 125 | print("Check threshold values") 126 | print("Towz850 "+str(Towz850)+", Towz500 "+str(Towz500)+", Trh950 "+str(Trh950)+", Trh700 "+str(Trh700)+", Twsh "+str(Twsh)+", Tsh950 "+str(Tsh950)) 127 | 128 | print("If RH thresholds < 0, SD will be used instead.") 129 | if(Trh950[0] < 0 and Trh700[0] < 0): 130 | Lsd = True 131 | Lwrite = False 132 | print("Sturation deficit thresholds in use.") 133 | else: 134 | Lsd = False 135 | print("Relative humidity thresholds in use.") 136 | 137 | file.close() 138 | 139 | 140 | # 141 | # Write to an ascii text file threshold information everywhere the combined 142 | # threshold is satisfied 143 | # 144 | def output(idcmb): 145 | print "** writing TH output ** "+date+" - "+time 146 | 147 | st_u = 0.0 148 | st_v = 0.0 149 | 150 | # Calculate yr and md 151 | store = int(date)/10000. 152 | yr = int(store) 153 | md = int(date) - yr*10000 154 | 155 | print("Date " + str(date)) 156 | 157 | if(time[0] < 24): 158 | time[0] = time[0]*100 159 | 160 | avlat = np.sum(lat)/yd 161 | 162 | # Create output file 163 | output_file = "TH_00"+str(idcmb+1)+"_"+str(date)+"_"+str(time)+"_"+Hem+"_00.txt" 164 | file = open(output_file, "w") 165 | 166 | # Loop through thrsh array and write out location where threshold is met 167 | 168 | for n in range(td): 169 | for i in range(i1, il+1): 170 | for j in range(j1, jl+1): 171 | if(thrsh[i][j][n] == 1.0): 172 | print("i = " + str(i) + " j = " + str(j) + " n = " + str(n)) 173 | st_u,st_v = steer_vel(st_u,st_v,i,j,n) 174 | # Check for unrealistic steering velocities caused by high topography in climate data 175 | if( (st_u < 1.e5) and (st_u > -1.e5) and (st_v < 1.e5) and (st_v > -1.e5) ): 176 | speed = np.sqrt( (uwnd[i][j][k850][n])**2 + (vwnd[i][j][k850][n])**2 ) 177 | 178 | file.write(str(date)+" " 179 | +str(time)+" " 180 | +str(lat[j])+" "+str(lon[i])+" " 181 | +str(st_u)+" " 182 | +str(st_v)+" " 183 | +str(speed)+" " 184 | +str(topo[i][j])+" " 185 | +str(fld1[i][j][n])+" " 186 | +str(fld2[i][j][n])+" " 187 | +str(fld3[i][j][n])+" " 188 | +str(fld4[i][j][n]) + " " 189 | +str(fld5[i][j][n]) +" " 190 | +str(fld6[i][j][n])+"\n") 191 | file.close() 192 | 193 | 194 | # The steering velocity is approximated by the average wind at 700 hPa 195 | # over box of size box_size 196 | # 197 | # st_u and st_v = Zonal and meridional components of steering velocity 198 | # ii,jj = Grid positions where steering velocity is to be calculated 199 | # nn = Time level when steering velocity is to be calculated 200 | def steer_vel(st_u,st_v,ii,jj,nn): 201 | box_size = 4.1 #Size of averaging box (degrees) 202 | 203 | #Distance between neighbouring grid points (degrees) 204 | dlon = lon[1] - lon[0] 205 | dlat = lat[1] - lat[0] 206 | 207 | 208 | #Number of grid points surrounding ii,jj in averaging box 209 | ibox = int(box_size * 0.5 / dlon) + 1 210 | jbox = int(box_size * 0.5 / dlat) + 1 211 | 212 | 213 | #The number of grid points plus/minus to be included in averaging 214 | ip = ii + ibox 215 | im = ii - ibox 216 | jp = jj + jbox 217 | jm = jj - jbox 218 | if(ip > xd): 219 | ip = xd 220 | 221 | if(im < 0): 222 | im = 0 223 | 224 | if(jp > yd): 225 | jp = yd 226 | 227 | 228 | if(jm < 0): 229 | jm = 0 230 | 231 | count = 0 #Counter to determine how many grid points used in wind averaging 232 | st_u = 0.0 233 | st_v = 0.0 234 | 235 | 236 | # Calculate average winds in box 237 | for i in range(im, ip+1): 238 | for j in range(jm, jp+1): 239 | if(i != xd): 240 | if(j != yd): 241 | count = count + 1 242 | st_u = st_u + uwnd[i][j][k700][nn] 243 | st_v = st_v + vwnd[i][j][k700][nn] 244 | 245 | 246 | 247 | 248 | st_u = st_u/float(count) 249 | st_v = st_v/float(count) 250 | 251 | print("st_u = " + str(st_u) + " --- st_v = " + str(st_v) ) 252 | 253 | 254 | return st_u,st_v 255 | 256 | 257 | # Put threshold fields into 3-d arrays for efficient array manipulation 258 | def fill_fields(): 259 | global fild1 260 | global fild2 261 | global fild3 262 | global fild4 263 | global fild5 264 | global fild6 265 | 266 | print("Inside fill_fields") 267 | 268 | #x! Fill fields 269 | 270 | smth = np.zeros([xd, yd],float) 271 | for i in range(xd): 272 | for j in range(yd): 273 | for n in range(td): 274 | fld1[i][j][n] = owz[i][j][k850][n] 275 | fld2[i][j][n] = owz[i][j][k500][n] 276 | fld3[i][j][n] = rh[i][j][k950][n] 277 | fld4[i][j][n] = rh[i][j][k700][n] 278 | fld5[i][j][n] = wsh[i][j][k200][n] 279 | fld6[i][j][n] = mrsh[i][j][k950][n] 280 | 281 | 282 | 283 | #If large values of OWZ are encountered the format specification of f5.1 means "*****" is written 284 | # to the output file. To avoid this set a maximum value of 999.9 for OWZ. 12-05-2016 285 | 286 | np.where(fld1 > 999.9, 999.9,fld1) 287 | np.where(fld2 > 999.9, 999.9, fld2) 288 | 289 | print("End of fill_fields") 290 | 291 | 292 | 293 | # By first setting the combined threshold array (thrsh) to everywhere 294 | # represent a satisfied threshold (thrsh = 1.0), the unsatisfied locations 295 | # for each field can be progressively set to 0.0, to effectively mask all 296 | # unsatisfied grid points. This leaves only those points with all threshold 297 | # variables satisfied, with a value of 1.0. 298 | # idcbm = Threshold combination ID 299 | def find_threshold(idcmb): 300 | print "** finding threshold ** "+date+" - "+time 301 | 302 | global thrsh 303 | 304 | RHmax=120.0 # Maximum RH limit to ensure no highly unrealistic values appear 305 | RHmin=-10.0 # Minimum RH limit to ensure no highly unrealistic values appear 306 | SDmax=1.0 # Maximum SD limit to ensure no highly unrealistic values appear 307 | SDmin=-10.0 # Minimum SD limit to ensure no highly unrealistic values appear 308 | 309 | fldmax = 0 # Maximum 950 hPa RH or SD limit 310 | fldmin = 0 # Minimum 950 hPa RH or SD limit 311 | 312 | print("Inside find_threshold") 313 | 314 | # Reset thrsh array to 1.0 315 | thrsh = np.ones([xd,yd,td],float) 316 | 317 | print(thrsh) 318 | 319 | 320 | # Search for threshold failure locations and set thrsh values to zero 321 | thrsh[fld1 < Towz850[idcmb]] = 0.0 322 | thrsh[fld2 < Towz500[idcmb]] = 0.0 323 | thrsh[fld3 < Trh950[idcmb]] = 0.0 324 | thrsh[fld4 < Trh700[idcmb]] = 0.0 325 | thrsh[fld5 > Twsh[idcmb]] = 0.0 326 | thrsh[fld6 < Tsh950[idcmb]] = 0.0 327 | 328 | 329 | # Search for unreasonable fld values and set thrsh values to zero 330 | # Climate data have missing values where fields intersect topography. 331 | # The windowing program creates dodgy numbers when encountering these fields. 332 | 333 | # Set fld3max etc. 334 | if(Lsd): 335 | fldmax = SDmax 336 | fldmin = SDmin 337 | else: 338 | fldmax = RHmax 339 | fldmin = RHmin 340 | 341 | 342 | thrsh[fld1 > 1e5] = 0.0 343 | thrsh[fld2 > 1e5] = 0.0 344 | thrsh[fld3 > fldmax] = 0.0 345 | thrsh[fld4 > fldmax] = 0.0 346 | thrsh[fld5 > 1e5] = 0.0 347 | thrsh[fld6 > 120] = 0.0 348 | 349 | 350 | 351 | thrsh[fld1 < -1e5] = 0.0 352 | thrsh[fld2 < -1e5] = 0.0 353 | thrsh[fld3 < fldmin] = 0.0 354 | thrsh[fld4 < fldmin] = 0.0 355 | thrsh[fld5 < -1e5] = 0.0 356 | thrsh[fld6 < -10.0] = 0.0 357 | 358 | print("End of find_threshold") 359 | 360 | 361 | 362 | # 363 | # Calculate OWZ 364 | # 365 | def owz_calc(): 366 | print "** Inside owz_calc ** "+date+" - "+time 367 | 368 | global owz 369 | global avor 370 | 371 | DVDX = 0.0 # 372 | DUDY = 0.0 # Wind gradients 373 | DVDY = 0.0 # 374 | DUDX = 0.0 # 375 | 376 | XX = 0.0 # Normalized OW 377 | 378 | scale = 1.0e6 # Scale parameter for OWZ 379 | 380 | #Reciprocal of two times the grid separation distance (m) 381 | idx = 0.0 382 | idy = 0.0 383 | 384 | #Calculate corioils limit equivalent to a latitude of 20 degrees 385 | Cormax = 2.0 * 7.292116e-5 * np.sin(20.0 * 0.01745329) #Maximum contribution of the coriolis paramter to OWZ 386 | 387 | #dlat/dlon = Longitude/Latitude increments 388 | dlon = lon[1] - lon[0] 389 | dlat = lat[1] - lat[0] 390 | idy = 1.0 / (2.0 * 111.2 * 1000.0 * dlat) 391 | 392 | 393 | for j in range(yd): 394 | Cor = 2.0 * 7.292116e-5 * np.sin(lat[j] * 0.01745329) 395 | if(lat[j] < 0.0 ): 396 | SgnCor = -1.0 397 | if (Cor < -Cormax): 398 | Cor = -Cormax 399 | else: 400 | if (Cor > Cormax): 401 | Cor = Cormax 402 | SgnCor = 1.0 403 | 404 | idx = 1.0 / (2.0 * 111.2 * 1000.0 * dlon * np.cos(lat[j] * 0.01745329)) 405 | 406 | 407 | for n in range(td): 408 | for k in range(zd): 409 | for i in range(xd): 410 | 411 | if(j == 0): 412 | DUDY = 2.0 * idy * (uwnd[i][j+1][k][n] - uwnd[i][j][k][n]) 413 | DVDY = 2.0 * idy *(vwnd[i][j+1][k][n] - vwnd[i][j][k][n] ) 414 | elif(j+1 == yd): 415 | DUDY = 2.0 * idy * (uwnd[i][j][k][n] - uwnd[i][j-1][k][n]) 416 | DVDY = 2.0 * idy * (vwnd[i][j][k][n] - vwnd[i][j-1][k][n]) 417 | else: 418 | DVDY = idy * (vwnd[i][j+1][k][n] - vwnd[i][j-1][k][n]) 419 | DUDY = idy * (uwnd[i][j+1][k][n] - uwnd[i][j-1][k][n]) 420 | 421 | 422 | if(i == 0 ): 423 | DVDX = 2.0 * idx * ( vwnd[i+1][j][k][n] - vwnd[i][j][k][n]) 424 | DUDX = 2.0 * idx * (uwnd[i+1][j][k][n] - uwnd[i][j][k][n]) 425 | 426 | elif( i + 1 == xd ): 427 | DVDX = 2.0 * idx * (vwnd[i][j][k][n] - vwnd[i-1][j][k][n]) 428 | DUDX = 2.0 * idx * (uwnd[i][j][k][n] - uwnd[i-1][j][k][n]) 429 | 430 | else: 431 | DVDX = idx * (vwnd[i+1][j][k][n] - vwnd[i-1][j][k][n]) 432 | DUDX = idx * (uwnd[i+1][j][k][n] - uwnd[i-1][j][k][n]) 433 | 434 | 435 | 436 | 437 | ZZ = (DVDX - DUDY)**2 # Relativevorticity 438 | EE = (DUDX - DVDY)**2 # EE & EF = Shear and strain defrmation 439 | FF = (DVDX + DUDY)**2 # 440 | ZZA = DVDX - DUDY + Cor #absolute vorticity 441 | 442 | XX = (ZZ - EE - FF)/(ZZ+1.0e-20) 443 | 444 | # XX = np.amax(XX,0) 445 | if(XX < 0): 446 | XX = 0.0 447 | 448 | avor[i][j][k][n] = ZZA*scale 449 | owz[i][j][k][n] = XX * SgnCor * ZZA * scale 450 | 451 | print "end of owz_calc" 452 | 453 | 454 | 455 | # Calculate the magnitude of the wind shear at every level relative to the 850 hPa level. 456 | # Sub-tropical jet (STJ) calculation 457 | def wsh_calc(): 458 | print "** Inside wsh_calc ** "+date+" - "+time 459 | 460 | global uwnd 461 | global vwnd 462 | global wsh 463 | 464 | smth = np.zeros([xd, yd],float) # 2-d array to be passed to smoothing subroutine 465 | 466 | u850 = 0 467 | v850 = 0 # wind components at 850 hPa 468 | 469 | yr = 0 470 | md = 0 # Year number, monthday number 471 | store = 0 # Temporary variable for calculating yr and md 472 | avlat = 0.0 # Average of all latitudes in lat array (used to find hemisphere) 473 | latjet = 0.0 # Latitude of the STJ 474 | speed = np.zeros(yd) # Wind speed at 200 hPa 475 | shear = 0.0 # meridional shear of the 200 hPa wind 476 | 477 | 478 | print("num_smth = " + str(num_smth)) 479 | 480 | #Smooth uwnd 481 | if (num_smth > 0): 482 | for n in range(td): 483 | for k in range(zd): 484 | for i in range(xd): 485 | for j in range(yd): 486 | smth[i][j] = uwnd[i][j][k][n] 487 | 488 | smth = smoother(smth,xd,yd,num_smth) 489 | 490 | for i in range(xd): 491 | for j in range(yd): 492 | uwnd[i][j][k][n] = smth[i][j] 493 | 494 | #! Smooth vwnd 495 | print("**** Smooth vwnd *****") 496 | if (num_smth > 0): 497 | print("Smooth vwnd") 498 | for n in range(td): 499 | for k in range(zd): 500 | for i in range(xd): 501 | for j in range(yd): 502 | smth[i][j] = vwnd[i][j][k][n] 503 | smth = smoother(smth,xd,yd,num_smth) 504 | 505 | for i in range(xd): 506 | for j in range(yd): 507 | vwnd[i][j][k][n] = smth[i][j] 508 | 509 | #! Calculate wind shear 510 | for n in range(td): 511 | for i in range(xd): 512 | for j in range(yd): 513 | u850 = uwnd[i][j][k850][n] 514 | v850 = vwnd[i][j][k850][n] 515 | for k in range(zd): 516 | wsh[i][j][k][n] = np.sqrt( (uwnd[i][j][k][n]-u850)**2 + (vwnd[i][j][k][n]-v850)**2 ) 517 | 518 | 519 | 520 | # Determine the position of the sub-tropical jet (STJ) if it exists or the position 521 | # of the closest jet to the equator (more than 7 degrees), that is stronger than 522 | # 25 m/s, and the zonal component is greater than 15 m/s. 523 | 524 | # Calculate yr and md 525 | store = int(date)/10000 526 | yr = int(store) 527 | md = int(date) - yr*10000 528 | if(time[0] < 24): 529 | time[0] = time[0]*100 530 | 531 | 532 | # Determine the hemisphere (assume the sign of the average latitude determines the hemisphere) 533 | avlat = np.sum(lat)/yd 534 | print("sum of lat = " + str(avlat)) 535 | 536 | 537 | #! Open output file to write STJ data 538 | file = open("STJ_"+str(yr)+str(md)+str(time[0])+"_"+Hem+"_00.txt", "w") 539 | 540 | for n in range(td): 541 | for i in range(xd): 542 | for j in range(yd): 543 | speed[j] = np.sqrt( (uwnd[i][j][k200][n])**2 + (vwnd[i][j][k200][n])**2 ) 544 | 545 | if (Hem == "NH"): 546 | for j in range(yd-1): 547 | latjet = lat[yd-1] 548 | if ( (lat[j] > 7.0) and (uwnd[i][j][k200][n] > 15.0) ) : 549 | shear = speed[j+1]-speed[j-1] 550 | if ( (shear < 0.0) and (speed[j] > 25.0) ): 551 | latjet = lat[j] 552 | elif (Hem == "SH"): 553 | for j in range(yd-1,0, -1): 554 | latjet = lat[0] 555 | if ( (lat[j] < -7.0) and (uwnd[i][j][k200][n] > 15.0) ) : 556 | shear = speed[j-1]-speed[j+1] 557 | if ( (shear < 0.0) and (speed[j] > 25.0) ): 558 | latjet = lat[j] 559 | file.write(str(lon[i])+" , "+str(latjet)+" "+str(yr)+str(md)+str(time[0])+"\n") 560 | file.close() 561 | 562 | 563 | # This subroutine smooths by averaging values using a 2-dimensional 2-4-2 weighting, plus 1 564 | # for the diagonals. That is, the grid points to the north, south, east and west have a weighting 565 | # of 2, the central grid point has a weigting of 4 and the NW, NE, SE, SW grid points have a 566 | # weighting of 1. 567 | # 568 | # smth = Field to be smoothed 569 | # xd yd = X and Y dimensions 570 | # num_smth = Number of smoothing operations 571 | def smoother(smth,xd,yd,num_smth): 572 | print "** inside smoother ** "+date+" - "+time 573 | 574 | tmp = np.zeros([xd, yd],float) # 2-d array to be passed to smoothing subroutine 575 | 576 | #Fractions used in averaging 577 | third = 1.0/3.0 578 | sixth = 1.0/6.0 579 | ninth = 1.0/9.0 580 | twelfth = 1.0/12.0 581 | 582 | for n in range(num_smth): 583 | tmp = np.zeros([xd, yd],float) # 2-d array to be passed to smoothing subroutine 584 | 585 | # Interior 586 | for i in range(1,xd-1): 587 | for j in range(1,yd-1): 588 | tmp[i][j] = (0.25*smth[i][j] 589 | + 0.125 * ( smth[i-1][j] + smth[i+1][j] + smth[i][j-1] + smth[i][j+1] ) 590 | + 0.0625 * ( smth[i-1][j-1]+smth[i-1][j+1]+smth[i+1][j+1]+smth[i+1][j-1] )) 591 | 592 | #! Edges 593 | for i in range(1,xd-1): 594 | tmp[i,0] = (third * smth[i][0] 595 | + sixth * ( smth[i-1][0]+smth[i+1][0]+smth[i][1] ) 596 | + twelfth * ( smth[i-1][1]+smth[i+1][1] )) 597 | 598 | tmp[i][yd-1] = (third * smth[i][yd-1] 599 | + sixth * ( smth[i-1][yd-1]+smth[i+1][yd-1]+smth[i][yd-2] ) 600 | + twelfth * ( smth[i-1][yd-2]+smth[i+1][yd-2] )) 601 | 602 | 603 | 604 | for j in range(1,yd-1): 605 | tmp[0][j] = (third * smth[0][j] 606 | + sixth * ( smth[0][j-1] + smth[0][j+1]+smth[1][j] ) 607 | + twelfth * ( smth[1][j-1]+smth[1][j+1] )) 608 | 609 | 610 | tmp[xd-1][j] = (third * smth[xd-1][j] 611 | + sixth * ( smth[xd-1][j-1]+smth[xd-1][j+1]+smth[xd-2][j] ) 612 | + twelfth * ( smth[xd-2][j-1]+smth[xd-2][j+1] )) 613 | 614 | 615 | #! Corners 616 | tmp[0][0] = (4*ninth*smth[0][0] + 2*ninth*( smth[0][1]+smth[1][0] ) + ninth*smth[1][1]) 617 | tmp[0][yd-1]= (4*ninth*smth[0][yd-1] + 2*ninth*( smth[0][yd-2]+smth[1][yd-1] ) +ninth*smth[1][yd-2]) 618 | tmp[xd-1][0]= (4*ninth*smth[xd-1][0]+ 2*ninth*( smth[xd-2][0]+smth[xd-1][1] ) +ninth*smth[xd-2][1]) 619 | tmp[xd-1][yd-1]= (4*ninth*smth[xd-1][yd-1]+2*ninth*( smth[xd-2][yd-1]+smth[xd-1][yd-2] ) +ninth*smth[xd-2][yd-2]) 620 | 621 | 622 | smth = tmp 623 | 624 | return smth 625 | 626 | 627 | 628 | 629 | # This subroutine ensures the relative humidity (RH) and specific humidity (SH) arrays are filled. 630 | # The input moisture variable can be RH, SH or mixing ratio (MR). The logical flag corresponding to 631 | # the input moisture variable will be "true" and the other two logical flags "false". 632 | # 633 | # Case: 634 | # Lrh = true --> SH is calculated and returned in "mrsh" array. 635 | # Lmr = true --> SH is calculated and returned in "mrsh", and RH is calculated and returned in "rh" array. 636 | # Lsh = true --> RH is calculated and returned in "rh" array. 637 | def rh_calc(Lrh,Lmr,Lsh): 638 | print "** inside rh_calc ** "+date+" - "+time 639 | 640 | global rh 641 | global mrsh 642 | 643 | vps1=6.112 #! Saturation vapour pressure constants 644 | vps2=17.67 645 | vps3=243.5 646 | vps4=273.15 647 | vp1=0.622 #! Vapour pressure constant = Rd/Rv 648 | 649 | fac = 1.0 #! Factor that distinguishes vapour pressure as a function of 650 | #! mixing ratio or specific humidity 651 | 652 | 653 | vpsat = np.zeros([xd,yd,zd,td],float) #! Saturation vapour pressure 654 | 655 | 656 | 657 | #! Set mr vs. sh factor 658 | if (Lmr): 659 | fac = 1.0 660 | else: 661 | fac = 1.0 - vp1 662 | 663 | # ! Calculate saturation vapour pressure 664 | print("Inside rh_calc - Lrh, Lmr, Lsh",Lrh,Lmr,Lsh) 665 | for n in range(td): 666 | for k in range(zd): 667 | for i in range(xd): 668 | for j in range(yd): 669 | TdegC = temp[i][j][k][n] - vps4 670 | vpsat[i][j][k][n] = vps1 * np.exp( (vps2*TdegC)/(TdegC+vps3) ) 671 | #if(TdegC > -20.0) write(6,*) "TdegC = ",TdegC,i,j,k 672 | 673 | #! If rel_hum exists calculate spec_hum 674 | if (Lrh): 675 | print("In LRH") 676 | for n in range(td): 677 | for k in range(zd): 678 | for i in range(xd): 679 | for j in range(yd): 680 | vp = rh[i][j][k][n] * vpsat[i][j][k][n]/100.0 681 | mrsh[i][j][k][n]= vp1*vp/(lvl[k] - vp*fac) 682 | 683 | 684 | else: 685 | for n in range(td): 686 | for k in range(zd): 687 | for i in range(xd): 688 | for j in range(yd): 689 | vp = mrsh[i][j][k][n] * lvl[k] / ( vp1 + mrsh[i][j][k][n] * fac ) 690 | rh[i][j][k][n] = 100.0 * vp / vpsat[i][j][k][n] 691 | #! if(rh(i,j,k,n) > 5.0) write(6,*) "RH ",rh(i,j,k,n),i,j,k 692 | 693 | 694 | #! Calculate saturation deficit if required and put in rh array (values should be negative) 695 | if (Lsd): 696 | print("in LSD") 697 | for n in range(td): 698 | for k in range(zd): 699 | for i in range(xd): 700 | for j in range(yd): 701 | qsat = vp1*vpsat[i][j][k][n]/(lvl[k] - vpsat[i][j][k][n]*fac) 702 | rh[i][j][k][n] = mrsh[i][j][k][n] - qsat 703 | 704 | rh = np.multiply(rh, 1000.0) #! Change to g/kg 705 | 706 | 707 | 708 | #! Replace mixing ratio with specific humidity if required 709 | if (Lmr): 710 | mrsh = mrsh/(1 + mrsh) 711 | 712 | #! Change units of mrsh to g/kg 713 | mrsh = np.multiply(mrsh, 1000.0) 714 | 715 | 716 | # 717 | # Set Date/time of this data 718 | # 719 | def setDateTime(d,t): 720 | global date, time 721 | date = d 722 | time = t 723 | 724 | 725 | # Open and read the NetCDF file 726 | def read_data(nc_file): 727 | print "** reading data ** "+date+" - "+time 728 | global yd, xd,zd,td 729 | global Lowz,Lav,Lrh,Lwsh,Lmr,Lsh,Ltop,Lsfg 730 | global owz,rh,wsh,thrsh,uwnd,vwnd,temp,fld1,fld2,fld3,fld4,fld5,fld6,topo,topo3d,avor,mrsh 731 | global lat, lon,lvl#, date, time 732 | global i1, il, j1, jl 733 | global k950,k850,k700,k500,k200 734 | 735 | i = 0 736 | j = 0 737 | k = 0 738 | max_lvl = 0 739 | 740 | # Set logical variables to the default value of TRUE. They are set to false when found to not exist 741 | Lowz = False 742 | Lrh = True 743 | Lwsh = False 744 | Lmr = False 745 | Lsh = True 746 | Ltop = True 747 | Lsfg = True 748 | 749 | nc_fid = Dataset(nc_file, 'r') # Dataset is the class behavior to open the file 750 | # and create an instance of the ncCDF4 class 751 | 752 | print("*************************") 753 | # print(nc_dims) 754 | #ncdump(nc_fid) 755 | 756 | print("Getting dimension ID's") 757 | 758 | if("lat" not in nc_fid.dimensions): 759 | print("Error: lat dimension not available") 760 | return -1 761 | 762 | latid = nc_fid.dimensions["lat"] 763 | 764 | if("lon" not in nc_fid.dimensions): 765 | print("Error: lon dimension not available") 766 | return -1 767 | 768 | lonid = nc_fid.dimensions["lon"] 769 | 770 | 771 | if("time" not in nc_fid.dimensions): 772 | print("Error: time dimension not available") 773 | return -1 774 | 775 | timid = nc_fid.dimensions["time"] 776 | 777 | 778 | if("lvl" not in nc_fid.dimensions): 779 | print("Error: lvl dimension not available") 780 | return -1 781 | 782 | lvlid = nc_fid.dimensions["lvl"] 783 | 784 | 785 | print("Getting dimension lengths") 786 | 787 | yd = latid.size 788 | xd = lonid.size 789 | zd = lvlid.size 790 | td = timid.size 791 | 792 | 793 | 794 | print("Getting variable ID's") 795 | 796 | if("rh" not in nc_fid.variables): 797 | print("rh variable not available") 798 | Lrh = False 799 | if("mrsh" not in nc_fid.variables): 800 | print("spec_hum variable not available") 801 | Lmr = False 802 | print("No recognised moisture parameter available.") 803 | print("Program can use relative humidity(%),") 804 | print("specific humidity (kg/kg) and mixing ratio (kg/kg).") 805 | print("Check variable names match those in the data file.") 806 | return -1 807 | else: 808 | mrshid = nc_fid.variables["mrsh"] 809 | Lsh = False 810 | else: 811 | Lmr = False 812 | Lsh = False 813 | 814 | 815 | rhid = nc_fid.variables["rh"] 816 | 817 | if("mrsh" not in nc_fid.variables): 818 | print("Error: mrsh variable not available") 819 | return -1 820 | mrshid = nc_fid.variables["mrsh"] 821 | 822 | 823 | if("uwnd" not in nc_fid.variables): 824 | print("Error: uwnd variable not available") 825 | return -1 826 | uid = nc_fid.variables["uwnd"] 827 | 828 | 829 | if("vwnd" not in nc_fid.variables): 830 | print("Error: vwnd variable not available") 831 | return -1 832 | vid = nc_fid.variables["vwnd"] 833 | 834 | 835 | if("temp" not in nc_fid.variables): 836 | print("Error: temp variable not available") 837 | return -1 838 | tdid = nc_fid.variables["temp"] 839 | 840 | if("topog" not in nc_fid.variables): 841 | Ltop = False 842 | if("topo" not in nc_fid.variables): 843 | print("Error: topog variable not available") 844 | Ltop = False 845 | print("No recognised topography field available.") 846 | print("Program can use 'topog' or 'sfc_geop'.") 847 | return -1 848 | else: 849 | topid = nc_fid.variables["topo"] 850 | else: 851 | Lsfg = False 852 | topid = nc_fid.variables["topog"] 853 | 854 | 855 | 856 | 857 | if("lat" not in nc_fid.variables): 858 | print("Error: lat variable not available") 859 | return -1 860 | 861 | if("lon" not in nc_fid.variables): 862 | print("Error: lon variable not available") 863 | return -1 864 | 865 | 866 | if("lvl" not in nc_fid.variables): 867 | print("Error: lvl variable not available") 868 | return -1 869 | 870 | 871 | print("Allocating space for field variables") 872 | owz = np.zeros([xd,yd,zd,td],float) 873 | rh = np.zeros([xd,yd,zd,td],float) 874 | wsh = np.zeros([xd,yd,zd,td],float) 875 | thrsh = np.zeros([xd,yd,td],float) 876 | uwnd = np.zeros([xd,yd,zd,td],float) 877 | vwnd = np.zeros([xd,yd,zd,td],float) 878 | temp = np.zeros([xd,yd,zd,td],float) 879 | fld1 = np.zeros([xd,yd,td],float) 880 | fld2 = np.zeros([xd,yd,td],float) 881 | fld3 = np.zeros([xd,yd,td],float) 882 | fld4 = np.zeros([xd,yd,td],float) 883 | fld5 = np.zeros([xd,yd,td],float) 884 | fld6 = np.zeros([xd,yd,td],float) 885 | topo = np.zeros([xd,yd],float) 886 | avor = np.zeros([xd,yd,zd,td],float) 887 | mrsh = np.zeros([xd,yd,zd,td],float) 888 | 889 | 890 | 891 | print("Allocating space for dimension variables") 892 | lat = np.zeros([yd],float) 893 | lon = np.zeros([xd],float) 894 | lvl = np.zeros([zd],float) 895 | 896 | 897 | print("Filling dimension arrays") 898 | #! Fill the dimension arrays (note if a subset of the array is required use nf_get_vara_real) 899 | 900 | #for i in nc_fid.variables: 901 | # print(i, nc_fid.variables[i][:], nc_fid.variables[i].shape) 902 | 903 | lat = nc_fid.variables["lat"][:] 904 | lon = nc_fid.variables["lon"][:] 905 | lvl = nc_fid.variables["lvl"][:] 906 | #date = nc_fid.variables["valid_date"][:] 907 | #time = nc_fid.variables["valid_time"][:] 908 | 909 | print(date) 910 | print(time) 911 | 912 | #! Convert lvl units from Pa to hPa if necessary 913 | max_lvl = 0.0 914 | for j in range(zd): 915 | if (lvl[j] > max_lvl): 916 | max_lvl = lvl[j] 917 | 918 | if (max_lvl > 1100.0): 919 | lvl = lvl * 0.01 920 | 921 | print("Filling 4-d variable arrays") 922 | # if (Lowz): 923 | # owz = nc_fid.variables["n_ow_zta"][:,:,:,:] 924 | _rh = np.zeros([xd,yd,zd,td],float) 925 | if (Lrh): 926 | _rh = rhid[:]#:,:,:,:] 927 | 928 | 929 | _mrsh = np.zeros([xd,yd,zd,td],float) 930 | if (Lmr or Lsh): 931 | _mrsh = mrshid[:] 932 | _uwnd = uid[:] 933 | 934 | 935 | _vwnd = vid[:]#,:,:,:] 936 | 937 | _temp = tdid[:]#,:,:,:] 938 | 939 | for i in range(xd): 940 | for j in range(yd): 941 | for k in range(zd): 942 | for n in range(td): 943 | rh[i][j][k][n] = _rh[n][k][j][i] 944 | uwnd[i][j][k][n] = _uwnd[n][k][j][i] 945 | vwnd[i][j][k][n] = _vwnd[n][k][j][i] 946 | temp[i][j][k][n] = _temp[n][k][j][i] 947 | if (Lmr or Lsh): 948 | mrsh[i][j][k][n] = _mrsh[n][k][j][i] 949 | 950 | 951 | print("xd " + str(xd)) 952 | print("yd " + str(yd)) 953 | print("zd " + str(zd)) 954 | print("td " + str(td)) 955 | 956 | #! Find standard level integers 957 | for k in range(zd): 958 | if(np.abs(lvl[k]-950.) < 0.001): 959 | k950=k 960 | if(np.abs(lvl[k]-850.) < 0.001): 961 | k850=k 962 | if(np.abs(lvl[k]-700.) < 0.001): 963 | k700=k 964 | if(np.abs(lvl[k]-500.) < 0.001): 965 | k500=k 966 | if(np.abs(lvl[k]-200.) < 0.001): 967 | k200=k 968 | 969 | print('k950 = ',k950, lvl[k950]) 970 | print('k850 = ',k850, lvl[k850]) 971 | print('k700 = ',k700, lvl[k700]) 972 | print('k500 = ',k500, lvl[k500]) 973 | print('k200 = ',k200, lvl[k200]) 974 | 975 | 976 | #! If the 950 hPa pressure level does not exist, look for 925 hPa (climate data) 977 | if (k950 == -1): 978 | print("950 hPa pressure level does not exist in this data file.") 979 | print("Replace with 925 hPa level.") 980 | for k in range(zd): 981 | if(np.abs(lvl[k]-925.) < 0.001): 982 | k950=k 983 | if (k950 == 0): 984 | print("Could not find 925 hPa pressure level either.") 985 | else: 986 | print("Found 925 hPa pressure level. k950 = ",k950, lvl[k950]) 987 | 988 | #! Fill missing arrays 989 | if(not Lowz): 990 | owz_calc() 991 | rh_calc(Lrh,Lmr,Lsh) 992 | 993 | if(not Lwsh): 994 | wsh_calc() 995 | 996 | print("Filling 3-d variable arrays") 997 | 998 | print("Lsfg" + str(Lsfg)) 999 | if(Lsfg): 1000 | topo3d = topid[:] 1001 | print(topo3d.shape) 1002 | 1003 | for i in range(xd): 1004 | for j in range(yd): 1005 | topo[i][j] = topo3d[j][i] 1006 | 1007 | 1008 | 1009 | print("Filling 2-d variable arrays") 1010 | #! Fill the 2-d variable arrays 1011 | if(Ltop): 1012 | print("Ltop") 1013 | topoTemp = topid[:,:] 1014 | for i in range(xd): 1015 | for j in range(yd): 1016 | topo[i][j] = topoTemp[j][i] 1017 | # if(status /= nf_noerr) call error_handle(status) 1018 | 1019 | print("Topo check: topo(20,20) ="+ str(topo[19][19])) 1020 | #print("Extended topo check") 1021 | #for i in range(xd): 1022 | # for j in range(yd): 1023 | # print(str(topo[i][j])+ " " + str(lon[i]) + " " + str(lat[j]) +" "+ str(i) + " " +str(j)) 1024 | 1025 | 1026 | #! Close the NetCDF file 1027 | nc_fid.close() 1028 | 1029 | # if(status /= nf_noerr) call error_handle(status) 1030 | 1031 | #! Get first and last output window indices 1032 | i1 = 0 1033 | il = xd -1 1034 | for i in range(xd) : 1035 | if (np.abs(lon[i]-Wlnmin) < 0.001): i1 = i 1036 | if (np.abs(lon[i]-Wlnmax) < 0.001): il = i 1037 | 1038 | j1 = 0 1039 | jl = yd -1 1040 | print("Wltmin " + str(Wltmin)) 1041 | print("Wltmax " + str(Wltmax)) 1042 | for j in range(yd): 1043 | #print(str(lat[j]) + " - " + str(Wltmin) + "=" + str(np.abs(lat[j]-Wltmin))) 1044 | if (np.abs(lat[j]-Wltmin) < 0.001): j1 = j 1045 | if (np.abs(lat[j]-Wltmax) < 0.001): jl = j 1046 | #print(str("j1 = " + str(j1) + " - jl = " + str(jl))) 1047 | 1048 | 1049 | 1050 | # 1051 | # Module to print ncdump 1052 | # 1053 | def ncdump(nc_fid, verb=True): 1054 | ''' 1055 | ncdump outputs dimensions, variables and their attribute information. 1056 | The information is similar to that of NCAR's ncdump utility. 1057 | ncdump requires a valid instance of Dataset. 1058 | 1059 | Parameters 1060 | ---------- 1061 | nc_fid : netCDF4.Dataset 1062 | A netCDF4 dateset object 1063 | verb : Boolean 1064 | whether or not nc_attrs, nc_dims, and nc_vars are printed 1065 | 1066 | Returns 1067 | ------- 1068 | nc_attrs : list 1069 | A Python list of the NetCDF file global attributes 1070 | nc_dims : list 1071 | A Python list of the NetCDF file dimensions 1072 | nc_vars : list 1073 | A Python list of the NetCDF file variables 1074 | ''' 1075 | def print_ncattr(key): 1076 | """ 1077 | Prints the NetCDF file attributes for a given key 1078 | 1079 | Parameters 1080 | ---------- 1081 | key : unicode 1082 | a valid netCDF4.Dataset.variables key 1083 | """ 1084 | try: 1085 | print ("\t\ttype:", repr(nc_fid.variables[key].dtype)) 1086 | for ncattr in nc_fid.variables[key].ncattrs(): 1087 | print ('\t\t%s:' % ncattr,\ 1088 | repr(nc_fid.variables[key].getncattr(ncattr))) 1089 | except KeyError: 1090 | print ("\t\tWARNING: %s does not contain variable attributes" % key) 1091 | 1092 | # NetCDF global attributes 1093 | nc_attrs = nc_fid.ncattrs() 1094 | if verb: 1095 | print ("NetCDF Global Attributes:") 1096 | for nc_attr in nc_attrs: 1097 | print ('\t%s:' % nc_attr, repr(nc_fid.getncattr(nc_attr))) 1098 | nc_dims = [dim for dim in nc_fid.dimensions] # list of nc dimensions 1099 | # Dimension shape information. 1100 | if verb: 1101 | print ("NetCDF dimension information:") 1102 | for dim in nc_dims: 1103 | print ("\tName:", dim) 1104 | print ("\t\tsize:", len(nc_fid.dimensions[dim])) 1105 | print_ncattr(dim) 1106 | # Variable information. 1107 | nc_vars = [var for var in nc_fid.variables] # list of nc variables 1108 | if verb: 1109 | print ("NetCDF variable information:") 1110 | for var in nc_vars: 1111 | if var not in nc_dims: 1112 | print ('\tName:', var) 1113 | print ("\t\tdimensions:", nc_fid.variables[var].dimensions) 1114 | print ("\t\tsize:", nc_fid.variables[var].size) 1115 | print_ncattr(var) 1116 | return nc_attrs, nc_dims, nc_vars 1117 | 1118 | 1119 | 1120 | # 1121 | # Main Entry point of threshold module 1122 | # 1123 | def process(inputFile,date,time,hemisphere): 1124 | global Hem 1125 | Hem = hemisphere 1126 | 1127 | if(Hem == "SH"): 1128 | input_read("input_thresh_SH") 1129 | elif(Hem == "NH"): 1130 | input_read("input_thresh_SH") 1131 | else: 1132 | print("Invaid Hemisphere") 1133 | return 1134 | 1135 | setDateTime(date,time) 1136 | 1137 | # print(sys.argv[1]) 1138 | read_data(inputFile) 1139 | 1140 | print("xd,yd,zd,td",xd,yd,zd,td) 1141 | 1142 | 1143 | # ! Check arrays 1144 | print("owz - 850 " + str(owz[1][1][k850][0])) 1145 | print('owz - 500' + str(owz[1][1][k500][0])) 1146 | print( 'rh - 950 '+ str(rh[1][1][k950][0])) 1147 | print( 'rh - 700 ' + str(rh[1][1][k700][0])) 1148 | print( 'wsh - 850-200 '+ str(wsh[1][1][k200][0])) 1149 | print( 'sh - 950 ' + str(mrsh[1][1][k950][0])) 1150 | 1151 | fill_fields() 1152 | 1153 | 1154 | for n in range(num_th_comb): 1155 | find_threshold(n) 1156 | if (Loutput): 1157 | output(n) 1158 | # if (Lwrite and (n == 1)): 1159 | # write_data() 1160 | 1161 | 1162 | #if __name__ == "__main__": 1163 | # main() 1164 | -------------------------------------------------------------------------------- /thrsh.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/thrsh.pyc -------------------------------------------------------------------------------- /topog/topogNH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/topog/topogNH.nc -------------------------------------------------------------------------------- /topog/topogSH.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/topog/topogSH.nc -------------------------------------------------------------------------------- /tracker.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Please refer to the Documentation for citation and acknowledgement 4 | # 5 | 6 | 7 | import sys 8 | import numpy as np 9 | from netCDF4 import Dataset 10 | 11 | 12 | ## 13 | ## input_variables 14 | ## 15 | nlns = 0 # number of lines in input file 16 | date = np.empty([1],int) # Date 17 | time = np.empty([1],int) # Time 18 | lat = np.empty(1,float) # Latitude 19 | lon = np.empty(1,float) # Longitude 20 | usteer = np.empty(1,float) # Zonal steering velocity 21 | vsteer = np.empty(1,float) # Meridional steering velocity 22 | speed = np.empty(1,float) # Wind speed at 850 hPa 23 | topo = np.empty(1,float) # Topography 24 | owz850 = np.empty(1,float) # OWZ at 850 hPa 25 | owz500 = np.empty(1,float) # OWZ at 500 hPa 26 | rh950 = np.empty(1,float) # Relative humidity at 950 hPa 27 | rh700 = np.empty(1,float) # Relative humidity at 700 hPa 28 | wsh = np.empty(1,float) # Smoothed 850 - 200 hPa wind shear 29 | sh950 = np.empty(1,float) # Specific humidity at 950 hPa 30 | 31 | 32 | 33 | ## 34 | ## clump_variables 35 | ## 36 | 37 | cl_num = 0 # Number of clumps 38 | cl_arsz = 1000000 # Clump array size increment 39 | C_size = np.empty([1],int) # Number of gridpoints in the clump 40 | C_size2 = np.empty([1],int) # Number of gridpoints in the clump that satisfy all thresholds 41 | C_land = np.empty([1],int) # Number of land points in the clump 42 | C_date = np.empty([1],int) # Clump date 43 | C_time = np.empty([1],int) # Clump time 44 | C_flag = np.empty([1],bool) # Clump flag. False = free = not yet included in a CT string 45 | C_ign_flag = np.empty([1],bool) # Flag for temporarily ignoring clumps in find_link 46 | C_thrsh = np.empty([1],bool) # Overide threshold flag. True = overiding thrsh satisfied 47 | C_lat = np.empty([1],float) # Clump latitude 48 | C_lon = np.empty([1],float) # Clump longitude 49 | C_latp1 = np.empty([1],float) # Estimated clump latitude at next half time interval 50 | C_lonp1 = np.empty([1],float) # Estimated clump longitude at next halftime interval 51 | C_latm1 = np.empty([1],float) # Estimated clump latitude at previous half time interval 52 | C_lonm1 = np.empty([1],float) # Estimated clump longitude at previous half time interval 53 | C_latp2 = np.empty([1],float) # Estimated clump latitude at next time interval 54 | C_lonp2 = np.empty([1],float) # Estimated clump longitude at next time interval 55 | C_latm2 = np.empty([1],float) # Estimated clump latitude at previous time interval 56 | C_lonm2 = np.empty([1],float) # Estimated clump longitude at previous time interval 57 | C_ust = np.empty([1],float) # Clump zonal steering velocity 58 | C_vst = np.empty([1],float) # Clump meridional steering velocity 59 | C_SPmax = np.empty([1],float) # Maximum value of 850 wind speed in the clump 60 | C_OWZmax = np.empty([1],float) # Maximum vlaue of owz850 in the clump 61 | C_owz850 = np.empty([1],float) # Average owz850 in the clump 62 | C_owz500 = np.empty([1],float) # Average owz500 in the clump 63 | C_rh950 = np.empty([1],float) # Average rh950 in the clump 64 | C_rh700 = np.empty([1],float) # Average rh700 in the clump 65 | C_wsh = np.empty([1],float) # Average wsh in the clump 66 | C_sh950 = np.empty([1],float) # Average sh950 in the clump 67 | C_flagfail1_owz850 = np.empty([1],int) # Count number of owz850 failures for invalid points in a clump 68 | C_flagfail1_owz500 = np.empty([1],int) # Count number of owz500 failures for invalid points in a clump 69 | C_flagfail1_rh950 = np.empty([1],int) # Count number of rh950 failures for invalid points in a clump 70 | C_flagfail1_rh700 = np.empty([1],int) # Count number of rh700 failures for invalid points in a clump 71 | C_flagfail1_wsh = np.empty([1],int) # Count number of wsh failures for invalid points in a clump 72 | C_flagfail1_sh950 = np.empty([1],int) # Count number of sh950 failures for invalid points in a clump 73 | C_flagfail2_OT_count= np.empty([1],int) # Flag indicating inadequate number of valid points in a clump 74 | C_flagfail2_owz850 = np.empty([1],int) # Flag indicating clump mean owz850 value below threshold 75 | C_flagfail2_owz500 = np.empty([1],int) # Flag indicating clump mean owz500 value below threshold 76 | C_flagfail2_rh950 = np.empty([1],int) # Flag indicating clump mean rh950 value below threshold 77 | C_flagfail2_sh950 = np.empty([1],int) # Flag indicating clump mean sh950 value below threshold 78 | 79 | 80 | ## 81 | ## data_info 82 | ## 83 | 84 | dlat = 0.0 # Latitude increment (degrees) 85 | dlon = 0.0 # Longitude increment (degrees) 86 | dtim = 0.0 # Time increment (hours) 87 | e_min = 0 # Minimum number of neighbouring events for a clump to be considered 88 | TC_min = 0 # Minimum number of consecutive True links before TC declared 89 | sea_min = 0 # Minimum number of sea points to make a land influenced clump True 90 | land_lim = 0.0 # Topography value above which the grid point is considered to be land [m] 91 | srch_rad = 0.0 # Search radius to determine links in CT strings (km) 92 | clmp_rad = 0.0 # Clump radius, distance in which two CTs are to be combined 93 | TH_owz850 = 0.0 # Overiding 850 hPa OWZ threshold (in case stricter thresholds are desired) 94 | TH_owz500 = 0.0 # Overiding 500 hPa OWZ threshold 95 | TH_rh950 = 0.0 # Overiding 950 hPa RH threshold 96 | TH_rh700 = 0.0 # Overiding 700 hPa RH threshold 97 | TH_wsh = 0.0 # Overiding 850 - 200 hPa windshear threshold 98 | TH_sh950 = 0.0 # Overiding 950 hPa SH threshold 99 | 100 | 101 | ## 102 | ## work_variables 103 | ## 104 | 105 | 106 | T_event = np.empty([1],dtype="S") # Threshold event: N=new, U=unchecked, C=checked 107 | clbkt_num = 1000 # Array dimension of clbkt 108 | clbkt = np.zeros([clbkt_num],int) # Clump bucket 109 | 110 | 111 | 112 | ## 113 | ## string_variables 114 | ## 115 | 116 | smax = 100000 # Max number of event strings 117 | lmax = 1000 # Max number of links in an event string 118 | S_CT = np.empty([1],int) # Array of CT IDs for each link in each string 119 | #S_CT(string number,link number) 120 | iTCdeclareflag = np.empty([1],int) # flag = 1 for all clumps at and after TC declaration 121 | TCflag = np.empty([1],bool) # Flag = true for strings satisfying TC definition 122 | 123 | 124 | ## 125 | ## constants 126 | ## 127 | 128 | Rearth = 6.37e6 # Earth radius [m] 129 | pi = 2.0*np.arcsin(1.0) # Circle constant 130 | deg2rad = pi/180.0 # Degrees to radians conversion factor 131 | rad2deg = 180.0/pi # Radians to degrees conversion factor 132 | 133 | 134 | 135 | 136 | def read_data(infile): 137 | global nlns, date,time,lat,lon,usteer,vsteer,speed,topo,owz850,owz500,rh950,rh700,wsh,sh950 138 | global T_event 139 | 140 | 141 | 142 | # Open input file 143 | file = open(infile, "r") 144 | # Count the number of lines in the input file 145 | nlns = sum(1 for line in file) 146 | 147 | file.seek(0, 0) 148 | 149 | 150 | # Allocate space for input threshold variables 151 | 152 | date = np.empty([nlns],int) 153 | time = np.empty([nlns],int) 154 | lat = np.empty([nlns],float) 155 | lon = np.empty([nlns],float) 156 | usteer = np.empty([nlns],float) 157 | vsteer = np.empty([nlns],float) 158 | speed = np.empty([nlns],float) 159 | topo = np.empty([nlns],float) 160 | owz850 = np.empty([nlns],float) 161 | owz500 = np.empty([nlns],float) 162 | rh950 = np.empty([nlns],float) 163 | rh700 = np.empty([nlns],float) 164 | wsh = np.empty([nlns],float) 165 | sh950 = np.empty([nlns],float) 166 | 167 | 168 | # Allocate space for "work" threshold variables 169 | T_event = np.empty([nlns],dtype="S") 170 | 171 | 172 | 173 | # Read input file 174 | count = 0 175 | for line in file: 176 | #print("LINE => " + str(line)) 177 | date[count],time[count],lat[count],lon[count],usteer[count],vsteer[count],speed[count],topo[count],owz850[count],owz500[count],rh950[count],rh700[count],wsh[count],sh950[count] = line.split(" ") 178 | 179 | # Assign character value of "N" (= new) to each threshold event to indicate 180 | # that the event is not yet included in a "clump". 181 | T_event[count] = "N" 182 | #print(str(usteer[count]) + " "+ str(vsteer[count])) 183 | # + " " + str(speed[count]) +" "+str(topo[count]) + " " + str(round(owz850[count])) + " " + str(round(owz500[count])) + " " + str(rh950[count]) + " "+ str(rh700[count])+ " " + str(wsh[count]) + " "+str(sh950[count])) 184 | count = count + 1 185 | file.close() 186 | 187 | 188 | #-----------END Read Data------------------------------- 189 | 190 | def read_info(data_info_file): 191 | global dlon, dlat,dtim,e_min,TC_min,sea_min,land_lim,srch_rad,clmp_rad 192 | global TH_owz850,TH_owz500,TH_rh950,TH_rh700,TH_wsh,TH_sh950 193 | 194 | file = open(data_info_file, "r") 195 | dlon = float(file.readline().split(" ")[0]) 196 | dlat = float(file.readline().split(" ")[0]) 197 | dtim = float(file.readline().split(" ")[0]) 198 | e_min = int(file.readline().split(" ")[0]) 199 | TC_min = int(file.readline().split(" ")[0]) 200 | sea_min = int(file.readline().split(" ")[0]) 201 | land_lim = float(file.readline().split(" ")[0]) 202 | srch_rad = float(file.readline().split(" ")[0]) 203 | clmp_rad = float(file.readline().split(" ")[0]) 204 | TH_owz850 = float(file.readline().split(" ")[0]) 205 | TH_owz500 = float(file.readline().split(" ")[0]) 206 | TH_rh950 = float(file.readline().split(" ")[0]) 207 | TH_rh700 = float(file.readline().split(" ")[0]) 208 | TH_wsh = float(file.readline().split(" ")[0]) 209 | TH_sh950 = float(file.readline().split(" ")[0]) 210 | 211 | file.close() 212 | 213 | 214 | #-----------END Read Info------------------------------- 215 | 216 | 217 | 218 | def clump_allocate(): 219 | global C_size,C_size2,C_land,C_date,C_time,C_lat,C_lon,C_latp1,C_lonp1,C_latm1,C_lonm1 220 | global C_latp2,C_lonp2,C_latm2,C_lonm2 221 | global C_ust,C_vst,C_SPmax,C_OWZmax,C_owz850,C_owz500 222 | global C_rh950,C_rh700,C_wsh,C_sh950,C_flag,C_ign_flag,C_thrsh,C_flagfail1_owz850, C_flagfail1_owz500 223 | global C_flagfail1_rh950, C_flagfail1_rh700,C_flagfail1_wsh, C_flagfail1_sh950,C_flagfail2_OT_count 224 | global C_flagfail2_owz850, C_flagfail2_owz500,C_flagfail2_rh950, C_flagfail2_sh950 225 | 226 | 227 | C_size = np.zeros([cl_arsz],int) # Number of gridpoints in the clump 228 | C_size2 = np.zeros([cl_arsz],int) # Number of gridpoints in the clump that satisfy all thresholds 229 | C_land = np.zeros([cl_arsz],int) # Number of land points in the clump 230 | C_date = np.zeros([cl_arsz],int) # Clump date 231 | C_time = np.zeros([cl_arsz],int) # Clump time 232 | C_flag = np.zeros([cl_arsz],bool) # Clump flag. False = free = not yet included in a CT string 233 | C_ign_flag = np.zeros([cl_arsz],bool) # Flag for temporarily ignoring clumps in find_link 234 | C_thrsh = np.zeros([cl_arsz],bool) # Overide threshold flag. True = overiding thrsh satisfied 235 | C_lat = np.zeros([cl_arsz],float) # Clump latitude 236 | C_lon = np.zeros([cl_arsz],float) # Clump longitude 237 | C_latp1 = np.zeros([cl_arsz],float) # Estimated clump latitude at next half time interval 238 | C_lonp1 = np.zeros([cl_arsz],float) # Estimated clump longitude at next halftime interval 239 | C_latm1 = np.zeros([cl_arsz],float) # Estimated clump latitude at previous half time interval 240 | C_lonm1 = np.zeros([cl_arsz],float) # Estimated clump longitude at previous half time interval 241 | C_latp2 = np.zeros([cl_arsz],float) # Estimated clump latitude at next time interval 242 | C_lonp2 = np.zeros([cl_arsz],float) # Estimated clump longitude at next time interval 243 | C_latm2 = np.zeros([cl_arsz],float) # Estimated clump latitude at previous time interval 244 | C_lonm2 = np.zeros([cl_arsz],float) # Estimated clump longitude at previous time interval 245 | C_ust = np.zeros([cl_arsz],float) # Clump zonal steering velocity 246 | C_vst = np.zeros([cl_arsz],float) # Clump meridional steering velocity 247 | C_SPmax = np.zeros([cl_arsz],float) # Maximum value of 850 wind speed in the clump 248 | C_OWZmax = np.zeros([cl_arsz],float) # Maximum vlaue of owz850 in the clump 249 | C_owz850 = np.zeros([cl_arsz],float) # Average owz850 in the clump 250 | C_owz500 = np.zeros([cl_arsz],float) # Average owz500 in the clump 251 | C_rh950 = np.zeros([cl_arsz],float) # Average rh950 in the clump 252 | C_rh700 = np.zeros([cl_arsz],float) # Average rh700 in the clump 253 | C_wsh = np.zeros([cl_arsz],float) # Average wsh in the clump 254 | C_sh950 = np.zeros([cl_arsz],float) # Average sh950 in the clump 255 | C_flagfail1_owz850 = np.zeros([cl_arsz],int) # Count number of owz850 failures for invalid points in a clump 256 | C_flagfail1_owz500 = np.zeros([cl_arsz],int) # Count number of owz500 failures for invalid points in a clump 257 | C_flagfail1_rh950 = np.zeros([cl_arsz],int) # Count number of rh950 failures for invalid points in a clump 258 | C_flagfail1_rh700 = np.zeros([cl_arsz],int) # Count number of rh700 failures for invalid points in a clump 259 | C_flagfail1_wsh = np.zeros([cl_arsz],int) # Count number of wsh failures for invalid points in a clump 260 | C_flagfail1_sh950 = np.zeros([cl_arsz],int) # Count number of sh950 failures for invalid points in a clump 261 | C_flagfail2_OT_count= np.zeros([cl_arsz],int) # Flag indicating inadequate number of valid points in a clump 262 | C_flagfail2_owz850 = np.zeros([cl_arsz],int) # Flag indicating clump mean owz850 value below threshold 263 | C_flagfail2_owz500 = np.zeros([cl_arsz],int) # Flag indicating clump mean owz500 value below threshold 264 | C_flagfail2_rh950 = np.zeros([cl_arsz],int) # Flag indicating clump mean rh950 value below threshold 265 | C_flagfail2_sh950 = np.zeros([cl_arsz],int) # Flag indicating clump mean sh950 value below threshold 266 | 267 | #-----------END Clump Allocate------------------------------- 268 | 269 | 270 | def S_CT_allocate(): 271 | global S_CT,TCflag,iTCdeclareflag 272 | 273 | S_CT = np.empty([smax, lmax],int) 274 | TCflag = np.empty([smax],int) 275 | iTCdeclareflag = np.empty([smax,lmax],int) 276 | 277 | #-----------END S_CT Allocate------------------------------- 278 | 279 | 280 | 281 | def centroid_threshold(): 282 | 283 | #********************************************************************************* 284 | # 285 | # This subroutine finds clumps of threshold events, and replaces them with a 286 | # single set of values at a centroid location weighted by OWZ values. The 287 | # threshold components will be specified either as an average or maximum/minimum 288 | # whichever is most appropriate. 289 | # 290 | # The code takes the first threshold event, sets it to false and adds it to a 291 | # "clump" bucket, before it looks for immediate neighbours. Any neighbours found 292 | # are set to false and added to the clump bucket. New neighbours (with a .true. 293 | # setting) are sought for all events in the bucket, until no more are found. At 294 | # this point the bucket contains all events in the clump, and single values 295 | # representing the clump are calculated. 296 | # 297 | # The process is repeated for the next .true. event in the list, until all events 298 | # have been investigated. 299 | # 300 | #********************************************************************************* 301 | global T_event,clbkt, cl_num 302 | loop = False 303 | 304 | print("Inside centroid_threshold") 305 | print(nlns) 306 | for n in range(nlns): 307 | print("Searching for neighbours to threshold number:"+ str(n)) 308 | if (T_event[n] == "C"): 309 | print(" ...included in a previous clump.") 310 | #! Empty the clump bucket and set loop exit flag 311 | clbkt = np.zeros([clbkt_num],int) # Clump bucket 312 | print("****"+str(n)+"****") 313 | 314 | #print(clbkt) 315 | cb_count = 0 316 | loop = True 317 | 318 | print(T_event[n]) 319 | if (T_event[n] == "N"): 320 | #! Set to unchecked and add to bucket 321 | T_event[n] = "U" 322 | #cb_count = cb_count + 1 323 | #print("cb_count "+str(cb_count)) 324 | if(cb_count > clbkt_num) : 325 | print("Insufficient space allocated for clbkt array.") 326 | print("clbkt_num must be increased. Current value = " + str(clbkt_num)) 327 | return 328 | 329 | clbkt[cb_count] = n 330 | 331 | #print("clbkt[cb_count]" + str(clbkt[cb_count])) 332 | 333 | while loop: 334 | icnt = cb_count + 1 335 | #print("Inside do while loop: icnt = "+str(icnt)) 336 | for i in range(icnt): 337 | #print("inside inner loop"+str(i)) 338 | loop = False #! Exit do while loop when all events have been checked ("C") 339 | #print(str(i) + " " + str(clbkt[i]) + " " + str(T_event[clbkt[i]])) 340 | if (T_event[clbkt[i]] == "U") : 341 | # Search for new neighbours, add them to bucket and update cb_count. 342 | cb_count = neighbour_search(clbkt[i],cb_count,date[n],time[n]) 343 | T_event[clbkt[i]] = "C" 344 | loop = True # Stay in do while loop until all events have been checked 345 | #end for-loop 346 | #end while-loop 347 | 348 | # If sufficient events in the bucket, calculate individual clump values 349 | # Remove this condition so that established circulations under shear that 350 | # have only one threshold point are included. The e_min limit is now 351 | # applied in clump_values 352 | #If (cb_count >= e_min) 353 | #print("cb_count "+ str(cb_count)) 354 | cl_num = clump_values(cb_count+1,cl_num) 355 | print("cl_num "+ str(cl_num)) 356 | #endif 357 | #endif 358 | #end for-loop 359 | 360 | 361 | # Write clump information to output file 362 | file = open("Clump_out_py.txt", "w") 363 | file.write("**************************************\n") 364 | file.write("Number of clumps identified = "+str(cl_num)+"\n") 365 | 366 | print("**************************************") 367 | print("Number of clumps identified = "+str(cl_num)) 368 | 369 | for k in range(1,(cl_num+1)) : 370 | print(str(C_date[k])+" "+ str(C_time[k])+" "+ str(C_lat[k])+" "+ str(C_lon[k])+" "+ str(C_size[k])+" "+ str(C_OWZmax[k])+" "+ 371 | str(C_owz850[k])+" "+ str(C_owz500[k])+" "+ str(C_rh950[k])+" "+ str(C_rh700[k])+" "+ str(C_wsh[k])+" "+ str(C_sh950[k])+" "+ str(k)+" "+ str(C_thrsh[k])+"\n") 372 | 373 | file.write(str(C_date[k])+" "+ str(C_time[k])+" "+ str(C_lat[k])+" "+ str(C_lon[k])+" "+ str(C_size[k])+" "+ str(C_OWZmax[k])+" "+ 374 | str(C_owz850[k])+" "+ str(C_owz500[k])+" "+ str(C_rh950[k])+" "+ str(C_rh700[k])+" "+ str(C_wsh[k])+" "+ str(C_sh950[k])+" "+ str(k)+" "+ str(C_thrsh[k])+"\n") 375 | print("**************************************") 376 | 377 | file.close() 378 | 379 | #---------------End centroid_threshold--------------------------- 380 | 381 | def neighbour_search(cen,cb_count,cdate,ctime): 382 | global latp,latm,lonp,lonm, T_event,clbkt 383 | 384 | #print("Inside neighbour_search") 385 | #print(str(cen)+", " +str(date[cen])+ ", "+ str(time[cen])+ ", "+ str(lat[cen])+ ", "+ str(lon[cen])+ ", "+ str(cb_count)) 386 | 387 | # Set neighbouring lat and lon values to 10% greater magnitude than actual values 388 | # to avoid possible round-off issues with inequalities 389 | latp = lat[cen] + dlat*1.1 390 | latm = lat[cen] - dlat*1.1 391 | lonp = lon[cen] + dlon*1.1 392 | lonm = lon[cen] - dlon*1.1 393 | 394 | 395 | 396 | #print("nlns " + str(nlns) + " / cb_count "+ str(cb_count)) 397 | # Loop through all events and look for current "New" neighbours. If found set 398 | # the neighbour to "Unchecked", add 1 to cb_count, and add to the clump bucket. 399 | for n in range(nlns): 400 | if ( (T_event[n] == "N") and (date[n] == cdate) and (time[n] == ctime) ) : 401 | if ( (lat[n] < latp) and (lat[n] > latm) and (lon[n] < lonp) and (lon[n] > lonm) ) : 402 | T_event[n] = "U" 403 | cb_count = cb_count + 1 404 | if(cb_count > clbkt_num) : 405 | print("Insufficient space allocated for clbkt array.") 406 | print("clbkt_num must be increased. Current value = "+ str(clbkt_num )) 407 | return 408 | 409 | clbkt[cb_count] = n 410 | #print( "increment cb_count "+ str(cb_count)) 411 | 412 | 413 | return cb_count 414 | 415 | #------------------neighbour_search------------------------ 416 | 417 | 418 | def clump_values(cb_count,cl_num_2): 419 | global C_size,C_date, C_time, C_flagfail1_owz850, C_flagfail1_owz500, C_flagfail1_rh950, C_flagfail1_rh700, C_flagfail1_wsh 420 | global C_flagfail1_sh950, C_flagfail2_OT_count, C_flagfail2_owz850, C_flagfail2_owz500, C_flagfail2_rh950, C_flagfail2_sh950 421 | global C_lat, C_lon, C_ust, C_vst, C_SPmax, C_owz850, C_owz500, C_rh950, C_rh700, C_wsh, C_sh950, C_OWZmax, C_land 422 | global clbkt 423 | global C_size2,C_latp1,C_lonp1,C_latm1,C_lonm1 424 | global C_latp2,C_lonp2,C_latm2,C_lonm2 425 | 426 | 427 | # Local variabels 428 | m = 0 # ID number for T_event in the clump bucket 429 | Xdist = 0.0 430 | Ydist = 0.0 # Estimated distance [m] clump will move in one time interval 431 | del_lat = 0.0 432 | del_lon = 0.0 # Estimated distance (degrees) clump will move in one time interval 433 | Test_OWZmax = 0.0 # Experimental maximum OWZ parameter 434 | 435 | O_TH = np.zeros([cb_count], dtype=bool) # Overide threshold flag 436 | OT_count = 0 # Counter for points that satisfy the overide threshold 437 | 438 | # Update clump_count 439 | cl_num_2 = cl_num_2 + 1 440 | 441 | # Check for sufficient room in clump arrays and reallocate if not 442 | if (cl_num_2 > cl_arsz) : 443 | print("Increasing the size of clump arrays") 444 | print("Current number of clumps = "+ str(cl_num_2)) 445 | print("Current clump array size = "+str(cl_arsz)) 446 | # call reallocate_clump 447 | return # Temporary instruction until subroutine reallocate_clump is completed 448 | 449 | # Set simple clump values 450 | C_size[cl_num_2] = cb_count 451 | C_date[cl_num_2] = date[clbkt[0]] 452 | C_time[cl_num_2] = time[clbkt[0]] 453 | 454 | # Set fail flags to zero 455 | C_flagfail1_owz850[cl_num_2]=0 456 | C_flagfail1_owz500[cl_num_2]=0 457 | C_flagfail1_rh950[cl_num_2]=0 458 | C_flagfail1_rh700[cl_num]=0 459 | C_flagfail1_wsh[cl_num_2]=0 460 | C_flagfail1_sh950[cl_num_2]=0 461 | C_flagfail2_OT_count[cl_num_2]=0 462 | C_flagfail2_owz850[cl_num_2]=0 463 | C_flagfail2_owz500[cl_num_2]=0 464 | C_flagfail2_rh950[cl_num_2]=0 465 | C_flagfail2_sh950[cl_num_2]=0 466 | 467 | # Set averaged and maximized clump values 468 | C_lat[cl_num_2] = 0.0 469 | C_lon[cl_num_2] = 0.0 470 | C_ust[cl_num_2] = 0.0 471 | C_vst[cl_num_2] = 0.0 472 | C_SPmax[cl_num_2] = 0.0 473 | C_owz850[cl_num_2] = 0.0 474 | C_owz500[cl_num_2] = 0.0 475 | C_rh950[cl_num_2] = 0.0 476 | C_rh700[cl_num_2] = 0.0 477 | C_wsh[cl_num_2] = 0.0 478 | C_sh950[cl_num_2] = 0.0 479 | C_OWZmax[cl_num_2] = 0.0 480 | C_land[cl_num_2] = 0 481 | 482 | 483 | for n in range(cb_count) : 484 | m = clbkt[n] 485 | C_lat[cl_num_2] = C_lat[cl_num_2] + lat[m] 486 | C_lon[cl_num_2] = C_lon[cl_num_2] + lon[m] 487 | #print("n = "+str(n) + " clbkt[n] = " +str(clbkt[n])+" m = "+str(m)+" C_ust= "+str(C_ust[cl_num_2]) + " + "+ str(usteer[m]) +" = "+ str(C_ust[cl_num_2] + usteer[m]) +"\n") 488 | 489 | C_ust[cl_num_2] = C_ust[cl_num_2] + usteer[m] 490 | 491 | C_vst[cl_num_2] = C_vst[cl_num_2] + vsteer[m] 492 | if(C_SPmax[cl_num_2] < speed[m]): 493 | C_SPmax[cl_num_2] = speed[m] 494 | C_owz850[cl_num_2] = C_owz850[cl_num_2] + owz850[m] 495 | C_owz500[cl_num_2] = C_owz500[cl_num_2] + owz500[m] 496 | C_rh950[cl_num_2] = C_rh950[cl_num_2] + rh950[m] 497 | #print(str(C_rh950[cl_num_2]) + " + "+ str(rh950[m]) +" = "+ str(C_rh950[cl_num_2] + rh950[m]) +"\n") 498 | C_rh700[cl_num_2] = C_rh700[cl_num_2] + rh700[m] 499 | C_wsh[cl_num_2] = C_wsh[cl_num_2] + wsh[m] 500 | C_sh950[cl_num_2] = C_sh950[cl_num_2] + sh950[m] 501 | # Test_OWZmax = owz850[m] + owz500[m] 502 | # if(C_OWZmax[cl_num] < Test_OWZmax) C_OWZmax[cl_num] = Test_OWZmax 503 | if(C_OWZmax[cl_num_2] < owz850[m]): 504 | C_OWZmax[cl_num_2] = owz850[m] 505 | # C_land now calculated below, so that only points satisfying the OT are considered 506 | # if(topo[m] > 0.1) C_land[cl_num] = C_land[cl_num] + 1 507 | #print(str(C_lat[cl_num_2]) +" " + str(C_lon[cl_num_2]) +" " + str(C_ust[cl_num_2]) +" " + str(C_vst[cl_num_2]) +" " + str(C_SPmax[cl_num_2]) +" " + str(C_owz850[cl_num_2]) +" " + str(C_owz500[cl_num_2]) +" " + str(C_rh950[cl_num_2])+" " + str(C_rh700[cl_num_2])+" " + str(C_wsh[cl_num_2])+" " + str(C_sh950[cl_num_2]) + "\n") 508 | 509 | C_lat[cl_num_2] = C_lat[cl_num_2] / cb_count 510 | C_lon[cl_num_2] = C_lon[cl_num_2] / cb_count 511 | C_ust[cl_num_2] = C_ust[cl_num_2] / cb_count 512 | C_vst[cl_num_2] = C_vst[cl_num_2] / cb_count 513 | 514 | #print( str(cl_num_2) + " => " + str(C_ust[cl_num_2]) + " / " + str(cb_count) + " = " + str(C_ust[cl_num_2] / cb_count) ) 515 | #print( str(cl_num_2) + " => " + str(C_vst[cl_num_2]) + " / " + str(cb_count) + " = " + str(C_vst[cl_num_2] / cb_count) ) 516 | 517 | C_owz850[cl_num_2] = C_owz850[cl_num_2] / cb_count 518 | C_owz500[cl_num_2] = C_owz500[cl_num_2] / cb_count 519 | C_rh950[cl_num_2] = C_rh950[cl_num_2] / cb_count 520 | C_rh700[cl_num_2] = C_rh700[cl_num_2] / cb_count 521 | C_wsh[cl_num_2] = C_wsh[cl_num_2] / cb_count 522 | C_sh950[cl_num_2] = C_sh950[cl_num_2] / cb_count 523 | 524 | # Estimate future and past clump positions 525 | Xdist = C_ust[cl_num_2]*3600.*dtim*0.5 526 | Ydist = C_vst[cl_num_2]*3600.*dtim*0.5 527 | del_lat = Ydist*rad2deg/( Rearth ) 528 | del_lon = Xdist*rad2deg/( Rearth*np.cos(C_lat[cl_num_2]*deg2rad) ) 529 | 530 | 531 | #print(str(cl_num_2) + " Xdist => " + str(C_ust[cl_num_2]) + "*3600*"+str(dtim) + "*0.5 = " + str(Xdist)) 532 | #print(str(cl_num_2) + " Ydist => " + str(C_vst[cl_num_2]) + "*3600*"+str(dtim) + "*0.5 = " + str(Ydist)) 533 | #print(str(cl_num_2) + " del_lat => " + str(Ydist) + "*"+str(rad2deg) + " / " + str(Rearth) + " = " + str(Ydist*rad2deg/( Rearth ))) 534 | #print(str(cl_num_2) + " del_lon => " + str(Xdist) + "*"+str(rad2deg) + " / (" + str(Rearth) + " * cos(" + str(C_lat[cl_num_2]) + " * "+ str(deg2rad) + ") ) = " + str(Xdist*rad2deg/( Rearth*np.cos(C_lat[cl_num_2]*deg2rad) ))) 535 | #print("cl_num_2 = " + str(cl_num_2)) 536 | #print(str(C_lat[cl_num_2]) + " + " + str(del_lat) + " = " + str((C_lat[cl_num_2] + del_lat))) 537 | #print(str(C_lat[cl_num_2]) + " - " + str(del_lat) + " = " + str((C_lat[cl_num_2] - del_lat))) 538 | 539 | C_latp1[cl_num_2] = C_lat[cl_num_2] + del_lat 540 | C_latm1[cl_num_2] = C_lat[cl_num_2] - del_lat 541 | C_latp2[cl_num_2] = C_lat[cl_num_2] + del_lat*2.0 542 | C_latm2[cl_num_2] = C_lat[cl_num_2] - del_lat*2.0 543 | C_lonp1[cl_num_2] = C_lon[cl_num_2] + del_lon 544 | C_lonm1[cl_num_2] = C_lon[cl_num_2] - del_lon 545 | C_lonp2[cl_num_2] = C_lon[cl_num_2] + del_lon*2.0 546 | C_lonm2[cl_num_2] = C_lon[cl_num_2] - del_lon*2.0 547 | 548 | 549 | print(str(cl_num_2)+" => C_latp1[cl_num_2] = "+str(C_latp1[cl_num_2]) ) 550 | print(str(cl_num_2)+" => C_latm1[cl_num_2] = "+str(C_latm1[cl_num_2]) ) 551 | print(str(cl_num_2)+" => C_latp2[cl_num_2] = "+str(C_latp2[cl_num_2]) ) 552 | print(str(cl_num_2)+" => C_latm2[cl_num_2] = "+str(C_latm2[cl_num_2]) ) 553 | print(str(cl_num_2)+" => C_lonp1[cl_num_2] = "+str(C_lonp1[cl_num_2]) ) 554 | print(str(cl_num_2)+" => C_lonm1[cl_num_2] = "+str(C_lonm1[cl_num_2]) ) 555 | print(str(cl_num_2)+" => C_lonp2[cl_num_2] = "+str(C_lonp2[cl_num_2]) ) 556 | print(str(cl_num_2)+" => C_lonm2[cl_num_2] = "+str(C_lonm2[cl_num_2]) ) 557 | 558 | 559 | 560 | # Determine if sufficient, neighbouring points satisfy the overiding thresholds 561 | # First, disregard the neighbouring points requirement and set to true if 'e_min' 562 | # points are satisfied. 563 | # Find points where overiding thresholds are satisfied 564 | O_TH = np.zeros([cb_count], dtype=bool) 565 | OT_count = 0 566 | for n in range(cb_count): 567 | m = clbkt[n] 568 | if( (owz850[m] > TH_owz850) and (owz500[m] > TH_owz500) and (rh950[m] > TH_rh950) 569 | and (rh700[m] > TH_rh700) and (wsh[m] < TH_wsh) and (sh950[m] > TH_sh950) ) : 570 | O_TH[n] = True 571 | OT_count = OT_count + 1 572 | if(topo[m] > land_lim): 573 | C_land[cl_num_2] = C_land[cl_num_2] + 1 574 | else: 575 | if(owz850[m] <= TH_owz850): 576 | C_flagfail1_owz850[cl_num_2]=C_flagfail1_owz850[cl_num_2]+1 577 | if(owz500[m] <= TH_owz500): 578 | C_flagfail1_owz500[cl_num_2]=C_flagfail1_owz500[cl_num_2]+1 579 | if(rh950[m] <= TH_rh950): 580 | C_flagfail1_rh950[cl_num_2]=C_flagfail1_rh950[cl_num_2]+1 581 | if(rh700[m] <= TH_rh700): 582 | C_flagfail1_rh700[cl_num_2]=C_flagfail1_rh700[cl_num_2]+1 583 | if(wsh[m] >= TH_wsh): 584 | C_flagfail1_wsh[cl_num_2]=C_flagfail1_wsh[cl_num_2]+1 585 | if(sh950[m] <= TH_sh950): 586 | C_flagfail1_sh950[cl_num_2]=C_flagfail1_sh950[cl_num_2]+1 587 | 588 | C_size2[cl_num_2] = OT_count 589 | # Initial method for determining true clumps was for at least two grid points to satisfy the 590 | # overiding threshold (5 lines immediately below). This was giving poor results for many 591 | # clumps. Average clump values gives a better result except for intense systems where storm 592 | # system shear is large, and 700 RH is occasionally suspect. To get around this problem 593 | # the threshold can be reassessed with average clump values excluding the shear and 700 RH. 594 | if ( (OT_count >= e_min) and (C_owz850[cl_num_2] > TH_owz850) 595 | and (C_owz500[cl_num_2] > TH_owz500) and (C_rh950[cl_num_2] > TH_rh950) 596 | and (C_sh950[cl_num_2] > TH_sh950) ) : 597 | C_thrsh[cl_num_2] = True 598 | else : 599 | C_thrsh[cl_num_2] = False 600 | if(OT_count < e_min): 601 | C_flagfail2_OT_count[cl_num_2]=1 602 | if(C_owz850[cl_num_2] <= TH_owz850): 603 | C_flagfail2_owz850[cl_num_2]=1 604 | if(C_owz500[cl_num_2] <= TH_owz500): 605 | C_flagfail2_owz500[cl_num_2]=1 606 | if(C_rh950[cl_num_2] <= TH_rh950): 607 | C_flagfail2_rh950[cl_num_2]=1 608 | if(C_sh950[cl_num_2] <= TH_sh950): 609 | C_flagfail2_sh950[cl_num_2]=1 610 | 611 | # Write tracking comments 612 | #print("Clump"+str(cl_num_2)+" contains:") 613 | for n in range(cb_count): 614 | m = clbkt[n] 615 | print(str(date[m])+" " + str(time[m])+" "+str(lat[m])+" "+ str(lon[m])+ " "+ str(m)+" "+str(O_TH[n])) 616 | 617 | return cl_num_2 618 | 619 | 620 | 621 | 622 | #------------------------------------------ 623 | 624 | def CT_strings(): 625 | global C_lat,C_lon,C_lat,C_lon,clmp_rad,C_date,C_time,C_flag,C_thrsh 626 | global C_size2,C_OWZmax 627 | global S_CT,iTCdeclareflag,TCflag 628 | mj = 0 # m-jump, to jump forward in the m-loop 629 | mj2 = 0 # Second m-jump used in the recursive call of find_link 630 | s_num = 1 # String number 631 | l_num = 0 # Link number 632 | nxt_dt = 0 633 | nxt_tm = 0 # Date and time of next time level 634 | mret = 0 # The value of m to be returned from find_link 635 | Tcount = 0 # Counter for number of consecutive Ts in a string 636 | last_time = False # Flag to ensure exit of recursive subroutine 637 | First1 = False # Flag to ensure first TC declaration time only goes to output 638 | 639 | print("Inside CT_strings\n") 640 | #Initialize CT flags for each CT to False (= free) to indicate not included in a string 641 | C_flag = np.zeros([cl_arsz],bool) # Clump flag. False = free = not yet included in a CT string 642 | S_flag = False 643 | dist = 0.0 644 | # Look for nearby CTs and set the smaller/weaker ones to True (= taken) to exclude them from 645 | # the search. Or if only one satisfies the overiding threshold use it. If the CT's differ in 646 | # size by one or less point, exclude the CT with a smaller 647 | # OWXmax. Otherwise, exclude the smaller CT (CT with less grid points). 648 | # Use S_flag to identify when a nearby CT has been found 649 | for n in range(cl_num+1): 650 | if (C_flag[n] == False ): 651 | for m in range(n+1,cl_num+1): 652 | if (C_date[n] < C_date[m] ): 653 | break # Assumes CTs are ordered in time 654 | #print(str(C_date[n]) + " " + str(C_date[m]) + " " + str(C_time[n]) + " " + str(C_time[m]) +" "+ str(C_flag[m])) 655 | if ( (C_date[n]==C_date[m]) and (C_time[n]==C_time[m]) and (C_flag[m] == False) ) : 656 | print("How far apart are clumps"+str(n)+" and "+str(m)+"?\n") 657 | 658 | S_flag,dist = proximity_search(C_lat[n],C_lon[n],C_lat[m],C_lon[m],clmp_rad,S_flag,dist) 659 | 660 | if(S_flag): 661 | if( (C_thrsh[n]) and (C_thrsh[m] == False) ) : 662 | C_flag[m] = True 663 | elif( (C_thrsh[m]) and (C_thrsh[n] == False) ) : 664 | C_flag[n] = True 665 | else: 666 | if(C_size2[n] > C_size2[m] + 1) : 667 | C_flag[m] = True 668 | elif(C_size2[n] < C_size2[m] - 1) : 669 | C_flag[n] = True 670 | else: 671 | if(C_OWZmax[n] > C_OWZmax[m]) : 672 | C_flag[m] = True 673 | else: 674 | C_flag[n] = True 675 | 676 | print("After proximity_search",C_OWZmax[n],C_OWZmax[m],C_flag[n],C_flag[m],n,m) 677 | 678 | 679 | print("******************************************\n") 680 | for n in range(cl_num): 681 | if(C_flag[n]): 682 | print("Ignoring clump:"+str(n)+"\n") 683 | 684 | print("******************************************\n") 685 | 686 | 687 | 688 | # Initialize S_CT array to -999 to easily identify empty array locations 689 | S_CT.fill(-999) 690 | 691 | 692 | # Loop through CTs from each time period and search for a match with CT from next period 693 | print(C_date) 694 | print("Link search begins.\n") 695 | #print("cl_num - 2 = " + str(cl_num-2)) 696 | #print("cl_num = " + str(cl_num)) 697 | 698 | for n in range(1 , cl_num+1): 699 | S_flag = True 700 | l_num = 1 701 | 702 | # If still free assign CT number to the first position on the string 703 | if (C_flag[n] == False) : 704 | print("n = " +str(n)+" => Looking for a string beginning with clump:"+str(n) + "\n") 705 | 706 | S_CT[s_num][l_num] = n 707 | mj = n 708 | while S_flag: 709 | # write(6,*) " (Inside do while loop)" 710 | # Calculate the next date/time 711 | nxt_dt,nxt_tm = next_time(C_date[mj],C_time[mj],nxt_dt,nxt_tm) 712 | print("After next_time. Before "+ str(C_date[mj])+" "+str(C_time[mj])+ " After "+ str(nxt_dt) + str(nxt_tm)) 713 | # Loop through remaining CT's looking for the next CT with nxt_dt, nxt_tm 714 | # Exit the loop when one is found and record the CT id 715 | last_time = False 716 | mj2 = 1 717 | mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time = find_link(mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time) 718 | #print(str(mj)+" "+str(mj2)+" "+str(nxt_dt)+" "+str(nxt_tm)+" "+str(S_flag)+" "+str(l_num)+" "+str(s_num)+" "+str(mret)+" "+str(last_time)) 719 | #print("cl_num = " + str(cl_num)) 720 | if(mret>cl_num): 721 | break 722 | 723 | # Only update the string number if a string was identified in the do while loop 724 | if (l_num > 1): 725 | s_num = s_num + 1 726 | if (s_num > smax): 727 | #call reallocate_S_CT and TCflag iTCdeclareflag 728 | print("s_num exceeds array dimensions.\n") 729 | print("s_num = " + str(s_num) +" Array = "+str(smax)) 730 | return 731 | 732 | 733 | 734 | # Remove the last string if it only has one CT 735 | if (l_num <= 1): 736 | s_num = s_num - 1 737 | 738 | 739 | 740 | # Search for strings that satisfy the TC genesis criteria 741 | TCflag.fill(False) 742 | iTCdeclareflag.fill(0) 743 | 744 | print("s_num = " + str(s_num)) 745 | for n in range(1,s_num+1): 746 | Tcount = 0 747 | #print("TC test: String number "+str(n)) 748 | for m in range(1,lmax+1): 749 | #print("Inside m loop"+str(m)+" "+str(C_thrsh[S_CT[n][m]])+" " + str(S_CT[n][m])) 750 | print("n = "+str(n)+" m ="+str(m)+" "+str(C_thrsh[S_CT[n][m]])+" " + str(S_CT[n][m])+"\n") 751 | 752 | #print("=> " + str(S_CT[n][m]) + "\n") 753 | 754 | if(S_CT[n][m] == -999): 755 | break 756 | if(C_thrsh[S_CT[n][m]]) : # Current link is true. 757 | # Check for land influence 758 | if( (C_land[S_CT[n][m]] > 0) and (C_size2[S_CT[n][m]] - C_land[S_CT[n][m]] < sea_min) ) : 759 | Tcount = 0 760 | else: 761 | Tcount = Tcount + 1 762 | # Exit loop if Tcount has reached TC_min 763 | # print("Tcount = "+str(Tcount) + " TC_min = " + str(TC_min)) 764 | if(Tcount == TC_min) : 765 | TCflag[n] = True 766 | print(str(C_date[S_CT[n][m]]) + " " + str(nxt_dt)) 767 | iTCdeclareflag[n][m:lmax+1].fill(1) 768 | break 769 | #endif 770 | #endif 771 | # Check for a time gap 772 | #write(6,*) "S_CT(n,m+1)",S_CT(n,m+1) 773 | if (S_CT[n][m] != -999) : 774 | nxt_dt,nxt_tm = next_time(C_date[S_CT[n][m]],C_time[S_CT[n][m]],nxt_dt,nxt_tm) 775 | #write(6,*) C_date(S_CT(n,m+1)),nxt_dt 776 | if ( (nxt_dt != C_date[S_CT[n][m+1]] ) or (nxt_tm != C_time[S_CT[n][m+1]]) ) : 777 | Tcount = 0 778 | #endif 779 | #endif 780 | else: 781 | Tcount = 0 782 | #endif 783 | #enddo 784 | #enddo 785 | 786 | # Print to screen S_CT 787 | # Write S_CT to output file 788 | 789 | 790 | file_out = open("S_CT_out_py.txt","w") 791 | file_out2 = open("S_CT_out2_py.txt","w") 792 | file_out3 = open("S_CT_out3_py.txt","w") 793 | 794 | print("#############################################") 795 | print(" Begin string information (S_CT)") 796 | print("#############################################") 797 | 798 | print(" lat , lon ,OW850,OW500, wshr,RH950,RH700,SH950,OWZmx,sz,lnd,clump, date ,tm, CoreThrsh") 799 | print("Core Thresh vals: " + str(TH_owz850)+" " + str(TH_owz500)+" " + str(TH_wsh)+" " + str(TH_rh950)+" " + str(TH_rh700)+" " + str(TH_sh950)) 800 | file_out.write("#############################################\n") 801 | file_out.write(" Begin string information (S_CT)\n") 802 | file_out.write("#############################################\n") 803 | file_out.write(" lat , lon ,OW850,OW500, wshr,RH950,RH700,SH950,OWZmx,sz,lnd,clump, date ,tm, CoreThrsh\n") 804 | file_out.write("Core Thresh vals: " + str(TH_owz850)+" " + str(TH_owz500)+" " + str(TH_wsh)+" " + str(TH_rh950)+" " + str(TH_rh700)+" " + str(TH_sh950) + "\n") 805 | 806 | 807 | #if(any(TCflag)) 808 | file_out3.write("Date0, Tim0,Lat0,Lon0,OWZmx0,Lat-12,Lon-12,Lat-24,Lon-24\n") 809 | print("s_num = "+ str(s_num)) 810 | 811 | for n in range(1,s_num + 1): 812 | if(TCflag[n] == False) : 813 | #print("String number"+ str(n)+"\n") 814 | file_out.write("String number "+str(n) + "\n") 815 | else: 816 | #print("String number "+ str(n)+" *** TC declared Or test conditions met ***\n") 817 | file_out.write("String number "+str(n)+" *** TC declared Or test conditions met ***\n") 818 | 819 | # First1=.true. 820 | print("lmax = "+ str(lmax)) 821 | for m in range(1,lmax+1): 822 | if(S_CT[n][m] == -999): 823 | break 824 | 825 | #if(int(C_date[S_CT[n][m]]/100) == 200402): 826 | # print(str(C_lat[S_CT[n][m]])+" " +str(C_lon[S_CT[n][m]]) +" "+ str(C_owz850[S_CT[n][m]])+" "+str(C_owz500[S_CT[n][m]]) + " " +str(C_wsh[S_CT[n][m]]) + " "+ str(C_rh950[S_CT[n][m]])+" "+str(C_rh700[S_CT[n][m]]) + " "+ str(C_sh950[S_CT[n][m]]) + " "+str(C_OWZmax[S_CT[n][m]]) + " "+ str(C_size2[S_CT[n][m]]) + " "+ str(C_land[S_CT[n][m]]) + " "+ str(S_CT[n][m]) + " "+str(C_date[S_CT[n][m]]) + " "+ str(C_time[S_CT[n][m]]/100) + " "+ str(C_thrsh[S_CT[n][m]])+"\n") 827 | 828 | file_out.write(str(C_lat[S_CT[n][m]])+" " +str(C_lon[S_CT[n][m]]) +" "+ str(C_owz850[S_CT[n][m]])+" "+str(C_owz500[S_CT[n][m]]) + " " +str(C_wsh[S_CT[n][m]]) + " "+ str(C_rh950[S_CT[n][m]])+" "+str(C_rh700[S_CT[n][m]]) + " "+ str(C_sh950[S_CT[n][m]]) + " "+str(C_OWZmax[S_CT[n][m]]) + " "+ str(C_size2[S_CT[n][m]]) + " "+ str(C_land[S_CT[n][m]]) + " "+ str(S_CT[n][m]) + " "+str(C_date[S_CT[n][m]]) + " "+ str(C_time[S_CT[n][m]]/100) + " "+ str(C_thrsh[S_CT[n][m]])+"\n") 829 | 830 | #write(50,131) C_lat[S_CT[n][m]],C_lon[S_CT[n][m]],C_owz850[S_CT[n][m]],C_owz500[S_CT[n][m]], & 831 | # C_wsh[S_CT[n][m]],C_rh950[S_CT[n][m]],C_rh700[S_CT[n][m]],C_sh950[S_CT[n][m]], & 832 | # C_OWZmax[S_CT[n][m]],C_size2[S_CT[n][m]],C_land[S_CT[n][m]],S_CT(n,m), & 833 | # C_date[S_CT[n][m]],C_time[S_CT[n][m]]/100,C_thrsh[S_CT[n][m]] 834 | #chtm = "" 835 | chtm = C_time[S_CT[n][m]]/100#int2ch(chtm,2,C_time[S_CT[n][m]]/100,2) 836 | 837 | file_out2.write(str(m)+"\t"+str(n)+"\t"+str(iTCdeclareflag[n][m])+"\t"+str(round(C_lat[S_CT[n][m]],2))+"\t"+str(round(C_lon[S_CT[n][m]],2))+"\t"+ 838 | str(round(C_rh950[S_CT[n][m]],2))+"\t"+str(round(C_sh950[S_CT[n][m]],2))+"\t"+ 839 | str(round(C_OWZmax[S_CT[n][m]],2))+"\t"+str(round(C_SPmax[S_CT[n][m]],2))+"\t"+str(C_size[S_CT[n][m]])+"\t"+str(C_size2[S_CT[n][m]])+"\t"+ 840 | str(C_land[S_CT[n][m]])+"\t"+str(S_CT[n][m])+"\t"+str(C_date[S_CT[n][m]])+"\t"+str(chtm)+"\t"+str(C_thrsh[S_CT[n][m]])+"\t"+ 841 | str(C_flagfail1_owz850[S_CT[n][m]])+"\t"+str(C_flagfail1_owz500[S_CT[n][m]])+"\t"+ 842 | str(C_flagfail1_rh950[S_CT[n][m]])+"\t"+str(C_flagfail1_rh700[S_CT[n][m]])+"\t"+ 843 | str(C_flagfail1_wsh[S_CT[n][m]])+"\t"+str(C_flagfail1_sh950[S_CT[n][m]])+"\t"+ 844 | str(C_flagfail2_OT_count[S_CT[n][m]])+"\t"+ 845 | str(C_flagfail2_owz850[S_CT[n][m]])+"\t"+str(C_flagfail2_owz500[S_CT[n][m]])+"\t"+ 846 | str(C_flagfail2_rh950[S_CT[n][m]])+"\t"+str( C_flagfail2_sh950[S_CT[n][m]])+"\t"+ 847 | str(TH_owz850)+"\t"+str(TH_owz500)+"\t"+str(TH_rh950)+"\t"+str(TH_rh700)+"\t"+str(TH_wsh)+"\t"+str(TH_sh950)+"\n") 848 | # if( (iTCdeclareflag(n,m) == 1).and.(First1) ) then 849 | if( (iTCdeclareflag[n][m] == 1) and (S_CT[n][m+1] == -999) ) : 850 | if( (C_thrsh[S_CT[n][m]]) and (C_thrsh[S_CT[n][m-1]]) and (C_thrsh[S_CT[n][m-2]]) ) : 851 | file_out3.write(str(C_date[S_CT[n][m]])+" "+str(C_time[S_CT[n][m]]) + " " +str(C_lat[S_CT[n][m]])+" "+ str(C_lon[S_CT[n][m]])+" "+str(C_OWZmax[S_CT[n][m]])+" "+ 852 | str(C_lat[S_CT[n][m-1]]) +" "+str(C_lon[S_CT[n][m-1]])+" "+ str(C_lat[S_CT[n][m-2]])+" "+str(C_lon[S_CT[n][m-2]])+"\n") 853 | # First1=.False. 854 | #endif 855 | #endif 856 | #enddo 857 | #enddo 858 | file_out.close() 859 | file_out2.close() 860 | file_out3.close() 861 | 862 | 863 | #--------------End CT_strings---------------------------- 864 | 865 | 866 | def int2ch(character,l,integr,num_dig): 867 | #---------------------------------------------------------------------- 868 | # This subroutine converts an integer to a character. It is 869 | # based on "intXch" created by G.S. Dietachmayer September 1992. 870 | # 871 | # charactr is the character string representation of the integer. 872 | # l is the desired length of the character string and must 873 | # satisfy the following condition (l >= num_dig). 874 | # integr is the integer number to be converted. 875 | # num_dig is the number of digits in integr. If (l > num_dig) 876 | # charactr will be padded to the left with zeroes. 877 | # 878 | # Created: 14-1-00 K. Tory. 879 | #---------------------------------------------------------------------- 880 | 881 | epsilon=1e-6 882 | real_bit=0.0 883 | # Check that l is >= num_dig 884 | 885 | if(l < num_dig) : 886 | print('Error in int2ch. l must be >= num_dig.') 887 | print('l, num_dig ='+str(l)+" "+str(num_dig)) 888 | return 889 | #endif 890 | 891 | # Build up charactr digit by digit 892 | store = integr 893 | for i in range(l,1,-1): 894 | real_bit=float(store)*0.1+epsilon 895 | new_store=int(real_bit) 896 | digit = store - new_store*10 897 | character = chr(digit+48) 898 | store = new_store 899 | #enddo 900 | 901 | print('integer =',integr,'character = ',character) 902 | return character 903 | 904 | 905 | 906 | 907 | def proximity_search(C_latc,C_lonc,C_latn,C_lonn,radius,S_flag,dist): 908 | 909 | coslat = np.cos( deg2rad*(C_latc+C_latn)*0.5 ) 910 | Ydist = (C_latc - C_latn)*deg2rad*Rearth 911 | Xdist = (C_lonc - C_lonn)*deg2rad*Rearth*coslat 912 | dist = np.sqrt( Xdist**2 + Ydist**2 +1.0e-5) 913 | 914 | #print(str(C_latc) + " - " + str(C_latn) + " * "+ str(deg2rad) +" * "+str(Rearth) + " = " + str((C_latc - C_latn)*deg2rad*Rearth)) 915 | #print("coslat = " + str(coslat)+" Ydist = "+str(Ydist)+" Xdist = "+str(Xdist)+" "+str(dist)+"\n") 916 | 917 | if(dist < radius*1000.0): 918 | S_flag = True 919 | else: 920 | S_flag = False 921 | 922 | #print("dist = " + str(dist)) 923 | # Convert dist from m to km 924 | dist = dist/1000.0 925 | 926 | #print(" Distance = "+str(dist)+"km. SR = "+ str(radius)) 927 | # write(6,*) "S_flag = ",S_flag 928 | # write(6,*) "C_latc,C_lonc, C_latn, C_lonn",C_latc,C_lonc, C_latn, C_lonn 929 | return S_flag,dist 930 | 931 | #--------------End proximity_search---------------------------- 932 | 933 | 934 | 935 | def next_time(in_dt,in_tm,out_dt,out_tm): 936 | 937 | 938 | hr1 = 0.0 # Input time real hours 939 | ihr1 = 0 # Input time integer hours 940 | imin1 =0 # Input time integer minutes 941 | hr2 = 0.0 # Output time real hours 942 | ihr2 =0 # Output time integer hours 943 | imin2 =0 # Output time integer minutes 944 | 945 | dy1 = 0 946 | dy2 = 0 # In and out days 947 | mn1 = 0 948 | mn2 = 0 # In and out months 949 | yr1 = 0 950 | yr2 = 0 # In and out years 951 | 952 | add_day = 0 # Number of days to be added to in_dt 953 | ldm = 0 # Day number of the last day of the month 954 | 955 | 956 | 957 | # Calculate in_tm in hours (real) 958 | ihr1 = int( float(in_tm)/100.0) 959 | imin1= in_tm - ihr1*100 960 | hr1 = float(ihr1) + float(imin1)/60.0 961 | 962 | # write(6,*) "ihr1, imin1, hr1 ",ihr1, imin1, hr1 963 | 964 | # Add dtim to hr to get out_tm 965 | hr2 = dtim + hr1 966 | add_day = 0 967 | while (hr2 >= 24.0): 968 | hr2 = hr2 - 24.0 969 | add_day = add_day + 1 970 | if(add_day > 10) : 971 | print("Problem with hr2 calculation") 972 | return 973 | 974 | ihr2 = int(hr2) 975 | imin2= int( (hr2 - float(int(hr2)) )*60.0) 976 | out_tm = ihr2*100 + imin2 977 | 978 | # Add days to in_dt 979 | if (add_day > 0) : 980 | # Get input yr, mn, dy 981 | yr1 = int( float(in_dt)/10000.0) 982 | mn1 = int( float(in_dt)/100.0) # Temporary value 983 | dy1 = in_dt - mn1*100 984 | mn1 = mn1 - yr1*100 985 | 986 | # write(6,*) "yr1, mn1, dy1",yr1, mn1, dy1 987 | 988 | # Determine last day of month 989 | if ((mn1==1) or (mn1==3) or (mn1==5) or (mn1==7) or (mn1==8) or (mn1==10) or (mn1==12)) : 990 | ldm = 31 991 | elif ((mn1==4) or (mn1==6) or (mn1==9) or (mn1==11)): 992 | ldm = 30 993 | elif (mn1==2) : 994 | if(np.mod(yr1,4)==0) : 995 | ldm = 29 996 | else: 997 | ldm = 28 998 | 999 | if((yr1==1900) or (yr1==2100) or (yr1==2200) or (yr1==2300)): 1000 | ldm = 28 1001 | 1002 | 1003 | # Find dy2,mn2,yr2 1004 | dy2 = dy1 + add_day 1005 | if (dy2 > ldm) : 1006 | dy2 = dy2 - ldm 1007 | mn2 = mn1 + 1 1008 | else: 1009 | mn2 = mn1 1010 | 1011 | if (mn2 > 12) : 1012 | yr2 = yr1 + 1 1013 | mn2 = mn2 - 12 1014 | else: 1015 | yr2 = yr1 1016 | 1017 | 1018 | out_dt = 10000*yr2 + 100*mn2 + dy2 1019 | else: 1020 | out_dt = in_dt 1021 | 1022 | return out_dt,out_tm 1023 | 1024 | 1025 | 1026 | def find_link(mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time): 1027 | 1028 | global C_size,C_size2,C_land,C_date,C_time,C_lat,C_lon,C_latp1,C_lonp1,C_latm1,C_lonm1 1029 | global C_latp2,C_lonp2,C_latm2,C_lonm2 1030 | global C_ust,C_vst,C_SPmax,C_OWZmax,C_owz850,C_owz500 1031 | global C_rh950,C_rh700,C_wsh,C_sh950,C_flag,C_ign_flag,C_thrsh,C_flagfail1_owz850, C_flagfail1_owz500 1032 | global C_flagfail1_rh950, C_flagfail1_rh700,C_flagfail1_wsh, C_flagfail1_sh950,C_flagfail2_OT_count 1033 | global C_flagfail2_owz850, C_flagfail2_owz500,C_flagfail2_rh950, C_flagfail2_sh950 1034 | global dlon, dlat,dtim,e_min,TC_min,sea_min,land_lim,srch_rad,clmp_rad 1035 | global TH_owz850,TH_owz500,TH_rh950,TH_rh700,TH_wsh,TH_sh950 1036 | 1037 | m = mj+mj2 1038 | m_false = False # Stored 'm' when link to 'false' C_thrsh is found 1039 | crnt_dt = 0 # Value of nxt_dt before recursive call 1040 | crnt_tm = 0 # Value of nxt_tm before recursive call 1041 | dist = 0.0 1042 | dist_store = 0.0 # Distance between points (returned from proximity_search) 1043 | sr1 = 200.0 # Minimum search radius (km) 1044 | sr_inc = 100.0 # Search radius increment 1045 | SR = 0.0 # Variable search radius 1046 | s_rad = 0.0 # maximum search radius (as a function of latitude) 2013-07-30 1047 | modlat = 0.0 # Modulus of the latitude (degrees) 1048 | repeat_srch = True # Flag to determine if search is to be repeated over larger area 1049 | double_exit = False # Flag set to true when both do loops are to be exited 1050 | 1051 | # Search for links within an increasing radius, from sr1 to s_rad, at sr_inc increments 1052 | SR = sr1 1053 | repeat_srch = True 1054 | C_ign_flag.fill(False) 1055 | 1056 | while(repeat_srch): 1057 | repeat_srch = False 1058 | 1059 | m_false = 0 1060 | for m in range(mj+mj2,cl_num+1): 1061 | # Set variable search radius based on latitude 1062 | modlat = np.sqrt(C_lat[mj]**2) 1063 | if(modlat > 30.0) : 1064 | s_rad = 400.0 # km 1065 | elif(modlat < 15.0) : 1066 | s_rad = 600.0 # km 1067 | else: 1068 | s_rad = 800.0 - 13.3333*modlat 1069 | 1070 | s_rad = s_rad*srch_rad/600.0 1071 | if (last_time == False) : 1072 | print(" Try clump "+str(m)+" Search radius =" + str(SR)+" "+str(s_rad)) 1073 | else: 1074 | print(" Try clump "+str(m)+" one time-step later. Search radius ="+ str(SR) +" "+str(s_rad)) 1075 | 1076 | #print( "m = " + str(m) + " cl_num = " + str(cl_num)) 1077 | # If C_date > nxt_dt search for a link in the next time period then exit 1078 | # (Assumes CTs are ordered in time) 1079 | # write(6,*) "C_date[m],C_time[m],nxt_dt,nxt_tm,m:",C_date[m],C_time[m],nxt_dt,nxt_tm,m 1080 | if ( (C_date[m] > nxt_dt) or ( (C_date[m] == nxt_dt) and (C_time[m] > nxt_tm) ) or (m == cl_num) ) : 1081 | S_flag = False 1082 | SR = SR + sr_inc 1083 | if( (SR > s_rad) and (SR < s_rad+sr_inc-0.1) ): 1084 | SR = s_rad 1085 | if (SR <= s_rad) : 1086 | repeat_srch = True 1087 | # print("m = "+ str(m) + "cl_num = " + str(cl_num) +" SR ="+str(SR)+" repeat_srch = "+str(repeat_srch)) 1088 | if (m == cl_num) : 1089 | print(" Search stopped because there are no more clumps to search.\n") 1090 | else: 1091 | print(" Search stopped because clump",m,"is from a later time period.\n") 1092 | 1093 | # If a link was found to a C_thrsh=false clump, add it now. 1094 | if ( (m_false != 0) and (repeat_srch == False) ) : 1095 | print(" No C_thrsh = true clumps found. Add stored clump:"+str(m_false)) 1096 | C_flag[m_false] = True # T = taken, i.e., it will be excluded from further searches 1097 | l_num = l_num + 1 1098 | S_CT[s_num][l_num] = m_false 1099 | # print(" Clumps "+str(mj)+" "+str(m_false)+" are linked, on string "+str(s_num)) 1100 | mj = m_false 1101 | if (l_num > lmax) : 1102 | #call reallocate_S_CT 1103 | print("l_num exceeds array dimensions.") 1104 | print("l_num = "+str(l_num)+" Array = "+ str(lmax)) 1105 | return 1106 | 1107 | nxt_dt,nxt_tm = next_time(nxt_dt,nxt_tm,nxt_dt,nxt_tm) 1108 | SR = sr1 1109 | double_exit = True 1110 | C_ign_flag.fill(False) 1111 | S_flag = True 1112 | #endif 1113 | break 1114 | #endif 1115 | 1116 | #write(6,*) "C_date,C_time,nxt_dt,nxt_tm",C_date[m],C_time[m],nxt_dt,nxt_tm 1117 | if ( (C_date[m] != nxt_dt) and (C_time[m] != nxt_tm) ): 1118 | C_ign_flag[m] = True 1119 | if ( (C_date[m] == nxt_dt) and (C_time[m] == nxt_tm) and (C_flag[m] == False) and (C_ign_flag[m] == False) ) : 1120 | print(" How far apart are clumps"+str(mj)+" and "+ str(m)+"?") 1121 | #write(6,*) " C_ign_flag[m] = ",C_ign_flag[m] 1122 | #**************************************** 1123 | # Test tracker without half-way storm position estimates. 1124 | # Comment out the lines between the stars below 1125 | # 1126 | # call proximity_search(C_lat(mj),C_lon(mj),C_lat[m],C_lon[m],SR,S_flag,dist) 1127 | # dist_store = dist 1128 | #************************************** 1129 | 1130 | #************************************* 1131 | # Use half-way storm position estimates 1132 | # Comment out the lines between the stars above 1133 | 1134 | #print("LatMJ "+str(C_latp1[mj-1])+" "+str(C_lonp1[mj-1])+" " +str(C_latp1[mj])+" "+str(C_lonp1[mj])) 1135 | #print("LatM "+str(C_latm1[m-1])+" "+str(C_lonm1[m-1])+" " +str(C_latm1[m])+" "+str(C_lonm1[m])) 1136 | 1137 | dist_store = 0.0 1138 | if ( last_time == False) : 1139 | S_flag,dist = proximity_search(C_latp1[mj],C_lonp1[mj],C_latm1[m],C_lonm1[m],SR,S_flag,dist) 1140 | if (dist < dist_store): 1141 | dist_store = dist 1142 | else: 1143 | S_flag,dist = proximity_search(C_latp2[mj],C_lonp2[mj],C_latm2[m],C_lonm2[m],SR,S_flag,dist) 1144 | if (dist < dist_store): 1145 | dist_store = dist 1146 | 1147 | #************************************* 1148 | if (dist_store > s_rad) : 1149 | C_ign_flag[m] = True 1150 | #write(6,*) "minimum dist > s_rad for clump",m 1151 | #write(6,*) "dist_store =",dist_store 1152 | #endif 1153 | # If link exists, add to string and exit both loops. If link does not exist S_flag will 1154 | # be returned as false. It is reset to true before trying next CT 1155 | double_exit = False 1156 | if (S_flag) : 1157 | if (C_thrsh[m] == True) : 1158 | C_flag[m] = True # T = taken, i.e., it will be excluded from further searches 1159 | l_num = l_num + 1 1160 | S_CT[s_num][l_num] = m 1161 | print(" Clumps"+str(mj)+" "+str(m)+" are linked, on string "+ str(s_num)) 1162 | mj = m 1163 | if (l_num > lmax) : 1164 | #call reallocate_S_CT 1165 | print("l_num exceeds array dimensions.") 1166 | print("l_num = "+str(l_num)+" Array = "+ str(lmax)) 1167 | return 1168 | #endif 1169 | nxt_dt,nxt_tm = next_time(nxt_dt,nxt_tm,nxt_dt,nxt_tm) 1170 | SR = sr1 1171 | double_exit = True 1172 | C_ign_flag.fill(False) 1173 | break 1174 | else: 1175 | # A link to a C_thrsh=false clump has been found. Add it later if no 'true' links found 1176 | # Add only the first one found for now. May need to try something more clever later. 1177 | if (m_false == 0): 1178 | m_false = m 1179 | print(" Link found, but C_thrsh=false. Link added later if no true C_thrsh found") 1180 | #endif 1181 | else: 1182 | S_flag = True 1183 | #endif 1184 | #endif 1185 | #end-for 1186 | 1187 | 1188 | #print("*** M = " + str(m)) 1189 | if (double_exit) : 1190 | break 1191 | 1192 | if ( (C_date[m] > nxt_dt) and (repeat_srch == False) and ( last_time == False) ) : 1193 | last_time = True 1194 | crnt_dt = nxt_dt 1195 | crnt_tm = nxt_tm 1196 | mj2 = m - mj 1197 | nxt_dt,nxt_tm = next_time(crnt_dt,crnt_tm,nxt_dt,nxt_tm) 1198 | mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time = find_link(mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time) 1199 | nxt_dt = crnt_dt 1200 | nxt_tm = crnt_tm 1201 | if(S_flag == False) : 1202 | print(" Second search stopped because clump"+str(m)+" is from a later time period.") 1203 | break 1204 | #endif 1205 | #endif 1206 | #enddo 1207 | C_ign_flag.fill(False) 1208 | 1209 | mret = m 1210 | print("******************************* mret ="+str(mret)) 1211 | return mj,mj2,nxt_dt,nxt_tm,S_flag,l_num,s_num,mret,last_time 1212 | 1213 | 1214 | 1215 | def start(inputFile): 1216 | 1217 | read_data(inputFile) 1218 | 1219 | print("reading data_info_file") 1220 | read_info("data_info_file") 1221 | 1222 | 1223 | # Allocate space for clump variables 1224 | print("Before clump_allocate") 1225 | clump_allocate() 1226 | 1227 | # Find threshold clumps and calculate centroid threshold (CT) 1228 | # values for each clump 1229 | print("Before centroid_threshold") 1230 | centroid_threshold() 1231 | 1232 | 1233 | 1234 | # Allocate space for S_CT array 1235 | S_CT_allocate() 1236 | 1237 | # String together CTs in time 1238 | CT_strings() 1239 | 1240 | 1241 | #if __name__ == "__main__": 1242 | # main() 1243 | -------------------------------------------------------------------------------- /tracker.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/savinchand/owz_python/a9dbece7903889d0221d528a986463e319310200/tracker.pyc --------------------------------------------------------------------------------