├── .github └── workflows │ └── python-publish.yml ├── .gitignore ├── .old ├── 2dm2gr3.pl ├── DPackage.py ├── Gen_Hycom_3Dth_old.py ├── Gen_hotstart_hycom.py ├── Interp_hotstart.f90 ├── convert_matfile_format.py ├── date_proc.py ├── gen_bctides.py ├── gen_hycom_3Dth.py ├── gen_hycom_nudge.py ├── get_schism_param.py ├── grd2sms.pl ├── loadz.py ├── mfft.py ├── mpi_test.py ├── netcdf.py ├── pcompute_OLDIO_flux.py ├── pextract_OLDIO_schism_slab.py ├── pextract_OLDIO_schism_xyz.py ├── pextract_schism_slab.py ├── plot_elev_sample.py ├── pylib0.py ├── shpfile.py └── str2num.py ├── LICENSE ├── README.md ├── pylib.py ├── pylib_experimental ├── README.md ├── pylib_experimental │ └── schism_file.py └── setup.py ├── scripts ├── .run.job ├── Ddiff ├── Dparam ├── Harmonic_Analysis │ ├── .gitignore │ ├── compile.py │ ├── nrutil.c │ ├── nrutil.h │ ├── readme.txt │ ├── tidal_analysis.c │ ├── tidal_const.dat │ └── tide_fac_const.npz ├── __init__.py ├── change_ssh_hotstart.py ├── checkrun ├── chmodDir ├── cmake_schism ├── cmb_hotstart ├── cmb_icm ├── copyrun ├── download_AVISO.py ├── download_CMEMS.py ├── download_HFRadar.py ├── download_hycom.py ├── gen_bctides.py ├── gen_fluxflag.py ├── gen_fluxth_USGS.py ├── gen_hycom_3Dth_nudge.py ├── gen_hycom_hotstart.py ├── gen_narr_sflux.py ├── gen_shapiro.py ├── gen_vqs.py ├── gplot ├── grd2sms ├── hotstart_proc.py ├── make_sflux_links.py ├── make_sflux_subdomain.py ├── pextract_OLDIO_schism_fabm_xyz.py ├── pextract_schism.py ├── pextract_schism_flux.py ├── pextract_schism_slab.py ├── pextract_tidal_harmonics.py ├── pload_dem.py ├── pplot ├── prj.npz ├── proj ├── pschism_output_subset.py ├── run.cmb_hotstart ├── run.cmb_outputs ├── run.levante ├── run.schism ├── run_mpi_template.py ├── schismcheck ├── schismview ├── sflux_template.npz ├── sms2grd ├── subset_outputs_parallel.py └── sync_outputs ├── setup.cfg ├── setup.py ├── src ├── __init__.py ├── mylib.py └── schism_file.py └── tutorial.ipynb /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | __pycache__/ 3 | !.gitignore 4 | **/pylib.pyc 5 | *egg-info 6 | build 7 | dist 8 | -------------------------------------------------------------------------------- /.old/2dm2gr3.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | # 3 | # Convert SMS 2dm grid to .gr3 format, simplified. Assumes no extra nodes. 4 | # 5 | 6 | if ( @ARGV != 2 ) { 7 | print "Usage: $0 [SMS grid file] [.gr3 file]\n"; 8 | exit(1); 9 | } 10 | 11 | $file = $ARGV[0]; 12 | $outfile = $ARGV[1]; 13 | 14 | open( IN, "<$file" ) || die "Can't open $file\n"; 15 | open( OUT, ">$outfile" ) || die "Can't open $outfile\n"; 16 | 17 | my @lines = ; 18 | close(IN); 19 | 20 | my $ne = 1; 21 | my $np = 1; 22 | my @nes = (); 23 | my @nps = (); 24 | 25 | for (my $i=0;$i<@lines; $i++) { 26 | if ($lines[$i] =~ /^E3T/) { 27 | my ($junk, $junk, $n1, $n2, $n3, $junk) = split(' ', $lines[$i]); 28 | $nes[$ne - 1] = "$ne 3 $n1 $n2 $n3\n"; 29 | $ne++; 30 | } elsif ($lines[$i] =~ /^E4Q/){ 31 | my ($junk, $junk, $n1, $n2, $n3,$n4, $junk) = split(' ', $lines[$i]); 32 | $nes[$ne - 1] = "$ne 4 $n1 $n2 $n3 $n4\n"; 33 | $ne++; 34 | } elsif ($lines[$i] =~ /^ND/) { 35 | my ($junk, $num, $x, $y, $d) = split(' ', $lines[$i]); 36 | $nps[$np - 1] = "$np $x $y $d\n"; 37 | $np++; 38 | } 39 | } 40 | 41 | $ne--; 42 | $np--; 43 | 44 | print OUT "2dm2gr3\n$ne $np\n";; 45 | print OUT "@nps"; 46 | print OUT "@nes"; 47 | close(OUT); 48 | -------------------------------------------------------------------------------- /.old/DPackage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | compare package difference for different python verions 4 | usage: DPackage pip3.5 pip3.7 (only shows missing pkgs) 5 | DPackage -a pip3.5 pip3.7 (shows all information) 6 | ''' 7 | from pylib import * 8 | 9 | #read inputs 10 | pv=sys.argv[1:] 11 | 12 | sflag=True 13 | if pv[0]=='-a': 14 | sflag=False 15 | pv=pv[1:] 16 | 17 | #parse all the packages for each python version 18 | S=[]; svars=[] 19 | for pvi in pv: 20 | lines=command_outputs('{} list'.format(pvi)).stdout.split('\n') 21 | Si=dict(); 22 | for line in lines: 23 | sline=line.split() 24 | 25 | #check whether it is a package line 26 | if len(sline)!=2: continue 27 | if sline[1].split('.')[0][0]=='(': 28 | if not sline[1].split('.')[0][1].isdigit(): continue 29 | else: 30 | if not sline[1].split('.')[0].isdigit(): continue 31 | 32 | #add package 33 | Si[sline[0]]=sline[1] 34 | 35 | #save 36 | S.append(Si) 37 | svars.extend(Si.keys()) 38 | 39 | #compare different python packages 40 | svars=unique(array(svars)) 41 | sformat='{:25} '+'{:25} '*len(pv) 42 | 43 | print(sformat.format('Package:',*pv)) 44 | for svar in svars: 45 | vi=[]; sflag2=True 46 | for m in arange(len(pv)): 47 | if svar in S[m].keys(): 48 | vi.append(S[m][svar]) 49 | else: 50 | vi.append('') 51 | sflag2=False 52 | if sflag2*sflag: continue 53 | print(sformat.format(svar,*vi)) 54 | -------------------------------------------------------------------------------- /.old/Gen_Hycom_3Dth_old.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #generate SCHISM 3D and 2D boundary condition based on Hycom data 3 | from pylib import * 4 | 5 | #----inputs-------------- 6 | StartT=datenum(2005,1,1) 7 | EndT=datenum(2005,1,6) 8 | 9 | Var=['surf_el','water_temp','salinity',['water_u','water_v']]; 10 | VarName=['elev','temp','salt',['Ux','Uy']]; 11 | ncname=['elev2D.th.nc','TEM_3D.th.nc','SAL_3D.th.nc','uv3D.th.nc'] 12 | 13 | dir_hycom='HYCOM/Data'; 14 | 15 | #---read grid---- 16 | gd=read_schism_hgrid('../hgrid.ll') 17 | 18 | #index of pts for interpolation 19 | node=gd.iobn[0] #open boundary node index 20 | #node=r_[gd.iobn[0],gd.iobn[1]] #open boundary node index 21 | 22 | #---get z coordinate------ 23 | zcor=read_schism_vgrid('../vgrid.in',gd,node=node,flag=1); 24 | zcor=-zcor; fp=zcor<0; zcor[fp]=0; fp=zcor>5000; zcor[fp]=5000; 25 | nvrt=zcor.shape[1] 26 | 27 | #---interpolation pts---- 28 | dt=1/4; Time=arange(StartT,EndT+dt,dt); 29 | loni=gd.x[node]; lati=gd.y[node]; 30 | bxy=c_[lati,loni] 31 | 32 | lon2i=tile(loni,[nvrt,1]).T; lat2i=tile(lati,[nvrt,1]).T; 33 | bxyz=c_[zcor.reshape(size(zcor)),lat2i.reshape(size(lat2i)),lon2i.reshape(size(lon2i))] 34 | 35 | #---read Hycom data and interpolate onto boundary nodes------------------ 36 | Dt=[]; 37 | for i in arange(len(Var)): 38 | vari=Var[i]; varnamei=VarName[i]; ncnamei=ncname[i] 39 | #----build npz file for 3Dth or 2Dth---------- 40 | nd=npz_data() 41 | nd.dimname=['nOpenBndNodes', 'nLevels', 'nComponents', 'one', 'time'] 42 | if varnamei=='elev': 43 | nd.dims=[len(node),1,1,1,len(Time)] 44 | elif varnamei=='salt' or varnamei=='temp': 45 | nd.dims=[len(node),nvrt,1,1,len(Time)] 46 | elif isinstance(varnamei,list): 47 | nd.dims=[len(node),nvrt,2,1,len(Time)] 48 | nd.file_format='NETCDF4' 49 | 50 | #--time step, time, and time series---- 51 | nd.vars=['time_step', 'time', 'time_series']; 52 | nd.time_step=npz_data() 53 | nd.time_step.attrs=['long_name'];nd.time_step.long_name='time step in seconds'; 54 | nd.time_step.dimname=('one',); nd.time_step.val=array(dt*86400).astype('float32'); 55 | nd.time=npz_data() 56 | nd.time.attrs=['long_name'];nd.time.long_name='simulation time in seconds'; 57 | nd.time.dimname=('time',); nd.time.val=(Time-Time[0])*86400; 58 | nd.time_series=npz_data() 59 | nd.time_series.attrs=[]; 60 | nd.time_series.dimname=('nComponents','nLevels','nOpenBndNodes','time'); 61 | 62 | t0=time.time(); 63 | T0=[]; Data0=[]; 64 | for ti in arange(StartT,EndT+1): 65 | t1=num2date(ti); t2=num2date(ti+1-1/24/60); 66 | 67 | if isinstance(vari,list): 68 | fname='Hycom_{}_{}_{}.nc'.format(varnamei[0],t1.strftime('%Y%m%dZ%H%M00'),t2.strftime('%Y%m%dZ%H%M00')) 69 | fname2='Hycom_{}_{}_{}.nc'.format(varnamei[1],t1.strftime('%Y%m%dZ%H%M00'),t2.strftime('%Y%m%dZ%H%M00')) 70 | if not os.path.exists(r'{}/{}'.format(dir_hycom,fname)): continue 71 | if not os.path.exists(r'{}/{}'.format(dir_hycom,fname2)): continue 72 | print(fname+'; '+fname2) 73 | C=ReadNC('{}/{}'.format(dir_hycom,fname),2); C2=ReadNC('{}/{}'.format(dir_hycom,fname2),2) 74 | 75 | #get value 76 | exec('val=C.{}.val'.format(vari[0])) 77 | exec('val2=C2.{}.val'.format(vari[1])) 78 | val=array(val); fp=val<-29999; val2=array(val2); fp2=val2<-29999; 79 | val[fp]=nan; val2[fp2]=nan; 80 | else: 81 | fname='Hycom_{}_{}_{}.nc'.format(varnamei,t1.strftime('%Y%m%dZ%H%M00'),t2.strftime('%Y%m%dZ%H%M00')) 82 | if not os.path.exists(r'{}/{}'.format(dir_hycom,fname)): continue 83 | print(fname) 84 | C=ReadNC('{}/{}'.format(dir_hycom,fname),2) 85 | 86 | #get value 87 | exec('val=C.{}.val'.format(vari)) 88 | val=array(val); fp=val<-29999; 89 | val[fp]=nan 90 | 91 | 92 | ti=datestr2num(C.time.time_origin)+array(C.time.val)/24 93 | cloni=array(C.lon.val); cloni=mod(cloni,360)-360 94 | clati=array(C.lat.val); 95 | 96 | #------define data region extracted 97 | ind_lon=nonzero((cloni<=max(loni)+0.1)*(cloni>=min(loni)-0.1))[0]; 98 | ind_lat=nonzero((clati<=max(lati)+0.1)*(clati>=min(lati)-0.1))[0]; 99 | i1_lon=ind_lon.min(); i2_lon=i1_lon+len(ind_lon) 100 | i1_lat=ind_lat.min(); i2_lat=i1_lat+len(ind_lat) 101 | 102 | cloni=cloni[i1_lon:i2_lon]; clati=clati[i1_lat:i2_lat] 103 | 104 | if varnamei=='elev': 105 | for m in arange(len(ti)): 106 | valii=squeeze(val[m,i1_lat:i2_lat,i1_lon:i2_lon]) 107 | 108 | #interpolation 109 | fd=sp.interpolate.RegularGridInterpolator((clati,cloni),valii,fill_value=nan) 110 | vi=fd(bxy) 111 | 112 | #remove nan pts 113 | fp=isnan(vi); 114 | if sum(fp)!=0: 115 | vi[fp]=sp.interpolate.griddata(bxy[~fp,:],vi[~fp],bxy[fp,:],'nearest') 116 | 117 | T0.append(ti[m]); Data0.append(vi); 118 | else: 119 | #------define data region extracted for depth 120 | cdepi=array(C.depth.val) 121 | ind_dep=nonzero((cdepi<=zcor.max()+1000)*(cdepi>=zcor.min()-100))[0]; 122 | i1_dep=ind_dep.min(); i2_dep=i1_dep+len(ind_dep) 123 | cdepi=cdepi[i1_dep:i2_dep]; 124 | 125 | for m in arange(len(ti)): 126 | T0.append(ti[m]); 127 | valii=squeeze(val[m,i1_dep:i2_dep,i1_lat:i2_lat,i1_lon:i2_lon]) 128 | 129 | #interpolation 130 | fd=sp.interpolate.RegularGridInterpolator((cdepi,clati,cloni),valii,fill_value=nan) 131 | vi=fd(bxyz) 132 | 133 | #remove nan pts 134 | fp=isnan(vi); 135 | if sum(fp)!=0: 136 | vi[fp]=sp.interpolate.griddata(bxyz[~fp,:],vi[~fp],bxyz[fp,:],'nearest') 137 | 138 | vi=vi.reshape(zcor.shape) 139 | 140 | #----if variable is velocity 141 | if isinstance(varnamei,list): 142 | val2ii=squeeze(val2[m,i1_dep:i2_dep,i1_lat:i2_lat,i1_lon:i2_lon]) 143 | 144 | #interpolation 145 | fd=sp.interpolate.RegularGridInterpolator((cdepi,clati,cloni),val2ii,fill_value=nan) 146 | v2i=fd(bxyz) 147 | 148 | #remove nan pts 149 | fp=isnan(v2i); 150 | if sum(fp)!=0: 151 | v2i[fp]=sp.interpolate.griddata(bxyz[~fp,:],v2i[~fp],bxyz[fp,:],'nearest') 152 | 153 | v2i=v2i.reshape(zcor.shape) 154 | Data0.append(r_[expand_dims(vi,0),expand_dims(v2i,0)]) 155 | 156 | else: 157 | Data0.append(vi) 158 | 159 | T0=array(T0); Data0=array(Data0) 160 | 161 | #---check whether there is nan 162 | y0=Data0.reshape([size(Data0)]); fp=isnan(y0) 163 | if sum(fp)!=0: 164 | print('{} has NaN: check'.format(varnamei)) 165 | sys.exit() 166 | 167 | #interpolation in time 168 | ds=Data0.shape; y0=Data0.reshape([ds[0],prod(ds[1:])]); 169 | #lpfilter 170 | if varnamei=='elev' or isinstance(varnamei,list): 171 | y0=lpfilt(y0,dt,0.9) 172 | fd=interpolate.interp1d(T0,y0,axis=0,fill_value='extrapolate'); 173 | Data=reshape(fd(Time),[len(Time),*ds[1:]]); 174 | 175 | #----put data into ncfile 176 | if varnamei=='elev': 177 | Data=Data[:,:,None,None].transpose([3,2,1,0]) 178 | elif varnamei=='salt' or varnamei=='temp': 179 | Data=Data[:,:,:,None].transpose([3,2,1,0]) 180 | elif isinstance(varnamei,list): 181 | Data=Data.transpose([1,3,2,0]) 182 | nd.time_series.val=Data.astype('float32'); 183 | 184 | #--write ncfile---- 185 | if os.path.exists(ncnamei): os.remove(ncnamei) 186 | WriteNC(nd,ncnamei,med=2,order=1) 187 | 188 | Dt.append(time.time()-t0); 189 | 190 | #----print time consumed-------------------- 191 | for i in arange(len(Dt)): 192 | varnamei=VarName[i] 193 | print('reading {}: {}'.format(varnamei,Dt[i])) 194 | 195 | #---dist--------------------------------- 196 | #bP=gd.x[gd.iobn[0]]+1j*gd.y[gd.iobn[0]]; 197 | #L=zeros(bP.shape); 198 | #for i in arange(gd.nobn[0]-1): 199 | # L[i+1]=L[i]+abs(bP[i+1]-bP[i]) 200 | 201 | ##----plot boundary grid--------------- 202 | #for i in arange(zcor.shape[0]): 203 | # xi=ones(zcor.shape[1])*L[i] 204 | # zi=zcor[i,:] 205 | # plot(xi,zi,'k-') 206 | # 207 | #for i in arange(zcor.shape[1]): 208 | # xi=L 209 | # zi=zcor[:,i] 210 | # plot(xi,zi,'k-') 211 | -------------------------------------------------------------------------------- /.old/Gen_hotstart_hycom.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | 4 | #----inputs-------------- 5 | dir_hycom='./HYCOM/Data'; 6 | date_hycom=datenum(2005,1,1) #date for hycom file 7 | 8 | Var=['water_temp','salinity']; 9 | VarName=['temp','salt']; 10 | 11 | t0=time.time(); 12 | #---read grid---- 13 | gd=read_schism_hgrid('hgrid.ll') 14 | 15 | #index of pts for interpolation 16 | node=arange(gd.np) 17 | 18 | #---get z coordinate------ 19 | zcor=read_schism_vgrid('vgrid.in',gd,node=node,flag=1); 20 | zcor=-zcor; fp=zcor<0; zcor[fp]=0; fp=zcor>5000; zcor[fp]=5000; 21 | nvrt=zcor.shape[1] 22 | 23 | #---interpolation pts---- 24 | loni=gd.x[node]; lati=gd.y[node]; 25 | bxy=c_[lati,loni] 26 | 27 | lon2i=tile(loni,[nvrt,1]).T; lat2i=tile(lati,[nvrt,1]).T; 28 | bxyz=c_[zcor.reshape(size(zcor)),lat2i.reshape(size(lat2i)),lon2i.reshape(size(lon2i))] 29 | 30 | #--------interpolation -------------------------------------------------------- 31 | #interpolate is done only for salinity and temperature,but could modified to include elevation and velocity 32 | Data=npz_data() 33 | for i in arange(len(Var)): 34 | vari=Var[i]; varnamei=VarName[i]; 35 | t1=num2date(date_hycom); t2=num2date(date_hycom+1-1/24/60) 36 | fname='{}/Hycom_{}_{}_{}.nc'.format(dir_hycom,varnamei,t1.strftime('%Y%m%dZ%H%M00'),t2.strftime('%Y%m%dZ%H%M00')) 37 | 38 | if os.path.exists(fname): 39 | print(fname) 40 | 41 | C=ReadNC(fname,2) 42 | 43 | #get value 44 | exec('val=C.{}.val'.format(vari)) 45 | val=array(val); fp=val<-29999; 46 | val[fp]=nan 47 | 48 | ti=datestr2num(C.time.time_origin)+array(C.time.val)/24 49 | 50 | cdepi=array(C.depth.val) 51 | clati=array(C.lat.val); 52 | cloni=array(C.lon.val); cloni=mod(cloni,360)-360 53 | 54 | #------define data region extracted---- 55 | ind_dep=nonzero((cdepi<=zcor.max()+1000)*(cdepi>=zcor.min()-100))[0]; 56 | ind_lat=nonzero((clati<=max(lati)+0.1)*(clati>=min(lati)-0.1))[0]; 57 | ind_lon=nonzero((cloni<=max(loni)+0.1)*(cloni>=min(loni)-0.1))[0]; 58 | i1_dep=ind_dep.min(); i2_dep=i1_dep+len(ind_dep) 59 | i1_lat=ind_lat.min(); i2_lat=i1_lat+len(ind_lat) 60 | i1_lon=ind_lon.min(); i2_lon=i1_lon+len(ind_lon) 61 | 62 | cdepi=cdepi[i1_dep:i2_dep]; cloni=cloni[i1_lon:i2_lon]; clati=clati[i1_lat:i2_lat]; 63 | 64 | #----extract hycom data for the defined region 65 | valii=squeeze(val[0,i1_dep:i2_dep,i1_lat:i2_lat,i1_lon:i2_lon]) 66 | 67 | #interpolation 68 | fd=sp.interpolate.RegularGridInterpolator((cdepi,clati,cloni),valii,fill_value=nan) 69 | fdn=sp.interpolate.RegularGridInterpolator((cdepi,clati,cloni),valii,'nearest') 70 | vi=fd(bxyz); vin=fdn(bxyz) 71 | 72 | #remove nan pts 73 | fp=isnan(vi); 74 | if sum(fp)!=0: 75 | #vi[fp]=vin[fp] 76 | vi[fp]=sp.interpolate.griddata(bxyz[~fp,:],vi[~fp],bxyz[fp,:],'nearest') 77 | 78 | fp=isnan(vi); 79 | if sum(fp)!=0: sys.exit() 80 | 81 | vi=vi.reshape(zcor.shape) 82 | exec('Data.{}=vi'.format(varnamei)) 83 | 84 | #---compute tr_nd and tr_el------ 85 | tr_nd=c_[Data.temp[:,:,None],Data.salt[:,:,None]].transpose([2,1,0]) 86 | tr_el=zeros([2,nvrt,gd.ne]) 87 | for i in arange(2): 88 | for k in arange(nvrt): 89 | gd.dp=squeeze(tr_nd[i,k,:]); 90 | gd.compute_ctr(); 91 | tr_el[i,k,:]=gd.dpe; 92 | 93 | #-----build hotstart.nc-------------------------------------------------------- 94 | nd=npz_data() 95 | nd.dimname=['node','elem','side','nVert','ntracers','one'] 96 | nd.dims=[gd.np,gd.ne,gd.ns,nvrt,2,1] 97 | nd.file_format='NETCDF4' 98 | 99 | #--time step, time, and time series---- 100 | nd.vars=['time','iths','ifile','idry_e','idry_s','idry','eta2','we','tr_el','tr_nd',\ 101 | 'tr_nd0','su2','sv2','q2','xl','dfv','dfh','dfq1','dfq2'] 102 | ndi=npz_data(); ndi.attrs=[]; #template 103 | 104 | nd.time=npz_data(); nd.time.dimname=('one',);nd.time.val=array(0.0) #time 105 | nd.iths=npz_data(); nd.iths.dimname=('one',);nd.iths.val=array(0) #iths 106 | nd.ifile=npz_data(); nd.ifile.dimname=('one',);nd.ifile.val=array(1) #ifile 107 | 108 | nd.idry_e=npz_data(); nd.idry_e.dimname=('elem',);nd.idry_e.val=zeros(gd.ne).astype('int32') #idry_e 109 | nd.idry_s=npz_data(); nd.idry_s.dimname=('side',);nd.idry_s.val=zeros(gd.ns).astype('int32') #idry_s 110 | nd.idry=npz_data(); nd.idry.dimname=('node',);nd.idry.val=zeros(gd.np).astype('int32') #idry 111 | nd.eta2=npz_data(); nd.eta2.dimname=('node',);nd.eta2.val=zeros(gd.np) #eta2 112 | 113 | nd.we=npz_data(); nd.we.dimname=('nVert','elem');nd.we.val=zeros([nvrt,gd.ne]) #eta2 114 | 115 | nd.tr_el=npz_data(); nd.tr_el.dimname=('ntracers','nVert','elem');nd.tr_el.val=tr_el #tr_el 116 | nd.tr_nd=npz_data(); nd.tr_nd.dimname=('ntracers','nVert','node');nd.tr_nd.val=tr_nd #tr_nd 117 | nd.tr_nd0=npz_data(); nd.tr_nd0.dimname=('ntracers','nVert','node');nd.tr_nd0.val=tr_nd #tr_nd0 118 | 119 | nd.su2=npz_data(); nd.su2.dimname=('nVert','side');nd.su2.val=zeros([nvrt,gd.ns]) #su2 120 | nd.sv2=npz_data(); nd.sv2.dimname=('nVert','side');nd.sv2.val=zeros([nvrt,gd.ns]) #sv2 121 | 122 | nd.q2=npz_data(); nd.q2.dimname=('nVert','node');nd.q2.val=zeros([nvrt,gd.np]) #q2 123 | nd.xl=npz_data(); nd.xl.dimname=('nVert','node');nd.xl.val=zeros([nvrt,gd.np]) #xl 124 | nd.dfv=npz_data(); nd.dfv.dimname=('nVert','node');nd.dfv.val=zeros([nvrt,gd.np]) #dfv 125 | nd.dfh=npz_data(); nd.dfh.dimname=('nVert','node');nd.dfh.val=zeros([nvrt,gd.np]) #dfh 126 | nd.dfq1=npz_data(); nd.dfq1.dimname=('nVert','node');nd.dfq1.val=zeros([nvrt,gd.np]) #dfq1 127 | nd.dfq2=npz_data(); nd.dfq2.dimname=('nVert','node');nd.dfq2.val=zeros([nvrt,gd.np]) #dfq2 128 | 129 | for vari in nd.vars: 130 | exec('nd.{}.attrs=[]'.format(vari)) 131 | 132 | #--write ncfile---- 133 | ncname='hotstart.nc' 134 | if os.path.exists(ncname): os.remove(ncname) 135 | WriteNC(nd,ncname,med=2,order=1) 136 | 137 | #---time used-------- 138 | print('time consumed for generating hotstart: {} s'.format(time.time()-t0)); 139 | 140 | sys.exit() 141 | #----plot for check------------------------------------------------------------ 142 | for i in arange(12): 143 | subplot(3,4,i+1) 144 | gd.dp=vi[:,i*4] 145 | gd.compute_ctr() 146 | gd.plot_grid(fmt=1,ec=None,clim=[10,35]); 147 | # colorbar(gd.hc) 148 | -------------------------------------------------------------------------------- /.old/convert_matfile_format.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | def convert_matfile_format(file): 5 | #input for a directory or a matfile 6 | # r'C:\Users\Zhengui\Desktop\Observation2\DWR\SFBay_DWRData_SSI.mat']; 7 | #file=r'D:\OneDrive\Python\tem.mat'; 8 | fname=[]; 9 | if os.path.isdir(file): 10 | sfile=os.listdir(file) 11 | for sfilei in sfile: 12 | ename=sfilei.rstrip().split('.')[-1] 13 | if ename=='mat': 14 | fname.append(file+os.sep+sfilei) 15 | else: 16 | fname=[file]; 17 | 18 | fid=open('log_matfile_convert.txt','w+') 19 | #convert mat format from v7.3 to v7 20 | cdir=os.getcwd(); 21 | os.chdir('D:\OneDrive\Python') 22 | import matlab.engine 23 | eng = matlab.engine.start_matlab() 24 | 25 | for fn in fname: 26 | print('converting matfile: '+fn) 27 | dname=os.path.dirname(fn) 28 | bname=os.path.basename(fn).split('.')[0] 29 | fnv7=dname+os.sep+bname+'_v7' 30 | fnz=dname+os.sep+bname 31 | eflag=eng.convert_matfile_format(fn,fnv7,nargout=1) 32 | if eflag!=0: 33 | print('convert flag is %d: %s\n' % (eflag, fn)); 34 | fid.write('convert flag is %d: %s\n' % (eflag,fn)) 35 | continue 36 | convert_matfile(fnz,fnv7) 37 | os.remove(fnv7+'.mat') 38 | fid.close() 39 | os.chdir(cdir) 40 | 41 | #convert mat to npz 42 | def convert_matfile(fnz,fnv7): 43 | fc=np.vectorize(lambda x: x[0]) 44 | C=sp.io.loadmat(fnv7+'.mat') 45 | vn=C.keys(); 46 | 47 | iflag=0;Y={}; 48 | for vni in vn: 49 | if vni[:2]=='__': 50 | continue 51 | Ci=C[vni]; 52 | if issubdtype(Ci.dtype,np.number): 53 | Yi=Ci.copy(); 54 | else: 55 | Yi=fc(Ci) 56 | if vni=='Doy' or vni=='doy': 57 | Yi=Yi-366; 58 | Y[vni]=Yi 59 | savez_compressed(fnz,**Y) 60 | 61 | 62 | if __name__ == "__main__": 63 | pass 64 | # fname=r'C:\Users\Zhengui\Desktop\Observation2\USGS\SFBay_USGSData_MAL.mat' 65 | # fname=r'C:\Users\Zhengui\Desktop\convert_matfile\tem.mat' 66 | # cmat.convert_matfile_format(fname) 67 | -------------------------------------------------------------------------------- /.old/date_proc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | def datenum_0(*args): 5 | if len(args)==1: 6 | args=args[0]; 7 | 8 | args=array(args) 9 | args=args.astype('int') 10 | return datetime.datetime(*args) 11 | 12 | 13 | def datenum(*args,doy=0): 14 | args=array(args) 15 | e1=args[0] 16 | 17 | if hasattr(e1, "__len__"): 18 | if not hasattr(e1[0],"__len__"): 19 | f=datenum_0(*e1) 20 | else: 21 | f=apply_along_axis(datenum_0,1,e1) 22 | else: 23 | f=datenum_0(*args) 24 | if doy==0: 25 | return date2num(f) 26 | else: 27 | return f 28 | 29 | 30 | 31 | if __name__=='__main__': 32 | pass 33 | # n1=(2006.,1,1) 34 | # n2=array([2006,2,1]); 35 | # n3=array([[2006,3,2,1,1],[2006,3,3,3,0]]); 36 | # 37 | # f1=datenum(*n1); 38 | # f2=datenum(n2); 39 | # f3=datenum(n3,doy=1); 40 | 41 | -------------------------------------------------------------------------------- /.old/gen_bctides.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #generate bctides.in's tide harmonics 3 | #specify inputs in the bottom 4 | from pylib import * 5 | 6 | def get_tide_nodal(tide_name,StartT,nday): 7 | tdir='tide_fac_improved' 8 | #compile the code 9 | os.system('cd {}; ifort -o tide_fac_improved tf_main.f90 tf_selfe.f90'.format(tdir)) 10 | 11 | #write input 12 | with open('{}/tide.in'.format(tdir),'w+') as fid: 13 | fid.write('{}\n'.format(nday)) 14 | fid.write('{} {} {} {}\n'.format(*flipud(StartT))) 15 | fid.write('0\n'); 16 | 17 | #run the code 18 | os.system('cd {}; ./tide_fac_improved =(StartT-1))*(mti<(EndT+1)); fnames=fnames[fpt]; mti=mti[fpt] 29 | sind=argsort(mti); mti=mti[sind]; fnames=fnames[sind] 30 | 31 | #read hgrid 32 | gd=loadz(grd).hgrid; vd=loadz(grd).vgrid; gd.x,gd.y=gd.lon,gd.lat; nvrt=vd.nvrt 33 | 34 | #for each variables 35 | for n,[sname,svar,mvar] in enumerate(zip(snames,svars,mvars)): 36 | if isinstance(svar,str): svar=[svar]; mvar=[mvar] 37 | 38 | #get nudge xyz 39 | gdn=read_schism_hgrid('{}_nudge.gr3'.format(sname.split('_')[0])); gdn.compute_ctr() 40 | bind=unique(gdn.elnode[gdn.dpe!=0,:].ravel()); bind=bind[bind>=0]; nobn=len(bind) 41 | lxi0=gd.x[bind]%360; lyi0=gd.y[bind]; bxy=c_[lxi0,lyi0] #for 2D 42 | lxi=tile(lxi0,[nvrt,1]).T.ravel(); lyi=tile(lyi0,[nvrt,1]).T.ravel() #for 3D 43 | if vd.ivcor==2: 44 | lzi=abs(compute_zcor(vd.sigma,gd.dp[bind],ivcor=2,vd=vd)).ravel() 45 | else: 46 | lzi=abs(compute_zcor(vd.sigma[bind],gd.dp[bind])).ravel(); 47 | bxyz=c_[lxi,lyi,lzi] 48 | sx0,sy0,sz0=None,None,None 49 | 50 | #interp in space 51 | S=zdata(); S.time=[] 52 | [exec('S.{}=[]'.format(i)) for i in mvar] 53 | for m,fname in enumerate(fnames): 54 | C=ReadNC('{}/{}'.format(dir_hycom,fname),1); print(fname) 55 | ctime=array(C.variables['time'])/24+datenum(2000,1,1); sx=array(C.variables['lon'][:])%360 56 | sy=array(C.variables['lat'][:]); sz=array(C.variables['depth'][:]); nz=len(sz) 57 | fpz=lzi>=sz.max(); lzi[fpz]=sz.max()-1e-6 58 | 59 | if not array_equal(sx,sx0)*array_equal(sy,sy0)*array_equal(sz,sz0): 60 | #get interp index for HYCOM data 61 | if ifix==0: 62 | sxi,syi=meshgrid(sx,sy); sxy=c_[sxi.ravel(),syi.ravel()]; 63 | cvs=array(C.variables['water_temp'][0]); sindns=[]; sindps=[] 64 | for ii in arange(nz): 65 | print('computing HYCOM interpation index: level={}/{}'.format(ii,nz)) 66 | cv=cvs[ii]; ds=cv.shape; cv=cv.ravel() 67 | fpn=abs(cv)>1e3; sindn=nonzero(fpn)[0]; sindr=nonzero(~fpn)[0]; sindp=sindr[near_pts(sxy[sindn],sxy[sindr])] 68 | sindns.append(sindn); sindps.append(sindp) 69 | 70 | #get interp index for pts 71 | sx0=sx[:]; sy0=sy[:]; sz0=sz[:]; print('get new interp indices: {}'.format(fname)) 72 | idx=((lxi[:,None]-sx0[None,:])>=0).sum(axis=1)-1; ratx=(lxi-sx0[idx])/(sx0[idx+1]-sx0[idx]) 73 | idy=((lyi[:,None]-sy0[None,:])>=0).sum(axis=1)-1; raty=(lyi-sy0[idy])/(sy0[idy+1]-sy0[idy]) 74 | idz=((lzi[:,None]-sz0[None,:])>=0).sum(axis=1)-1; ratz=(lzi-sz0[idz])/(sz0[idz+1]-sz0[idz]) 75 | 76 | S.time.extend(ctime) 77 | for i, cti in enumerate(ctime): 78 | for k,svari in enumerate(svar): 79 | exec("cv=array(C.variables['{}'][{}])".format(svari,i)); mvari=mvar[k] 80 | 81 | #remove HYCOM nan pts 82 | if ifix==0: 83 | for ii in arange(nz): 84 | sindn,sindp=sindns[ii],sindps[ii] 85 | cvi=cv[ii].ravel(); fpn=(abs(cvi[sindn])>1e3)*(abs(cvi[sindp])<1e3); cvi[sindn]=cvi[sindp]; fpn=abs(cvi)>1e3 #init fix 86 | if sum(fpn)!=0: fni=nonzero(fpn)[0]; fri=nonzero(~fpn)[0]; fpi=fri[near_pts(sxy[fni],sxy[fri])]; cvi[fni]=cvi[fpi] #final fix 87 | #fpn=abs(cv[ii].ravel())>1e3; cv[ii].ravel()[fpn]=sp.interpolate.griddata(sxy[~fpn,:],cv[ii].ravel()[~fpn],sxy[fpn,:],'nearest') #old method 88 | 89 | v0=array([cv[idz,idy,idx],cv[idz,idy,idx+1],cv[idz,idy+1,idx],cv[idz,idy+1,idx+1], 90 | cv[idz+1,idy,idx],cv[idz+1,idy,idx+1],cv[idz+1,idy+1,idx],cv[idz+1,idy+1,idx+1]]) 91 | 92 | #remove nan in parent pts 93 | if ifix==1: 94 | for ii in arange(8): fpn=abs(v0[ii])>1e3; v0[ii,fpn]=sp.interpolate.griddata(bxyz[~fpn,:],v0[ii,~fpn],bxyz[fpn,:],'nearest',rescale=True) 95 | 96 | v11=v0[0]*(1-ratx)+v0[1]*ratx; v12=v0[2]*(1-ratx)+v0[3]*ratx; v1=v11*(1-raty)+v12*raty 97 | v21=v0[4]*(1-ratx)+v0[5]*ratx; v22=v0[6]*(1-ratx)+v0[7]*ratx; v2=v21*(1-raty)+v22*raty 98 | vi=v1*(1-ratz)+v2*ratz; vi=vi.astype('float32') 99 | 100 | #save data 101 | exec('S.{}.append(vi)'.format(mvari)) 102 | C.close(); 103 | S.time=array(S.time); [exec('S.{}=array(S.{})'.format(i,i)) for i in mvar] 104 | 105 | #interp in time 106 | for mvari in mvar: 107 | exec('vi=S.{}'.format(mvari)) 108 | #svi=interpolate.interp1d(S.time,vi,axis=0)(mtime).astype('float32') 109 | svi=array([interpolate.interp1d(S.time,vi[:,i])(mtime).astype('float32') for i in arange(vi.shape[1])]).T; vi=None 110 | if iLP==1: svi=lpfilt(svi,dt,fc).astype('float32') #low-pass 111 | exec('S.{}=svi'.format(mvari)) 112 | S.time=mtime 113 | 114 | #reshape the data, and save 115 | [exec('S.{}=S.{}.reshape([{},{},{}])'.format(i,i,nt,nobn,nvrt)) for i in mvar] 116 | exec("vdata=S.{}[...,None].astype('float32')".format(mvar[0])) 117 | 118 | #-------------------------------------------------------------------------- 119 | #create netcdf 120 | #-------------------------------------------------------------------------- 121 | nd=zdata(); nd.file_format='NETCDF4' 122 | 123 | #define dimensions 124 | nd.dimname=['time','node','nLevels','one'] 125 | nd.dims=[nt,nobn,nvrt,1] 126 | 127 | #define variables 128 | nd.vars=['time', 'map_to_global_node', 'tracer_concentration'] 129 | vi=zdata(); vi.dimname=('time',); vi.val=(S.time-S.time[0]); nd.time=vi 130 | vi=zdata(); vi.dimname=('node',); vi.val=bind+1; nd.map_to_global_node=vi 131 | vi=zdata(); vi.dimname=('time','node','nLevels','one'); vi.val=vdata; nd.tracer_concentration=vi 132 | 133 | WriteNC(sname,nd) 134 | -------------------------------------------------------------------------------- /.old/get_schism_param.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | 4 | def read_schism_param(fname,*args): 5 | with open(fname,'r') as fid: 6 | lines=fid.readlines() 7 | 8 | param={} 9 | for line in lines: 10 | line=line.strip() 11 | if len(line)==0 or line[0]=='!': continue 12 | ind=line.find('!'); 13 | if(ind!=-1): line=line[0:ind]; 14 | ind=line.find('='); 15 | keyi=line[:ind].strip(); 16 | vali=line[(ind+1):].strip(); 17 | param[keyi]=vali 18 | if((len(args)>0) and (args[0]==1)): 19 | if vali.lstrip('-').replace('.','',1).isdigit(): param[keyi]=float(vali) 20 | #try: 21 | # param[keyi]=float(vali) 22 | #except: 23 | # pass 24 | 25 | return param; 26 | 27 | def write_schism_param(fname,param): 28 | pkeys=sorted(param.keys()) 29 | with open(fname,'w+') as fid: 30 | #[fid.write('{:10}= {:}\n'.format(x,y)) for x,y in zip(param.keys(),param.values())]; 31 | #[fid.write('{:10}= {:}\n'.format(i,param[i])) for i in pkeys]; 32 | for i in range(len(pkeys)): 33 | fid.write('{:10}= {:}\n'.format(pkeys[i],param[pkeys[i]])) 34 | 35 | 36 | if __name__=="__main__": 37 | if len(sys.argv)<2: 38 | print("not enough arguments !!\nExample: get_schism_param param.in ihot dt\n"); 39 | sys.exit() 40 | 41 | fname=sys.argv[1]; 42 | var=sys.argv[2:]; 43 | param=read_schism_param(fname) 44 | 45 | #print params 46 | if len(var)!=0: 47 | for keyi in var: 48 | if param.get(keyi)==None: 49 | print('{} : not exist'.format(keyi)) 50 | else: 51 | print('{} : {}'.format(keyi,param[keyi])) 52 | else: 53 | for keyi in param: 54 | print('{} : {}'.format(keyi,param[keyi])) 55 | -------------------------------------------------------------------------------- /.old/grd2sms.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | #Convert gredit grid file to SMS 2dm file 3 | if (@ARGV !=2) { 4 | die "usage: $0 infile outfile\n"; 5 | } 6 | $file = $ARGV[0]; 7 | 8 | $outfile = $ARGV[1]; 9 | open(IN,$file); 10 | @lines = ; 11 | close(IN); 12 | open(OUT,">$outfile"); 13 | print OUT "MESH2D\n"; 14 | chomp $lines[1]; 15 | $lines[1]=~s/^\s+//; 16 | $lines[1]=~s/\s+/ /g; 17 | ($e,$n)=split(" ",$lines[1]); 18 | $starte = $n+2; 19 | print "$lines[1]\n$e $n $starte\n"; 20 | for ($i = $starte; $i<$n+$e+2; $i++){ 21 | chomp $lines[$i]; 22 | $lines[$i]=~s/^\s+//; 23 | $lines[$i]=~s/\s+/ /g; 24 | ($elemn,$elem34,$e1,$e2,$e3,$e4)=split(" ",$lines[$i]); 25 | if ($elem34 == 4) {print OUT "E$elem34"."Q $elemn $e1 $e2 $e3 $e4 1\n";} 26 | elsif ($elem34 == 3) {print OUT "E$elem34"."T $elemn $e1 $e2 $e3 1\n";} 27 | } 28 | for ($i = 2; $i<$starte; $i++){ 29 | chomp $lines[$i]; 30 | $lines[$i]=~s/^\s+//; 31 | $lines[$i]=~s/\s+/ /g; 32 | print OUT "ND $lines[$i]\n"; 33 | } 34 | -------------------------------------------------------------------------------- /.old/loadz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | class npz_data(object): 5 | def __init__(self): 6 | pass 7 | 8 | def save_npz(fname,C): 9 | #npz_vars=[ npz_vari.split(':')[0] for npz_vari in C.VINFO ]; 10 | npz_vars=list(C.__dict__.keys()) 11 | if 'VINFO' in npz_vars: npz_vars.remove('VINFO') 12 | 13 | save_str='savez_compressed("{}" '.format(fname); 14 | for vari in npz_vars: 15 | save_str=save_str+',{}=C.{}'.format(vari,vari) 16 | save_str=save_str+')'; 17 | #print(save_str) 18 | exec(save_str) 19 | 20 | 21 | def loadz(fname,med=1): 22 | #med=1: return class format; med=2:return dict format 23 | data0=load(fname) 24 | keys0=data0.keys() 25 | 26 | if med==1: 27 | zdata=npz_data(); 28 | else: 29 | zdata2={} 30 | 31 | VINFO=[] 32 | for keyi in keys0: 33 | datai=data0[keyi]; 34 | if datai.dtype==dtype('O'): datai=datai[()] 35 | if med==1: 36 | exec('zdata.'+keyi+'=datai') 37 | else: 38 | zdata2[keyi]=datai 39 | 40 | #gather information about datai 41 | vinfo=keyi+": "+type(datai).__name__ 42 | if isinstance(datai,list): 43 | vinfo=vinfo+'('+str(len(datai))+'), ' 44 | elif isinstance(datai,np.ndarray): 45 | vinfo=vinfo+str(datai.shape)+', dtype='+str(datai.dtype) 46 | VINFO.append(vinfo) 47 | VINFO=array(VINFO) 48 | zdata.VINFO=VINFO 49 | 50 | 51 | return zdata if med==1 else zdata2 52 | 53 | 54 | if __name__=='__main__': 55 | pass 56 | fname='D:\Work\SFBay\Observation\CMON\DWR\SFBay_DWRData_Turb.npz' 57 | T=loadz(fname) 58 | wipe() 59 | 60 | -------------------------------------------------------------------------------- /.old/mfft.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | from scipy.fftpack import fft 4 | 5 | def mfft(xi,dt): 6 | #input 7 | #xi: time series 8 | #dt: interval 9 | # 10 | #output 11 | #perid[period],afx[amplitude],pfx[phase] 12 | N=xi.size; 13 | fx=fft(xi); 14 | afx=abs(fx[1:N//2])*2.0/N; 15 | pfx=angle(fx[1:N//2]); 16 | period=dt*N/arange(1,N//2); 17 | return period,afx,pfx 18 | 19 | if __name__=="__main__": 20 | pass; 21 | 22 | # plt.close('all') 23 | # T=10; dt=0.01; N=T/dt; 24 | # x=linspace(0.0, T, N); 25 | # y=4*cos(2.0*pi*(x-0.3)/0.5)+2*cos(2.0*pi*(x-0.4)/1.0)+4*cos(2.0*pi*(x-0.5)/2.0) 26 | # f,a,p=mfft(y,dt) 27 | # 28 | # subplot(2,1,1) 29 | # plot(x,y,'k-') 30 | # subplot(2,1,2) 31 | # plot(f,a,'k.',ms=20) 32 | # setp(gca(),'xlim',[0,5]) 33 | -------------------------------------------------------------------------------- /.old/mpi_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | from mpi4py import MPI 5 | 6 | comm = MPI.COMM_WORLD 7 | size = comm.Get_size() 8 | rank = comm.Get_rank() 9 | 10 | data = (rank+1)**2 11 | data = comm.gather(data, root=0) 12 | if rank == 0: 13 | for i in range(size): 14 | assert data[i] == (i+1)**2 15 | else: 16 | assert data is None 17 | -------------------------------------------------------------------------------- /.old/netcdf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | 5 | def ReadNC(fname,med=1,order=0): 6 | #ReadNC(fname,med=1) 7 | #if med=1: return netcdf.Dateset(fname) 8 | #if med=2: reorgnaized Dataset similar to npz_data 9 | #order=1: only works for med=2 10 | #order=0: variable dimension order read not changed for python format 11 | #order=1: variable dimension order read reversed follwoing in matlab/fortran format 12 | C=Dataset(fname); 13 | 14 | if med==1: 15 | return C 16 | else: 17 | ncdims=[i for i in C.dimensions] 18 | ncvars=[i for i in C.variables] 19 | F=npz_data(); 20 | F.file_format=C.file_format 21 | F.dimname=ncdims 22 | F.dims=[C.dimensions[i].size for i in ncdims] 23 | F.vars=ncvars 24 | for i in ncvars: 25 | fi=npz_data(); 26 | dimi=C.variables[i].dimensions; 27 | fi.dimname=dimi 28 | fi.dims=[C.dimensions[j].size for j in dimi] 29 | fi.val=C.variables[i][:] 30 | fi.attrs=C.variables[i].ncattrs() 31 | for j in C.variables[i].ncattrs(): 32 | ncattri=C.variables[i].getncattr(j); 33 | exec('fi.{}=ncattri'.format(j)) 34 | 35 | if order==1: 36 | fi.dimname=list(flipud(fi.dimname)) 37 | fi.dims=list(flipud(fi.dims)) 38 | nm=flipud(arange(ndim(fi.val))); 39 | fi.val=fi.val.transpose(nm) 40 | 41 | exec('F.{}=fi'.format(i)) 42 | 43 | return F 44 | 45 | def WriteNC(C,fname,med=1,order=0): 46 | #WriteNC(C,fname,med=1) 47 | #C is data source 48 | #if med=1, C has netcdf.Dataset format 49 | #if med=2, C has different format 50 | #order=0: variable dimension order written not changed for python format 51 | #order=1: variable dimension order written reversed follwoing in matlab/fortran format 52 | if med==1: 53 | #----write NC files------------- 54 | fid=Dataset(fname,'w',format=C.file_format); #C.file_format 55 | ncdims=[i for i in C.dimensions] 56 | ncvars=[i for i in C.variables] 57 | for dimi in ncdims: 58 | fid.createDimension(dimi,C.dimensions[dimi].size) 59 | if order==0: 60 | for vari in ncvars: 61 | vid=fid.createVariable(vari,C.variables[vari].dtype,C.variables[vari].dimensions) 62 | for attri in C.variables[vari].ncattrs(): 63 | vid.setncattr(attri,C.variables[vari].getncattr(attri)) 64 | fid.variables[vari][:]=C.variables[vari][:] 65 | elif order==1: 66 | for vari in ncvars: 67 | vid=fid.createVariable(vari,C.variables[vari].dtype,flipud(C.variables[vari].dimensions)) 68 | for attri in C.variables[vari].ncattrs(): 69 | vid.setncattr(attri,C.variables[vari].getncattr(attri)) 70 | nm=flipud(arange(ndim(C.variables[vari][:]))); 71 | fid.variables[vari][:]=C.variables[vari][:].transpose(nm) 72 | 73 | fid.close() 74 | else: 75 | #----write NC files------------- 76 | fid=Dataset(fname,'w',format=C.file_format); #C.file_format 77 | for i in range(len(C.dims)): 78 | fid.createDimension(C.dimname[i],C.dims[i]) 79 | 80 | if order==0: 81 | for vari in C.vars: 82 | vi=eval('C.{}'.format(vari)); 83 | vid=fid.createVariable(vari,vi.val.dtype,vi.dimname) 84 | for j in vi.attrs: 85 | attri=eval('vi.{}'.format(j)) 86 | vid.setncattr(j,attri) 87 | fid.variables[vari][:]=vi.val 88 | elif order==1: 89 | for vari in C.vars: 90 | vi=eval('C.{}'.format(vari)); 91 | vid=fid.createVariable(vari,vi.val.dtype,flipud(vi.dimname)) 92 | for j in vi.attrs: 93 | attri=eval('vi.{}'.format(j)) 94 | vid.setncattr(j,attri) 95 | if ndim(vi.val)>=2: 96 | nm=flipud(arange(ndim(vi.val))); 97 | fid.variables[vari][:]=vi.val.transpose(nm) 98 | else: 99 | fid.variables[vari][:]=vi.val 100 | 101 | 102 | fid.close() 103 | 104 | if __name__=='__main__': 105 | 106 | F2=ReadNC(r'D:\Work\E3SM\E3SMScript\run4ie\sflux\sflux_air_1.002.nc',2) 107 | WriteNC(F2,'T2.nc',2) 108 | 109 | F=ReadNC('T2.nc') 110 | 111 | # pass 112 | # # read NC files 113 | # C=Dataset('sflux_air_1.002.nc') 114 | # 115 | # ncdims=[i for i in C.dimensions] 116 | # ncvars=[i for i in C.variables] 117 | # 118 | # 119 | # [print("{}".format(i)) for i in ncdims] 120 | # [print("{}".format(i)) for i in ncvars] 121 | # 122 | # #----write NC files------------- 123 | # fid=Dataset('test.nc','w',format='NETCDF3_CLASSIC'); #C.file_format 124 | # 125 | # for dimi in ncdims: 126 | # fid.createDimension(dimi,C.dimensions[dimi].size) 127 | # 128 | # for vari in ncvars: 129 | # vid=fid.createVariable(vari,C.variables[vari].dtype,C.variables[vari].dimensions) 130 | # for attri in C.variables[vari].ncattrs(): 131 | # vid.setncattr(attri,C.variables[vari].getncattr(attri)) 132 | # fid.variables[vari][:]=C.variables[vari][:] 133 | # fid.close() 134 | # 135 | # ## check results 136 | # F=Dataset('test.nc'); 137 | -------------------------------------------------------------------------------- /.old/pextract_schism_slab.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #python script used to extract slabs of schism results 3 | from pylib import * 4 | #import xarray as xr 5 | 6 | #---------inputs------------------------------ 7 | run='run4ia' 8 | stack=[1,73] 9 | svars=['salt',['SED3D_1','SED3D_2','SED3D_3']] 10 | snames=['salt','SED3D'] 11 | depths=[0,3] 12 | qnode='haswell' 13 | nproc=10 14 | 15 | #-------flags--------------------------------- 16 | icmb=0 #0:normal read (parallel); 1: read when model running (serial); 17 | #2:read but not combine (parallel); 3: not read, only combine *npz (skip read) 18 | ifs=0 #0: station depth relative to free surface; 1: fixed station depth 19 | 20 | #------pre-processing------------------------- 21 | bdir=os.path.abspath(os.path.curdir) 22 | if (icmb==1)|(icmb==3): nproc=1 23 | 24 | #-------on front node------------------------- 25 | if os.getenv('param')==None: 26 | args=sys.argv 27 | #link results 28 | if not os.path.exists(run): os.mkdir(run) 29 | os.system('cd {}; ln -sf ../../{}/outputs/schout_?.nc ./;ln -sf ../../{}/outputs/schout_??.nc ./'.format(run,run,run)) 30 | #submit job on node 31 | param=[bdir,args[0]] 32 | os.system('qsub {} -v param="{} {}", -N rd_{} -q {} -e Rd_outputs.e -o Rd_outputs.o -l nodes={}:ppn=1 -l walltime=100:00:00'.format(args[0],*param,run,qnode,nproc)) 33 | #os.system('qsub {} -v param="{} {}", -N rd_{} -q {} -e Rd_outputs.e -o Rd_outputs.o -l procs={} -l walltime=100:00:00'.format(args[0],*param,run,qnode,nproc)) 34 | sys.exit(0) 35 | 36 | param=os.getenv('param').split(); 37 | param=[int(i) if i.isdigit() else i for i in param] 38 | bdir=param[0]; fname0=param[1]; 39 | 40 | #submit jobs on each core 41 | if os.getenv('job_on_node')==None: 42 | print("cd {}; mpiexec -np {} --env job_on_node 1 {}>>screen.out".format(bdir,nproc,fname0)) 43 | os.system("cd {}; mpiexec -np {} --env job_on_node 1 {}>>screen.out".format(bdir,nproc,fname0)) 44 | sys.exit() 45 | 46 | #start to work on each core 47 | os.chdir('{}/{}'.format(bdir,run)) 48 | 49 | #----get nproc and myrank-------- 50 | comm=MPI.COMM_WORLD 51 | nproc=comm.Get_size() 52 | myrank=comm.Get_rank() 53 | 54 | t0=time.time() 55 | #----distribute work-------------------------- 56 | stacks=arange(stack[0],stack[1]+1); istack=[]; 57 | #stacks=[1,3,10,71] 58 | for i in arange(len(stacks)): 59 | if i%nproc==myrank: 60 | istack.append(stacks[i]) 61 | istack=array(istack) 62 | if(icmb==3): istack=[] 63 | 64 | #read results 65 | P=npz_data(); 66 | for istacki in istack: 67 | while ((icmb==1)*(os.path.exists('schout_{}.nc'.format(istacki+1)))): sleep(10) 68 | fname='schout_{}.nc'.format(istacki); 69 | S=npz_data(); 70 | 71 | #read nc values 72 | C=Dataset(fname); 73 | mtime=array(C.variables['time'][:])/86400; S.time=mtime.astype('float32'); S.depth=array(depths).astype('float32') 74 | nt,np,nz=C.variables['zcor'].shape 75 | [exec('S.{}=[]'.format(sname)) for sname in snames] 76 | 77 | for i in arange(nt): 78 | #compute matrix for vertical interpolation 79 | zcor=array(C.variables['zcor'][i,:,:]).T; #(nz,np) 80 | #treat invalid depths 81 | fp=isnan(zcor); zcor[fp]=0 82 | fp=abs(zcor)>1e10; zcor[fp]=-zcor[fp] 83 | srat=[] 84 | for m in arange(len(depths)): 85 | zs=-array(depths[m]).astype('float') 86 | if ifs==0: zs=zs+zcor[-1] 87 | rat=zeros([np,nz]) 88 | fp=zs<=zcor[0]; rat[fp,0]=1 89 | fp=zs>zcor[-1]; rat[fp,-1]=1 90 | for k in arange(1,nz): 91 | zi0=zcor[k-1]; zi=zcor[k] 92 | fp=(zs>zi0)*(zs<=zi) 93 | rati=(zs[fp]-zi0[fp])/(zi[fp]-zi0[fp]) 94 | rat[fp,k]=rati 95 | rat[fp,k-1]=1-rati 96 | srat.append(rat) 97 | srat=array(srat) 98 | if sum(abs(srat.sum(axis=2)-1.0)>1e-6)!=0: sys.exit('wrong for srat: {},step={}'.format(fname,i)) 99 | 100 | #read slices 101 | for n in arange(len(svars)): 102 | svari=svars[n]; sname=snames[n] 103 | if i==0: print('reading {} (slab): {} '.format(fname,svari)); sys.stdout.flush() 104 | datai=[] 105 | for m in arange(len(depths)): 106 | if (svari=='elev')*(m!=0): continue 107 | #read raw data 108 | if type(svari)==list: 109 | for k in arange(len(svari)): 110 | if k==0: 111 | exec("P.vi=C.variables['{}'][i]".format(svari[k])) 112 | else: 113 | exec("P.vi=P.vi+C.variables['{}'][i]".format(svari[k])) 114 | else: 115 | exec("P.vi=C.variables['{}'][i]".format(svari)) 116 | #extract 117 | if svari=='elev': 118 | dataii=P.vi 119 | elif svari=='hvel': 120 | dataii=(P.vi*tile(srat[m][:,:,None],[1,1,2])).sum(axis=1) 121 | else: 122 | dataii=(P.vi*srat[m]).sum(axis=1) 123 | datai.append(dataii) 124 | datai=array(datai) 125 | exec('S.{}.append(datai)'.format(sname)) 126 | 127 | #save data 128 | for n in arange(len(svars)): 129 | svari=svars[n]; sname=snames[n] 130 | if svari=='elev': 131 | exec("S.{}=squeeze(array(S.{})).astype('float32')".format(sname,sname)) 132 | elif svari=='hvel': 133 | exec("S.{}=array(S.{}).transpose([0,2,3,1]).astype('float32')".format(sname,sname)) 134 | else: 135 | exec("S.{}=array(S.{}).transpose([0,2,1]).astype('float32')".format(sname,sname)) 136 | 137 | #save data 138 | save_npz('{}_slab_{}'.format(run,istacki),S) 139 | 140 | #colloect results 141 | comm.Barrier() 142 | if myrank==0: 143 | #wait all results 144 | while(True): 145 | iflag=len(stacks) 146 | for i in arange(len(stacks)): 147 | if os.path.exists('{}_slab_{}.npz'.format(run,stacks[i])): iflag=iflag-1 148 | if iflag==0: break 149 | if iflag!=0: time.sleep(1) 150 | 151 | #read result 152 | if icmb!=2: 153 | S=npz_data(); 154 | for i in arange(len(stacks)): 155 | Si=loadz('{}_slab_{}.npz'.format(run,stacks[i])) 156 | 157 | if i==0: 158 | exec('S.time=Si.time; S.depth=Si.depth'); 159 | for m in arange(len(snames)): 160 | exec('S.{}=Si.{}'.format(snames[m],snames[m])) 161 | else: 162 | exec('S.time=r_[S.time,Si.time]'); 163 | for m in arange(len(snames)): 164 | exec('S.{}=r_[S.{},Si.{}]'.format(snames[m],snames[m],snames[m])) 165 | 166 | #save result 167 | save_npz('{}_slab.npz'.format(run),S) 168 | [os.system("rm {}_slab_{}.npz".format(run,i)) for i in stacks] 169 | 170 | #clean 171 | os.system("rm schout_*.nc ") 172 | dt=time.time()-t0 173 | print('finish reading {}: {}s'.format(run,dt)); sys.stdout.flush() 174 | -------------------------------------------------------------------------------- /.old/pylib0.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylab2 import * 3 | import os,sys 4 | import scipy as sp 5 | from scipy import (optimize,interpolate,io,signal) 6 | from scipy.fftpack import fft 7 | from pyproj import Proj, transform 8 | import importlib as imp 9 | from mpi4py import MPI 10 | from netCDF4 import Dataset 11 | from io import StringIO 12 | import skill_metrics as sm 13 | import re 14 | 15 | #my library 16 | from str2num import (str2num,remove_tail) 17 | import convert_matfile_format as cmat 18 | from date_proc import datenum 19 | from loadz import loadz,npz_data,save_npz 20 | from misc import (wipe,reload,smooth,clear_globals,DaytimeLength,move_figure,lpfilt,mdivide,signa, 21 | inside_polygon,command_outputs,near_pts,proj,close_data_loop,get_prj_file, 22 | mfft) 23 | from shpfile import read_shapefile_data,write_shapefile_data 24 | from read_schism_file import (read_schism_hgrid, read_schism_hgrid_ll,read_schism_bpfile,getglob, 25 | schism_grid,schism_bpfile,sms2gr3,read_schism_vgrid,read_schism_param,write_schism_param) 26 | from netcdf import ReadNC, WriteNC 27 | 28 | if os.getenv('HOME')!=None: 29 | sys.path.append(os.getenv('HOME')) 30 | -------------------------------------------------------------------------------- /.old/shpfile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | 4 | def read_shapefile_data(fname): 5 | import shapefile as shp 6 | with shp.Reader(fname) as C: 7 | #----read shapefile---------------- 8 | S=npz_data(); 9 | S.nrec=C.numRecords 10 | S.type=C.shapeTypeName 11 | 12 | #----read pts------------------------------------------------------------------ 13 | #works for pts and polygon, may not work for other geomerty (need update in this case) 14 | S.xy=[]; 15 | for i in arange(S.nrec): 16 | xyi=array(C.shape(i).points); 17 | parti=array(C.shape(i).parts,dtype='int'); 18 | #insert nan for delimiter 19 | #to get original index: ind=nonzero(isnan(xyi[:,0]))[0]-arange(len(parti)); 20 | S.xy.append(insert(xyi,parti,nan,axis=0)) 21 | S.xy=squeeze(array(S.xy)) 22 | 23 | #---read attributes------------------------------------------------------------ 24 | S.attname=array([C.fields[i][0] for i in arange(1,len(C.fields))]); 25 | stype=array([type(C.record()[m]) for m in S.attname]) 26 | svalue=array(C.records(),dtype='O'); 27 | S.attvalue=array(zeros(len(S.attname))).astype('O') 28 | for m in arange(len(S.attname)): 29 | S.attvalue[m]=svalue[:,m].astype(stype[m]) 30 | S.atttype=stype 31 | 32 | #read prj file if exist--- 33 | bdir=os.path.dirname(os.path.abspath(fname)); 34 | bname=os.path.basename(fname).split('.')[0] 35 | prjname='{}/{}.prj'.format(bdir,bname) 36 | if os.path.exists(prjname): 37 | with open(prjname,'r') as fid: 38 | S.prj=fid.readline().strip() 39 | 40 | return S 41 | 42 | def write_shapefile_data(fname,S,float_len=18,float_decimal=8): 43 | import shapefile as shp 44 | 45 | #---get nrec----- 46 | if S.type=='POINT': 47 | if S.xy.dtype==dtype('O'): 48 | print('S.xy has a dtype="O" for POINT'); sys.exit() 49 | else: 50 | nrec=S.xy.shape[0]; 51 | elif S.type=='POLYLINE' or S.type=='POLYGON': 52 | if S.xy.dtype==dtype('O'): 53 | nrec=len(S.xy) 54 | else: 55 | nrec=1; 56 | 57 | #---check nrec 58 | if hasattr(S,'nrec'): 59 | if nrec!=S.nrec: 60 | print('nrec inconsistent') 61 | sys.exit() 62 | 63 | #---write shapefile--------- 64 | with shp.Writer(fname) as W: 65 | W.autoBalance=1; 66 | #define attributes 67 | if hasattr(S,'attname'): 68 | if S.attvalue.ndim==1: 69 | stype=[type(S.attvalue[0])] 70 | elif S.attvalue.ndim==2: 71 | stype=[type(S.attvalue[m][0]) for m in arange(len(S.attname))] 72 | for m in arange(len(stype)): 73 | if stype[m] in [np.int,np.int8,np.int16,np.int32,np.int64]: 74 | W.field(S.attname[m],'N') 75 | elif stype[m] in [np.float,np.float16,np.float32,np.float64]: 76 | W.field(S.attname[m],'F',float_len,float_decimal) 77 | elif stype[m] in [np.str0,np.str,np.str_,np.string_]: 78 | W.field(S.attname[m],'C',100) 79 | else: 80 | print('attribute type not included: add here') 81 | sys.exit() 82 | else: 83 | W.field('field','C') 84 | W.record() 85 | 86 | #put values 87 | for i in arange(nrec): 88 | if S.type=='POINT': #point, W.multipoint(S.xy) is multiple pts features 89 | vali=S.xy[i] 90 | W.point(*vali) 91 | elif S.type=='POLYLINE': 92 | if S.xy.dtype==dtype('O'): 93 | vali=S.xy[i] 94 | else: 95 | vali=S.xy 96 | #reorganize the shape of vali 97 | valii=delete_shapefile_nan(vali,0) 98 | W.line(valii) 99 | elif S.type=='POLYGON': 100 | if S.xy.dtype==dtype('O'): 101 | vali=S.xy[i] 102 | else: 103 | vali=S.xy 104 | #reorganize the shape of vali 105 | valii=delete_shapefile_nan(vali,1) 106 | W.poly(valii) 107 | 108 | #add attribute 109 | if hasattr(S,'attname'): 110 | if S.attvalue.ndim==1: 111 | atti=[S.attvalue[i]] 112 | elif S.attvalue.ndim==2: 113 | atti=[S.attvalue[m][i] for m in arange(len(stype))] 114 | W.record(*atti) 115 | 116 | #----write projection------------ 117 | bname=os.path.basename(fname).split('.')[0] 118 | bdir=os.path.dirname(os.path.abspath(fname)); 119 | if hasattr(S,'prj'): 120 | with open('{}/{}.prj'.format(bdir,bname),'w+') as fid: 121 | fid.write(S.prj) 122 | 123 | def delete_shapefile_nan(xi,iloop=0): 124 | #----delete nan (head and tail), and get ind for the rest 125 | if xi.ndim==1: 126 | i1=0; i2=xi.shape[0] 127 | if isnan(xi[0]): i1=1 128 | if isnan(xi[-1]): i2=i2-1 129 | yi=xi[i1:i2]; ind=nonzero(isnan(yi))[0] 130 | elif xi.ndim==2: 131 | i1=0; i2=xi.shape[0] 132 | if isnan(xi[0,0]): i1=1 133 | if isnan(xi[-1,0]): i2=i2-1 134 | yi=xi[i1:i2]; ind=nonzero(isnan(yi[:,0]))[0] 135 | 136 | #------reorganize----------- 137 | if len(ind)==0: 138 | #close the geomety 139 | if iloop==1: yi=close_data_loop(yi) 140 | 141 | vi=[yi]; 142 | else: 143 | vi=[]; 144 | yii=yi[:ind[0]]; 145 | if iloop==1: yii=close_data_loop(yii) 146 | vi.append(yii) 147 | for m in arange(len(ind)-1): 148 | i1=ind[m]+1; i2=ind[m+1]; 149 | yii=yi[i1:i2]; 150 | if iloop==1: yii=close_data_loop(yii); 151 | vi.append(yii) 152 | yii=yi[(ind[-1]+1):]; 153 | if iloop==1: yii=close_data_loop(yii) 154 | vi.append(yii) 155 | 156 | return vi 157 | 158 | 159 | if __name__=="__main__": 160 | pass 161 | # import shapefile as shp 162 | # 163 | # #---read grid----- 164 | # gd=read_schism_hgrid('./hgrid.gr3') 165 | 166 | # #---grid bnd-------------------- 167 | # S=npz_data() 168 | # S.type='POLYLINE' 169 | # S.nrec=1; 170 | # for i in arange(gd.nob): 171 | # ind=gd.iobn[i] 172 | # xyi=c_[gd.x[ind],gd.y[ind]]; 173 | # xyi=insert(xyi,0,nan,axis=0); 174 | # if i==0: 175 | # xy=xyi 176 | # else: 177 | # xy=r_[xy,xyi] 178 | # for i in arange(gd.nlb): 179 | # ind=gd.ilbn[i] 180 | # xyi=c_[gd.x[ind],gd.y[ind]]; 181 | # if gd.island[i]==1: xyi=close_data_loop(xyi) 182 | # xyi=insert(xyi,0,nan,axis=0) 183 | # xy=r_[xy,xyi] 184 | # S.xy=xy 185 | # S.prj=get_prj_file('epsg:26918') 186 | 187 | # #---grid points--------- 188 | # S=npz_data() 189 | # S.type='POINT' 190 | # S.xy=c_[gd.x,gd.y] 191 | # S.prj=get_prj_file('epsg:26918') 192 | # S.attname=['node_number'] 193 | # S.attvalue=arange(gd.np)+1; 194 | # 195 | # 196 | # 197 | # #--grid element---------- 198 | # S=npz_data() 199 | # S.type='POLYGON' 200 | # elnode=gd.elnode; fp=elnode[:,-1]<0; elnode[fp,-1]=elnode[fp,0] 201 | # elnode=fliplr(elnode) 202 | # for i in arange(4): 203 | # xyi=c_[gd.x[elnode[:,i]],gd.y[elnode[:,i]]] 204 | # if i==0: 205 | # xy=xyi[:,:,None] 206 | # else: 207 | # xy=c_[xy,xyi[:,:,None]] 208 | # xy=transpose(xy,[0,2,1]); 209 | # S.xy=zeros(gd.ne).astype('O') 210 | # for i in arange(gd.ne): 211 | # S.xy[i]=xy[i] 212 | # 213 | # S.attname=['element_number'] 214 | # S.attvalue=arange(gd.ne)+1; 215 | # S.prj=get_prj_file('epsg:26918') 216 | # 217 | # write_shapefile_data('test7',S) 218 | # S0=read_shapefile_data('test7') 219 | -------------------------------------------------------------------------------- /.old/str2num.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/evn python3 2 | from pylib import * 3 | 4 | def str2num(line,*args): 5 | num=str2num_process(line,*args) 6 | if isinstance(num[0],float): 7 | num=num.astype('float64') 8 | else: 9 | num=[s.astype('float64') for s in num] 10 | num=array(num) 11 | return num 12 | 13 | @np.vectorize 14 | def str2num_process(line,*args): 15 | if len(args)>0: 16 | if len(args)>1: 17 | for i in range(len(args)-1): 18 | line=line.replace(arg) 19 | line=line.replace(args[0],',') 20 | else: 21 | line=line.replace(';',',').replace(' ',',') 22 | linei=[s for s in line.split(',') if s] 23 | fc=np.vectorize(lambda x: np.float64(x)) 24 | return fc(linei).astype('object') 25 | 26 | 27 | @np.vectorize 28 | def remove_tail(line): 29 | li=line.rstrip(); 30 | ind=li.find('!'); 31 | if ind!=-1: 32 | li=li[:ind] 33 | ind=li.find('='); 34 | if ind!=-1: 35 | li=li[:ind] 36 | return li 37 | 38 | if __name__=="__main__": 39 | pass 40 | # x='3.5, 4, 5; 5 6.5, 78'; 41 | # x='3.5 4 5 5 6.5 78' 42 | # xi=str2num(x); 43 | # x=['3 4 5','4 3 6 8'] 44 | # x=['3 4 5','4 3 6'] 45 | # xi=str2num(x) 46 | # print(xi) 47 | 48 | ##-----test files---------- 49 | # fname=r'C:\Users\Zhengui\Desktop\Python\learn\hgrid.gr3' 50 | # with open(fname,'r') as fid: 51 | # lines=fid.readlines() 52 | # line=lines[24535:24545] 53 | # rline=remove_tail(line) 54 | # print(line) 55 | # print(rline) 56 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pylibs 2 | * **Repository of python functions/classes/scripts. This library is designed to handle most of our routine work. Processing SCHISM related work is a significant part of the usgae of this library.** 3 | 4 | * In pylibs, there are mainly two groups of functions:
5 | * generic function: lpfilt, ReadNC, inside_polygon, proj, ...
6 | * schism-related function: sms2grd, read_schism_hgrid, read_schism_output), ...
7 | 8 | * Installation
9 | * `pip install pylibs-ocean` (user mode) 10 | * `pip install pylibs-ocean[mpi,shapefile,projection,eof]` (comprehensive user mode) 11 | * `git clone https://github.com/wzhengui/pylibs.git; cd pylibs; pip install -e .` (developer mode) 12 | 13 | * Usage
14 | * explicit import: `from pylib import zdata, ReadNC, read_schism_hgrid, sms2grd` 15 | * import mport: `from pylib import *` (import all) 16 | 17 | * Directories
18 | * Scripts: sample scripts for using pylibs
19 | * Utility: python library functions
20 | * pylib.py: tool for importing all necessary and frequently-used python functions/packages
21 | * mylib.py: defined functions/classes 22 | * schism_file.py: schism related functions/classes 23 | -------------------------------------------------------------------------------- /pylib.py: -------------------------------------------------------------------------------- 1 | #Copyright 2021, Zhengui WANG 2 | #Apache License, Version 2.0; http://www.apache.org/licenses/LICENSE-2.0 3 | #--------------------------------------------------------------------- 4 | #import system lib 5 | #--------------------------------------------------------------------- 6 | import os,sys 7 | from glob import glob 8 | 9 | Libs=['pylib','mylib','schism_file'] 10 | if not set(Libs).issubset(set(sys.modules.keys())): 11 | pversion=sys.version.split(' ')[0] #print(pversion) 12 | 13 | #--------------------------------------------------------------------- 14 | #load pylib libraries of packages 15 | #--------------------------------------------------------------------- 16 | #matplotlib 17 | import matplotlib as mpl 18 | if 'frontera' in [str(os.getenv('HOSTNAME')), str(os.getenv('TACC_SYSTEM'))]: mpl.use('tkagg') 19 | from matplotlib import pyplot as plt 20 | from matplotlib.dates import date2num, datestr2num,num2date 21 | if hasattr(mpl.dates,'set_epoch'): 22 | try: 23 | mpl.dates.set_epoch('0000-12-31') 24 | except: 25 | pass 26 | from matplotlib.pyplot import * 27 | 28 | import platform 29 | if platform.system().lower()=='windows': 30 | try: 31 | if get_ipython().__class__.__name__!='ZMQInteractiveShell': mpl.use('Qt5Agg') 32 | except: 33 | pass 34 | 35 | #numpy 36 | import numpy as np 37 | from numpy import * 38 | from numpy.random import rand,randn 39 | from numpy.linalg import * 40 | #temp fix: try to be compatible with higher numpy version 41 | if ('numpy.core' in sys.modules) and ('numpy._core' not in sys.modules): 42 | sys.modules['numpy._core']=sys.modules['numpy.core'] 43 | nms=[i.split('.')[-1] for i in sys.modules if i.startswith('numpy.core.')] 44 | for i in nms: sys.modules['numpy._core.'+i]=sys.modules['numpy.core.'+i] 45 | 46 | import scipy as sp #scipy 47 | from scipy import interpolate 48 | 49 | #------------------------------------------------ 50 | #old import 51 | #------------------------------------------------ 52 | #url download 53 | #try: 54 | # import urllib 55 | # from urllib.request import urlretrieve as urlsave 56 | # import ssl 57 | # try: 58 | # _create_unverified_https_context = ssl._create_unverified_context 59 | # except AttributeError: # Legacy Python that doesn't verify HTTPS certificates by default 60 | # pass 61 | # else: # Handle target environment that doesn't support HTTPS verification 62 | # ssl._create_default_https_context = _create_unverified_https_context 63 | #except: 64 | # pass 65 | #mpi4py 66 | #try: 67 | # from mpi4py import MPI 68 | #except: 69 | # pass 70 | # #from src.mylib import parallel_jobs 71 | # #MPI=parallel_jobs() 72 | #from numpy.random import * 73 | #import numpy.ma as ma 74 | #from matplotlib import cbook, mlab 75 | #from matplotlib.dates import * 76 | #netcdf 77 | #from netCDF4 import Dataset 78 | #misc 79 | #import datetime 80 | #import pandas as pd 81 | #sympy 82 | #try: 83 | # from sympy import init_session as sym_init 84 | #except: 85 | # pass 86 | #pickle 87 | #import pickle 88 | #import copy 89 | #from copy import copy as scopy 90 | #from copy import deepcopy as dcopy 91 | 92 | #--------------------------------------------------------------------- 93 | #libraries of self-defined modules 94 | #--------------------------------------------------------------------- 95 | path_pylib=os.path.dirname(__file__) 96 | if os.path.exists(path_pylib+'/pylibs/src'): 97 | import pylibs.src.mylib as mylib; sys.modules['src']=sys.modules['pylibs.src'] 98 | path_src=path_pylib+'/pylibs/src'; path_scripts=path_pylib+'/pylibs/scripts' 99 | else: 100 | if path_pylib not in sys.path: sys.path.append(path_pylib) 101 | import src.mylib as mylib; path_src=path_pylib+'/src'; path_scripts=path_pylib+'/scripts' 102 | sys.modules['mylib'] = mylib 103 | from mylib import (ntype,xtick,get_xtick,close_data_loop,datenum,quickdatenum, #type, 104 | add_basemap,get_INFO,loadz,zdata,savez,find_cs,npz2mat,read_mat,sort_all, 105 | cmean,smooth,doy,daytime_length,move_figure,bpfilt,lpfilt,mdivide,signa,sub_lines,sub_polygons, 106 | inside,inside_polygon,mdist,command_outputs,near_pts,proj,proj_pts,rewrite,rewrite_input, 107 | get_prj_file,mfft,interp_vertical,read_shapefile_data,write_shapefile_data, 108 | ReadNC,WriteNC,harmonic_fit,harmonic_analysis,get_hycom,compute_contour,EOF,REOF, 109 | get_stat,get_subplot_position,get_subplot_position2,load_dem,plot_taylor_diagram, 110 | read_dem,get_hpc_command,least_square_fit,read_yaml,read_excel, write_excel,rtext, 111 | mklink,sindex,pindex,nindex,cindex,resize,savefig,pplot,blit_manager,read,add_xtick, 112 | get_qnode,modify_figure,parallel_jobs,fig_IFNO,ceqstate,subdomain_index,interp, 113 | nargout,pause,isnumber,ncfile,urlsave) 114 | 115 | if os.path.exists(os.path.dirname(__file__)+'/pylibs/src'): 116 | import pylibs.src.schism_file as schism_file 117 | else: 118 | import src.schism_file as schism_file 119 | sys.modules['schism_file'] = schism_file 120 | from schism_file import (read_schism_hgrid, read_schism_bpfile,getglob, 121 | schism_grid,schism_vgrid,schism_bpfile,sms2grd,read_schism_vgrid,save_schism_grid, 122 | compute_zcor,read_schism_param,write_schism_param,read_schism_local_to_global, 123 | create_schism_vgrid,srank,grd2sms,scatter_to_schism_grid,delete_schism_grid_element, 124 | read_schism_prop,read_schism_reg,interp_schism_3d,get_schism_var_info,check_schism_ihot, 125 | read_schism_output,change_schism_param,get_schism_output_info,get_schism_grid_subdomain, 126 | get_schism_output_subset,combine_schism_hotstart,combine_icm_output,read_schism_slab, 127 | convert_schism_source,schism_view,schism_check,zcor_to_schism_grid,compute_schism_volume, 128 | read_schism_grid,schism_transect) 129 | 130 | if os.getenv('HOME')!=None: 131 | sys.path.append(os.getenv('HOME')) 132 | 133 | #sys.modules['loadz'] = mylib #in case oldmodule name used 134 | #sys.modules['read_schism_file'] = schism_file #in case oldmodule name used 135 | #import mpas_file 136 | #from mpas_file import (read_mpas_grid) 137 | 138 | #old module names 139 | sys.modules['pyUtility']=sys.modules['src'] 140 | 141 | #--------------------------------------------------------------------- 142 | #alias 143 | #--------------------------------------------------------------------- 144 | from os.path import exists as fexist 145 | from numpy import array_equal as eq 146 | from src.mylib import savez as save_npz; mylib.save_npz=savez 147 | from src.mylib import zdata as npz_data; mylib.npz_data=zdata 148 | from src.mylib import least_square_fit as lsq; mylib.least_square_fit=lsq 149 | from src.mylib import move_figure as mvfig 150 | from src.mylib import modify_figure as mfig 151 | from src.mylib import find_cs as find_continuous_sections; mylib.find_continuous_sections=find_cs 152 | from src.mylib import read_mat as mat2npz 153 | from src.mylib import read_mat as convert_matfile 154 | from src.mylib import read_dem as convert_dem_format 155 | from src.mylib import load_dem as load_bathymetry 156 | from src.mylib import read_shapefile_data as read_shp 157 | from src.mylib import write_shapefile_data as write_shp 158 | from src.mylib import harmonic_analysis as HA 159 | from src.mylib import interp_vertical as interpv 160 | from src.mylib import sort_all as asort 161 | from src.schism_file import read_schism_hgrid as read_hgrid 162 | from src.schism_file import read_schism_grid as read_grd 163 | from src.schism_file import read_schism_grid as grd 164 | from src.schism_file import read_schism_bpfile as read_bp 165 | from src.schism_file import change_schism_param as chparam 166 | -------------------------------------------------------------------------------- /pylib_experimental/README.md: -------------------------------------------------------------------------------- 1 | Experimental modules not yet incorporated in pylib's core. 2 | 3 | Installation: 4 | pip install git+https://github.com/wzhengui/pylibs.git#subdirectory=pylib_experimental 5 | 6 | -------------------------------------------------------------------------------- /pylib_experimental/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name='pylib_experimental', 5 | version='0.0.1', 6 | description='Experimental functions', 7 | license='MIT', 8 | packages=['pylib_experimental'], 9 | package_data={}, 10 | install_requires=[ 11 | 'setuptools', 12 | 'numpy', 13 | 'scipy', 14 | 'pandas', 15 | 'netCDF4', 16 | 'matplotlib>=3.0.0', 17 | 'pyproj>=3.0.0', 18 | ], 19 | extras_require={ 20 | 'mpi': ['mpi4py>=3.0.0'], 21 | 'shapefile': ['pyshp>=2.1.0'], 22 | 'eof': ['eofs>=1.4.0'], 23 | 'cloudpickle': ['cloudpickle'], 24 | } 25 | ) 26 | 27 | -------------------------------------------------------------------------------- /scripts/.run.job: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | submit a job container 4 | ''' 5 | from pylib import * 6 | import time 7 | import signal 8 | import subprocess 9 | 10 | #----------------------------------------------------------------------------- 11 | #Input 12 | #----------------------------------------------------------------------------- 13 | #resource requst 14 | walltime='12:00:00' 15 | #qnode='x5672'; nnode=1; ppn=4 #hurricane, ppn=8 16 | #qnode='bora'; nnode=30; ppn=20 #bora, ppn=20 17 | #qnode='vortex'; nnode=2; ppn=12 #vortex, ppn=12 18 | qnode='femto'; nnode=6; ppn=32 #femto,ppn=32 19 | #qnode='potomac'; nnode=4; ppn=8 #ches, ppn=12 20 | #qnode='james'; nnode=5; ppn=20 #james, ppn=20 21 | #qnode='frontera'; nnode=1; ppn=56 #frontera, ppn=56 (flex,normal) 22 | #qnode='mistral'; nnode=1; ppn=36 #mistral, ppn=36 23 | #qnode='stampede2'; nnode=1; ppn=48 #stampede2, ppn=48 (skx-normal,skx-dev,normal,etc) 24 | 25 | #additional information: frontera,mistral,stampede2 26 | qname='flex' #partition name 27 | account='TG-OCE140024' #stampede2: NOAA_CSDL_NWI,TG-OCE140024; mistral: gg0028 28 | 29 | ibatch=1; scrin='job.in'; scrout='job.out' 30 | bdir=os.path.abspath(os.path.curdir); jname=os.path.basename(bdir) 31 | #----------------------------------------------------------------------------- 32 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 33 | #----------------------------------------------------------------------------- 34 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 35 | if os.getenv('param')==None: 36 | #submit job on HPC nodes 37 | fmt=0; bcode=sys.argv[0] 38 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt=fmt,qname=qname) 39 | print(scode); os.system(scode); os._exit(0) 40 | else: 41 | fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 42 | 43 | #loop to run pending jobs 44 | hjob,sjob=None,None; tm=0.0; scrin='{}/{}'.format(bdir,scrin) 45 | while True: 46 | if fexist(scrin): 47 | #read command from scrin to be executed, and then comment out 48 | rcode=''; ccode=''; rdir=bdir 49 | if os.path.getmtime(scrin)>tm: 50 | lines0=open(scrin,'r').readlines(); tm=time.time() 51 | slines=[i.strip() for i in lines0 if i.strip()!='' and not i.startswith('#')] 52 | 53 | #get system command 54 | clines=[i[1:] for i in slines if i.startswith('!')] 55 | if len(clines)!=0: ccode=clines[-1] 56 | 57 | #get normal command 58 | rlines=[i for i in slines if not i.startswith('!')] 59 | if len(rlines)!=0: rcode=rlines[-1] 60 | 61 | #terminate the current job 62 | if ccode.lower()=='kill' or rcode!='' : 63 | if hjob is not None: os.killpg(os.getpgid(hjob.pid), signal.SIGTERM); hjob=None 64 | if sjob is not None: os.killpg(os.getpgid(sjob.pid), signal.SIGTERM); sjob=None 65 | 66 | #run system commend 67 | if ccode!='' and ccode.lower()!='kill': 68 | if ccode.startswith('cd '): #chdir 69 | if ';' in ccode: 70 | sid=ccode.find(';'); rdir=ccode[3:sid]; ccode=ccode[(sid+1):]; os.chdir(rdir); 71 | sjob=subprocess.Popen(ccode, stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid) 72 | else: 73 | rdir=os.path.abspath(ccode[3:]); os.chdir(rdir) 74 | else: #run system command 75 | sjob=subprocess.Popen(ccode, stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid) 76 | 77 | #comment out 78 | if len(slines)!=0: fid=open(scrin,'w+'); fid.writelines([i if i.startswith('#') else '#'+i for i in lines0]); fid.close() 79 | 80 | #if there is valid command, run it 81 | if rcode=='': 82 | time.sleep(1) 83 | if hjob is not None: hjob.stdout.flush() 84 | else: 85 | #run job 86 | scode=get_hpc_command(rcode,rdir,jname,qnode,nnode,ppn,walltime,scrout,fmt=fmt,qname=qname) 87 | try: 88 | hjob = subprocess.Popen(scode, stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid) 89 | except: 90 | pass 91 | 92 | #reset bdir 93 | if ccode.startswith('cd '): os.chdir(bdir) 94 | else: 95 | time.sleep(1) 96 | -------------------------------------------------------------------------------- /scripts/Ddiff: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | compare directories and show differences 4 | ''' 5 | import os,sys 6 | from numpy import sort,unique,array,zeros 7 | import subprocess 8 | 9 | #get dirnames 10 | if len(sys.argv)<3: 11 | sys.exit('usage: Ddiff.py dir1 dir2 ...') 12 | else: 13 | sdirs=sys.argv[1:] 14 | ndir=len(sdirs); slens=[max(len(i),10) for i in sdirs] 15 | 16 | #get fnames for each directory 17 | fnames=[]; [fnames.extend(os.listdir(i)) for i in sdirs] 18 | fnames=sort(unique(array([i for i in fnames if not i.startswith('.')]))) 19 | 20 | #compare each file 21 | print("Files below are different(0:doesn't exist; same number: files are the same; directory is skipped") 22 | fstr="{:10s} "*len(sdirs); print(fstr.format(*sdirs)) 23 | for fname in fnames: 24 | #print(fname) 25 | #get real path of each files 26 | rnames=[] 27 | for n,sdir in enumerate(sdirs): 28 | ffname='{}/{}'.format(sdir,fname) 29 | if os.path.exists(ffname): 30 | rnames.append(os.path.realpath(ffname)) 31 | else: 32 | rnames.append(None) 33 | 34 | #compare each file 35 | snames=[]; fns=zeros(ndir).astype('int') 36 | for n,rname in enumerate(rnames): 37 | if rname is None: continue 38 | if len(snames)==0: #1st existing file 39 | snames.append(rname); fns[n]=len(snames) 40 | else: #find a new file 41 | fn=None 42 | #compare with previouse files 43 | for m,sname in enumerate(snames): 44 | if rname==sname: fn=m+1; break 45 | if os.path.isdir(rname): continue 46 | #use diff to compare two actual files 47 | code='diff -q {} {}'.format(rname,sname) 48 | p=subprocess.Popen(code,stdout=subprocess.PIPE,stderr=subprocess.STDOUT,shell=True) 49 | stdout,stderr=p.communicate() 50 | if stdout!=None: stdout=stdout.decode('utf-8') 51 | if 'differ' not in stdout: fn=m+1; break 52 | 53 | #save file status 54 | if fn is None: 55 | snames.append(rname); fns[n]=len(snames) 56 | else: 57 | fns[n]=fn 58 | 59 | #print status 60 | fns=array(fns); fstr='' 61 | if len(unique(fns))==1: continue 62 | for i,[fn,slen] in enumerate(zip(fns,slens)): 63 | fstr=fstr+' {}{}'.format(fn,' '*(slen-4)) 64 | if i==(ndir-1): fstr=fstr+': {}'.format(fname) 65 | print(fstr) 66 | -------------------------------------------------------------------------------- /scripts/Dparam: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #show difference between SCHISM param.in files 3 | # usage: Dparam.py param.in_1 param.in_2 4 | # Dparam.py -c param.in_1 param.in_2 (only show common varibles all files have) 5 | # Dparam.py -d param.in_1 param.in_2 (only show different varibles that are missing in some files) 6 | # Dparam.py -e param.in_1 param.in_2 (only show different varibles that are missing in some files,but sorted) 7 | 8 | #import os, sys 9 | #from read_schism_file import read_schism_param 10 | from pylib import * 11 | 12 | flagd=0; 13 | if len(sys.argv)==1: 14 | files=['param.nml'] 15 | else: 16 | files=[]; 17 | for vi in sys.argv[1:]: 18 | if vi[0]!='-': 19 | files.append(vi) 20 | elif vi=='-c': 21 | flagd=1 22 | elif vi=='-d': 23 | flagd=2 24 | elif vi=='-e': 25 | flagd=3 26 | elif vi=='-h': 27 | print(''' 28 | 1. Dparam f1.nml f2.nml 29 | 2. Dparam -c f1.nml f2.nml (common variables) 30 | 3. Dparam -d f1.nml f2.nml (missing variables) 31 | 4. Dparam -e f1.nml f2.nml (sorted missing variables) 32 | '''); sys.exit() 33 | 34 | Par=[]; Key=[]; Val=[]; 35 | for i,fname in enumerate(files): 36 | if 'yaml' in fname: 37 | Pi=read_yaml(fname) 38 | else: 39 | Pi=read_schism_param(fname,fmt=1) 40 | Par.append(Pi) 41 | Key.append(Pi.keys()) 42 | Val.append(Pi.values()) 43 | if i==0: 44 | AKey=set(Key[i]) 45 | else: 46 | [AKey.add(ki) for ki in Key[i]] 47 | 48 | print("{:20s}: ".format('Parameters')+', '.join("{:10s}".format(vi) for vi in files)) 49 | AKey=sorted(list(AKey)); lines=[]; vs=[]; slines=[] 50 | for ki in AKey: 51 | v=[p[ki] if (ki in k) else 'N/A' for k, p in zip(Key,Par)] 52 | v=[str(i) if isinstance(i,list) else i for i in v] 53 | if len(unique(v))!=1: 54 | line="{:20s}: ".format(ki)+', '.join("{:10s}".format(str(vi)) for vi in v) 55 | if flagd==1 and ('N/A' in v): continue 56 | if (flagd in [2,3]) and ('N/A' not in v): continue 57 | lines.append(line); vs.append(v) 58 | if flagd==3: [[slines.append(i) for i,v in zip(lines,vs) if v[n]=='N/A'] for n,k in enumerate(files)]; lines=slines #sort comparison 59 | print('\n'.join(lines)) #outputs difference 60 | 61 | -------------------------------------------------------------------------------- /scripts/Harmonic_Analysis/.gitignore: -------------------------------------------------------------------------------- 1 | README 2 | bk/ 3 | Time_Series.txt 4 | tidal_analyze 5 | tidal_const.dat.sample 6 | t1.txt 7 | -------------------------------------------------------------------------------- /scripts/Harmonic_Analysis/compile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | 4 | #compile 5 | os.system('gcc tidal_analysis.c nrutil.c -lm; mv a.out tidal_analyze'); 6 | 7 | #sys.exit() 8 | #generate tidal consitituents. 9 | #tide_name=['O1','K1','Q1','P1','M2','S2','K2','N2']; 10 | #C=loadz('tide_fac_const.npz'); 11 | #with open('tidal_const.dat','w+') as fid: 12 | # fid.write('{}\n'.format(len(tide_name))) 13 | # for m in arange(len(tide_name)): 14 | # fp=C.name==tide_name[m]; freqi=squeeze(C.freq[fp]) 15 | # fid.write('{}\n {:e}\n'.format(tide_name[m],freqi)) 16 | # 17 | ##analyze tidal components 18 | #os.system('./tidal_analyze Time_Series.txt tidal_const.dat t1.txt'); 19 | #os.system('./tidal_analyze Time_Series.txt tidal_const.dat.sample t1.txt'); 20 | -------------------------------------------------------------------------------- /scripts/Harmonic_Analysis/nrutil.h: -------------------------------------------------------------------------------- 1 | #ifndef _NR_UTILS_H_ 2 | #define _NR_UTILS_H_ 3 | 4 | static float sqrarg; 5 | #define SQR(a) ((sqrarg=(a)) == 0.0 ? 0.0 : sqrarg*sqrarg) 6 | 7 | static double dsqrarg; 8 | #define DSQR(a) ((dsqrarg=(a)) == 0.0 ? 0.0 : dsqrarg*dsqrarg) 9 | 10 | static double dmaxarg1,dmaxarg2; 11 | #define DMAX(a,b) (dmaxarg1=(a),dmaxarg2=(b),(dmaxarg1) > (dmaxarg2) ?\ 12 | (dmaxarg1) : (dmaxarg2)) 13 | 14 | static double dminarg1,dminarg2; 15 | #define DMIN(a,b) (dminarg1=(a),dminarg2=(b),(dminarg1) < (dminarg2) ?\ 16 | (dminarg1) : (dminarg2)) 17 | 18 | static float maxarg1,maxarg2; 19 | #define FMAX(a,b) (maxarg1=(a),maxarg2=(b),(maxarg1) > (maxarg2) ?\ 20 | (maxarg1) : (maxarg2)) 21 | 22 | static float minarg1,minarg2; 23 | #define FMIN(a,b) (minarg1=(a),minarg2=(b),(minarg1) < (minarg2) ?\ 24 | (minarg1) : (minarg2)) 25 | 26 | static long lmaxarg1,lmaxarg2; 27 | #define LMAX(a,b) (lmaxarg1=(a),lmaxarg2=(b),(lmaxarg1) > (lmaxarg2) ?\ 28 | (lmaxarg1) : (lmaxarg2)) 29 | 30 | static long lminarg1,lminarg2; 31 | #define LMIN(a,b) (lminarg1=(a),lminarg2=(b),(lminarg1) < (lminarg2) ?\ 32 | (lminarg1) : (lminarg2)) 33 | 34 | static int imaxarg1,imaxarg2; 35 | #define IMAX(a,b) (imaxarg1=(a),imaxarg2=(b),(imaxarg1) > (imaxarg2) ?\ 36 | (imaxarg1) : (imaxarg2)) 37 | 38 | static int iminarg1,iminarg2; 39 | #define IMIN(a,b) (iminarg1=(a),iminarg2=(b),(iminarg1) < (iminarg2) ?\ 40 | (iminarg1) : (iminarg2)) 41 | 42 | #define SIGN(a,b) ((b) >= 0.0 ? fabs(a) : -fabs(a)) 43 | 44 | #if defined(__STDC__) || defined(ANSI) || defined(NRANSI) /* ANSI */ 45 | 46 | void nrerror(char error_text[]); 47 | float *vector(long nl, long nh); 48 | int *ivector(long nl, long nh); 49 | unsigned char *cvector(long nl, long nh); 50 | unsigned long *lvector(long nl, long nh); 51 | double *dvector(long nl, long nh); 52 | float **matrix(long nrl, long nrh, long ncl, long nch); 53 | double **dmatrix(long nrl, long nrh, long ncl, long nch); 54 | int **imatrix(long nrl, long nrh, long ncl, long nch); 55 | float **submatrix(float **a, long oldrl, long oldrh, long oldcl, long oldch, 56 | long newrl, long newcl); 57 | float **convert_matrix(float *a, long nrl, long nrh, long ncl, long nch); 58 | float ***f3tensor(long nrl, long nrh, long ncl, long nch, long ndl, long ndh); 59 | void free_vector(float *v, long nl, long nh); 60 | void free_ivector(int *v, long nl, long nh); 61 | void free_cvector(unsigned char *v, long nl, long nh); 62 | void free_lvector(unsigned long *v, long nl, long nh); 63 | void free_dvector(double *v, long nl, long nh); 64 | void free_matrix(float **m, long nrl, long nrh, long ncl, long nch); 65 | void free_dmatrix(double **m, long nrl, long nrh, long ncl, long nch); 66 | void free_imatrix(int **m, long nrl, long nrh, long ncl, long nch); 67 | void free_submatrix(float **b, long nrl, long nrh, long ncl, long nch); 68 | void free_convert_matrix(float **b, long nrl, long nrh, long ncl, long nch); 69 | void free_f3tensor(float ***t, long nrl, long nrh, long ncl, long nch, 70 | long ndl, long ndh); 71 | 72 | #else /* ANSI */ 73 | /* traditional - K&R */ 74 | 75 | void nrerror(); 76 | float *vector(); 77 | float **matrix(); 78 | float **submatrix(); 79 | float **convert_matrix(); 80 | float ***f3tensor(); 81 | double *dvector(); 82 | double **dmatrix(); 83 | int *ivector(); 84 | int **imatrix(); 85 | unsigned char *cvector(); 86 | unsigned long *lvector(); 87 | void free_vector(); 88 | void free_dvector(); 89 | void free_ivector(); 90 | void free_cvector(); 91 | void free_lvector(); 92 | void free_matrix(); 93 | void free_submatrix(); 94 | void free_convert_matrix(); 95 | void free_dmatrix(); 96 | void free_imatrix(); 97 | void free_f3tensor(); 98 | 99 | #endif /* ANSI */ 100 | 101 | #endif /* _NR_UTILS_H_ */ 102 | -------------------------------------------------------------------------------- /scripts/Harmonic_Analysis/readme.txt: -------------------------------------------------------------------------------- 1 | example is listed in compile.py 2 | 3 | usage: 4 | ./tidal_analyze Time_Series.txt tidal_const.dat result.dat 5 | -------------------------------------------------------------------------------- /scripts/Harmonic_Analysis/tidal_analysis.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include "nrutil.h" 5 | #define SWAP(a,b) {temp=(a);(a)=(b);(b)=temp;} 6 | #define MAX 1000000 7 | 8 | 9 | void gaussj(double **a, int n, double **b, int m) 10 | { 11 | int *indxc, *indxr, *ipiv; 12 | int i, icol, irow, j, k, l, ll; 13 | double big, dum, pivinv, temp; 14 | 15 | indxc = ivector(1,n); 16 | indxr = ivector(1,n); 17 | ipiv = ivector(1,n); 18 | for (j=1;j<=n;j++) ipiv[j] = 0; 19 | for (i=1;i<=n;i++) { 20 | big=0.0; 21 | for (j=1;j<=n;j++) 22 | if (ipiv[j]!=1) 23 | for (k=1;k<=n;k++){ 24 | if (ipiv[k]==0){ 25 | if (fabs(a[j][k]) >= big){ 26 | big = fabs(a[j][k]); 27 | irow = j; 28 | icol = k; 29 | } 30 | } else if (ipiv[k] > 1) nrerror("gaussj: Singular Matrix-1"); 31 | } 32 | ++(ipiv[icol]); 33 | if (irow != icol){ 34 | for (l=1;l<=n;l++) SWAP(a[irow][l],a[icol][l]); 35 | for (l=1;l<=m;l++) SWAP(b[irow][l],b[icol][l]); 36 | } 37 | indxr[i] = irow; 38 | indxc[i] = icol; 39 | if (a[icol][icol] == 0) nrerror("gaussj:Singular Matrix-2"); 40 | pivinv = 1.0/a[icol][icol]; 41 | a[icol][icol] = 1.0; 42 | for (l=1;l<=n;l++) a[icol][l] *= pivinv; 43 | for (l=1;l<=m;l++) b[icol][l] *= pivinv; 44 | for (ll=1;ll<=n;ll++) 45 | if (ll != icol){ 46 | dum = a[ll][icol]; 47 | a[ll][icol] = 0.0; 48 | for (l=1;l<=n;l++) a[ll][l] -= a[icol][l]*dum; 49 | for (l=1;l<=m;l++) b[ll][l] -= b[icol][l]*dum; 50 | } 51 | } 52 | for (l=n;l>=1;l--) { 53 | if (indxr[l] != indxc[l]) 54 | for (k=1;k<=n;k++) 55 | SWAP(a[k][indxr[l]],a[k][indxc[l]]); 56 | } 57 | free_ivector(ipiv,1,n); 58 | free_ivector(indxr,1,n); 59 | free_ivector(indxc,1,n); 60 | } 61 | 62 | char *number2string(int N,char *char_number) 63 | { 64 | int decimal; 65 | 66 | if (N<= 9){ 67 | char_number[0] = '0'+N; 68 | char_number[1] = '\0'; 69 | } 70 | else { 71 | decimal = N/10; 72 | char_number[0] = '0'+decimal; 73 | char_number[1] = '0'+N-decimal*10; 74 | char_number[2] = '\0'; 75 | } 76 | return (char_number); 77 | } 78 | 79 | double time[MAX], eta[MAX]; 80 | main(int argc, char *argv[]){ 81 | 82 | struct tide { 83 | char label[3]; 84 | double freq; 85 | double amp; 86 | double phase; 87 | }; 88 | 89 | FILE *fid_input, *fid_output, *fid_const; 90 | char fname[200]; 91 | double **M, **F; 92 | struct tide *values; 93 | double sum1, sum2, sum3, sum4; 94 | int N = 0, i = 0, j = 0, l = 0, N_tides; 95 | 96 | void gaussj(double **, int, double **, int); 97 | 98 | if (argc!=4){ 99 | fprintf(stderr,"Usage:%s \n",argv[0]); 100 | exit(1); 101 | } 102 | 103 | sprintf(fname,"%s",argv[2]); 104 | if ((fid_const = fopen(fname,"r")) == NULL){ 105 | fprintf(stderr,"Could not open file %s\n",fname); 106 | exit(1); 107 | } 108 | fscanf(fid_const,"%d",&N_tides); 109 | 110 | values = (struct tide *)calloc(N_tides,sizeof(struct tide)); 111 | M = dmatrix(1,2*N_tides+1,1,2*N_tides+1); 112 | F = dmatrix(1,2*N_tides+1,1,1); 113 | 114 | for (i=0;i 180: sx=sx-360 # only work for Atlantic ocean when using AVISO 20 | sy=array(C.variables['latitude'][:]); 21 | 22 | #------ Perform interpolation 23 | #get interp index 24 | sxi,syi=meshgrid(sx,sy); sxy=c_[sxi.ravel(),syi.ravel()]; 25 | cvs=array(C.variables[sshname][0]); sindns=[]; sindps=[] 26 | print('computing interpation index') 27 | cv=cvs; ds=cv.shape; cv=cv.ravel() 28 | fpn=abs(cv)>1e3; sindn=nonzero(fpn)[0]; sindr=nonzero(~fpn)[0]; sindp=sindr[near_pts(sxy[sindn],sxy[sindr])] 29 | sindns.append(sindn); sindps.append(sindp) 30 | 31 | #get interp index for pts 32 | sx0=sx[:]; sy0=sy[:]; print('get new interp indices') 33 | idx0=((lxi0[:,None]-sx0[None,:])>=0).sum(axis=1)-1; ratx0=(lxi0-sx0[idx0])/(sx0[idx0+1]-sx0[idx0]) 34 | idy0=((lyi0[:,None]-sy0[None,:])>=0).sum(axis=1)-1; raty0=(lyi0-sy0[idy0])/(sy0[idy0+1]-sy0[idy0]) 35 | exec("cv=array(C.variables['{}'][0])".format(sshname)); 36 | sindn,sindp=sindns[0],sindps[0] 37 | cv=cv.ravel(); fpn=(abs(cv[sindn])>1e3)*(abs(cv[sindp])<1e3); cv[sindn]=cv[sindp]; fpn=abs(cv)>1e3 #init fix 38 | if sum(fpn)!=0: fni=nonzero(fpn)[0]; fri=nonzero(~fpn)[0]; fpi=fri[near_pts(sxy[fni],sxy[fri])]; cv[fni]=cv[fpi] #final fix 39 | cv=cv.reshape(ds) 40 | 41 | #find parent pts 42 | v0=array([cv[idy0,idx0],cv[idy0,idx0+1],cv[idy0+1,idx0],cv[idy0+1,idx0+1]]) 43 | 44 | #interp 45 | v1=v0[0]*(1-ratx0)+v0[1]*ratx0; v2=v0[2]*(1-ratx0)+v0[3]*ratx0 46 | vi=v1*(1-raty0)+v2*raty0 47 | vi=array(vi) # interpolated SSH on nodes 48 | 49 | #replace SSH with new SSH from large-scale model that you choose 50 | oldhot.eta2.val[:]=vi 51 | oldhot.cumsum_eta.val[:]=vi 52 | 53 | #save new hostart.nc 54 | WriteNC('{}'.format(sname),oldhot) 55 | 56 | print('--------done--------') 57 | 58 | -------------------------------------------------------------------------------- /scripts/checkrun: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #check runtime for each run 3 | from pylib import * 4 | import subprocess 5 | import datetime 6 | 7 | #-----input------------------------------------------------------------ 8 | runs=['RUN01d','RUN02p','RUN02q'] 9 | 10 | if len(sys.argv)!=1: runs=sys.argv[1:] 11 | #--compute runtime---------------------------------------------------- 12 | if len(sys.argv)!=1: runs=sys.argv[1:] 13 | for run in runs: 14 | fname='{}/mirror.out'.format(run) 15 | if not os.path.exists(fname): fname='{}/outputs/mirror.out'.format(run) 16 | if not os.path.exists(fname): continue 17 | 18 | #find start time 19 | with open(fname) as fid: 20 | line=fid.readline() 21 | 22 | R=re.findall('(\d+), (\d+).(\d+)',line)[0] 23 | yyyy=int(R[0][:4]); mm=int(R[0][4:6]); dd=int(R[0][6:]); 24 | HH=int(R[1][:2]); MM=int(R[1][2:4]); SS=int(R[1][4:]); mSS=int(R[2])*1000 25 | t0=datetime.datetime(yyyy,mm,dd,HH,MM,SS,mSS) 26 | 27 | #get the start time step 28 | fid=open(fname,'r'); nstep0=0 29 | while (True): 30 | line=fid.readline() 31 | if len(line)==0: break 32 | if line.strip().startswith('TIME STEP='): 33 | nstep0=float(line.split(';')[0].split('=')[1]) 34 | break 35 | fid.close() 36 | if nstep0==0: continue 37 | 38 | #get lines in the end 39 | code='tail -n 60 {}'.format(fname) 40 | p=subprocess.Popen(code,stdout=subprocess.PIPE,stderr=subprocess.STDOUT,shell=True) 41 | out,err=p.communicate() 42 | if out!=None: out=out.decode('utf-8') 43 | lines=out.split('\n'); 44 | if lines[-1]=='': lines.pop() 45 | lines.reverse() 46 | 47 | #find end time 48 | line=lines[0] 49 | if 'Run completed successfully' in line: 50 | R=re.findall('(\d+), (\d+).(\d+)',line)[0] 51 | yyyy=int(R[0][:4]); mm=int(R[0][4:6]); dd=int(R[0][6:]); 52 | HH=int(R[1][:2]); MM=int(R[1][2:4]); SS=int(R[1][4:]);mSS=int(R[2])*1000 53 | t1=datetime.datetime(yyyy,mm,dd,HH,MM,SS,mSS) 54 | else: 55 | t1=datetime.datetime.fromtimestamp(os.path.getmtime(fname)) 56 | 57 | #find time step 58 | if os.path.exists('{}/param.in'.format(run)): 59 | P=read_schism_param('{}/param.in'.format(run)); dt=float(P['dt']); rnday=float(P['rnday']) 60 | else: 61 | P=read_schism_param('{}/param.nml'.format(run)); dt=float(P['dt']); rnday=float(P['rnday']) 62 | 63 | #find number of time step completed 64 | nstep1=None 65 | for line in lines: 66 | if re.match('TIME STEP=',line): 67 | nstep1=int(re.findall('(\d+); ',line)[0]) 68 | break 69 | if nstep1==None: continue 70 | 71 | #output values 72 | nstep=nstep1-nstep0 73 | ds=(t1-t0).total_seconds() 74 | RTR=(dt*nstep/86400)/(ds/86400) 75 | 76 | nday=rnday; nday0=(dt*nstep0)/86400; nday1=(dt*nstep1)/86400 77 | time_all=nday*24/RTR 78 | time_left=(nday-nday1)*24/RTR 79 | time_365=365*24/RTR 80 | 81 | #print results 82 | if nday0*86400<2*dt: 83 | print('{}: RTR={:.1f}; {:.2f} days finished in {:.1f} hrs ({:.0f} min); ({:.1f}, {:.1f}, {:.1f}) hrs needed for ({:.1f}, {:.1f}, {:.0f}) days'.format \ 84 | (run,RTR,nday1,ds/3600,ds/60, time_left,time_all,time_365,(nday-nday1),nday, 365)) 85 | else: 86 | print('{}: RTR={:.1f}; {:.2f} ({:.1f}) days finished in {:.1f} hrs ({:.0f} min); ({:.1f}, {:.1f}, {:.1f}) hrs needed for ({:.1f}, {:.1f}, {:.0f}) days'.format \ 87 | (run,RTR,(nday1-nday0),nday1,ds/3600,ds/60, time_left,time_all,time_365,(nday-nday1),nday, 365)) 88 | 89 | 90 | sys.exit() 91 | 92 | 93 | -------------------------------------------------------------------------------- /scripts/chmodDir: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #change dir permission to allow the files public acess 3 | #from pylib import * 4 | import os,sys 5 | import numpy as np 6 | 7 | #directories to be changed in access permission 8 | if len(sys.argv)==1 : 9 | sdirs0=['.'] 10 | else: 11 | sdirs0=sys.argv[1:] 12 | sdirs0=[os.path.abspath(i) for i in sdirs0] 13 | 14 | #get all filenames recursively 15 | fnames=[] 16 | while len(sdirs0)!=0: 17 | sdirs=sdirs0[:]; sdirs0=[] 18 | for sdir in sdirs: 19 | for rname in os.listdir(sdir): 20 | fname=os.path.realpath(sdir+'/'+rname) 21 | if os.path.isdir(fname): 22 | sdirs0.append(fname) 23 | else: 24 | fnames.append(fname) 25 | 26 | #change file permission 27 | for fname in fnames: 28 | try: 29 | mode=os.stat(fname).st_mode 30 | os.chmod(fname,int(mode/64)*64+4*8+4) #keep origin user permission 31 | except: 32 | pass 33 | 34 | #change directory permission 35 | snames=np.unique(np.array([os.path.dirname(i) for i in fnames])) 36 | user=os.getlogin() #user id 37 | for sname in snames: 38 | rdir=sdir[:sdir.find(user)] 39 | while len(sname)>len(rdir): 40 | try: 41 | mode=os.stat(sname).st_mode 42 | os.chmod(sname,int(mode/64)*64+5*8+5) #keep origin user permission 43 | except: 44 | pass 45 | sname=os.path.dirname(sname) 46 | -------------------------------------------------------------------------------- /scripts/cmake_schism: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | auto compile schism model; work for cmake on sciclone/frontera 4 | Usage (examples): 5 | 1). cmake_schism #compile pschism_* 6 | 2). cmake_schism all #compile all Utility; copy all to curdir 7 | 3). cmake_schism all gen_ #compile all Utility; copy gen_* to curdir 8 | 4). cmake_schism combine_hotstart7 #compile combine_hotstart7; copy to curdir 9 | ''' 10 | from pylib import * 11 | 12 | #----------------------------------------------------------------------------------------------- 13 | #Inputs: 14 | #mods: OLDIO,PREC_EVAP,GOTM,HA,MARSH,SED2D,WWW,ICM,MICM,GEN,AGE,ECO,ICM,PH,COSINE,FIB,SED,FABM,ANALYSIS 15 | #----------------------------------------------------------------------------------------------- 16 | modules=['OLDIO', 'ICM', 'PREC_EVAP'] 17 | #modules=['OLDIO','COSINE'] 18 | 19 | #choose TVD LIM 20 | TVD_LIM='VL' # can be: SB, VL, MM or OS for Superbee, Van Leer, Minmod, or Osher 21 | 22 | #directory of schism/fabm code 23 | schism='~/schism'; fabm='~/fabm' 24 | 25 | target=sys.argv[1] if len(sys.argv)>1 else 'pschism' #compile target 26 | cnames=sys.argv[2:] if len(sys.argv)>2 else [target] #executables to be copied to current directory 27 | #----------------------------------------------------------------------------------------------- 28 | #compile the code 29 | #----------------------------------------------------------------------------------------------- 30 | #write SCHSI.local.build 31 | schism=schism.replace('~',os.path.expanduser("~")); fabm=fabm.replace('~',os.path.expanduser("~")); 32 | fname='{}/cmake/SCHISM.local.build'.format(schism) 33 | 34 | fid=open(fname,'r'); lines=fid.readlines(); fid.close() #save original file 35 | rewrite(fname,replace=['ON','OFF'],include=['USE_','PREC_EVAP','OLDIO']) 36 | rewrite(fname,replace=['OFF','ON'],include=[i if i in ['OLDIO','PREC_EVAP'] else 'USE_{}'.format(i) for i in modules]) 37 | rewrite(fname,replace='set (TVD_LIM {} CACHE STRING "Flux limiter")'.format(TVD_LIM),include=['Flux limiter']) 38 | rewrite(fname,replace=[],include=['FABM_BASE']) 39 | rewrite(fname,append=['set( FABM_BASE {} CACHE STRING "Path to FABM base")\n'.format(fabm)]) 40 | if 'SED2D' not in modules: rewrite(fname,replace=['ON','OFF'],include=['SED2D']) 41 | 42 | #determine host 43 | host=os.getenv('HOST').split('.')[0] 44 | if host=='hurricane': host='whirlwind' 45 | 46 | try: 47 | #compile 48 | if not fexist('{}/build'.format(schism)): os.mkdir('{}/build'.format(schism)) 49 | os.system('cd {}/build; rm -rf *; cmake -C ../cmake/SCHISM.local.build -C ../cmake/SCHISM.local.{} ../src; make -j8 {}'.format(schism,host,target)) 50 | 51 | #put tag number 52 | if target=='pschism': 53 | sname=os.listdir('{}/build/bin'.format(schism))[0] 54 | irev=command_outputs('cd {}; git log'.format(schism)).stdout.split('\n')[0].split()[1][:8] 55 | os.system('cp {}/build/bin/{} ./{}.{}'.format(schism,sname,sname,irev)) 56 | else: 57 | snames=os.listdir('{}/build/bin'.format(schism)) 58 | for sname in snames: 59 | icopy=array([sname.startswith(i) for i in cnames]).sum() 60 | if target=='all': icopy=1 61 | if icopy!=0: os.system('cp {}/build/bin/{} ./'.format(schism,sname)) 62 | 63 | #write original file 64 | fid=open(fname,'w+'); fid.writelines(lines); fid.close() 65 | print('target {}: compile sucessfully'.format(target)) 66 | except: 67 | #write original file 68 | fid=open(fname,'w+'); fid.writelines(lines); fid.close() 69 | -------------------------------------------------------------------------------- /scripts/cmb_hotstart: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | close("all") 4 | 5 | #inputs 6 | args=sys.argv[1:]; nv=len(args) 7 | 8 | #help info. 9 | if nv==1 and args[0]=='-h': 10 | sys.exit('usage: 1). cmb_hotstart (last outputs); 2). cmb_hotstart all') 11 | 12 | #check input 13 | fmt=1 if (nv==1 and args[0]=='all') else 0 14 | 15 | #combine hotstart 16 | try: 17 | combine_schism_hotstart(fmt=fmt) 18 | except: 19 | combine_schism_hotstart('outputs',fmt=fmt) 20 | 21 | -------------------------------------------------------------------------------- /scripts/cmb_icm: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | close("all") 4 | 5 | #inputs 6 | args=sys.argv[1:]; nv=len(args) 7 | 8 | #help info. 9 | if nv==1 and args[0]=='-h': 10 | sys.exit('usage: 1). cmb_icm; 2). cmb_icm icm.nc') 11 | 12 | #fnames 13 | sname=args[0] if nv==1 else 'icm.nc' 14 | 15 | #combine icm outputs 16 | try: 17 | combine_icm_output(fmt=0,sname=sname) 18 | except: 19 | os.remove('outputs/'+sname); combine_icm_output(fmt=1,sname=sname) 20 | 21 | -------------------------------------------------------------------------------- /scripts/copyrun: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os,sys 3 | 4 | #inputs 5 | runs=['RUN08aa'] #new runs 6 | brun='RUN08a' #run to be copied 7 | 8 | #copy files 9 | fnames=['*.out',] #files to be cleaned 10 | for run in runs: 11 | if os.path.exists(run): continue 12 | print('copying {} to {}'.format(brun,run)) 13 | os.mkdir(run) 14 | os.system('cd {}; cp -a ../{}/* ./ '.format(run,brun)) 15 | for fname in fnames: os.system('cd {}; rm {}'.format(run,fname)) 16 | 17 | #creat outputs 18 | outdir=os.readlink('{}/outputs'.format(brun)).replace(brun,run) 19 | if not os.path.exists(outdir): os.system('mkdir -p {}'.format(outdir)) 20 | os.system('cd {}; rm outputs; ln -sf {} outputs'.format(run,outdir)) 21 | 22 | 23 | -------------------------------------------------------------------------------- /scripts/download_AVISO.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from ftplib import FTP 3 | from pylib import * 4 | 5 | ############################################### 6 | StartT=datenum('2016-9-1'); EndT=datenum('2016-9-4') # time period to download 7 | host='ftp-access.aviso.altimetry.fr' # domain name 8 | user='kmpark19900616@gmail.com'; passwd = 'Y0PLwL' # user id and password 9 | wdir='/duacs-experimental/dt-phy-grids/multiscale_interpolation_alti_drifters/version_01_00'# target directory to download 10 | #ftp.dir() #list contents in current dir 11 | ############################################### 12 | 13 | # connect to host and go to target directory 14 | ftp = FTP(host) 15 | ftp.login(user=user, passwd = passwd) 16 | ftp.cwd(wdir)#cd to target directory 17 | 18 | # organazing and sorting file names 19 | fnames = ftp.nlst(); fnames=array(fnames) 20 | mti=datenum(array([(i.replace('.','_').split('_')[5]) for i in fnames])) 21 | fpt=(mti>=(StartT))*(mti<(EndT)); fnames=fnames[fpt]; mti=mti[fpt] 22 | sind=argsort(mti); mti=mti[sind]; fnames=fnames[sind] 23 | 24 | # dowload data 25 | for fname in fnames: 26 | print('Downloading {}'.format(fname)) 27 | with open(fname, "wb") as file: 28 | ftp.retrbinary(f"RETR {fname}", file.write) 29 | print('--------------Done-------------') 30 | -------------------------------------------------------------------------------- /scripts/download_CMEMS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | import datetime 4 | # This script download 5 | # Need to install motuclinet (Web Server) to use this script (https://github.com/clstoulouse/motu-client-python.git) 6 | # Check user manual for detailed information of CMEMS data 7 | # analysis/forcast: https://catalogue.marine.copernicus.eu/documents/PUM/CMEMS-GLO-PUM-001-024.pdf 8 | # reanlaysis: https://catalogue.marine.copernicus.eu/documents/PUM/CMEMS-GLO-PUM-001-030.pdf 9 | 10 | ############################################### 11 | StartT,EndT=datenum(2018,12,31,fmt=1),datenum(2020,1,1,fmt=1) # time 12 | tres='daily' # hourly, daily, monthly. 13 | xl=[-163,-146] # longitude 14 | yl=[16,26] #latitude 15 | 16 | dl=[0, 8727.917] #depth 17 | data='rana' # analysis:'GLOBAL_ANALYSISFORECAST_PHY_001_024-TDS'; reanlysis: GLOBAL_MULTIYEAR_PHY_001_030-TDS 18 | myid='kpark' #user id 19 | pswd='KyungminPark0616' #user password 20 | sdir='./' #location to save 21 | svrs=['so','thetao','uv','zos'] # so: water salinity, thetao: water temperature, uv: water velocities, zos: sea surface height 22 | 23 | ####################################### 24 | #download CMEMS data 25 | if data=='ana': 26 | serv='GLOBAL_ANALYSISFORECAST_PHY_001_024-TDS' 27 | print('Downloading analysis data from CMEMS') 28 | for svr in svrs: 29 | # Variable definition 30 | if svr=='so': prod='-so'; vr='--variable so' 31 | elif svr=='thetao': prod='-thetao'; vr='--variable thetao' 32 | elif svr=='uv': prod='-cur'; vr='--variable uo --variable vo'; 33 | elif svr=='zos': prod=''; vr='--variable zos' 34 | else: print('Unrecognized variable --{}--.'.format(svr)); break 35 | 36 | # Temporal resolution definition 37 | if tres=='hourly': 38 | if svr=='zos': dt=datetime.timedelta(hours=1); pid='PT1H-m' 39 | else: dt=datetime.timedelta(hours=6); pid='PT6H-i' 40 | elif tres=='daily': 41 | dt=datetime.timedelta(hours=24); pid='P1D-m' 42 | elif tres=='monthly': 43 | dt=datetime.timedelta(days=31); pid='P1M-m' 44 | else: print('Unrecognized temporal resolution --{}--.'.format(tres)); break 45 | tlist=drange(StartT,EndT+dt,dt) 46 | dt=tlist[1]-tlist[0] 47 | # Download data 48 | for nn,ti in enumerate(tlist): 49 | if tres=='hourly' or tres=='daily': 50 | st=num2date(ti).strftime('%Y_%m_%d_%H') 51 | elif tres=='monthly': 52 | st=num2date(ti).strftime('%Y_%m_00_00') 53 | if os.path.isfile('cmems_{}_{}.nc'.format(svr,st)): print('Variable --{}-- on {} exsit'.format(svr,st)); continue 54 | furl='motuclient --motu https://nrt.cmems-du.eu/motu-web/Motu --service-id {} --product-id cmems_mod_glo_phy{}_anfc_0.083deg_{} --longitude-min {} --longitude-max {} --latitude-min {} --latitude-max {} --date-min "{}" --date-max "{}" --depth-min {} --depth-max {} {} --out-dir {} --out-name cmems_{}_{}.nc --user {} --pwd {}'.format(serv,prod,pid,xl[0],xl[1],yl[0],yl[1],num2date(ti).strftime('%Y-%m-%d %H:%M:%S'),num2date(ti+dt*0.9).strftime('%Y-%m-%d %H:%M:%S'),dl[0],dl[1],vr,sdir,svr,st,myid,pswd) 55 | #print(furl) 56 | os.system(furl) 57 | 58 | elif data=='rana': 59 | serv='GLOBAL_MULTIYEAR_PHY_001_030-TDS' 60 | print('Downloading reanalysis data from CMEMS') 61 | # Temporal resolution definition 62 | if tres=='daily': 63 | dt=datetime.timedelta(hours=24); pid='P1D-m' 64 | elif tres=='monthly': 65 | dt=datetime.timedelta(days=31); pid='P1M-m' 66 | else: print('Unrecognized temporal resolution --{}--.'.format(tres)); 67 | tlist=drange(StartT,EndT+dt,dt) 68 | dt=tlist[1]-tlist[0] 69 | # Download data 70 | for nn,ti in enumerate(tlist): 71 | if tres=='daily': 72 | st=num2date(ti).strftime('%Y_%m_%d_%H') 73 | elif tres=='monthly': 74 | st=num2date(ti).strftime('%Y_%m_00_00') 75 | if os.path.isfile('cmems_{}.nc'.format(st)): print('Data on {} exsit'.format(st)); continue 76 | furl='motuclient --motu https://my.cmems-du.eu/motu-web/Motu --service-id {} --product-id cmems_mod_glo_phy_my_0.083_{} --longitude-min {} --longitude-max {} --latitude-min {} --latitude-max {} --date-min "{}" --date-max "{}" --depth-min {} --depth-max {} --variable so --variable thetao --variable uo --variable vo --variable zos --out-dir {} --out-name cmems_{}.nc --user {} --pwd {}'.format(serv,pid,xl[0],xl[1],yl[0],yl[1],num2date(ti).strftime('%Y-%m-%d %H:%M:%S'),num2date(ti+dt*0.9).strftime('%Y-%m-%d %H:%M:%S'),dl[0],dl[1],sdir,st,myid,pswd) 77 | print(furl) 78 | os.system(furl) 79 | 80 | else: print('Wrong data type. Choose ana or rana in data') 81 | 82 | print('---------------done------------') 83 | -------------------------------------------------------------------------------- /scripts/download_HFRadar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Dowload HF Radar-information--> https://secoora.org/high-frequency-radar/ 3 | from pylib import * 4 | import pandas as pd 5 | 6 | ############################################### 7 | StartT, EndT= '2015-1-1', '2015-1-2' 8 | sdir='./' #location to save 9 | rname='USEGC'# AKNS: Alaska North Slope, GAK: Gulf of Alaska, USHI: Hawaiian Islands, PRVI: Puerto Rico/Virgin Islands, USWC: U.S. West Coast, USEGC: U.S. East Coast and Gulf of Mexico 10 | url='https://www.ncei.noaa.gov/data/oceans/ndbc/hfradar/rtv' 11 | ####################################### 12 | 13 | tlist=pd.date_range(StartT, EndT, freq='MS').strftime("%Y-%m-%d") 14 | tlist=datenum(tlist) 15 | #download CMEMS data 16 | for ti in tlist: 17 | furl='wget -N -r --no-parent -nH --reject "index.html*" -P {} {}/{}/{}/{}/'.format(sdir,url,num2date(ti).strftime('%Y'),num2date(ti).strftime('%Y%m'),rname) 18 | os.system(furl) 19 | -------------------------------------------------------------------------------- /scripts/download_hycom.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #automatically download HYCOM data 3 | from pylib import * 4 | close("all") 5 | 6 | #-------------------------------------------------------------------- 7 | #input 8 | #-------------------------------------------------------------------- 9 | StartT,EndT=datenum(2009,12,31),datenum(2020,1,1) 10 | xm=[-123.05,-122.35]; ym=[37.35,38.05] 11 | sdir='Data' 12 | 13 | #database 14 | url_1='http://ncss.hycom.org/thredds/ncss'; url_2='https://ncss.hycom.org/thredds/ncss' 15 | databases=[['GLBv0.08/expt_53.X',datestr2num('Jan-01-1994'),datestr2num('Dec-31-2015')], 16 | ['GLBv0.08/expt_56.3',datestr2num('Jan-01-2016'),datestr2num('Apr-30-2016')], 17 | ['GLBv0.08/expt_57.2',datestr2num('May-01-2016'),datestr2num('Jan-31-2017')], 18 | ['GLBv0.08/expt_92.8',datestr2num('Feb-01-2017'),datestr2num('May-31-2017')], 19 | ['GLBv0.08/expt_57.7',datestr2num('Jun-01-2017'),datestr2num('Sep-30-2017')], 20 | ['GLBv0.08/expt_92.9',datestr2num('Oct-01-2017'),datestr2num('Dec-31-2017')], 21 | ['GLBv0.08/expt_93.0',datestr2num('Jan-01-2018'),datestr2num('Feb-18-2020')],] 22 | 23 | #download hycom data 24 | if not os.path.exists(sdir): os.mkdir(sdir) 25 | for ti in arange(StartT,EndT): 26 | #choose database 27 | bname=[i for i in databases if (ti>=i[1]) and (ti<=i[2])] 28 | bname=bname[0][0] if len(bname)==1 else sys.exit('database not found: {}'.format(num2date(ti))) 29 | 30 | #get fname 31 | if (ti>=datestr2num('Jan-01-1994'))*(ti<=datestr2num('Dec-31-2015')): 32 | url='{}/{}/data/{}?'.format(url_1,bname,num2date(ti).year) 33 | else: 34 | url='{}/{}?'.format(url_2,bname) 35 | 36 | #add variables 37 | for i in ['surf_el','salinity','water_temp','water_u','water_v']: url=url+'var={}&'.format(i) 38 | 39 | #add domain 40 | for i,k in zip(['south','north','west','east'],[*ym,*xm]): url=url+'{}={}&'.format(i,k) 41 | 42 | #horizontal stride and time 43 | for n in arange(0,24,3): 44 | fname='{}/hycom_{}_{:02}.nc'.format(sdir,num2date(ti).strftime('%Y_%m_%d'),n) 45 | furl=url+'horizStride=1&time={}T{:02}%3A00%3A00Z&vertCoord=&accept=netcdf4'.format(num2date(ti).strftime('%Y-%m-%d'),n) 46 | 47 | #download hycom data 48 | if os.path.exists(fname): continue 49 | try: 50 | urlsave(furl,fname) 51 | print(fname) 52 | except: 53 | pass 54 | -------------------------------------------------------------------------------- /scripts/gen_bctides.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #generate bctides.in's tide harmonics 3 | from pylib import * 4 | 5 | #--------------------------------------------------------------------- 6 | #input 7 | #--------------------------------------------------------------------- 8 | tnames=['O1','K1','Q1','P1','M2','S2','K2','N2'] 9 | StartT=[2010,1,1,0] #year,month,day,hour 10 | nday=365 #number of days 11 | ibnds=[1,] #order of open boundaries (starts from 1) 12 | flags=[[5,5,4,4],] #SCHISM bnd flags for each boundary 13 | Z0=0.0 #add Z0 constant if Z0!=0.0 14 | 15 | grd='../grid.npz' #hgrid.ll (includes bndinfo), or grid.npz (include lon,lat) 16 | bdir=r'/sciclone/data10/wangzg/FES2014' #FES2014 database 17 | 18 | #--------------------------------------------------------------------- 19 | #read bndinfo, amp, freq, nodal factor and tear 20 | #--------------------------------------------------------------------- 21 | #read grid information 22 | if grd.endswith('.npz'): 23 | gd=loadz(grd).hgrid; gd.x=gd.lon; gd.y=gd.lat 24 | else: 25 | gd=read_schism_hgrid(grd) 26 | 27 | #get tidal amplitude and frequency 28 | amp=[]; freq=[]; ts=loadz('{}/tide_fac_const/tide_fac_const.npz'.format(bdir)) 29 | for tname in tnames: 30 | sind=nonzero(ts.name==tname.upper())[0][0] 31 | amp.append(ts.amp[sind]); freq.append(ts.freq[sind]) 32 | 33 | #get nodal factor 34 | tdir='{}/tide_fac_improved'.format(bdir) 35 | fid=open('./tide_fac.in','w+'); fid.write('{}\n{} {} {} {}\n0\n'.format(nday,*StartT[::-1])); fid.close() 36 | os.system('ifort -o tide_fac_improved {}/tf_main.f90 {}/tf_selfe.f90; ./tide_fac_improved 0)[0]; idx[sind]=idx[sind]-1 83 | idy=floor((yi-lat[0])/dy).astype('int'); sind=nonzero((lat[idy]-yi)>0)[0]; idy[sind]=idy[sind]-1 84 | xrat=(xi-lon[idx])/(lon[idx+1]-lon[idx]); yrat=(yi-lat[idy])/(lat[idy+1]-lat[idy]) 85 | if sum((xrat>1)|(xrat<0)|(yrat>1)|(yrat<0))!=0: sys.exit('xrat or yrat >1 or <0') 86 | 87 | #interp for amp,pha 88 | apii=[] 89 | for k in arange(2): 90 | if k==0: v0=c_[amp0[idy,idx],amp0[idy,idx+1],amp0[idy+1,idx],amp0[idy+1,idx+1]].T; vm=100 91 | if k==1: v0=c_[pha0[idy,idx],pha0[idy,idx+1],pha0[idy+1,idx],pha0[idy+1,idx+1]].T; vm=370 92 | vmax=v0.max(axis=0); vmin=v0.min(axis=0) 93 | if k==1: #deal with phase jump 94 | for kk in nonzero(abs(vmax-vmin)>180)[0]: 95 | fpn=abs(v0[:,kk]-vmax[kk])>180; v0[fpn,kk]=v0[fpn,kk]+360 96 | v1=v0[0]*(1-xrat)+v0[1]*xrat; v2=v0[2]*(1-xrat)+v0[3]*xrat; apiii=v1*(1-yrat)+v2*yrat 97 | sind=nonzero((vmax>vm)*(vmin<=vm)*(vmin>=0))[0]; apiii[sind]=vmin[sind] 98 | if sum((vmax>vm)*((vmin>vm)|(vmin<0)))!=0: sys.exit('All junks for amp or pha') 99 | apii.append(apiii) 100 | api.append(apii) 101 | ap.append(api) 102 | ap=array(ap).transpose([1,0,3,2]) 103 | 104 | #write tidal amp and pha for elev 105 | if Z0!=0: fid.write('Z0\n'); [fid.write('{} 0.0\n'.format(Z0)) for i in arange(nobn)] 106 | for m,tname in enumerate(tnames): 107 | fid.write('{}\n'.format(tname.lower())) 108 | for k in arange(nobn): 109 | fid.write('{:8.6f} {:.6f}\n'.format(*ap[0,m,k])) 110 | 111 | #write tidal amp and pha for uv 112 | if Z0!=0: fid.write('Z0\n'); [fid.write('0.0 0.0 0.0 0.0\n') for i in arange(nobn)] 113 | for m,tname in enumerate(tnames): 114 | fid.write('{}\n'.format(tname.lower())) 115 | for k in arange(nobn): 116 | fid.write('{:8.6f} {:.6f} {:8.6f} {:.6f}\n'.format(*ap[1,m,k],*ap[2,m,k])) 117 | fid.close() 118 | -------------------------------------------------------------------------------- /scripts/gen_fluxflag.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #write fluxflag.prop in regions (*.reg) for computing fluxes 3 | #note: *.reg is restrcited to 4 sorted pts with the 1st two pts 4 | # define upstream, and the 2nd two pts defines downstream 5 | from pylib import * 6 | 7 | #-------------------------------------------------------- 8 | #inputs 9 | #-------------------------------------------------------- 10 | grd='../grid.npz' #model grid (*.npz, or *.gr3) 11 | regions=['DSJ.reg','SJR.reg','TSL.reg','SCR.reg'] #regions 12 | 13 | #-------------------------------------------------------- 14 | #read grid 15 | #-------------------------------------------------------- 16 | gd=loadz(grd).hgrid if grd.endswith('.npz') else read_schism_hgrid(grd) 17 | #-------------------------------------------------------- 18 | #assign different values in regions 19 | #-------------------------------------------------------- 20 | pvi=-ones(gd.ne).astype('int'); gd.compute_ctr() 21 | for m,region in enumerate(regions): 22 | #read region info 23 | bp=read_schism_bpfile(region,fmt=1) 24 | if bp.nsta!=4: sys.exit(f'{region}''s npt!=4') 25 | x1,x2,x3,x4=bp.x; y1,y2,y3,y4=bp.y 26 | 27 | #middle pts 28 | mx1=(x1+x4)/2; mx2=(x2+x3)/2 29 | my1=(y1+y4)/2; my2=(y2+y3)/2 30 | 31 | #for lower region 32 | px=array([mx1,mx2,x3,x4]); py=array([my1,my2,y3,y4]) 33 | pvi[inside_polygon(c_[gd.xctr,gd.yctr],px,py)==1]=m 34 | 35 | #for upper region 36 | px=array([mx1,x1,x2,mx2]); py=array([my1,y1,y2,my2]) 37 | pvi[inside_polygon(c_[gd.xctr,gd.yctr],px,py)==1]=m+1 38 | gd.write_prop('fluxflag.prop',value=pvi,fmt='{:3d}') 39 | -------------------------------------------------------------------------------- /scripts/gen_fluxth_USGS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # generate flow.th using USGS river discharge (https://waterdata.usgs.gov/nwis/rt) 3 | 4 | from pylib import * 5 | import pandas as pd 6 | import numpy as np 7 | 8 | usgs=['USGS_02231254_RD.csv'] # USGS files (example --> https://nwis.waterservices.usgs.gov/nwis/iv/?sites=02231254¶meterCd=00060&startDT=2016-09-01T00:00:00.000-04:00&endDT=2016-11-01T23:59:59.999-04:00&siteStatus=all&format=rdb) 9 | st=datenum(2016,9,8) # start time 10 | et=datenum(2016,10,25) # end time 11 | sname='flux.th' # save name 12 | dt=900 # time step of flow.th (second) 13 | pt=1 # check result 1:on 14 | 15 | 16 | #generate flux.th 17 | ntime=arange(0,(et-st)*86400,dt) # new time window 18 | newset=ntime.copy() # new matrix to save data 19 | for file in usgs: 20 | df = pd.read_csv(file,skiprows=26) 21 | df = df.drop([0]) 22 | if df['tz_cd'][1]=='EDT': print('{} has EDT'.format(file));df['tz_cd'][1]='Etc/GMT+4' 23 | time=pd.to_datetime(df['datetime']).dt.tz_localize(df['tz_cd'][1]).dt.tz_convert('GMT') 24 | time= datenum(time.values.astype('str')).astype('float') 25 | rd=df[df.columns[4]].values.astype('float')*0.0283168 26 | 27 | #subset of time and data 28 | fpt=(time>=st)*(time<=et); time=time[fpt]; rd=rd[fpt] 29 | 30 | #interpolate the data to new time window and add it into new matrix 31 | time=(time-st)*86400; time,idx=unique(time,return_index=True); rd=rd[idx] 32 | nrd = -interpolate.interp1d(time, rd)(ntime) 33 | newset=column_stack((newset,nrd)) 34 | 35 | 36 | # save result 37 | np.savetxt('{}'.format(sname),newset,fmt='%f') 38 | 39 | #check result 40 | if pt == 1: 41 | fs=loadtxt(sname) 42 | for nn in arange(shape(fs)[1]-1): 43 | plot(fs[:,0],fs[:,nn+1]) 44 | xlabel('time (s)'); ylabel('River discharge (m^3/s)') 45 | show() 46 | -------------------------------------------------------------------------------- /scripts/gen_hycom_hotstart.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #create hotstart condition based on hycom data 3 | from pylib import * 4 | close("all") 5 | 6 | #------------------------------------------------------------------------------ 7 | #input 8 | #------------------------------------------------------------------------------ 9 | StartT=datenum(2010,1,1) 10 | grd='../grid.npz' 11 | dir_hycom='Data_init' 12 | 13 | #------------------------------------------------------------------------------ 14 | #interpolate hycom data to boundary 15 | #------------------------------------------------------------------------------ 16 | #variables to be interpolated 17 | svars=['water_temp','salinity'] 18 | mvars=['temp','salt'] 19 | 20 | #find hycom file 21 | fnames=array([i for i in os.listdir(dir_hycom) if i.endswith('.nc')]) 22 | mti=array([datenum(*array(i.replace('.','_').split('_')[1:5]).astype('int')) for i in fnames]) 23 | fpt=nonzero(abs(mti-StartT)==min(abs(mti-StartT)))[0][0]; fname=fnames[fpt] 24 | 25 | #read hgrid 26 | gd=loadz(grd).hgrid; vd=loadz(grd).vgrid; gd.x,gd.y=gd.lon,gd.lat 27 | ne,np,ns,nvrt=gd.ne,gd.np,gd.ns,vd.nvrt 28 | 29 | #get node xyz 30 | lxi=gd.x%360; lyi=gd.y; lzi0=abs(vd.compute_zcor(gd.dp)).T 31 | 32 | #get hycom time, xyz 33 | C=ReadNC('{}/{}'.format(dir_hycom,fname),1); #print(fname) 34 | ctime=array(C.variables['time'])/24+datenum(2000,1,1); sx=array(C.variables['lon'][:])%360 35 | sy=array(C.variables['lat'][:]); sz=array(C.variables['depth'][:]) 36 | fpz=lzi0>=sz.max(); lzi0[fpz]=sz.max()-1e-6 37 | 38 | #interp for ST 39 | S=zdata(); [exec('S.{}=[]'.format(i)) for i in mvars] 40 | for k in arange(nvrt): 41 | lzi=lzi0[k]; bxyz=c_[lxi,lyi,lzi] 42 | 43 | #get interp index 44 | idx=((lxi[:,None]-sx[None,:])>=0).sum(axis=1)-1; ratx=(lxi-sx[idx])/(sx[idx+1]-sx[idx]) 45 | idy=((lyi[:,None]-sy[None,:])>=0).sum(axis=1)-1; raty=(lyi-sy[idy])/(sy[idy+1]-sy[idy]) 46 | idz=((lzi[:,None]-sz[None,:])>=0).sum(axis=1)-1; ratz=(lzi-sz[idz])/(sz[idz+1]-sz[idz]) 47 | 48 | #for each variable 49 | for m,svar in enumerate(svars): 50 | print(svar,k) 51 | mvar=mvars[m] 52 | exec("cv=array(C.variables['{}'][0])".format(svar)) 53 | v0=array([cv[idz,idy,idx],cv[idz,idy,idx+1],cv[idz,idy+1,idx],cv[idz,idy+1,idx+1], 54 | cv[idz+1,idy,idx],cv[idz+1,idy,idx+1],cv[idz+1,idy+1,idx],cv[idz+1,idy+1,idx+1]]) 55 | 56 | #remove nan pts 57 | for n in arange(8): 58 | fpn=abs(v0[n])>1e3 59 | v0[n,fpn]=sp.interpolate.griddata(bxyz[~fpn,:],v0[n,~fpn],bxyz[fpn,:],'nearest',rescale=True) 60 | 61 | v11=v0[0]*(1-ratx)+v0[1]*ratx; v12=v0[2]*(1-ratx)+v0[3]*ratx; v1=v11*(1-raty)+v12*raty 62 | v21=v0[4]*(1-ratx)+v0[5]*ratx; v22=v0[6]*(1-ratx)+v0[7]*ratx; v2=v21*(1-raty)+v22*raty 63 | vi=v1*(1-ratz)+v2*ratz 64 | 65 | #save 66 | exec('S.{}.append(vi)'.format(mvar)) 67 | [exec('S.{}=array(S.{})'.format(i,i)) for i in mvars] 68 | 69 | #------------------------------------------------------------------------------ 70 | #creat netcdf 71 | #------------------------------------------------------------------------------ 72 | tr_nd=r_[S.temp[None,...],S.salt[None,...]].T; tr_el=tr_nd[gd.elnode[:,:3]].mean(axis=1) 73 | 74 | nd=zdata(); nd.file_format='NETCDF4' 75 | nd.dimname=['node','elem','side','nVert','ntracers','one']; nd.dims=[np,ne,ns,nvrt,2,1] 76 | 77 | #--time step, time, and time series---- 78 | nd.vars=['time','iths','ifile','idry_e','idry_s','idry','eta2','we','tr_el', 79 | 'tr_nd','tr_nd0','su2','sv2','q2','xl','dfv','dfh','dfq1','dfq2','nsteps_from_cold','cumsum_eta'] 80 | 81 | vi=zdata(); vi.dimname=('one',); vi.val=array(0.0); nd.time=vi 82 | vi=zdata(); vi.dimname=('one',); vi.val=array(0).astype('int'); nd.iths=vi 83 | vi=zdata(); vi.dimname=('one',); vi.val=array(1).astype('int'); nd.ifile=vi 84 | vi=zdata(); vi.dimname=('one',); vi.val=array(0).astype('int'); nd.nsteps_from_cold=vi 85 | 86 | vi=zdata(); vi.dimname=('elem',); vi.val=zeros(ne).astype('int32'); nd.idry_e=vi #idry_e 87 | vi=zdata(); vi.dimname=('side',); vi.val=zeros(ns).astype('int32'); nd.idry_s=vi #idry_s 88 | vi=zdata(); vi.dimname=('node',); vi.val=zeros(np).astype('int32'); nd.idry=vi #idry 89 | vi=zdata(); vi.dimname=('node',); vi.val=zeros(np); nd.eta2=vi #eta2 90 | vi=zdata(); vi.dimname=('node',); vi.val=zeros(np); nd.cumsum_eta=vi #cumsum_eta 91 | 92 | vi=zdata(); vi.dimname=('elem','nVert'); vi.val=zeros([ne,nvrt]); nd.we=vi #we 93 | vi=zdata(); vi.dimname=('side','nVert'); vi.val=zeros([ns,nvrt]); nd.su2=vi #su2 94 | vi=zdata(); vi.dimname=('side','nVert'); vi.val=zeros([ns,nvrt]); nd.sv2=vi #sv2 95 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.q2=vi #q2 96 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.xl=vi #xl 97 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.dfv=vi #dfv 98 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.dfh=vi #dfh 99 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.dfq1=vi #dfq1 100 | vi=zdata(); vi.dimname=('node','nVert'); vi.val=zeros([np,nvrt]); nd.dfq2=vi #dfq2 101 | 102 | vi=zdata(); vi.dimname=('elem','nVert','ntracers'); vi.val=tr_el; nd.tr_el=vi #tr_el 103 | vi=zdata(); vi.dimname=('node','nVert','ntracers'); vi.val=tr_nd; nd.tr_nd=vi #tr_nd 104 | vi=zdata(); vi.dimname=('node','nVert','ntracers'); vi.val=tr_nd; nd.tr_nd0=vi #tr_nd0 105 | 106 | WriteNC('hotstart.nc',nd) 107 | 108 | -------------------------------------------------------------------------------- /scripts/gen_narr_sflux.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | 4 | #------------------------------------------------------------------------------------ 5 | #inputs: creat narr sflux database 6 | #------------------------------------------------------------------------------------ 7 | years=[2021,] 8 | 9 | #ftp info 10 | bdir='ftp://ftp.cdc.noaa.gov/NARR/monolevel' 11 | vars=['air','rad','prc'] 12 | svars=[('uwind','vwind','prmsl','stmp','spfh'),('dlwrf','dswrf'),('prate',)] #sflux variable 13 | nvars=[('uwnd.10m','vwnd.10m','prmsl','air.2m','shum.2m'),('dlwrf','dswrf'),('prate',)] #narr variables 14 | 15 | #download data, and precess data 16 | for year in years: 17 | #create folder 18 | sdir='{}'.format(year); 19 | if not os.path.exists(sdir): os.mkdir(sdir) 20 | 21 | #pre-calculation 22 | S0=loadz('sflux_template.npz'); 23 | # days=arange(datenum(year,1,1),datenum(year+1,1,1)) 24 | 25 | #for each dataset 26 | for m in arange(len(vars)): 27 | vari=vars[m]; svari=svars[m]; nvari=nvars[m] 28 | #download data 29 | for nvarii in nvari: 30 | fname='{}.{}.nc'.format(nvarii,year) 31 | if os.path.exists(fname): continue 32 | url='{}/{}'.format(bdir,fname) 33 | print('downloading {}'.format(fname)) 34 | urlsave(url,fname) 35 | 36 | # #read the data 37 | C=zdata() 38 | for svarii,nvarii in zip(svari,nvari): 39 | fname='{}.{}.nc'.format(nvarii,year) 40 | exec('C.{}=ReadNC("{}")'.format(svarii,fname)) 41 | 42 | #processing data and write sflux 43 | exec('S=S0.{}'.format(vari)) 44 | exec('time=datenum(1800,1,1)+array(C.{}.time.val)/24'.format(svari[0])) 45 | exec('xgrid=array(C.{}.x.val); ygrid=array(C.{}.y.val)'.format(svari[0],svari[0])) 46 | exec('lon=array(C.{}.lon.val); lat=array(C.{}.lat.val)'.format(svari[0],svari[0])) 47 | 48 | #get days 49 | days=unique(time.astype('int')) 50 | nt,nx,ny=[int(len(time)/len(days)),len(xgrid),len(ygrid)] 51 | for dayi in days: 52 | ti=num2date(dayi) 53 | fp=(time>=dayi)*(time<(dayi+1)); 54 | 55 | #dims 56 | S.dims=[nx,ny,nt] 57 | #time, lon, lat 58 | S.time.base_date=array([ti.year,ti.month,ti.day,0]); 59 | S.time.units='days since {}'.format(ti.strftime('%Y-%m-%d')) 60 | S.time.dims=[nt]; S.time.val=time[fp]-dayi; 61 | S.lon.dims=[ny,nx]; S.lon.val=lon 62 | S.lat.dims=[ny,nx]; S.lat.val=lat 63 | #variables 64 | for svarii,nvarii in zip(svari,nvari): 65 | exec('S.{}.dims=[nt,ny,nx]'.format(svarii)); 66 | exec('S.{}.val=C.{}.{}.val[fp,:,:]'.format(svarii,svarii,nvarii.split('.')[0])); 67 | 68 | #write narr files 69 | fname='narr_{}.{}.nc'.format(vari,ti.strftime('%Y_%m_%d')) 70 | print('writing {}'.format(fname)) 71 | WriteNC('{}/{}'.format(sdir,fname),S) 72 | 73 | #move files 74 | if sys.platform.startswith('win'): continue 75 | for i in arange(1,13): 76 | subdir='{}_{:02}'.format(year,i); 77 | if not os.path.exists('{}/{}'.format(sdir,subdir)): os.mkdir('{}/{}'.format(sdir,subdir)) 78 | os.system('cd {}; mv *{}*.nc {}'.format(sdir,subdir,subdir)) 79 | os.system("ln -sf {}/{}_* ./ ".format(sdir,year)) 80 | 81 | #------------------------------------------------------------------------------ 82 | ##--prepare template for sflux based on former sflux files 83 | #------------------------------------------------------------------------------ 84 | #S=zdata(); 85 | #svars=['air','rad','prc'] 86 | #for svar in svars: 87 | # fname='sflux_{}_1.0001.nc'.format(svar) 88 | # Si=ReadNC(fname,2); 89 | # #clear variables 90 | # for vari in Si.vars: 91 | # exec('Si.{}.val=None'.format(vari)) 92 | # exec('S.{}=Si'.format(svar)); 93 | #S.vars=svars; 94 | #savez('sflux_template',S); 95 | -------------------------------------------------------------------------------- /scripts/gen_shapiro.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #region files have WGS84 coordinate in this script 3 | #you can find the examples of region files used in script here:/sciclone/data10/kpark07/regfiles 4 | 5 | from pylib import * 6 | 7 | def write_shapiro( 8 | grd, 9 | shapiro_max=0.5, threshold_slope=0.5, 10 | depths=None, shapiro_vals1=None, 11 | s_regions=None, s_shapiro_maxs=None, s_threshold_slopes=None, 12 | regions=None, shapiro_vals2=None, i_set_add_s=None, 13 | fname='shapiro.gr3' 14 | ): 15 | ''' 16 | write shapiro fileter strength value based on depth and specfied regions 17 | 18 | Input: 19 | grd: grid name (*.gr3 or *.npz, where *.npz is python format) 20 | depths: two hgrid depth(m) to distinguish river, land and the transition zone 21 | mvalues: lower and upper limits of shapiro values 22 | 23 | regions: list of region file names. e.g. regions=('GoME_1.reg','GoME_2.reg') 24 | s_regions: list of subregions file names for different parameters of shapiro filter 25 | s_shapiro_maxs: maximum shapiro values for subregions 26 | s_threshold_slopes: slope threshold for subregions 27 | rvalues: list of values for each region: e.g. (0.1,0.5) 28 | i_set_add_s: identifier for setting or adding value, 0: set value; 1: add value 29 | ''' 30 | #read hgrid 31 | if grd.endswith('.npz'): 32 | gd=loadz(grd).hgrid 33 | else: 34 | gd=read_schism_hgrid(grd) 35 | 36 | # compute bathymetry gradient on each node 37 | _, _, slope = gd.compute_gradient(fmt=2) 38 | 39 | # compute shapiro coefficients 40 | shapiro=shapiro_max*tanh(2*slope/threshold_slope) 41 | if s_regions is not None: 42 | for s_shapiro_max, s_threshold_slope, s_region in zip(s_shapiro_maxs,s_threshold_slopes,s_regions): 43 | print(f'managing {s_shapiro_max} shapiro with {s_threshold_slope} in {s_region}') 44 | bp=read_schism_bpfile(s_region) 45 | px,py=proj_pts(bp.x,bp.y,'epsg:4326','epsg:26918') 46 | sind=inside_polygon(c_[gd.x,gd.y], px,py).astype('bool') 47 | shapiro[sind]=s_shapiro_max*tanh(2*slope[sind]/s_threshold_slope) 48 | # further tweaks on shallow waters 49 | if len(depths) != len(shapiro_vals1): 50 | raise Exception(f'lengths of depths {len(depths)} and shapiro_vals1 {len(shapiro_vals1)} inconsistent') 51 | fp = gd.dp < depths[-1] 52 | shapiro[fp] = maximum(shapiro[fp], interp(gd.dp[fp], depths, shapiro_vals1)) 53 | 54 | #set or add values in regions 55 | if regions is not None: 56 | for i_set_add, rvalue, region in zip(i_set_add_s, shapiro_vals2, regions): 57 | bp=read_schism_bpfile(region) 58 | px,py=proj_pts(bp.x,bp.y,'epsg:4326','epsg:26918') 59 | sind=inside_polygon(c_[gd.x,gd.y], px,py).astype('bool') 60 | 61 | if i_set_add==0: 62 | print(f'setting {rvalue} shapiro in {region}') 63 | fp=sind 64 | shapiro[fp]=rvalue 65 | else: 66 | print(f'adding {rvalue} shapiro in {region}') 67 | sum(sind) 68 | sind2=(gd.dp>depths[0]) # additional condition: deeper water, dp > -1 m 69 | fp=(sind & sind2) 70 | shapiro[fp]=shapiro[fp]+rvalue 71 | #Edit values that is higher than maximum shaprio value 72 | sind=(shapiro>shapiro_max) 73 | shapiro[sind]=shapiro_max 74 | #save shapiro.gr3 75 | gd.dp=shapiro 76 | gd.write_hgrid(fname) 77 | 78 | if __name__=="__main__": 79 | outfilename = './shapiro.gr3' 80 | 81 | if os.path.exists(outfilename): 82 | os.remove(outfilename) 83 | 84 | write_shapiro( 85 | grd='./hgrid_utm.gr3', # grid name (*.gr3 or *.npz, where *.npz is python format) 86 | shapiro_max=0.5, 87 | threshold_slope=0.5, 88 | depths=[-99999, 20, 50], # tweaks in shallow waters 89 | shapiro_vals1=[0.2, 0.2, 0.05], # tweaks in shallow waters 90 | s_regions=['./LD1.bp','./LD2.bp','./LD3.bp','./SD1.bp','./SD2.bp','./NM1.bp','./NM2.bp','./NM3.bp','./INC1.bp','./INC2.bp','./INC3.bp','./INC4.bp','./INC5.bp','./CA1.bp','./CA2.bp','./CA3.bp','./CA4.bp'], # subregions for different shapiro parameters 91 | s_shapiro_maxs=[0.005,0.005,0.005,\ 92 | 0.3,0.3,\ 93 | 0.5,0.5,0.5,\ 94 | 0.5,0.5,0.5,0.5,0.5,\ 95 | 0.5,0.5,0.5,0.5], # maximum shapiro values for subregions 96 | s_threshold_slopes=[0.8,0.8,0.8,\ 97 | 0.7,0.7,\ 98 | 0.5,0.5,0.5,\ 99 | 0.5,0.5,0.5,0.5,0.5,\ 100 | 0.5,0.5,0.5,0.5],# slope threshold for subregions 101 | regions=['./INC1.bp','./INC3.bp','./INC4.bp','./INC5.bp','./CA2.bp','./CA3.bp','./CA4.bp'], # regions for set or add values 102 | shapiro_vals2=[0.15,0.2,0.15,0.15,0.1,0.1,0.1], # tweaks in regions, the order matters 103 | i_set_add_s=[1,0,1,1,1,1,1,1], # 0: set; 1: add 104 | fname=outfilename 105 | ) 106 | 107 | -------------------------------------------------------------------------------- /scripts/gen_vqs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env 2 | from pylib import * 3 | close('all') 4 | 5 | #------------------------------------------------------------------------------ 6 | #input 7 | #------------------------------------------------------------------------------ 8 | grd='./hgrid.gr3' #hgrid name (grid.npz, or hgrid.gr3) 9 | 10 | #hsm: depth for each master grid; nv: number for layers for each depth 11 | hsm=array([0.5,1.0,1.5, *arange(2,11),*[11.5,14,17,21],*arange(25,51,5),*arange(60,81,10),95,125]) 12 | nhm=len(hsm); nv=2+arange(nhm) 13 | 14 | #check transect 15 | bname='./transect.bp' #transect bpfile 16 | #------------------------------------------------------------------------------ 17 | #compute master grid 18 | #------------------------------------------------------------------------------ 19 | nvrt=nv[-1]; z_mas=ones([nhm,nvrt])*nan; eta=0.0 20 | for m, [hsmi,nvi] in enumerate(zip(hsm,nv)): 21 | #strethcing funciton 22 | theta_b=0; theta_f=2.5; hc=10.5 23 | hc=min(hsmi,hc) 24 | 25 | for k in arange(nvi): 26 | sigma= k/(1-nvi) #zi=-sigma #original sigma coordiante 27 | 28 | #compute zcoordinate 29 | cs=(1-theta_b)*sinh(theta_f*sigma)/sinh(theta_f)+theta_b*(tanh(theta_f*(sigma+0.5))-tanh(theta_f*0.5))/2/tanh(theta_f*0.5) 30 | z_mas[m,k]=eta*(1+sigma)+hc*sigma+(hsmi-hc)*cs 31 | 32 | #normalize z_mas 33 | z_mas[m]=-(z_mas[m]-z_mas[m,0])*hsmi/(z_mas[m,nvi-1]-z_mas[m,0]) 34 | s_mas=array([z_mas[i]/hsm[i] for i in arange(nhm)]) 35 | 36 | #check master grid 37 | for i in arange(nhm-1): 38 | if min(z_mas[i,:nv[i]]-z_mas[i+1,:nv[i]])<0: print('check: master grid layer={}, hsm={}, nv={}'.format(i+1,hsm[i+1],nv[i+1])) 39 | 40 | #plot master grid 41 | figure(figsize=[10,5]) 42 | for i in arange(nhm): plot(i*ones(nvrt),z_mas[i],'k-',lw=0.3) 43 | for k in arange(nvrt): plot(arange(nhm),z_mas.T[k],'k-',lw=0.3) 44 | setp(gca(),xlim=[-0.5,nhm-0.5],ylim=[-hsm[-1],0.5]) 45 | gcf().tight_layout() 46 | move_figure(gcf(),0,0) 47 | savefig('Master_Grid',dpi=200) 48 | 49 | # sys.exit() 50 | 51 | #------------------------------------------------------------------------------ 52 | #compute vgrid 53 | #------------------------------------------------------------------------------ 54 | #read hgrid 55 | gd=loadz(grd).hgrid if grd.endswith('.npz') else read_schism_hgrid(grd) 56 | fpz=gd.dphsm[m-1])*(gd.dp<=hsm[m]) 67 | ind1[fp]=m-1; ind2[fp]=m 68 | rat[fp]=(gd.dp[fp]-hsm[m-1])/(hsm[m]-hsm[m-1]); nlayer[fp]=nv[m] 69 | 70 | znd=z_mas[ind1]*(1-rat[:,None])+z_mas[ind2]*rat[:,None]; #z coordinate 71 | for i in arange(gd.np): znd[i,nlayer[i]-1]=-gd.dp[i] 72 | snd=znd/gd.dp[:,None]; #sigma coordinate 73 | 74 | #check vgrid 75 | for i in arange(gd.np): 76 | for k in arange(nvrt-1): 77 | if znd[i,k]<=znd[i,k+1]: 78 | sys.exit('wrong vertical layers') 79 | 80 | #write vgrid.in 81 | fid=open('vgrid.in','w+') 82 | fid.write('1 !average # of layers={:0.2f}\n{}\n'.format(mean(nlayer),nvrt)) 83 | for i in arange(gd.np): 84 | nlayeri=nlayer[i]; si=flipud(snd[i,:nlayeri]) 85 | fstr='{:6} {:2} '+'{:10.6f} '*nlayeri+'\n' 86 | fid.write(fstr.format(i+1,nvrt-nlayeri+1,*si)) 87 | fid.close() 88 | print('Average number of layers is: {:0.2f}'.format(mean(nlayer))) 89 | 90 | #------------------------------------------------------------------------------ 91 | #plot transect 92 | #------------------------------------------------------------------------------ 93 | if os.path.exists(str(bname)): 94 | bp=read_schism_bpfile(str(bname)) 95 | 96 | #compute dist 97 | dist=[0,] 98 | for i in arange(bp.nsta-1): 99 | disti=abs((bp.x[i+1]-bp.x[i])+1j*(bp.y[i+1]-bp.y[i]))+dist[i] 100 | dist.append(disti) 101 | dist=array(dist) 102 | 103 | #compute zcor 104 | sindp=near_pts(c_[bp.x,bp.y],c_[gd.x,gd.y]); zi=znd[sindp] 105 | for i in arange(bp.nsta): fpn=isnan(zi[i]); zi[i][fpn]=min(zi[i]) 106 | 107 | 108 | #plot 109 | figure(figsize=[10,5]) 110 | for k in arange(nvrt): plot(dist,zi[:,k],'k',lw=0.5) 111 | for i in arange(bp.nsta): plot(ones(nvrt)*dist[i],zi[i],'k',lw=0.5) 112 | setp(gca(),ylim=[zi.min()-1,0.5],xlim=[0,dist.max()]) 113 | gcf().tight_layout() 114 | # move_figure(gcf(),0,0) 115 | -------------------------------------------------------------------------------- /scripts/gplot: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | close("all") 4 | 5 | #inputs 6 | argv=sys.argv 7 | if len(argv)==1: 8 | note='usage: \n' +\ 9 | ' 1). gp hgrid.gr3 "fmt=0; ec=None; fc=None; lw=0.1; levels=None; ticks=None;\n' +\ 10 | ' xlim=None; ylim=None; clim=None; extend=\'both\'; cb=True;\n' +\ 11 | ' xticks=None; yticks=None; figsize=[8,6]"\n' +\ 12 | ' 2). gp grid.npz "gd.x,gd.y=gd.lon,gd.lat; ec=\'r\'"\n' 13 | sys.exit(note+'Note: plot settings are optional') 14 | 15 | #open ipython 16 | if argv[1]!='--argument': 17 | opts=argv[2] if len(argv)==3 else '' 18 | os.system('ipython -i {} -- --argument "{}" --argument "{}"'.format(argv[0],argv[1],opts)) 19 | sys.exit() 20 | 21 | grd=argv[2]; opts=argv[4] 22 | #read grid 23 | gd=loadz(grd).hgrid if grd.endswith('.npz') else read_schism_hgrid(grd) 24 | 25 | #get plot options 26 | fmt=0; ec=None; fc=None; lw=0.1; levels=None; ticks=None; xlim=None; ylim=None; clim=None; extend='both'; cb=True 27 | xticks=None; yticks=None; figsize=[8,6] 28 | exec(opts) 29 | 30 | #plot 31 | hf=figure(figsize=figsize) 32 | gd.plot(fmt=fmt,ec=ec,fc=fc,lw=lw,levels=levels,ticks=ticks,xlim=xlim,ylim=ylim,clim=clim,extend=extend,cb=cb) 33 | if xticks is not None: setp(gca(),xticks=xticks) 34 | if yticks is not None: setp(gca(),yticks=yticks) 35 | gcf().tight_layout() 36 | show(block=False) 37 | -------------------------------------------------------------------------------- /scripts/grd2sms: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pylib import * 3 | close("all") 4 | 5 | #inputs 6 | args=sys.argv[1:]; nv=len(args) 7 | 8 | #help 9 | if (nv==1 and args[0]=='-h') or nv==0 or nv>2: 10 | sys.exit('usage: 1). grd2sms hgrid.gr3\n 2). grd2sms hgrid.gr3 new.2dm\n') 11 | 12 | #fnames 13 | if nv==1: grd=args[0]; sms=(grd[:-4] if grd.endswith('.gr3') else grd)+'.2dm' 14 | if nv==2: grd,sms=args 15 | 16 | grd2sms(grd,sms) 17 | -------------------------------------------------------------------------------- /scripts/make_sflux_links.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #link sflux files 3 | from pylib import * 4 | 5 | #-------------------------------------------------------------------- 6 | #input 7 | #-------------------------------------------------------------------- 8 | StartT=datenum(2009,2,10); EndT=datenum(2011,1,1) 9 | tdir='sflux' #target dir 10 | itag=1 #itag=[1 or 2],for sflux_air_itag.0691.nc 11 | sdir='/sciclone/data10/wangzg/narr' #narr source 12 | 13 | #-------------------------------------------------------------------- 14 | #make links 15 | #-------------------------------------------------------------------- 16 | bdir=os.path.abspath(os.path.curdir); tdir=os.path.abspath(tdir) 17 | if fexist(tdir): os.system('rm -rf {}'.format(tdir)) 18 | os.mkdir(tdir); os.chdir(tdir) 19 | mtime=arange(StartT-2,EndT+2); svars=['air','prc','rad'] 20 | for irec,ti in enumerate(mtime): 21 | #link each file 22 | year=num2date(ti).year; month=num2date(ti).month; day=num2date(ti).day 23 | for m,svar in enumerate(svars): 24 | fname='{}/{}_{:02}/narr_{}.{}_{:02d}_{:02d}.nc'.format(sdir,year,month,svar,year,month,day) 25 | os.symlink(os.path.relpath(fname),'sflux_{}_{}.{:04d}.nc'.format(svar,itag,irec+1)) 26 | if m==1 and day==1: print(' sflux: {:04d}-{:02d}-{:02d}'.format(year,month,day)) 27 | #write sflux_inputs.txt 28 | fid=open('{}/sflux_inputs.txt'.format(tdir),'w+'); fid.write('&sflux_inputs\n \n/'); fid.close() 29 | os.chdir(bdir) 30 | -------------------------------------------------------------------------------- /scripts/make_sflux_subdomain.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #build sflux directory based on NARR 3 | from pylib import * 4 | close("all") 5 | 6 | #--------------------------------------------------------------------------- 7 | #inputs 8 | #--------------------------------------------------------------------------- 9 | StartT=datenum(2009,12,31); EndT=datenum(2011,1,3) 10 | xm=[-76.4,-76.25]; ym=[37.25,37.35] 11 | 12 | sdir='/sciclone/data10/wangzg/narr' #narr source on sciclone 13 | tdir='sflux' #target dir 14 | itag=1 #itag=[1 or 2],for sflux_air_itag.0691.nc 15 | 16 | #--------------------------------------------------------------------------- 17 | #make sflux files 18 | #--------------------------------------------------------------------------- 19 | if not os.path.exists(tdir): os.mkdir(tdir) 20 | fid=open('{}/sflux_inputs.txt'.format(tdir),'w+'); fid.write('&sflux_inputs\n \n/'); fid.close() 21 | mtime=arange(StartT,EndT+1); svars=['air','prc','rad']; ix1s,ix2s,iy1s,iy2s=[],[],[],[] 22 | for irec,ti in enumerate(mtime): 23 | pti=num2date(ti); year,month,day=pti.year,pti.month,pti.day 24 | 25 | #link each file 26 | for m,svar in enumerate(svars): 27 | #NARR database 28 | fname='{}/{}_{:02}/narr_{}.{}_{:02d}_{:02d}.nc'.format(sdir,year,month,svar,year,month,day) 29 | bname='{}/sflux_{}_{}.{:04d}.nc'.format(tdir,svar,itag,irec+1) 30 | if day==1 and m==0: print('reading: {}'.format(fname)) 31 | 32 | #compute indices of subdomaine 33 | if irec==0: 34 | C=ReadNC(fname,1);lon=array(C.variables['lon'][:]); lat=array(C.variables['lat'][:]); C.close(); 35 | ix1,ix2,iy1,iy2=subdomain_index(lon,lat,xm,ym) #save indices 36 | ix1s.append(ix1); ix2s.append(ix2); iy1s.append(iy1); iy2s.append(iy2) 37 | 38 | #read sflux, and change dimension 39 | C=ReadNC(fname); ix1,ix2,iy1,iy2=ix1s[m],ix2s[m],iy1s[m],iy2s[m] 40 | for n,dimname in enumerate(C.dimname): 41 | if dimname=='nx_grid': C.dims[n]=ix2-ix1 42 | if dimname=='ny_grid': C.dims[n]=iy2-iy1 43 | 44 | #change variables values 45 | for mvar in C.vars: 46 | if mvar in ['time']: continue 47 | if mvar in ['lon','lat']: 48 | exec('C.{}.val=C.{}.val[iy1:iy2,ix1:ix2]'.format(mvar,mvar)) 49 | else: 50 | exec('C.{}.val=C.{}.val[:,iy1:iy2,ix1:ix2]'.format(mvar,mvar)) 51 | WriteNC(bname,C) #write sflux 52 | -------------------------------------------------------------------------------- /scripts/pextract_schism.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | extract time series of points@xyz or transects@xy from SCHISM outputs 4 | ''' 5 | from pylib import * 6 | from mpi4py import MPI 7 | 8 | #----------------------------------------------------------------------------- 9 | #Input 10 | #hpc: kuro, femto, bora, potomac, james, frontera, levante, stampede2 11 | #ppn: 64, 32, 20, 12, 20, 56, 128, 48 12 | #----------------------------------------------------------------------------- 13 | run='/sciclone/data10/wangzg/CBP/RUN04a' 14 | svars=('elev','salt','hvel','NO3') #variables to be extracted 15 | bpfile='./station.bp' #station file 16 | sname='./icm' 17 | 18 | #optional 19 | #itype=1 #0: time series of points @xyz; 1: time series of trasects @xy 20 | #ifs=0 #0: refer to free surface; 1: fixed depth 21 | #stacks=[1,3] #outputs stacks to be extracted 22 | #nspool=12 #sub-sampling frequency within each stack (1 means all) 23 | #mdt=1 #time window (day) for averaging output 24 | #rvars=['elev','salt','hvel','NO3'] #rname the varibles 25 | #prj=['epsg:26918','epsg:4326'] #projections used to transform coord. in station.bp 26 | 27 | #hpc resource requst 28 | walltime='00:10:00'; nnode=1; ppn=4 29 | 30 | #optional: (frontera,levante,stampede2,etc.) 31 | ibatch =1 #0: serial mode; 1: parallel mode 32 | qnode =None #specify node name, or default qnode based on HOST will be used 33 | qname =None #partition name 34 | account =None #account name 35 | reservation=None #reservation information 36 | 37 | #----------------------------------------------------------------------------- 38 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 39 | #----------------------------------------------------------------------------- 40 | brun=os.path.basename(run); jname='Rd_'+brun; scrout='screen_{}.out'.format(brun); bdir=os.path.abspath(os.path.curdir) 41 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 42 | if os.getenv('job_on_node')==None: 43 | if os.getenv('param')==None: fmt=0; bcode=sys.argv[0]; os.environ['qnode']=get_qnode(qnode) 44 | if os.getenv('param')!=None: fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 45 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt,'param',qname,account,reservation) 46 | print(scode); os.system(scode); os._exit(0) 47 | 48 | #----------------------------------------------------------------------------- 49 | #on computation node 50 | #----------------------------------------------------------------------------- 51 | bdir=os.getenv('bdir'); os.chdir(bdir) #enter working dir 52 | odir=os.path.dirname(os.path.abspath(sname)) 53 | if ibatch==0: nproc=1; myrank=0 54 | if ibatch==1: comm=MPI.COMM_WORLD; nproc=comm.Get_size(); myrank=comm.Get_rank() 55 | if myrank==0: t0=time.time() 56 | if myrank==0 and (not fexist(odir)): os.mkdir(odir) 57 | 58 | #----------------------------------------------------------------------------- 59 | #do MPI work on each core 60 | #----------------------------------------------------------------------------- 61 | sdir=run+'/outputs' #output directory 62 | if 'itype' not in locals(): itype=0 #time series or transect 63 | if 'ifs' not in locals(): ifs=0 #refer to free surface 64 | if 'nspool' not in locals(): nspool=1 #subsample 65 | if 'rvars' not in locals(): rvars=svars #rename variables 66 | if 'prj' not in locals(): prj=None #projections 67 | if 'mdt' not in locals(): mdt=None #average 68 | modules, outfmt, dstacks, dvars, dvars_2d = get_schism_output_info(sdir,1) #schism outputs information 69 | stacks=arange(stacks[0],stacks[1]+1) if ('stacks' in locals()) else dstacks #check stacks 70 | 71 | #read model grid 72 | fgz=run+'/grid.npz'; fgd=run+'/hgrid.gr3'; fvd=run+'/vgrid.in' 73 | gd=loadz(fgz,'hgrid') if fexist(fgz) else read_schism_hgrid(fgd); gd.compute_bnd() 74 | vd=loadz(fgz,'vgrid') if fexist(fgz) else read_schism_vgrid(fvd); sys.stdout.flush() 75 | 76 | #extract results 77 | irec=0; oname=odir+'/.schout' 78 | for svar in svars: 79 | ovars=get_schism_var_info(svar,modules,fmt=outfmt) 80 | if ovars[0][1] not in dvars: continue 81 | for istack in stacks: 82 | fname='{}_{}_{}'.format(oname,svar,istack); irec=irec+1; t00=time.time() 83 | if irec%nproc==myrank: 84 | try: 85 | read_schism_output(run,svar,bpfile,istack,ifs,nspool,fname=fname,hgrid=gd,vgrid=vd,fmt=itype,prj=prj,mdt=mdt) 86 | dt=time.time()-t00; print('finishing reading {}_{}.nc on myrank={}: {:.2f}s'.format(svar,istack,myrank,dt)); sys.stdout.flush() 87 | except: 88 | pass 89 | 90 | #combine results 91 | if ibatch==1: comm.Barrier() 92 | if myrank==0: 93 | S=zdata(); S.time=[]; fnames=[] 94 | for i,[k,m] in enumerate(zip(svars,rvars)): 95 | data=[]; mtime=[] 96 | for istack in stacks: 97 | fname='{}_{}_{}.npz'.format(oname,k,istack) 98 | if not fexist(fname): continue 99 | C=loadz(fname); datai=C.__dict__[k]; fnames.append(fname) 100 | data.extend(datai.transpose([1,0,*arange(2,datai.ndim)])); mtime.extend(C.time) 101 | if len(data)>0: S.__dict__[m]=array(data).transpose([1,0,*arange(2,array(data).ndim)]) 102 | if len(mtime)>len(S.time): S.time=array(mtime) 103 | S.bp=read_schism_bpfile(bpfile) 104 | for pn in ['param','icm','sediment','cosine','wwminput']: 105 | if fexist('{}/{}.nml'.format(run,pn)): S.__dict__[pn]=read_schism_param('{}/{}.nml'.format(run,pn),3) 106 | savez(sname,S) 107 | for i in fnames: os.remove(i) 108 | 109 | #----------------------------------------------------------------------------- 110 | #finish MPI jobs 111 | #----------------------------------------------------------------------------- 112 | if ibatch==1: comm.Barrier() 113 | if myrank==0: dt=time.time()-t0; print('total time used: {} s'.format(dt)); sys.stdout.flush() 114 | sys.exit(0) if qnode in ['bora','levante'] else os._exit(0) 115 | -------------------------------------------------------------------------------- /scripts/pextract_schism_flux.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | Compute fluxes based on SCHISM node information 4 | ''' 5 | from pylib import * 6 | from mpi4py import MPI 7 | 8 | #----------------------------------------------------------------------------- 9 | #Input 10 | #hpc: kuro, femto, bora, potomac, james, frontera, levante, stampede2 11 | #ppn: 64, 32, 20, 12, 20, 56, 128, 48 12 | #----------------------------------------------------------------------------- 13 | run='/home/g/g260135/work/wangzg/DSP/RUN08a' 14 | svars=['GEN_1','GEN_2','GEN_3',] 15 | txy=[[[630597.229,630752.001], [4257510.76,4257544.77]], #1st transect: [xi,yi] 16 | 'transect_2.bp', #2nd transect; ts2.bp 17 | [[519390, 522149, 525018],[4186127, 4180971, 4177533]] ] #3rd transect 18 | sname='RUN08a/flux' 19 | 20 | #optional 21 | #stacks=[1,9] #output stacks 22 | #nspool=12 #sub-sampling frequency within each stack (1 means all) 23 | #dx=10 #interval of sub-section, used to divide transect 24 | #prj='cpp' #projection that convert lon&lat to local project when ics=2 25 | #rvars=['g1','g2','g3',] #rname the varibles 26 | 27 | #resource requst 28 | walltime='00:10:00'; nnode=1; ppn=4 29 | 30 | #optional: (frontera,levante,stampede2,etc.) 31 | ibatch =1 #0: serial mode; 1: parallel mode 32 | qnode =None #specify node name, or default qnode based on HOST will be used 33 | qname =None #partition name 34 | account =None #account name 35 | reservation=None #reservation information 36 | scrout ='screen.out' #fname for outout and error 37 | 38 | #----------------------------------------------------------------------------- 39 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 40 | #----------------------------------------------------------------------------- 41 | brun=os.path.basename(run); jname='Rd_'+brun; bdir=os.path.abspath(os.path.curdir) 42 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 43 | if os.getenv('job_on_node')==None: 44 | if os.getenv('param')==None: fmt=0; bcode=sys.argv[0]; os.environ['qnode']=get_qnode(qnode) 45 | if os.getenv('param')!=None: fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 46 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt,'param',qname,account,reservation) 47 | print(scode); os.system(scode); os._exit(0) 48 | 49 | #----------------------------------------------------------------------------- 50 | #on computation node 51 | #----------------------------------------------------------------------------- 52 | bdir=os.getenv('bdir'); os.chdir(bdir) #enter working dir 53 | if ibatch==0: nproc=1; myrank=0 54 | if ibatch==1: comm=MPI.COMM_WORLD; nproc=comm.Get_size(); myrank=comm.Get_rank() 55 | if myrank==0: t0=time.time() 56 | 57 | #----------------------------------------------------------------------------- 58 | #do MPI work on each core 59 | #----------------------------------------------------------------------------- 60 | if 'nspool' not in locals(): nspool=1 #subsample 61 | if 'rvars' not in locals(): rvars=svars #rename variables 62 | modules, outfmt, dstacks, dvars, dvars_2d=get_schism_output_info(run+'/outputs',1) #schism outputs info 63 | stacks=arange(stacks[0],stacks[1]+1) if ('stacks' in locals()) else dstacks #check stacks 64 | 65 | #check format of transects 66 | if isinstance(txy,str) or array(txy[0]).ndim==1: txy=[txy] 67 | rdp=read_schism_bpfile; txy=[[rdp(i).x,rdp(i).y] if isinstance(i,str) else i for i in txy] 68 | 69 | #read grid 70 | fgz=run+'/grid.npz'; fgd=run+'/hgrid.gr3'; fvd=run+'/vgrid.in' 71 | gd=loadz(fgz,'hgrid') if fexist(fgz) else read_schism_hgrid(fgd) 72 | vd=loadz(fgz,'vgrid') if fexist(fgz) else read_schism_vgrid(fvd) 73 | 74 | #compute transect information 75 | nps,dsa=[],[]; sx,sy,sinds,angles=[],[],[],[]; ns=len(txy); pxy=ones(ns).astype('O'); ipt=0 76 | for m,[x0,y0] in enumerate(txy): 77 | #compute transect pts 78 | x0=array(x0); y0=array(y0) 79 | if 'dx' in locals(): #divide transect evenly 80 | ds=abs(diff(x0+1j*y0)); s=cumsum([0,*ds]); npt=int(s[-1]/dx)+1; ms=linspace(0,s[-1],npt) 81 | xi=interpolate.interp1d(s,x0)(ms); yi=interpolate.interp1d(s,y0)(ms) 82 | else: 83 | xi,yi=x0,y0; 84 | npt=len(xi); ds=abs(diff(xi+1j*yi)) 85 | if sum(gd.inside_grid(c_[xi,yi])==0)!=0: sys.exit('pts outside of domain: {}'.format(m)) 86 | if 'prj' in locals(): pxi,pyi=proj_pts(xi,yi,'epsg:4326',prj); ds=abs(diff(pxi+1j*pyi)) 87 | 88 | #transect property 89 | angle=array([arctan2(yi[i+1]-yi[i],xi[i+1]-xi[i]) for i in arange(npt-1)]) #angle for each subsection 90 | #pie,pip,pacor=gd.compute_acor(c_[xi,yi]); sigma=(vd.sigma[pip]*pacor[...,None]).sum(axis=1) #sigma coord. 91 | 92 | nps.append(npt); pxy[m]=c_[xi,yi].T; dsa.append(ds); sx.extend(xi); sy.extend(yi) 93 | sinds.append(arange(ipt,ipt+npt)); angles.append(angle); ipt=ipt+npt 94 | 95 | #compute flux 96 | S=zdata(); S.time=[]; S.flux=[[] for i in txy]; S.tflux=[[[] for i in txy] for i in svars] 97 | for istack in stacks: 98 | if istack%nproc!=myrank: continue 99 | t00=time.time(); C=read_schism_output(run,['zcor','hvel',*svars],c_[sx,sy],istack,nspool=nspool,hgrid=gd,vgrid=vd,fmt=1) #read profile 100 | for m,npt in enumerate(nps): #for each transect 101 | sind=sinds[m]; angle=angles[m][:,None,None]; ds=dsa[m][:,None,None] 102 | dz=diff(C.zcor[sind],axis=2); dz=(dz[:-1]+dz[1:])/2; dz[dz<0]=0 103 | U=C.hvel[sind]; U=(U[:-1,:,:-1]+U[:-1,:,1:]+U[1:,:,:-1]+U[1:,:,1:])/4; u,v=U[...,0],U[...,1] 104 | 105 | #volume flux, and tracers flux 106 | flx=((sin(angle)*u-cos(angle)*v)*dz*ds).sum(axis=0).sum(axis=1); S.flux[m].extend(flx) 107 | for n,svar in enumerate(svars): 108 | T=C.__dict__[svar][sind]; tr=(T[:-1,:,:-1]+T[:-1,:,1:]+T[1:,:,:-1]+T[1:,:,1:])/4 109 | flx=((sin(angle)*u-cos(angle)*v)*dz*ds*tr).sum(axis=0).sum(axis=1); S.tflux[n][m].extend(flx) 110 | S.time.extend(C.time); C=None 111 | print('reading stack {} on rank {}: {:0.2f}'.format(istack,myrank,time.time()-t00)); sys.stdout.flush() 112 | S.time,S.flux,S.tflux=array(S.time),array(S.flux).T,array(S.tflux).T; S.save('.schism_flux_{}'.format(myrank)) 113 | 114 | #gather flux for all ranks 115 | if ibatch==1: comm.Barrier() 116 | C=zdata(); C.nps=array(nps); C.xy=pxy; C.xy0=txy; C.time,C.flux=[],[]; tflux=[] 117 | if myrank==0: 118 | for i in arange(nproc): 119 | fname='.schism_flux_{}.npz'.format(i); s=read(fname) 120 | C.time.extend(s.time); C.flux.extend(s.flux); tflux.extend(s.tflux); os.remove(fname) 121 | it=argsort(C.time); C.time=array(C.time)[it]; C.flux=array(C.flux)[it].T.astype('float32') 122 | for i,rvar in enumerate(rvars): C.__dict__['flux_'+rvar]=array(tflux)[it][...,i].T.astype('float32') 123 | 124 | #save 125 | sdir=os.path.dirname(os.path.abspath(sname)) 126 | if not fexist(sdir): os.system('mkdir -p '+sdir) 127 | savez(sname,C) 128 | 129 | #----------------------------------------------------------------------------- 130 | #finish MPI jobs 131 | #----------------------------------------------------------------------------- 132 | if ibatch==1: comm.Barrier() 133 | if myrank==0: dt=time.time()-t0; print('total time used: {} s'.format(dt)); sys.stdout.flush() 134 | sys.exit(0) if qnode in ['bora'] else os._exit(0) 135 | -------------------------------------------------------------------------------- /scripts/pextract_schism_slab.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | extract SCHISM slab outputs 4 | ''' 5 | from pylib import * 6 | from mpi4py import MPI 7 | 8 | #----------------------------------------------------------------------------- 9 | #Input 10 | #hpc: kuro, femto, bora, potomac, james, frontera, levante, stampede2 11 | #ppn: 64, 32, 20, 12, 20, 56, 128, 48 12 | #----------------------------------------------------------------------------- 13 | run='/home/g/g260135/work/wangzg/DSP/RUN08a' #run dir containing outputs 14 | svars=('elev','hvel','GEN_1') #variables to be extracted 15 | levels=[1,3,] #schism level indices (1-nvrt: surface-bottom; (>nvrt): kbp level) 16 | sname='RUN08a/slab' #name for saving the resutls 17 | 18 | #optional 19 | #stacks=[1,5] #outputs stacks to be extracted 20 | #nspool=12 #sub-sampling frequency within each stack (1 means all) 21 | #mdt=1 #time window (day) for averaging output 22 | #rvars=['elev','hvel','G1'] #rname the varibles 23 | #reg=None #region for subsetting reslts (*.reg, or *.bp, or gd_subgrid) 24 | 25 | #resource requst 26 | walltime='00:10:00'; nnode=1; ppn=4 27 | 28 | #optional: (frontera,levante,stampede2,etc.) 29 | ibatch =1 #0: serial mode; 1: parallel mode 30 | qnode =None #specify node name, or default qnode based on HOST will be used 31 | qname =None #partition name 32 | account =None #account name 33 | reservation=None #reservation information 34 | 35 | #----------------------------------------------------------------------------- 36 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 37 | #----------------------------------------------------------------------------- 38 | brun=os.path.basename(run); jname='Rd_'+brun; scrout='screen_{}.out'.format(brun); bdir=os.path.abspath(os.path.curdir) 39 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 40 | if os.getenv('job_on_node')==None: 41 | if os.getenv('param')==None: fmt=0; bcode=sys.argv[0]; os.environ['qnode']=get_qnode(qnode) 42 | if os.getenv('param')!=None: fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 43 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt,'param',qname,account,reservation) 44 | print(scode); os.system(scode); os._exit(0) 45 | 46 | #----------------------------------------------------------------------------- 47 | #on computation node 48 | #----------------------------------------------------------------------------- 49 | bdir=os.getenv('bdir'); os.chdir(bdir) #enter working dir 50 | odir=os.path.dirname(os.path.abspath(sname)) 51 | if ibatch==0: nproc=1; myrank=0 52 | if ibatch==1: comm=MPI.COMM_WORLD; nproc=comm.Get_size(); myrank=comm.Get_rank() 53 | if myrank==0: t0=time.time() 54 | if myrank==0 and (not fexist(odir)): os.mkdir(odir) 55 | 56 | #----------------------------------------------------------------------------- 57 | #do MPI work on each core 58 | #----------------------------------------------------------------------------- 59 | sdir=run+'/outputs' #output directory 60 | if 'nspool' not in locals(): nspool=1 #subsample 61 | if 'rvars' not in locals(): rvars=svars #rename variables 62 | if 'mdt' not in locals(): mdt=None #averaging 63 | if 'reg' not in locals(): reg=None #region 64 | modules, outfmt, dstacks, dvars, dvars_2d = get_schism_output_info(sdir,1) #schism outputs information 65 | stacks=arange(stacks[0],stacks[1]+1) if ('stacks' in locals()) else dstacks #check stacks 66 | 67 | #extract results 68 | irec=0; oname=odir+'/.schout' 69 | for svar in svars: 70 | ovars=get_schism_var_info(svar,modules,fmt=outfmt) 71 | if ovars[0][1] not in dvars: continue 72 | for istack in stacks: 73 | fname='{}_{}_{}_slab'.format(oname,svar,istack); irec=irec+1; t00=time.time() 74 | if irec%nproc==myrank: 75 | try: 76 | read_schism_slab(run,svar,levels,istack,nspool,mdt,fname=fname,reg=reg) 77 | dt=time.time()-t00; print('finishing reading {}_{}.nc on myrank={}: {:.2f}s'.format(svar,istack,myrank,dt)); sys.stdout.flush() 78 | except: 79 | pass 80 | 81 | #combine results 82 | if ibatch==1: comm.Barrier() 83 | if myrank==0: 84 | S=zdata(); S.time=[]; fnames=[] 85 | for i,[k,m] in enumerate(zip(svars,rvars)): 86 | data=[]; mtime=[] 87 | for istack in stacks: 88 | fname='{}_{}_{}_slab.npz'.format(oname,k,istack) 89 | if not fexist(fname): continue 90 | C=loadz(fname); data.extend(C.__dict__[k]); mtime.extend(C.time); fnames.append(fname) 91 | if len(data)>0: S.__dict__[m]=array(data) 92 | if len(mtime)>len(S.time): S.time=array(mtime) 93 | savez(sname,S) 94 | for i in fnames: os.remove(i) 95 | 96 | #----------------------------------------------------------------------------- 97 | #finish MPI jobs 98 | #----------------------------------------------------------------------------- 99 | if ibatch==1: comm.Barrier() 100 | if myrank==0: dt=time.time()-t0; print('total time used: {} s'.format(dt)); sys.stdout.flush() 101 | sys.exit(0) if qnode in ['bora','levante'] else os._exit(0) 102 | -------------------------------------------------------------------------------- /scripts/pextract_tidal_harmonics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | perform harmonic analysis for tide in whole domain 4 | ''' 5 | from pylib import * 6 | from mpi4py import MPI 7 | 8 | #----------------------------------------------------------------------------- 9 | #Input 10 | #hpc: kuro, femto, bora, potomac, james, frontera, levante, stampede2 11 | #ppn: 64, 32, 20, 12, 20, 56, 128, 48 12 | #----------------------------------------------------------------------------- 13 | run='/sciclone/data10/wangzg/CBP/RUN10i' #run dir containing outputs 14 | tidal_names=['O1','K1','Q1','P1','M2','S2','K2','N2'] 15 | sname='elev' #name for saving the resutls 16 | isave_raw=0 #save original time series on each rank (each rank only for subset of nodes) 17 | #tidal_names=['O1','K1','Q1','P1','M2','S2','K2','N2','M3','M4','M6','M7','M8','M10'] 18 | 19 | #optional 20 | #stacks=[1,5] #outputs stacks to be extracted 21 | 22 | #resource requst 23 | walltime='12:00:00'; nnode=5; ppn=64 24 | 25 | #optional: (frontera,levante,stampede2,etc.) 26 | ibatch =1 #0: serial mode; 1: parallel mode 27 | qnode =None #specify node name, or default qnode based on HOST will be used 28 | qname =None #partition name 29 | account =None #account name 30 | reservation=None #reservation information 31 | 32 | #----------------------------------------------------------------------------- 33 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 34 | #----------------------------------------------------------------------------- 35 | brun=os.path.basename(run); jname='Rd_'+brun; scrout='screen_{}.out'.format(brun); bdir=os.path.abspath(os.path.curdir) 36 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 37 | if os.getenv('job_on_node')==None: 38 | if os.getenv('param')==None: fmt=0; bcode=sys.argv[0]; os.environ['qnode']=get_qnode(qnode) 39 | if os.getenv('param')!=None: fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 40 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt,'param',qname,account,reservation) 41 | print(scode); os.system(scode); os._exit(0) 42 | 43 | #----------------------------------------------------------------------------- 44 | #on computation node 45 | #----------------------------------------------------------------------------- 46 | bdir=os.getenv('bdir'); os.chdir(bdir) #enter working dir 47 | odir=os.path.dirname(os.path.abspath(sname)) 48 | if ibatch==0: nproc=1; myrank=0 49 | if ibatch==1: comm=MPI.COMM_WORLD; nproc=comm.Get_size(); myrank=comm.Get_rank() 50 | if myrank==0: t0=time.time() 51 | if myrank==0 and (not fexist(odir)): os.mkdir(odir) 52 | 53 | #----------------------------------------------------------------------------- 54 | #do MPI work on each core 55 | #----------------------------------------------------------------------------- 56 | sdir=run+'/outputs' #output directory 57 | modules, outfmt, dstacks, dvars, dvars_2d = get_schism_output_info(sdir,1) #schism outputs information 58 | stacks=arange(stacks[0],stacks[1]+1) if ('stacks' in locals()) else dstacks #check stacks 59 | 60 | #-------------------------------------------------------- 61 | #extract elev on each node and do HA 62 | #-------------------------------------------------------- 63 | #distribute jobs on each node 64 | gd=grd(run); npt=int(gd.np/nproc); mtime=[]; elev=[] 65 | sindps=[arange(i*npt,gd.np) if i==(nproc-1) else arange(i*npt,(i+1)*npt) for i in arange(nproc)]; sindp=sindps[myrank] 66 | 67 | #read elev on myrank=0, and distribute 68 | for istack in stacks: 69 | if myrank==0: 70 | t00=time.time() 71 | C=read_schism_slab(run,'elev',[1,],istack); es=[C.elev[:,i] for i in sindps]; mtime.extend(C.time) 72 | else: 73 | es=None 74 | 75 | #collect data on each rank 76 | elevi=comm.scatter(es,root=0) if ibatch==1 else es[0]; elev.extend(elevi) 77 | if myrank==0: dt=time.time()-t00; print('reading stack: {}, dt={:0.3f}'.format(istack,dt)); sys.stdout.flush() 78 | mtime=comm.bcast(array(mtime),root=0) if ibatch==1 else array(mtime) 79 | elev=array(elev).T; dt=mean(diff(mtime)) 80 | 81 | #HA for each pt 82 | C=zdata(); C.time=array(mtime); C.elev=elev; C.sindp=sindp 83 | C.stime=arange(int(C.time.min()),int(C.time.max())).astype('float'); C.amplitude,C.phase,C.selev=[],[],[] 84 | for i,y0 in enumerate(C.elev): 85 | tn='.tidal_const_{}'.format(myrank); fn='.tidal_series_{}'.format(myrank); sn='.tidal_consit_{}'.format(myrank) 86 | H=harmonic_analysis(y0,dt,tidal_names=tidal_names,tname=tn,fname=fn,sname=sn) 87 | 88 | #construct tidal signals, and get subtidal signal 89 | fy=zeros(len(C.time)) 90 | for k,tname in enumerate(H.tidal_name): 91 | if tname=='Z0': continue 92 | fy=fy+H.amplitude[k]*cos(H.freq[k]*(C.time-C.time[0])*86400-H.phase[k]) 93 | sy=interp(C.time,y0-fy,C.stime) 94 | if ibatch==0 and i%100==0: print('HA on node {}'.format(i)) 95 | 96 | #save HA 97 | C.amplitude.append(H.amplitude); C.phase.append(H.phase); C.selev.append(sy) 98 | C.to_array('amplitude','phase'); C.to_array('selev',dtype='float32'); C.tidal_name=H.tidal_name; C.freq=H.freq 99 | if isave_raw==1: C.save('{}_{}'.format(sname,myrank)) #save raw data 100 | 101 | #combine results 102 | C.delattr('time','elev') #remove raw data 103 | CS=comm.gather(C,root=0) if ibatch==1 else [C] 104 | if myrank==0: 105 | svars=['amplitude','phase','selev','sindp'] 106 | S=zdata(); sdict=S.__dict__; [S.attr(i,[]) for i in svars] 107 | for C in CS: [sdict[i].extend(C.attr(i)) for i in svars] #collect data 108 | sind=argsort(S.sindp); [S.attr(i,array(S.attr(i))[sind].T) for i in svars] 109 | [S.attr(i,C.attr(i)) for i in ['freq','stime','tidal_name']] #no combine 110 | S.delattr('sindp'); S.save(sname) 111 | 112 | #----------------------------------------------------------------------------- 113 | #finish MPI jobs 114 | #----------------------------------------------------------------------------- 115 | if ibatch==1: comm.Barrier() 116 | if myrank==0: dt=time.time()-t0; print('total time used: {} s'.format(dt)); sys.stdout.flush() 117 | sys.exit(0) if qnode in ['bora','levante'] else os._exit(0) 118 | -------------------------------------------------------------------------------- /scripts/pload_dem.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ''' 3 | load bathymetry for NWM model grid 4 | Remove mpi4py if you only use serial 5 | ''' 6 | from pylib import * 7 | from mpi4py import MPI 8 | import time 9 | 10 | #----------------------------------------------------------------------------- 11 | #Input 12 | #----------------------------------------------------------------------------- 13 | grd='hgrid.ll' #grid name (.ll or .gr3) 14 | grdout='hgrid.ll.new' #grid name with depth loaded 15 | 16 | #DEM informat 17 | sdir=r'./DEM' #directory of DEM data 18 | format_dem='tif' #format of DEM data (npz, tif, tiff); positions are not needed for *.npz files 19 | reverse_sign=1 #invert depth sign 20 | datum_shift =0 #change vertical datum (e.g. -0.258 for navd to ngvd (msl) in ChesBay) 21 | 22 | #parameter 23 | headers=('Bayonne','New_Arthur','CT_River','NY_TACC','Hudson_River','Long_Island','Raritan_Bay_River','MA_TACC','Toms_River') 24 | positions=(0,0,0,0,0,0,0,0,0) #0: cell center; 1: cell corner for DEM file (Property->AREA_OR_POINT=Point to find out this) 25 | #headers=("etopo1","crm_3arcs","cdem13_","dem_continetalus_southcarolina","North_Carolina_USGS_3m", 26 | # "al_ll","nc_ll","fl_ll","gulf_1_dem_usgs","gulf_3_demcombined_ll","ge_ll","sc_ll", 27 | # "cb_ll","db_ll","new_england_topobathy_dem_3m_dd","Tile3_R3_DEMv2","cb_bay_dem_v3.1_ll") #FOR STOFS3D 28 | #regions=("min_5m_ll.reg","SabinePass.reg","BergenPoint.reg","Washington_3.reg", 29 | # "Elk_river.reg","Hudson_river.reg","James_river.reg","NorthEast_river.reg", 30 | # "Rappahannock_river.reg","Susquehanna_river.reg","York_river.reg", 31 | # "Androscoggin_Kennebec_rivers.reg","Merrimack_river.reg","Patuxent_river.reg", 32 | # "Penobscot_river.reg","Saco_river.reg","StCroix_river.reg") #regions for modifying depth 33 | #rvalues=(5,7,5,15,2,16,14,5,6,10,10,3,3,5,5,3,5) #minimum depth in regions (note: region will be skipped if not exist) 34 | 35 | #resource requst 36 | ibatch=0 #0: serial mode; 1: parallel mode (for serial node, walltime/nnode/ppn are optional) 37 | walltime='00:10:00'; nnode=1; ppn=4 38 | #hpc: femto, hurricane, bora, vortex, potomac, james, frontera, levante, stampede2 39 | #ppn: 32, 8, 8, 12, 12, 20, 56, 128, 48 40 | 41 | #optional: (frontera,levante,stampede2) 42 | qname ='compute' #partition name 43 | account ='TG-OCE140024' #stampede2: NOAA_CSDL_NWI,TG-OCE140024; levante: gg0028 44 | qnode =None #specify node name, or default qnode based on HOST will be used 45 | 46 | jname='load_dem'; scrout='screen.out'; bdir=os.path.abspath(os.path.curdir) 47 | #----------------------------------------------------------------------------- 48 | #on front node: 1). submit jobs first (qsub), 2) running parallel jobs (mpirun) 49 | #----------------------------------------------------------------------------- 50 | if ibatch==0: os.environ['job_on_node']='1'; os.environ['bdir']=bdir #run locally 51 | if os.getenv('job_on_node')==None: 52 | if os.getenv('param')==None: fmt=0; bcode=sys.argv[0]; os.environ['qnode']=get_qnode(qnode) 53 | if os.getenv('param')!=None: fmt=1; bdir,bcode=os.getenv('param').split(); os.chdir(bdir) 54 | scode=get_hpc_command(bcode,bdir,jname,qnode,nnode,ppn,walltime,scrout,fmt=fmt,qname=qname,account=account) 55 | print(scode); os.system(scode); os._exit(0) 56 | 57 | #----------------------------------------------------------------------------- 58 | #on computation node 59 | #----------------------------------------------------------------------------- 60 | bdir=os.getenv('bdir'); os.chdir(bdir) #enter working dir 61 | if ibatch==0: nproc=1; myrank=0 62 | if ibatch==1: comm=MPI.COMM_WORLD; nproc=comm.Get_size(); myrank=comm.Get_rank() 63 | if myrank==0: t0=time.time() 64 | 65 | if 'regions' not in locals(): regions=None; rvalues=None 66 | if format_dem!='npz' and len(headers)!=len(positions): sys.exit('different size: headers, positions') 67 | #----------------------------------------------------------------------------- 68 | #do MPI work on each core 69 | #----------------------------------------------------------------------------- 70 | #get all DEM files and distribute jobs 71 | fnames0=array([i for i in os.listdir(sdir) if i.endswith('.'+format_dem)]) 72 | 73 | #filter with headers, and sort by id numbers 74 | fnames_sort=[]; ps0=[] 75 | for header,position in zip(headers,positions): 76 | fnames_sub=array([i for i in fnames0 if i.startswith(header)]); psi='center' if position==0 else 'corner' 77 | if len(fnames_sub)==1: fnames_sort.extend(fnames_sub); ps0.append(psi); continue 78 | 79 | #get id number 80 | if format_dem=='npz': 81 | fid=array([i.replace('tif.','').replace('.','_')[len(header):].split('_')[-2] for i in fnames_sub]).astype('int') 82 | elif format_dem=='tif': 83 | fid=[i for i in fnames_sub] #can add order number in the DEM name to sort it 84 | fnames_sort.extend(fnames_sub[argsort(fid)]); ps0.extend(tile(psi,len(fid))) 85 | fnames_sort=array(fnames_sort); ps0=array(ps0) 86 | 87 | #distribute jobs 88 | fnames=[]; inum=[]; ps=[] 89 | for m,[fname,psi] in enumerate(zip(fnames_sort,ps0)): 90 | if m%nproc==myrank: fnames.append(fname); inum.append(m); ps.append(psi) 91 | fnames=array(fnames); ps=array(ps) 92 | 93 | #read hgrid 94 | if grd.endswith('npz'): 95 | gd=loadz(grd).hgrid 96 | elif grd.endswith('gr3') or grd.endswith('ll'): 97 | gd=read_schism_hgrid(grd) 98 | else: 99 | sys.exit('wrong format of grid: {}'.format(grd)); sys.stdout.flush() 100 | 101 | #load bathymetry on each core 102 | S=zdata(); S.dp=dict(); S.sind=dict() 103 | for m,[fname,psi] in enumerate(zip(fnames,ps)): 104 | bname=fname.split('.')[0] 105 | 106 | #interpolate depth 107 | while(True): 108 | try: 109 | dpi,sindi=load_dem(gd.x,gd.y,'{}/{}'.format(sdir,fname),fmt=1,position=psi) 110 | break 111 | except: 112 | time.sleep(15) 113 | 114 | #save results 115 | S.dp[bname]=dpi; S.sind[bname]=sindi 116 | print('finished reading {},: {}, myrank={}'.format(fname,inum[m],myrank)); sys.stdout.flush() 117 | savez('.load_dem_{}'.format(myrank),S) 118 | 119 | #combine results 120 | if ibatch==1: comm.Barrier() 121 | if myrank==0: 122 | #combine 123 | S=zdata(); S.dp=dict(); S.sind=dict() 124 | for i in arange(nproc): 125 | sname='.load_dem_{}.npz'.format(i) 126 | Si=read(sname); os.remove(sname) 127 | S.dp={**S.dp,**Si.dp} 128 | S.sind={**S.sind,**Si.sind} 129 | 130 | #load bathymetry 131 | did=zeros(gd.np,'int'); dname=[] 132 | for i,fname in enumerate(fnames_sort): 133 | bname=fname.split('.')[0] 134 | sind=S.sind[bname]; dp=S.dp[bname] 135 | if reverse_sign==1: dp=-dp #reverse depth sign 136 | gd.dp[sind]=dp+datum_shift; did[sind]=i+1 137 | dnamei=[k for k in fnames0 if k.startswith(fname)][0]; dname.append(dnamei) 138 | 139 | #applying minimum depth 140 | if regions is not None: 141 | if len(regions)!=len(rvalues): sys.exit('differet size: regions, rvalues') 142 | for i, region in enumerate(regions): 143 | if not os.path.exists(region): continue 144 | depth_min=rvalues[i] 145 | bp=read_schism_bpfile(region,fmt=1) 146 | sind=inside_polygon(c_[gd.x,gd.y], bp.x,bp.y) 147 | fp=(sind==1)*(gd.dp=2: code=sys.argv[1] 11 | #------------------------------------------------------------------ 12 | #compile combine_hotstart 13 | #------------------------------------------------------------------ 14 | ccode='combine_hotstart7' 15 | if not fexist("./outputs/{}".format(ccode)): command_outputs('cd outputs; cmake_schism {}'.format(ccode)) 16 | bdir=os.path.abspath('.'); t0=time.time() #current dir 17 | scrout='levante.out' 18 | 19 | #------------------------------------------------------------------ 20 | #do hotstart 21 | #------------------------------------------------------------------ 22 | ihs=[-1,] 23 | while (time.time()-t0)2: 10 | sys.exit('usage: 1). sms2grd hgrid.2dm\n 2). sms2grd hgrid.2dm new.gr3\n') 11 | 12 | #fnames 13 | if nv==1: sms=args[0]; grd=(sms[:-4] if sms.endswith('.2dm') else sms)+'.gr3' 14 | if nv==2: sms,grd=args 15 | 16 | sms2grd(sms,grd) 17 | -------------------------------------------------------------------------------- /scripts/subset_outputs_parallel.py: -------------------------------------------------------------------------------- 1 | import multiprocessing as mp 2 | import pathlib 3 | 4 | import numpy as np 5 | from netCDF4 import Dataset 6 | 7 | from pylib import inside_polygon, schism_grid 8 | 9 | def subset_on_stack(stack, nidxs, eidxs): 10 | 11 | for fname in ncfiles: 12 | 13 | ds = Dataset(f'outputs/{fname}_{stack}.nc') 14 | 15 | #get dimensions size 16 | nLevels = ds.dimensions['nSCHISM_vgrid_layers'].size 17 | nMax_face_nodes = ds.dimensions['nMaxSCHISM_hgrid_face_nodes'].size 18 | one = 1 19 | two = 2 20 | 21 | fout = Dataset(f'./{path}/{fname}_{stack}.nc', 'w', format='NETCDF3_CLASSIC') 22 | fout.createDimension('nSCHISM_hgrid_node', gd.np) 23 | fout.createDimension('nSCHISM_hgrid_face', gd.ne) 24 | fout.createDimension('nSCHISM_hgrid_edge', gd.ns) 25 | fout.createDimension('nMaxSCHISM_hgrid_face_nodes', nMax_face_nodes) 26 | fout.createDimension('nSCHISM_vgrid_layers', nLevels) 27 | fout.createDimension('one', one) 28 | fout.createDimension('two', two) 29 | fout.createDimension('time', None) 30 | 31 | #time 32 | fout.createVariable('time', 'f', ('time',)) 33 | fout['time'][:] = ds.variables['time'][:] 34 | fout['time'].i23d = 0 35 | 36 | if fname == 'out2d': 37 | fout.createVariable('SCHISM_hgrid_edge_nodes', 'i', ('nSCHISM_hgrid_edge', 'two')) 38 | fout['SCHISM_hgrid_edge_nodes'][:] = isidenode + 1 39 | 40 | fout.createVariable('SCHISM_hgrid_face_nodes', 'i', ('nSCHISM_hgrid_face', 'nMaxSCHISM_hgrid_face_nodes')) 41 | fout['SCHISM_hgrid_face_nodes'][:] = gd.elnode + 1 42 | 43 | for var in ds.variables: 44 | 45 | #dims = ds.variables[var].ndim 46 | 47 | if 'nSCHISM_hgrid_node' in ds.variables[var].dimensions: 48 | print(f'Processing on stack {stack}, var {var}.') 49 | dims = ds.variables[var].ndim 50 | if dims == 1: 51 | fout.createVariable(var, ds.variables[var].dtype, ds.variables[var].dimensions) 52 | fout.variables[var][:] = ds.variables[var][nidxs] 53 | #for attr in ds.variables[var].attrs.keys(): 54 | # fout.variables[var].setncattr(attr, ds.variables[var].attrs.get(attr)) 55 | 56 | elif dims == 2: 57 | fout.createVariable(var, ds.variables[var].dtype, ds.variables[var].dimensions) 58 | fout.variables[var][:, :] = ds.variables[var][:][:, nidxs] 59 | 60 | elif dims == 3: 61 | fout.createVariable(var, ds.variables[var].dtype, ds.variables[var].dimensions) 62 | fout.variables[var][:, :, :] = ds.variables[var][:][:, nidxs, :] 63 | 64 | elif 'nSCHISM_hgrid_face' in ds.variables[var].dimensions and 'time' in ds.variables[var].dimensions: 65 | fout.createVariable(var, ds.variables[var].dtype, ds.variables[var].dimensions) 66 | fout.variables[var][:, :] = ds.variables[var][:][:, eidxs] 67 | else: 68 | continue 69 | 70 | for attr in ds.variables[var].ncattrs(): 71 | fout.variables[var].setncattr(attr, ds.variables[var].getncattr(attr)) 72 | ds.close() 73 | fout.close() 74 | 75 | if __name__ == '__main__': 76 | 77 | #input 1: stack 78 | stack_start = 15 79 | stack_end = 42 80 | 81 | #input 2: bbox 82 | lon_min = -92.0 #-82.0 83 | lon_max = -88.0 #-80.0 84 | lat_min = 29 #25.0 85 | lat_max = 31 #27.5 86 | 87 | #input 3: choose which nc files to be subsetted 88 | #ncfiles = ['out2d', 'zCoordinates', 'salinity', 'temperature', 'horizontalVelX', 'horizontalVelY'] 89 | ncfiles = ['out2d', 'zCoordinates', 'horizontalVelX', 'horizontalVelY'] 90 | #ncfiles = ['out2d', 'zCoordinates'] 91 | #ncfiles = ['horizontalVelX', 'horizontalVelY'] 92 | 93 | #input 4: directory to save subsetting results 94 | dirname = 'subset_NC' 95 | path = pathlib.Path(dirname) 96 | if path.exists(): 97 | print('Directory exists!') 98 | else: 99 | print(f'Create a new directory {dirname}') 100 | path.mkdir(parents=True, exist_ok=True) 101 | 102 | #input 5: save sub-grid 103 | save_subgrid = True 104 | 105 | #build polygon 106 | px = np.array([lon_min, lon_max, lon_max, lon_min]) 107 | py = np.array([lat_max, lat_max, lat_min, lat_min]) 108 | 109 | #build new hgrid 110 | ds = Dataset(f'./outputs/out2d_1.nc') 111 | 112 | gd = schism_grid() 113 | 114 | gd.elnode = ds.variables['SCHISM_hgrid_face_nodes'][:]-1 115 | gd.x = ds.variables['SCHISM_hgrid_node_x'][:] 116 | gd.y = ds.variables['SCHISM_hgrid_node_y'][:] 117 | gd.dp = ds.variables['depth'][:] 118 | gd.ne = len(gd.elnode) 119 | gd.np = len(gd.x) 120 | gd.i34 = np.ones(gd.ne).astype('int') 121 | fp3 = gd.elnode[:, -1] < 0 122 | gd.i34[fp3] = 3 123 | gd.i34[~fp3] = 4 124 | 125 | gd.compute_ctr() 126 | 127 | #indexes of elements inside the box 128 | eidxs = np.nonzero(inside_polygon(np.c_[gd.xctr, gd.yctr], px, py) == 1)[0] 129 | gd.elnode = gd.elnode[eidxs] 130 | 131 | nidxs = np.unique(gd.elnode) 132 | fpn = nidxs>=0 133 | nidxs = nidxs[fpn] 134 | gd.x = gd.x[nidxs] 135 | gd.y = gd.y[nidxs] 136 | gd.dp = gd.dp[nidxs] 137 | gd.ne = len(eidxs) 138 | gd.np = len(nidxs) 139 | gd.i34 = gd.i34[eidxs] 140 | 141 | #construct new element connectivity 142 | node2node = dict(zip(nidxs, np.arange(gd.np))) 143 | for i in np.arange(gd.elnode.size): 144 | if gd.elnode.ravel()[i]<0: continue 145 | gd.elnode.ravel()[i] = node2node[gd.elnode.ravel()[i]] 146 | 147 | #compute side 148 | gd.ns, isidenode, isdel = gd.compute_side(fmt=1) 149 | 150 | #save grid 151 | if save_subgrid: 152 | gd.save('hgrid_sub.gr3') 153 | 154 | ds.close() 155 | 156 | #create stack list 157 | stacks = [i for i in np.arange(stack_start, stack_end+1)] 158 | npool = len(stacks) if len(stacks) < mp.cpu_count() else mp.cpu_count()-1 159 | #npool = len(stacks) if len(stacks) < mp.cpu_count()/2 else int(mp.cpu_count()/2) 160 | print(f'npool is {npool}') 161 | 162 | pool = mp.Pool(npool) 163 | pool.starmap(subset_on_stack, [(i, nidxs, eidxs) for i in stacks]) 164 | #pool.join() 165 | pool.close() 166 | del pool 167 | 168 | -------------------------------------------------------------------------------- /scripts/sync_outputs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os,time 3 | 4 | #inputs 5 | sdir='/home/g/g260135/work/wangzg/DSP/RUN08a/outputs' #source dir. 6 | tdir=os.getenv('Hurricane')+'/sciclone/pscr/wangzg/DSP/RUN08a/outputs' #target dir. on sciclone 7 | 8 | walltime=48 #hours, program exits when runtime exceeds walltime 9 | stime=60 #sleep interval for each loop checking the outputs 10 | 11 | #copyfile 12 | snames=[]; stimes=[]; t0=time.time() 13 | while True: 14 | #files to be transfered 15 | fnames=[i for i in os.listdir(sdir) if((not i.startswith('hotstart_0')) and i.endswith('.nc'))] 16 | mtimes=[os.path.getmtime(i) for i in fnames] 17 | 18 | iflag=0 19 | for fname,mtime in zip(fnames,mtimes): 20 | #check file mtime, and transfer file if it is new 21 | print(sdir+"/"+fname) 22 | if (fname in snames) and stimes[snames.index(fname)]>=mtime: continue 23 | os.system('rsync -raP {}/{} {}/'.format(sdir,fname,tdir)); iflag=0 24 | 25 | #save files transfered 26 | if fname in snames: 27 | stimes[snames.index(fname)]=mtime 28 | else: 29 | snames.append(fname); stimes.append(mtime) 30 | 31 | #exit the loop 32 | dt=time.time()-t0 33 | if (dt/3600>walltime) or iflag>5: sys.exit() 34 | time.sleep(60) 35 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | 4 | with open("README.md", "r", encoding="utf-8") as fh: 5 | long_description = fh.read() 6 | 7 | setup( 8 | name='pylibs_ocean', 9 | packages=[ 'pylibs/src', 'pylibs/scripts'], 10 | package_dir={'pylibs/src':'src','pylibs/scripts':'scripts'}, 11 | py_modules=['pylib'], 12 | version='1.2.3', # Ideally should be same as your GitHub release tag varsion 13 | package_data={'pylibs/scripts': ['prj.npz','sflux_template.npz','Harmonic_Analysis/*','schismcheck','schismview']}, 14 | description='python tools for ocean reserach', 15 | long_description='python libraries and utilities for data processing including the pre/post-processing about SCHISM models', 16 | author='Zhengui Wang', 17 | author_email='wzhengui@gmail.com', 18 | url='https://github.com/wzhengui/pylibs', 19 | classifiers=[], 20 | install_requires=[ 21 | 'setuptools', 22 | 'pandas', 23 | 'numpy', 24 | 'netCDF4>=1.5.8', 25 | 'matplotlib>=3.0.0', 26 | 'scipy' 27 | ], 28 | extras_require={ 29 | 'mpi': ['mpi4py>=3.0.0'], 30 | 'shapefile': ['pyshp>=2.1.0'], 31 | 'projection': ['pyproj>=3.0.0'], 32 | 'eof': ['eofs>=1.4.0'], 33 | 'cloudpickle': ['cloudpickle==2.2.1'], 34 | 'tiff': ['tifffile==2022.5.4','imagecodecs==2024.6.1'], 35 | 'urllib': ['urllib3==2.2.1'], 36 | 'basemap': ['basemap==1.4.1','basemap-data-hires==1.3.2'], 37 | 'all': ['mpi4py>=3.0.0','pyshp>=2.1.0','pyproj>=3.0.0','eofs>=1.4.0','cloudpickle==2.2.1', 38 | 'tifffile==2022.5.4','imagecodecs==2024.6.1','urllib3==2.2.1','basemap==1.4.1','basemap-data-hires==1.3.2'] 39 | } 40 | ) 41 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wzhengui/pylibs/38d92c427620537f592490b5b08dbad383643d3f/src/__init__.py --------------------------------------------------------------------------------