├── .gitignore ├── LICENSE ├── README.md ├── adcpy ├── ADCPRdiWorkhorseData.py ├── __init__.py ├── adcpy.py ├── adcpy_plot.py ├── adcpy_recipes.py ├── adcpy_utilities.py ├── pynmea │ ├── DISCLAIMER │ ├── __init__.py │ ├── exceptions.py │ ├── gps.py │ ├── nmea.py │ ├── streamer.py │ └── utils.py ├── rdradcp.py ├── transect_average.py ├── transect_flow_estimator.py ├── transect_preprocessor.py ├── transect_preprocessor_input.py ├── trn_pre_input_GEO20090106.py ├── trn_pre_input_GEO20090117.py └── trn_pre_input_RIO.py ├── doc ├── Makefile ├── make.bat ├── makeapi.bat └── source │ ├── api │ ├── adcpy.pynmea.rst │ ├── adcpy.rst │ └── modules.rst │ ├── conf.py │ ├── configuration.rst │ ├── dwrsmall.gif │ ├── image │ ├── group007_flow_summary.png │ ├── group007_mean_velocity.png │ ├── group007_secondary_circulation.png │ ├── group007_u_avg_n_sd.png │ ├── group007_uvw_velocity.png │ ├── group007_v_avg_n_sd.png │ ├── group007_velocity.csv │ ├── group007_w_avg_n_sd.png │ └── group007_xy_lines.png │ ├── index.rst │ ├── installation.rst │ ├── scripting.rst │ ├── terms.rst │ └── transect_average.rst └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | GEO2009* 3 | 4 | # Backup files 5 | *.~ 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | bin/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # Installer logs 31 | pip-log.txt 32 | pip-delete-this-directory.txt 33 | 34 | # Unit test / coverage reports 35 | .tox/ 36 | .coverage 37 | .cache 38 | nosetests.xml 39 | coverage.xml 40 | 41 | # Translations 42 | *.mo 43 | 44 | # Sphinx documentation 45 | docs/_build/ 46 | 47 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 esatel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ADCPy 2 | ===== 3 | 4 | Python tools for ADCP velocity mapping and visualization 5 | -------------------------------------------------------------------------------- /adcpy/ADCPRdiWorkhorseData.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ADCPTransectData subclass for the RDI Workhorse ADCP in transect mode 3 | 4 | Reads RDI raw files from the RDI Workhorse ADCP. Uses rdradcp. 5 | 6 | This code is open source, and defined by the included MIT Copyright License 7 | 8 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 9 | 2014-09 - First Release; blsaenz, esatel 10 | """ 11 | 12 | import numpy as np 13 | import re,os 14 | #import netCDF4 15 | import rdradcp 16 | reload(rdradcp) 17 | import pynmea.streamer 18 | import cStringIO 19 | import adcpy_utilities as au 20 | #import scipy.stats.stats as sp 21 | import scipy.stats.morestats as ssm 22 | import adcpy 23 | 24 | 25 | class ADCPRdiWorkhorseData(adcpy.ADCPTransectData): 26 | """ 27 | Subclass of :py:class:AdcpData for reading raw .rNNN RDI ADCP data files, 28 | and optionally accompanying navigational data in .nNNN files. 29 | """ 30 | error_vel = None # [n_ensembles, n_bins] 31 | heading = None # [n_ensembles] or None if no nav data available (degrees from North) 32 | 33 | #: Reference to Station object - necessary for some transformations where 34 | # a cross-channel direction is needed 35 | station = None 36 | 37 | # parameters passed to rdradcp 38 | baseyear = 2000 39 | despike = 'no' 40 | quiet = True 41 | 42 | kwarg_options = ['nav_file', # file path of optional NMEA navigational file 43 | 'num_av', # integer - may be used to average ensembles during reading 44 | 'nens', # number or range of ensembles to read 45 | 'adcp_depth'] # tow depth of ADCP below surface [m] 46 | 47 | def read_raw(self,raw_file,**kwargs): 48 | """ 49 | raw_file: path to a XXXXr.nnn raw file from an RDI ADCP 50 | nav_file: path to NMEA output which matches raw_file. If 'auto', 51 | then look for a nav file based on the raw file. if None, no 52 | handling of n files. 53 | 54 | num_av: average this many samples together 55 | nens: if None, read all ensembles, 56 | if [start,stop] read those ensembles (ending index is inclusive) 57 | if N, read the first N ensembles 58 | """ 59 | # set some defaults 60 | nav_file=None 61 | num_av=1 62 | nens=None 63 | adcp_depth=None 64 | for kwarg in self.kwarg_options: 65 | if kwarg in kwargs: 66 | exec("%s = kwargs[kwarg]"%kwarg) 67 | 68 | # set parameters passed to rdradcp 69 | self.rdradcp_num_av = num_av 70 | self.rdradcp_nens = nens 71 | self.rdradcp_adcp_depth = adcp_depth 72 | 73 | if not os.path.exists(raw_file): 74 | raise IOError, "Cannot find %s"%raw_file 75 | 76 | self.raw_file = raw_file 77 | 78 | if nav_file == 'auto': 79 | nav_file = None 80 | # NOTE: this will only find lowercase 'n' files: 81 | possible_nav_file = re.sub(r'r(\.\d+)$',r'n\1',raw_file) 82 | if possible_nav_file != raw_file and os.path.exists(possible_nav_file): 83 | nav_file = possible_nav_file 84 | if not self.quiet: 85 | self.msg("found nav file %s"%nav_file) 86 | else: 87 | nav_file = None 88 | 89 | if nav_file and not os.path.exists(nav_file): 90 | raise IOError,"Cannot find %s"%nav_file 91 | 92 | self.nav_file = nav_file 93 | 94 | if self.nav_file: 95 | self.read_nav() 96 | self.valid = self.read_raw_data() 97 | 98 | def read_nav(self): 99 | """ Reads NMEA stream data, prepared for transforn to latlon and 100 | mtime data structures 101 | """ 102 | fp = open(self.nav_file) 103 | 104 | nmea = pynmea.streamer.NMEAStream(fp) 105 | 106 | self.ensemble_gps_indexes = [] # [ensemble #, index into gps_data] 107 | self.gps_data = [] # [ day_fraction, lat, lon] 108 | 109 | while 1: 110 | next_data = nmea.get_objects() 111 | if not next_data: 112 | break 113 | 114 | for sentence in next_data: 115 | try: 116 | if sentence.sen_type == 'GPGGA': # a fix 117 | if sentence.gps_qual < 1: 118 | continue # not a valid fix. 119 | lat_s = sentence.latitude 120 | lon_s = sentence.longitude 121 | try: 122 | lat = int(lat_s[:2]) + float(lat_s[2:])/60. 123 | lon = int(lon_s[:3]) + float(lon_s[3:])/60. 124 | except ValueError: 125 | # every once in a while the strings are corrupted 126 | continue 127 | if sentence.lat_direction == 'S': 128 | lat *= -1 129 | if sentence.lon_direction == 'W': 130 | lon *= -1 131 | 132 | hours=int(sentence.timestamp[:2]) 133 | minutes= int(sentence.timestamp[2:4]) 134 | seconds= float(sentence.timestamp[4:]) 135 | day_fraction = (hours + (minutes + (seconds/60.))/60.)/24.0 136 | 137 | self.gps_data.append( [day_fraction,lat,lon] ) 138 | 139 | elif sentence.sen_type == 'RDENS': 140 | # assume that this marker goes with the *next* NMEA location 141 | # output. 142 | self.ensemble_gps_indexes.append( [int(sentence.ensemble), 143 | len(self.gps_data)] ) 144 | except AttributeError,exc: 145 | print "While parsing NMEA: " 146 | print exc 147 | print "Ignoring this NMEA sentence" 148 | continue 149 | 150 | self.ensemble_gps_indexes = np.array(self.ensemble_gps_indexes) 151 | self.gps_data = np.array(self.gps_data) 152 | 153 | def read_raw_data(self): 154 | """ read into memory the ADCP data 155 | if no data is found, return false, otherwise return true 156 | """ 157 | if self.rdradcp_nens is None: 158 | nens = -1 # translate to the matlab-ish calling convention 159 | else: 160 | nens = self.rdradcp_nens 161 | 162 | if self.quiet: 163 | log_fp = cStringIO.StringIO() 164 | else: 165 | log_fp = None 166 | 167 | self.raw_adcp = rdradcp.rdradcp(self.raw_file,num_av = self.rdradcp_num_av,nens=nens, 168 | baseyear=self.baseyear,despike=self.despike, 169 | log_fp = log_fp) 170 | if self.quiet: 171 | self.rdradcp_log = log_fp.getvalue() 172 | log_fp.close() 173 | 174 | if self.raw_adcp is None: 175 | return False 176 | 177 | # Rusty need to handle case when there are no valid ensembles 178 | 179 | 180 | # Get the dimensions that were actually returned 181 | self.n_bins = self.raw_adcp.east_vel.shape[1] 182 | self.raw_adcp.n_bins = self.n_bins 183 | self.n_ensembles = self.raw_adcp.east_vel.shape[0] 184 | 185 | bin_fields = self.raw_adcp.bin_data.dtype.names 186 | ens_fields = self.raw_adcp.ensemble_data.dtype.names 187 | 188 | # Due to a bug inherited from rdradcp, the last ensemble is not 189 | # valid. In the long run, this should be fixed in rdradcp.py 190 | self.n_ensembles -= 1 191 | Ne = self.n_ensembles 192 | 193 | # velocity: [Ntimes,Nbins,{u,v,w}] 194 | self.velocity = np.array( [self.raw_adcp.east_vel, 195 | self.raw_adcp.north_vel, 196 | self.raw_adcp.vert_vel] ).transpose([1,2,0])[:Ne] 197 | # invert w velocity - is there a way to determine orientation from config? 198 | self.velocity[:,:,2] = self.velocity[:,:,2] * -1.0 199 | 200 | if 'bt_vel' in ens_fields: 201 | self.bt_velocity = self.raw_adcp.bt_vel[:Ne]*1.0e-3 # mm/s -> m/s 202 | # problems w/ big spikes in bt -> not sure why 203 | for j in range(0,1): 204 | bt_vel = self.bt_velocity[:,j] 205 | ii = np.greater(bt_vel,5.0) # identify where depth is > 5 m/s 206 | bt_vel[ii] = np.nan 207 | bt_vel = au.interp_nans_1d(bt_vel) # interpolate over nans 208 | self.bt_velocity[:,j] = bt_vel 209 | 210 | if 'bt_range' in ens_fields: 211 | self.bt_depth = -1.0*np.array([np.mean(self.raw_adcp.bt_range[:Ne],1)]) 212 | 213 | if 'depth' in ens_fields: 214 | self.adcp_depth = self.raw_adcp.depth[:Ne] 215 | if self.rdradcp_adcp_depth is not None: 216 | self.adcp_depth = np.ones(Ne)*self.rdradcp_adcp_depth 217 | 218 | # [Ntimes,Nbins] 219 | self.error_vel = self.raw_adcp.error_vel[:Ne] 220 | self.mtime = self.raw_adcp.mtime[:Ne] 221 | 222 | if self.raw_adcp.longitude is not None and self.raw_adcp.latitude is not None: 223 | self.lonlat = np.array( [self.raw_adcp.longitude, 224 | self.raw_adcp.latitude] ).T[:Ne] 225 | 226 | if 'heading' in ens_fields: 227 | self.heading = self.raw_adcp.heading[:Ne] 228 | 229 | # Extract info about bins - but convert to a z=up, surface=0 230 | # coordinate system 231 | self.bin_center_elevation = -1*self.raw_adcp.config.ranges 232 | 233 | if self.n_ensembles < 1: 234 | self.msg("Dropping empty last ensemble lead to empty ADCP") 235 | return False 236 | 237 | # ben adding processing specific to adcpRaw_rdr.m 238 | 239 | # remove boat motion from water vel 240 | vbins = np.shape(self.bin_center_elevation) 241 | btE = np.copy(self.bt_velocity[:,0]) 242 | btN = np.copy(self.bt_velocity[:,1]) 243 | btW = np.copy(self.bt_velocity[:,2]) 244 | vE = np.copy(self.velocity[:,:,0]) - np.ones(vbins)*np.array([btE]).T 245 | vN = np.copy(self.velocity[:,:,1]) - np.ones(vbins)*np.array([btN]).T 246 | vW = np.copy(self.velocity[:,:,2]) - np.ones(vbins)*np.array([btW]).T 247 | 248 | # rotate velocities from ship coordibates 249 | if self.raw_adcp.config.coord_sys is 'ship': 250 | 251 | # convert ship coord to enu 252 | delta = self.heading*np.pi/180 253 | delta2D = np.ones(vbins)*np.array([delta]).T # array of headings 254 | self.velocity[:,:,0] = np.cos(delta2D)*vE + np.sin(delta2D)*vN 255 | self.velocity[:,:,1] = -np.sin(delta2D)*vE + np.cos(delta2D)*vN 256 | self.bt_velocity[:,0] = np.cos(delta)*btE + np.sin(delta)*btN 257 | self.bt_velocity[:,1] = -np.sin(delta)*btE + np.cos(delta)*btN 258 | 259 | else: 260 | 261 | self.velocity[:,:,0] = vE 262 | self.velocity[:,:,1] = vN 263 | 264 | self.velocity[:,:,2] = vW 265 | 266 | # -- previous method of rotation, before correction from Dave Ralton 4/26/2013 267 | # vbins = np.shape(self.bin_center_elevation) 268 | # delta = np.array([self.heading]).T # transpose to vertical 269 | # delta = np.ones(vbins)*delta*np.pi/180 # array of headings 270 | # vE = np.cos(delta)*self.velocity[:,:,0] + np.sin(delta)*self.velocity[:,:,1] 271 | # vN = -np.sin(delta)*self.velocity[:,:,0] + np.cos(delta)*self.velocity[:,:,1] 272 | # 273 | # delta = self.heading*np.pi/180 274 | # btE = np.cos(delta)*self.bt_velocity[:,0] + np.sin(delta)*self.bt_velocity[:,1] 275 | # btN = -np.sin(delta)*self.bt_velocity[:,0] + np.cos(delta)*self.bt_velocity[:,1] 276 | # 277 | # # remove boat motion from water vel 278 | # vN=vN-np.ones(vbins)*np.array([btN]).T 279 | # vE=vE-np.ones(vbins)*np.array([btE]).T 280 | # 281 | # # restore corrected velocities 282 | # self.velocity[:,:,0] = vE 283 | # self.velocity[:,:,1] = vN 284 | # self.bt_velocity[:,0] = btE 285 | # self.bt_velocity[:,1] = btN 286 | 287 | #lat=adcp.nav_latitude(nn); 288 | #lon=adcp.nav_longitude(nn); 289 | #% fix bad lon/lats 290 | #ii=abs(lon-mean(lonlat00(:,1)))>5; 291 | #lon(ii)=NaN; 292 | #ii=abs(lat-mean(lonlat00(:,2)))>5; 293 | #lat(ii)=NaN; 294 | #% if lat/lon not recorded every ping, fill in blanks 295 | #lat=interpnan(yd,lat)'; 296 | #lon=interpnan(yd,lon)'; 297 | 298 | read_raw_history = "Constructor RdiWorkhorseAdcpData: Raw RDI file: %s nnum_av=%s ens=%s"%(self.raw_file, 299 | self.rdradcp_num_av, 300 | self.n_ensembles) 301 | 302 | self.history_append(read_raw_history) 303 | 304 | path,fname = os.path.split(self.raw_file) 305 | self.source = fname 306 | 307 | return True 308 | 309 | def write_nc_extra(self,grp,zlib=None): 310 | super(ADCPRdiWorkhorseData,self).write_nc_extra(grp,zlib) 311 | 312 | if self.error_vel is not None: 313 | (e_ens,e_bins) = np.shape(self.error_vel) 314 | grp.createDimension('error_bin',e_bins) 315 | grp.createDimension('error_ens',e_ens) 316 | error_vel_var = grp.createVariable('error_vel','f8', 317 | ('error_ens','error_bin'), 318 | zlib=zlib) 319 | error_vel_var.units = 'm/s' 320 | error_vel_var[...] = self.error_vel 321 | 322 | if self.heading is not None: 323 | heading_var = grp.createVariable('heading','f8', 324 | self.nc_ensemble_dim, 325 | zlib=zlib) 326 | heading_var.units = 'degrees' 327 | heading_var[...] = self.heading 328 | 329 | if 'raw_adcp' in self.__dict__: 330 | 331 | raw_adcp_grp = grp.createGroup('raw_adcp') 332 | config = raw_adcp_grp.createGroup('config') 333 | for k in self.raw_adcp.config.__dict__: 334 | v = self.raw_adcp.config.__dict__[k] 335 | try: 336 | setattr(config,k,v) 337 | except Exception,exc: 338 | print exc 339 | print "Skipping config attribute %s"%k 340 | 341 | (raw_n_ens,raw_n_bins) = np.shape(self.raw_adcp.bin_data) 342 | raw_n_ens_dim = raw_adcp_grp.createDimension('raw_n_ensembles',raw_n_ens) 343 | raw_n_bins_dim = raw_adcp_grp.createDimension('raw_n_bins',raw_n_bins) 344 | 345 | ens_data_nc_dtype = raw_adcp_grp.createCompoundType(self.raw_adcp.ensemble_data.dtype,'ens_dtype') 346 | ens_data_var = raw_adcp_grp.createVariable('ensemble_data', 347 | ens_data_nc_dtype, 348 | 'raw_n_ensembles', 349 | zlib=zlib) 350 | ens_data_var[...] = self.raw_adcp.ensemble_data 351 | 352 | bin_data_nc_dtype = raw_adcp_grp.createCompoundType(self.raw_adcp.bin_data.dtype,'bin_dtype') 353 | bin_data_var = raw_adcp_grp.createVariable('bin_data', 354 | bin_data_nc_dtype, 355 | ('raw_n_ensembles','raw_n_bins'), 356 | zlib=zlib) 357 | bin_data_var[...] = self.raw_adcp.bin_data 358 | 359 | 360 | def read_nc_extra(self,grp): 361 | super(ADCPRdiWorkhorseData,self).read_nc_extra(grp) 362 | 363 | print 'Doing read_nc in ADCPRdiWorkhorseData...' 364 | 365 | # read optional base variables 366 | if 'error_vel' in grp.variables: 367 | self.error_vel = grp.variables['error_vel'][...] 368 | if 'heading' in grp.variables: 369 | self.heading = grp.variables['heading'][...] 370 | 371 | if 'raw_adcp' in grp.groups: 372 | self.raw_adcp = rdradcp.Adcp() 373 | raw_grp = grp.groups['raw_adcp'] 374 | if 'config' in raw_grp.groups: 375 | self.raw_adcp.config = rdradcp.Config() 376 | cfg = raw_grp.groups['config'] 377 | for k in raw_grp.groups['config'].__dict__: 378 | exec("self.raw_adcp.config.%s = cfg.%s"%(k,k)) 379 | 380 | if 'ensemble_data' in raw_grp.variables: 381 | self.raw_adcp.ensemble_data = raw_grp.variables['ensemble_data'][...] 382 | 383 | if 'bin_data' in raw_grp.variables: 384 | self.raw_adcp.bin_data = raw_grp.variables['bin_data'][...] 385 | 386 | 387 | def average_ensembles(self,ens_to_avg): 388 | """ Extra variables must be averaged for this subclass 389 | """ 390 | a = super(ADCPRdiWorkhorseData,self).average_ensembles(ens_to_avg) 391 | n2 = a.n_ensembles 392 | nn = range(n2*ens_to_avg) 393 | if a.heading is not None: 394 | head = a.heading[nn].reshape(n2,ens_to_avg) 395 | a.heading = np.zeros(n2,np.float64) 396 | for i in range(n2): 397 | a.heading[i] = ssm.circmean(head[i,:]*np.pi/180)*180/np.pi 398 | if a.bt_depth is not None: 399 | a.bt_depth = au.average_vector(self.bt_depth[0,nn],(n2,ens_to_avg)) 400 | a.bt_depth = np.array([a.bt_depth]) # reformat into downward vector 401 | if a.adcp_depth is not None: 402 | a.adcp_depth = au.average_vector(self.adcp_depth[nn],(n2,ens_to_avg)) 403 | if a.bt_velocity is not None: 404 | a.bt_velocity = np.zeros((n2,2),np.float64) 405 | for i in range(2): 406 | a.bt_velocity[:,i] = au.average_array(self.bt_velocity[nn,i],(n2,ens_to_avg),axis=0) 407 | a.bt_velocity = au.average_array(self.bt_velocity[nn,:],(n2,ens_to_avg),axis=0) 408 | if a.error_vel is not None: 409 | a.error_vel = au.average_array(self.error_vel[nn,:],(n2,ens_to_avg),axis=0) 410 | 411 | return a 412 | 413 | 414 | 415 | 416 | 417 | -------------------------------------------------------------------------------- /adcpy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/adcpy/__init__.py -------------------------------------------------------------------------------- /adcpy/adcpy_plot.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tools for visualizing ADCP data that is read and processed by the adcpy module 3 | This module is imported under the main adcpy, and should be available as 4 | adcpy.plot. Some methods can be used to visualize flat arrays, independent of 5 | adcpy, and the plots may be created quickly using the IPanel and QPanel 6 | classes. 7 | 8 | This code is open source, and defined by the included MIT Copyright License 9 | 10 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 11 | 2014-09 - First Release; blsaenz, esatel 12 | """ 13 | import numpy as np 14 | import matplotlib 15 | matplotlib.use('TkAgg') 16 | import matplotlib.pyplot as plt 17 | import scipy.stats.stats as sp 18 | from matplotlib.dates import num2date#,date2num, 19 | 20 | import adcpy 21 | from adcpy_recipes import calc_transect_flows_from_uniform_velocity_grid 22 | 23 | U_str = 'u' 24 | V_str = 'v' 25 | W_str = 'w' 26 | vel_strs = (U_str,V_str,W_str) 27 | 28 | 29 | # Common formatting for datenums: 30 | def fmt_dnum(dn): 31 | return num2date(dn).strftime('%c') 32 | 33 | 34 | class IPanel(object): 35 | """ 36 | This object stores and plots a 2D velocity map as an image. Any of the data 37 | fields (kwarg_options) may be specificed as kwargs during initialization. 38 | At minimum IPanel requires 'velocity' to be set. 39 | """ 40 | kwarg_options = ['use_pcolormesh', 41 | 'minv', 42 | 'maxv', 43 | 'velocity', 44 | 'title', 45 | 'units', 46 | 'xlabel', 47 | 'ylabel', 48 | 'x', 49 | 'y', 50 | 'chop_off_nans', 51 | 'x_is_mtime', 52 | 'arrow_color', 53 | 'xy_is_lonlat', 54 | 'interpolation', 55 | 'shading', 56 | 'my_axes'] 57 | 58 | def __init__(self,**kwargs): 59 | 60 | # init everything to None 61 | for kwarg in self.kwarg_options: 62 | exec("self.%s = None"%kwarg) 63 | # set defaults 64 | self.minv = -0.25 65 | self.maxv = 0.25 66 | self.x_is_mtime = False 67 | self.interpolation = 'nearest' 68 | self.use_pcolormesh = False 69 | self.shading = 'flat' 70 | self.xy_is_lonlat = False 71 | self.chop_off_nans = False 72 | 73 | # read/save arguments 74 | for kwarg in self.kwarg_options: 75 | if kwarg in kwargs: 76 | exec("self.%s = kwargs[kwarg]"%kwarg) 77 | 78 | def plot(self,ax=None): 79 | """ 80 | Plots the data in IPanel onto the axis ax, or if ax is None, 81 | onto self.my_axes. 82 | Inputs: 83 | ax = matplotlib axes object, or None 84 | Returns: 85 | Nothing 86 | """ 87 | # set desired axes 88 | if ax is not None: 89 | plt.sca(ax) 90 | elif self.my_axes is not None: 91 | ax = plt.sca(self.my_axes) 92 | else: 93 | ax = plt.gca() 94 | if self.minv is not None: 95 | mnv = ",vmin=self.minv" 96 | else: 97 | mnv = "" 98 | if self.minv is not None: 99 | mxv = ",vmax=self.maxv" 100 | else: 101 | mxv = "" 102 | if self.use_pcolormesh: 103 | vel_masked = np.ma.array(self.velocity,mask=np.isnan(self.velocity)) 104 | if self.x is not None and self.y is not None: 105 | xy = "self.x,self.y," 106 | else: 107 | xy = "" 108 | plot_cmd = "pc=plt.pcolormesh(%svel_masked.T,shading=self.shading%s%s)"%(xy,mnv,mxv) 109 | exec(plot_cmd) 110 | else: 111 | if self.x is not None and self.y is not None: 112 | xy = ",extent=[self.x[0],self.x[-1],self.y[-1],self.y[0]]" 113 | else: 114 | xy = "" 115 | plot_cmd = "pc=plt.imshow(self.velocity.T%s,interpolation=self.interpolation%s%s)"%(xy,mnv,mxv) 116 | 117 | exec(plot_cmd) 118 | if self.title is not None: 119 | plt.title(self.title) 120 | plt.axis('tight') 121 | if self.chop_off_nans: 122 | x_test = np.nansum(self.velocity,axis=1) 123 | x_test = ~np.isnan(x_test)*np.arange(np.size(x_test)) 124 | if self.x is None: 125 | plt.xlim([np.nanmin(x_test),np.nanmax(x_test)]) 126 | else: 127 | plt.xlim([self.x[np.nanmin(x_test)],self.x[np.nanmax(x_test)]]) 128 | if self.x[-1] < self.x[0]: 129 | plt.xlim(plt.xlim()[::-1]) 130 | y_test = np.nansum(self.velocity,axis=0) 131 | y_test = ~np.isnan(y_test)*np.arange(np.size(y_test)) 132 | plt.ylim([np.nanmin(y_test),np.nanmax(y_test)]) 133 | if self.y is None: 134 | plt.ylim([np.nanmin(y_test),np.nanmax(y_test)]) 135 | else: 136 | plt.ylim([self.y[np.nanmin(y_test)],self.y[np.nanmax(y_test)]]) 137 | if self.y[-1] < self.y[0]: 138 | plt.ylim(plt.ylim()[::-1]) 139 | if self.x_is_mtime: 140 | ax.xaxis_date() 141 | plt.gcf().autofmt_xdate() 142 | elif self.xy_is_lonlat: 143 | ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f')) 144 | ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f')) 145 | plt.ylabel('Latitude [degrees N]') 146 | plt.xlabel('Longitude [degrees E]') 147 | if self.xlabel is not None: 148 | plt.xlabel(self.xlabel) 149 | if self.ylabel is not None: 150 | plt.ylabel(self.ylabel) 151 | plt.colorbar(pc, use_gridspec=True) 152 | 153 | 154 | class QPanel(object): 155 | """ 156 | This object stores and plots a 1D or 2D velocity map as a quiver plot. Any 157 | of the data fields (kwarg_options) may be specificed as kwargs during 158 | initialization. At minimum QPanel requires 'velocity' to be set. 159 | """ 160 | kwarg_options = ['u_vecs', 161 | 'v_vecs', 162 | 'velocity', 163 | 'title', 164 | 'units', 165 | 'xlabel', 166 | 'ylabel', 167 | 'x', 168 | 'y', 169 | 'v_scale', # make arrow bigger or smaller, relatively speaking, daults to 1 170 | 'xpand', # fractional buffer around xy extent, to capture arrow ends 171 | 'x_is_mtime', 172 | 'arrow_color', 173 | 'xy_is_lonlat', 174 | 'equal_axes', 175 | 'my_axes'] 176 | 177 | def __init__(self,**kwargs): 178 | 179 | # init everything to None 180 | for kwarg in self.kwarg_options: 181 | exec("self.%s = None"%kwarg) 182 | # set defaults 183 | self.u_vecs = 50 184 | self.v_vecs = 50 185 | self.x_is_mtime = False 186 | self.xy_is_lonlat = False 187 | self.arrow_color = 'k' 188 | self.v_scale = 1.0 189 | self.xpand = 0.33 190 | self.equal_axes = False 191 | 192 | # read/save arguments 193 | for kwarg in self.kwarg_options: 194 | if kwarg in kwargs: 195 | exec("self.%s = kwargs[kwarg]"%kwarg) 196 | 197 | def plot(self,ax=None): 198 | """ 199 | Plots the data in QPanel onto the axis ax, or if ax is None, 200 | onto self.my_axes. 201 | Inputs: 202 | ax = matplotlib axes object, or None 203 | Returns: 204 | Nothing 205 | """ 206 | # set desired axes 207 | if ax is not None: 208 | plt.sca(ax) 209 | elif self.my_axes is not None: 210 | ax = plt.sca(self.my_axes) 211 | else: 212 | ax = plt.gca() 213 | dims = np.shape(self.velocity) 214 | u_reduction = max(1,int(dims[0]/self.u_vecs)) 215 | u_indices = np.arange(0,dims[0],u_reduction) 216 | v_mag = np.sqrt(self.velocity[...,0]**2 + self.velocity[...,1]**2) 217 | if len(dims) == 2: 218 | vScale = np.nanmax(v_mag[u_indices]) 219 | local_vel = self.velocity[u_indices,...] 220 | local_u = local_vel[:,0] 221 | local_v = local_vel[:,1] 222 | local_x = self.x[u_indices] 223 | local_y = self.y[u_indices] 224 | elif len(dims) == 3: 225 | v_reduction = max(1,int(dims[1]/self.v_vecs)) 226 | v_indices = np.arange(0,dims[1],v_reduction) 227 | v_mag = v_mag[u_indices,:] 228 | v_mag = v_mag[:,v_indices] 229 | vScale = np.nanmax(np.nanmax(v_mag)) 230 | local_vel = self.velocity[u_indices,:,:] 231 | local_vel = local_vel[:,v_indices,:] 232 | local_u = local_vel[:,:,0].T 233 | local_v = local_vel[:,:,1].T 234 | local_x,local_y = np.meshgrid(self.x[u_indices],self.y[v_indices]) 235 | vScale = max(vScale,0.126) 236 | qk_value = np.round(vScale*4)/4 237 | 238 | Q = plt.quiver(local_x,local_y, 239 | local_u,local_v, 240 | width=0.0015*self.v_scale, 241 | headlength=10.0, 242 | headwidth=7.0, 243 | scale = 10.0*vScale/self.v_scale, #scale = 0.005, 244 | color = self.arrow_color, 245 | scale_units = 'width') 246 | if self.equal_axes: 247 | ax.set_aspect('equal') 248 | if self.xpand is not None: 249 | xpand = self.xpand 250 | xspan = np.max(self.x) - np.min(self.x) 251 | yspan = np.max(self.y) - np.min(self.y) 252 | 253 | xspan = max(xspan,yspan) 254 | yspan = xspan 255 | 256 | x1 = np.min(self.x) - xpand*xspan; x2 = np.max(self.x) + xpand*xspan 257 | plt.xlim([x1, x2]) 258 | y1 = np.min(self.y) - xpand*yspan; y2 = np.max(self.y) + xpand*yspan 259 | plt.ylim([y1, y2]) 260 | qk = plt.quiverkey(Q, 0.5, 0.08, qk_value, 261 | r'%3.2f '%qk_value + r'$ \frac{m}{s}$', labelpos='W',) 262 | 263 | if self.title is not None: 264 | plt.title(self.title,y=1.06) 265 | if self.x_is_mtime: 266 | ax.xaxis_date() 267 | plt.gcf().autofmt_xdate() 268 | elif self.xy_is_lonlat: 269 | ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f')) 270 | ax.xaxis.set_major_formatter(plt.FormatStrFormatter('%7.4f')) 271 | plt.ylabel('Latitude [degrees N]') 272 | plt.xlabel('Longitude [degrees E]') 273 | if self.xlabel is not None: 274 | plt.xlabel(self.xlabel) 275 | if self.ylabel is not None: 276 | plt.ylabel(self.ylabel) 277 | #plt.autoscale(True) 278 | 279 | 280 | 281 | def get_fig(fig): 282 | """ 283 | Returns a new figure if figure is None, otherwise passes returns fig. 284 | Inputs: 285 | fig = matplotlib figure object, or None 286 | Returns: 287 | fig = either passes, or new matplotlib figure object 288 | """ 289 | if fig is None: 290 | return plt.figure() 291 | else: 292 | return fig 293 | 294 | def plot_vertical_panels(vpanels,fig=None,title=None): 295 | """ 296 | Plots a list of panels in a vertical arangement in in figure window. 297 | Inputs: 298 | fig = matplotlib figure object in which to plot, or None for a new figure 299 | Returns: 300 | nothing 301 | """ 302 | 303 | fig_handle = get_fig(fig) 304 | plt.clf() 305 | n_panels = len(vpanels) 306 | sp_base = 100*n_panels+10 307 | for i in range(n_panels): 308 | plt.subplot(sp_base+i+1) 309 | vpanels[i].plot() 310 | plt.tight_layout() 311 | if title is not None: 312 | plt.title(title) 313 | return fig_handle 314 | 315 | def show(): 316 | """ 317 | Shortcut to matplotlib.pyplot.show() 318 | """ 319 | plt.show() 320 | 321 | def find_array_value_bounds(nparray,resolution): 322 | """ 323 | Find the bounds of the array values, adding up + resolution to make the bounds 324 | a round out of a multiple of resolution. 325 | Inputs: 326 | nparray = array of numbers for which bounds are needed 327 | resoution = number of which the bounds will be rounded up toward 328 | Returns: 329 | minv = minimum bound value of nparray 330 | maxv = maximum bound value of nparray 331 | """ 332 | inv = 1.0/resolution 333 | mtest = np.floor(nparray*inv) 334 | minv = np.nanmin(np.nanmin(mtest))*resolution 335 | mtest = np.ceil(nparray*inv) 336 | maxv = np.nanmax(np.nanmax(mtest))*resolution 337 | return (minv,maxv) 338 | 339 | 340 | def find_plot_min_max_from_velocity(velocity_2d,res=None,equal_res_about_zero=True): 341 | """ 342 | Finds bounds as in find_array_value_bounds(), then optinoally 343 | equates then +/- from zero. If res is None, returns None 344 | Inputs: 345 | nparray = array of numbers for which bounds are needed [2D numpy array] 346 | res = number of which the bounds will be rounded up toward [number] 347 | equal_res_about_zero = toggle to switch [True/False] 348 | Returns: 349 | minv =- minimum bound value of nparray, or None 350 | maxv = maximum bound value of nparray, or None 351 | """ 352 | if res is not None: 353 | minv, maxv = find_array_value_bounds(velocity_2d,res) 354 | if equal_res_about_zero: 355 | maxv = np.max(np.abs(minv),np.abs(minv)) 356 | minv = -1.0*maxv 357 | else: 358 | minv = None 359 | maxv = None 360 | return (minv,maxv) 361 | 362 | def get_basic_velocity_panel(velocity_2d,res=None,equal_res_about_zero=True): 363 | """ 364 | Returns an IPanel with from a 2D velocity array. 365 | Inputs: 366 | nparray = array of numbers for which bounds are needed 367 | res = number of which the bounds will be rounded up toward 368 | equal_res_about_zero = toggle to switch [True/False] 369 | Returns: 370 | IPanel onject 371 | """ 372 | minv, maxv = find_plot_min_max_from_velocity(velocity_2d,res, 373 | equal_res_about_zero) 374 | return IPanel(velocity = velocity_2d, 375 | x = None, 376 | y = None, 377 | minv = minv, 378 | maxv = maxv, 379 | units = 'm/s') 380 | 381 | 382 | def plot_uvw_velocity_array(velocity,fig=None,title=None,ures=None,vres=None,wres=None, 383 | equal_res_about_zero=True): 384 | """ 385 | Generates a figure with three panels showing U,V,W velocity from a single 3D 386 | velocity array 387 | Inputs: 388 | velocity = [x,y,3] shape numpy array of 2D velocities 389 | fig = input figure number [integer or None] 390 | ures,vres,wres = numbers by which the velocity bounds will be rounded up toward [number or None] 391 | equal_res_about_zero = toggle to switch [True/False] 392 | Returns: 393 | fig = matplotlib figure object 394 | """ 395 | panels = [] 396 | res = [ures, vres, wres] 397 | for i in range(3): 398 | if i == 0 and title is not None: 399 | title_str = title + " - " 400 | else: 401 | title_str = "" 402 | panels.append(get_basic_velocity_panel(velocity[:,:,i],res=res[i],equal_res_about_zero=False)) 403 | panels[-1].title = "%s%s Velocity [m/s]"%(title_str,vel_strs[i]) 404 | panels[-1].use_pcolormesh = False 405 | fig = plot_vertical_panels(panels) 406 | plt.tight_layout() 407 | return fig 408 | 409 | 410 | def plot_secondary_circulation(adcp,u_vecs,v_vecs,fig=None,title=None): 411 | """ 412 | Generates a with a single panel, plotting U velocity as an IPanel, overlain by 413 | VW vectors from a QPanel. 414 | Inputs: 415 | adcp = ADCPData object 416 | u_vecs,v_vecs = desired number of horizontal/vertical vectors [integers] 417 | fig = input figure number [integer or None] 418 | title = figure title text [string or None] 419 | Returns: 420 | fig = matplotlib figure object 421 | """ 422 | if fig is None: 423 | fig = plt.figure(fig,figsize=(10,4)) 424 | else: 425 | plt.clf() 426 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy) 427 | stream_wise = get_basic_velocity_panel(adcp.velocity[:,:,1],res=0.01) 428 | stream_wise.x = dd 429 | stream_wise.y = adcp.bin_center_elevation 430 | stream_wise.chop_off_nans = True 431 | secondary = QPanel(velocity = adcp.velocity[:,:,1:], 432 | x = dd, 433 | y = adcp.bin_center_elevation, 434 | xpand = None, 435 | v_scale = 1.5, 436 | u_vecs = u_vecs, 437 | v_vecs = v_vecs, 438 | arrow_color = 'k', 439 | units = 'm/s') 440 | stream_wise.plot() 441 | secondary.plot() 442 | if title is not None: 443 | plt.title(title) 444 | return fig 445 | 446 | def plot_secondary_circulation_over_streamwise(adcp,u_vecs,v_vecs,fig=None,title=None): 447 | """ 448 | Generates a with a single panel, plotting U velocity as an IPanel, overlain by 449 | VW vectors from a QPanel. 450 | Inputs: 451 | adcp = ADCPData object 452 | u_vecs,v_vecs = desired number of horizontal/vertical vectors [integers] 453 | fig = input figure number [integer or None] 454 | title = figure title text [string or None] 455 | Returns: 456 | fig = matplotlib figure object 457 | """ 458 | if fig is None: 459 | fig = plt.figure(fig,figsize=(10,4)) 460 | else: 461 | plt.clf() 462 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy) 463 | stream_wise = get_basic_velocity_panel(adcp.velocity[:,:,0],res=0.01) 464 | stream_wise.x = dd 465 | stream_wise.y = adcp.bin_center_elevation 466 | stream_wise.chop_off_nans = True 467 | secondary = QPanel(velocity = adcp.velocity[:,:,1:], 468 | x = dd, 469 | y = adcp.bin_center_elevation, 470 | xpand = None, 471 | v_scale = 1.5, 472 | u_vecs = u_vecs, 473 | v_vecs = v_vecs, 474 | arrow_color = 'k', 475 | units = 'm/s') 476 | stream_wise.plot() 477 | secondary.plot() 478 | if title is not None: 479 | plt.title(title) 480 | return fig 481 | 482 | def plot_ensemble_mean_vectors(adcp,fig=None,title=None,n_vectors=50,return_panel=False): 483 | """ 484 | Generates a QPanel, plotting mean uv velocity vectors in the x-y plane. 485 | Inputs: 486 | adcp = ADCPData object 487 | fig = input figure number [integer or None] 488 | title = figure title text [string or None] 489 | n_vectors = desired number of vectors [integer] 490 | return_panel = optinally return the QPanel instead of the figure 491 | Returns: 492 | fig = matplotlib figure object, or 493 | vectors = QPanel object 494 | """ 495 | dude = np.zeros((adcp.n_ensembles,2),np.float64) 496 | velocity = adcp.get_unrotated_velocity() 497 | # this doesn't factor in depth, may integrate bad values if the have not been filtered into NaNs somehow 498 | dude[:,0] = sp.nanmean(velocity[:,:,0],axis=1) 499 | dude[:,1] = sp.nanmean(velocity[:,:,1],axis=1) 500 | vectors = QPanel(velocity = dude, 501 | u_vecs = n_vectors, 502 | arrow_color = 'k', 503 | title = title, 504 | units = 'm/s') 505 | if adcp.xy is not None: 506 | vectors.x = adcp.xy[:,0] 507 | vectors.y = adcp.xy[:,1] 508 | vectors.xlabel = 'm' 509 | vectors.ylabel = 'm' 510 | vectors.equal_axes = True 511 | elif adcp.lonlat is not None: 512 | vectors.x = adcp.lonlat[:,0] 513 | vectors.y = adcp.lonlat[:,1] 514 | vectors.xy_is_lonlat = True 515 | else: 516 | vectors.x = adcp.mtime 517 | vectors.y = np.zeros(np.size(vectors.x)) 518 | vectors.x_is_mtime = True 519 | if return_panel: 520 | return vectors 521 | else: 522 | fig = get_fig(fig) 523 | vectors.plot() 524 | plt.tight_layout() 525 | return fig 526 | 527 | 528 | def plot_obs_group_xy_lines(adcp_obs,fig=None,title=None): 529 | """ 530 | Produces a quick plot of the adcp ensemble x-y locations, from 531 | a list of ADCPData objects. x-y tracks lines are colored differently 532 | for each ADCPData object. 533 | Inputs: 534 | adcp_obs = list ADCPData objects 535 | fig = input figure number [integer or None] 536 | title = figure title text [string or None] 537 | Returns: 538 | fig = matplotlib figure object 539 | """ 540 | 541 | fig = get_fig(fig) 542 | plt.hold(True) 543 | legends = [] 544 | for a in adcp_obs: 545 | if a.mtime is not None: 546 | label = a.source+"; "+fmt_dnum(a.mtime[0]) 547 | else: 548 | label = a.source 549 | plot_xy_line(a,fig,label=label,use_stars_at_xy_locations=False) 550 | plt.legend(prop={'size':10}) 551 | if title is not None: 552 | plt.title(title,y=1.06) 553 | return fig 554 | 555 | def plot_xy_line(adcp,fig=None,title=None,label=None,use_stars_at_xy_locations=True): 556 | """ 557 | Produces a quick plot of the adcp ensemble x-y locations, from an ADCPData 558 | object. 559 | Inputs: 560 | adcp_obs = list ADCPData objects 561 | fig = input figure number [integer or None] 562 | title = figure title text [string or None] 563 | use_stars_at_xy_locations = plots * at actual ensemble locations [True/False] 564 | Returns: 565 | fig = matplotlib figure object 566 | """ 567 | fig = get_fig(fig) 568 | if adcp.xy is not None: 569 | x = adcp.xy[:,0] 570 | y = adcp.xy[:,1] 571 | elif adcp.lonlat is not None: 572 | x = adcp.lonlat[:,0] 573 | y = adcp.lonlat[:,1] 574 | else: 575 | raise Exception,"plot_xy_line(): no position data in ADCPData object" 576 | if use_stars_at_xy_locations: 577 | plt.plot(x,y,marker='*',label=label) 578 | else: 579 | plt.plot(x,y,label=label) 580 | if title is not None: 581 | plt.title(title,y=1.06) 582 | formatter = matplotlib.ticker.ScalarFormatter(useOffset=False) 583 | plt.gca().yaxis.set_major_formatter(formatter) 584 | plt.gca().xaxis.set_major_formatter(formatter) 585 | return fig 586 | 587 | 588 | def plot_uvw_velocity(adcp,uvw='uvw',fig=None,title=None,ures=None,vres=None,wres=None, 589 | equal_res_about_zero=True,return_panels=False): 590 | """ 591 | Produces a quick plot of the adcp ensemble x-y locations, from an ADCPData 592 | object. 593 | Inputs: 594 | adcp_obs = list ADCPData objects 595 | fig = input figure number [integer or None] 596 | title = figure title text [string or None] 597 | use_stars_at_xy_locations = plots * at actual ensemble locations [True/False] 598 | Returns: 599 | fig = matplotlib figure object 600 | """ 601 | panels = [] 602 | dx = None 603 | dt = None 604 | res = [ures, vres, wres] 605 | if adcp.xy is not None: 606 | if np.size(adcp.xy[:,0]) == adcp.n_ensembles: 607 | xd,yd,dx,xy_line = adcpy.util.find_projection_distances(adcp.xy) 608 | if adcp.mtime is not None: 609 | if np.size(adcp.mtime) == adcp.n_ensembles: 610 | dt = adcp.mtime 611 | ax = adcpy.util.get_axis_num_from_str(uvw) 612 | 613 | for i in ax: 614 | if i == ax[0] and title is not None: 615 | title_str = title + " - " 616 | else: 617 | title_str = "" 618 | panels.append(get_basic_velocity_panel(adcp.velocity[:,:,i],res=res[i])) 619 | panels[-1].title = "%s%s Velocity [m/s]"%(title_str,vel_strs[i]) 620 | if dx is not None: 621 | # plotting velocity projected along a line 622 | panels[-1].x = dx 623 | panels[-1].xlabel = 'm' 624 | panels[-1].ylabel = 'm' 625 | panels[-1].y = adcp.bin_center_elevation 626 | elif dt is not None: 627 | # plotting velocity ensembles vs time 628 | panels[-1].x = dt 629 | panels[-1].x_is_mtime = True 630 | panels[-1].y = adcp.bin_center_elevation 631 | panels[-1].ylabel = 'm' 632 | panels[-1].use_pcolormesh = False 633 | else: 634 | # super basic plot 635 | panels[-1].use_pcolormesh = False 636 | 637 | if return_panels: 638 | return panels 639 | else: 640 | fig = plot_vertical_panels(panels) 641 | return fig 642 | 643 | 644 | def plot_flow_summmary(adcp,title=None,fig=None,ures=None,vres=None,use_grid_flows=False): 645 | """ 646 | Plots projected mean flow vectors, U and V velocity profiles, and 647 | associated text data on a single plot. 648 | Inputs: 649 | adcp_obs = list ADCPData objects 650 | fig = input figure number [integer or None] 651 | title = figure title text [string or None] 652 | ures,vres = numbers by which the velocity bounds will be rounded up toward [number or None] 653 | use_grid_flows = calculates flows using crossproduct flow (if available) 654 | [True] or by weighted summing of grid cells [False] 655 | Returns: 656 | fig = matplotlib figure object 657 | """ 658 | 659 | if adcp.xy is None: 660 | ValueError('Cannot plot summary without projected data.') 661 | raise 662 | if fig is None: 663 | fig = plt.figure(fig,figsize=(8,10.5)) 664 | else: 665 | plt.clf() 666 | 667 | vectors = plot_ensemble_mean_vectors(adcp,n_vectors=30,return_panel=True) 668 | vectors.x = vectors.x - np.min(vectors.x) 669 | vectors.y = vectors.y - np.min(vectors.y) 670 | u_panel,v_panel = plot_uvw_velocity(adcp,uvw='uv',fig=fig,ures=ures, 671 | vres=vres,return_panels=True) 672 | 673 | u_panel.chop_off_nans = True 674 | u_panel.xlabel = None 675 | v_panel.chop_off_nans = True 676 | 677 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy) 678 | 679 | plt.subplot(221) 680 | vectors.plot() 681 | plt.subplot(413) 682 | u_panel.plot() 683 | plt.subplot(414) 684 | v_panel.plot() 685 | plt.tight_layout() 686 | 687 | if title is not None: 688 | plt.text(0.55,0.933,title, 689 | horizontalalignment='left', 690 | verticalalignment='center', 691 | fontsize=10, 692 | transform = fig.transFigure) 693 | 694 | if adcp.mtime is not None: 695 | plt.text(0.55,0.9,'Start of Data: %s'%( num2date(adcp.mtime[0]).strftime('%c')), 696 | horizontalalignment='left', 697 | verticalalignment='center', 698 | fontsize=10, 699 | transform = fig.transFigure) 700 | 701 | if adcp.rotation_angle is not None: 702 | if np.size(adcp.rotation_angle) > 1: 703 | rot_str = 'Rozovski' 704 | else: 705 | rot_str = '%5.2f degrees'%(adcp.rotation_angle*180.0/np.pi) 706 | else: 707 | rot_str = 'None' 708 | plt.text(0.55,0.866,'Streawise Rotation: %s'%rot_str, 709 | horizontalalignment='left', 710 | verticalalignment='center', 711 | fontsize=10, 712 | transform=fig.transFigure) 713 | 714 | 715 | x1 = min(adcp.xy[:,0][np.nonzero(~np.isnan(adcp.xy[:,0]))]) 716 | y1 = min(adcp.xy[:,1][np.nonzero(~np.isnan(adcp.xy[:,1]))]) 717 | 718 | loc_string = 'Plot origin (%s) = (%i,%i)'%(adcp.xy_srs, 719 | int(x1), 720 | int(y1)) 721 | 722 | plt.text(0.55,0.833,loc_string, 723 | horizontalalignment='left', 724 | verticalalignment='center', 725 | fontsize=10, 726 | transform = fig.transFigure) 727 | 728 | if not use_grid_flows and 'calc_crossproduct_flow' in dir(adcp): 729 | 730 | wrums,wru,tsa,tcsa = adcp.calc_crossproduct_flow() 731 | 732 | plt.text(0.55,0.8,'Mean cross-product velocity [m/s]: %3.2f'%wrums, 733 | horizontalalignment='left', 734 | verticalalignment='center', 735 | fontsize=10, 736 | transform = fig.transFigure) 737 | 738 | plt.text(0.55,0.766,'Mean cross-product flow [m^3/s]: %12.2f'%wru, 739 | horizontalalignment='left', 740 | verticalalignment='center', 741 | fontsize=10, 742 | transform = fig.transFigure) 743 | 744 | else: 745 | 746 | (scalar_mean_vel, depth_averaged_vel, total_flow, total_survey_area) = \ 747 | calc_transect_flows_from_uniform_velocity_grid(adcp,use_grid_only=True) 748 | 749 | plt.text(0.55,0.8,'Mean U velocity [m/s]: %3.2f'%scalar_mean_vel[0], 750 | horizontalalignment='left', 751 | verticalalignment='center', 752 | fontsize=10, 753 | transform = fig.transFigure) 754 | 755 | plt.text(0.55,0.766,'Mean V velocity [m/s]: %3.2f'%scalar_mean_vel[1], 756 | horizontalalignment='left', 757 | verticalalignment='center', 758 | fontsize=10, 759 | transform = fig.transFigure) 760 | 761 | plt.text(0.55,0.733,'Mean U flow [m^3/s]: %12.2f'%total_flow[0], 762 | horizontalalignment='left', 763 | verticalalignment='center', 764 | fontsize=10, 765 | transform = fig.transFigure) 766 | 767 | plt.text(0.55,0.7,'Mean V flow [m^3/s]: %12.2f'%total_flow[1], 768 | horizontalalignment='left', 769 | verticalalignment='center', 770 | fontsize=10, 771 | transform = fig.transFigure) 772 | 773 | if adcp.source is not None: 774 | plt.text(0.55,0.633,'Sources:\n%s'%adcp.source, 775 | horizontalalignment='left', 776 | verticalalignment='center', 777 | fontsize=10, 778 | transform = fig.transFigure) 779 | 780 | return fig 781 | 782 | -------------------------------------------------------------------------------- /adcpy/adcpy_recipes.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Batch processing and logic for organizing and binning multiple ADCPData objects. 3 | Tools and methods for cateogizing/manipulating/visualizing data in ADCPy/ADCPData 4 | format. This module is dependent upon adcpy. 5 | 6 | This code is open source, and defined by the included MIT Copyright License 7 | 8 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 9 | 2014-09 - First Release; blsaenz, esatel 10 | """ 11 | import numpy as np # numpy 1.7 12 | import glob 13 | import os 14 | import csv 15 | import scipy.stats as sp 16 | #import scipy.signal as sps 17 | import scipy.stats.morestats as ssm 18 | from matplotlib.dates import num2date#,date2num, 19 | import datetime 20 | 21 | import adcpy 22 | 23 | def average_transects(transects,dxy,dz,plotline=None,return_adcpy=True, 24 | stats=True,plotline_from_flow=False,sd_drop=0): 25 | """ 26 | This method takes a list of input ADCPy transect objects, and: 27 | 1) Projects and re-grids each transect to either the input plotline, or a best 28 | fit of available projected xy locations; 29 | 2) Bin-averages the re-gridded U,V, and W velocities of input ADCPTransectData 30 | objects 31 | Inputs: 32 | transects = list of ADCPTransectData objects 33 | dxy = new grid spacing in the xy (or plotline) direction 34 | dz = new regular grid spacing in the z direction (downward for transects) 35 | plotline = optional dsignated line in the xy plane for projecting ensembles onto 36 | return_adcpy = True: returns an ADCPData object containing averaged velocities 37 | False: returns a 3D numpy array containing U,V,W gridded veloctiy 38 | """ 39 | n_transects = len(transects) 40 | avg = transects[0].copy_minimum_data() 41 | if n_transects > 1: 42 | # ugly brute-force method ot find furthest points; 43 | # ConvexHull-type approach is only available in more recent scipy 44 | max_dist = 0.0 45 | centers = [adcpy.util.centroid(a.xy) for a in transects] 46 | for c1 in centers: 47 | for c2 in centers: 48 | max_dist = max(max_dist,adcpy.util.find_line_distance(c1,c2)) 49 | print "max dist:",max_dist 50 | if max_dist > 30.0: 51 | print 'WARNING: averaging transects with maximum centroid distance of %f m!'%max_dist 52 | 53 | # gather position data for new grid generation 54 | xy_data = np.vstack([transects[i].xy for i in range(n_transects)]) 55 | z_data = np.hstack([transects[i].bin_center_elevation for i in range(n_transects)]) 56 | 57 | # find common grid 58 | if plotline is None: 59 | if plotline_from_flow: 60 | flows = transects[0].calc_ensemble_flow(range_from_velocities=False) 61 | xy_line = adcpy.util.map_flow_to_line(xy_data,flows[:,0],flows[:,1]) 62 | else: 63 | xy_line = adcpy.util.map_xy_to_line(xy_data) 64 | else: 65 | xy_line = plotline 66 | # NEED logic around determining whether original data was negative down, positive up, etc 67 | z_mm = np.array([np.max(z_data),np.min(z_data)]) 68 | (dd,xy_new_range,xy_new,z_new) = adcpy.util.new_xy_grid(xy_data,z_mm,dxy,dz,xy_line,True) 69 | 70 | # initialize arrays 71 | xy_bins = adcpy.util.find_regular_bin_edges_from_centers(xy_new_range) 72 | z_bins = adcpy.util.find_regular_bin_edges_from_centers(z_new) 73 | new_shape = [len(xy_new_range),len(z_new),3] 74 | avg.velocity = np.empty(new_shape) 75 | if stats: 76 | avg.velocity_n = np.empty(new_shape) 77 | avg.velocity_sd = np.empty(new_shape) 78 | 79 | 80 | # generate linear xy,z,velocties for bin averaging, perform bin averaging 81 | for i in range(3): 82 | bin_ave_inputs = [] 83 | mtimes = [] 84 | for t in transects: 85 | xx,yy,xy_range,xy_line = adcpy.util.find_projection_distances(t.xy,xy_line) 86 | bin_ave_inputs.append(adcpy.util.xy_z_linearize_array(xy_range, 87 | t.bin_center_elevation, 88 | t.velocity[...,i])) 89 | xy = np.hstack([bin_ave_inputs[j][0] for j in range(n_transects)]) 90 | z = np.hstack([bin_ave_inputs[j][1] for j in range(n_transects)]) 91 | values = np.hstack([bin_ave_inputs[j][2] for j in range(n_transects)]) 92 | bin_ave = adcpy.util.bin_average(xy,xy_bins,values,z,z_bins,return_stats=stats,sd_drop=sd_drop) 93 | bin_ave = adcpy.util.un_flip_bin_average(xy_new_range,z_new,bin_ave) 94 | if stats: 95 | (avg.velocity[...,i], 96 | avg.velocity_n[...,i], 97 | avg.velocity_sd[...,i]) = bin_ave 98 | else: 99 | avg.velocity[...,i] = bin_ave[0] 100 | 101 | # update adcpData object 102 | avg.xy = xy_new 103 | avg.bin_center_elevation = z_new 104 | avg.n_ensembles = new_shape[0] 105 | avg.n_bins = new_shape[1] 106 | 107 | # report back 108 | if return_adcpy: 109 | avg.xy_srs = transects[0].xy_srs 110 | sources = [transects[i].source for i in range(n_transects)] 111 | avg.source = "\n".join(sources) 112 | mtimes = [sp.nanmedian(transects[i].mtime) for i in range(n_transects)] 113 | mtimes = np.array(filter(None,mtimes)) 114 | if mtimes.any(): 115 | avg.mtime = np.ones(new_shape[0],np.float64) * sp.nanmean(mtimes) 116 | if plotline is not None: 117 | plotlinestr = "[%f,%f],[%f,%f]"%(plotline[0,0], 118 | plotline[0,1], 119 | plotline[1,0], 120 | plotline[1,1]) 121 | else: 122 | plotlinestr='None' 123 | avg.history_append('average_transects(dxy=%f,dz=%f,plotline=%s)'%(dxy,dz,plotlinestr)) 124 | return avg 125 | else: 126 | return avg.velocity 127 | 128 | 129 | def write_csv_velocity_array(adcp,csv_filename,no_header=False,un_rotate_velocties=True): 130 | """ 131 | Writes comma-delimited u,v,w velocties to a text file. 132 | Inputs: 133 | ADCP = ADCPData object 134 | csv_filename = file path of output file 135 | no_header = boolean, True = don't write header line 136 | Returns: 137 | nothing 138 | """ 139 | # direct dump of numpy array - opps required numpy v1.8 140 | #np.savetext(csv_filename,adcp.velocity,delimiter=",") 141 | if un_rotate_velocties: 142 | v = adcp.get_unrotated_velocity() 143 | else: 144 | v = adcp.velocity 145 | with open(csv_filename, 'wb') as csvfile: 146 | arraywriter = csv.writer(csvfile, delimiter=',', 147 | quoting=csv.QUOTE_MINIMAL) 148 | if not no_header: 149 | if adcp.xy is not None: 150 | arraywriter.writerow(['x']+[adcp.xy_srs]) 151 | arraywriter.writerow(adcp.xy[:,0].tolist()) 152 | arraywriter.writerow(['y']) 153 | arraywriter.writerow(adcp.xy[:,1].tolist()) 154 | elif adcp.lonlat is not None: 155 | arraywriter.writerow(['longitude']) 156 | arraywriter.writerow(adcp.lonlat[:,0].tolist()) 157 | arraywriter.writerow(['latitude']) 158 | arraywriter.writerow(adcp.lonlat[:,1].tolist()) 159 | arraywriter.writerow(['bin_center_elevation']) 160 | arraywriter.writerow(adcp.bin_center_elevation.tolist()) 161 | arraywriter.writerow(['U']) 162 | for i in range(adcp.n_ensembles): 163 | arraywriter.writerow(v[i,:,0].tolist()) 164 | arraywriter.writerow(['V']) 165 | for i in range(adcp.n_ensembles): 166 | arraywriter.writerow(v[i,:,1].tolist()) 167 | arraywriter.writerow(['W']) 168 | for i in range(adcp.n_ensembles): 169 | arraywriter.writerow(v[i,:,2].tolist()) 170 | 171 | 172 | def write_csv_velocity_db(adcp,csv_filename,no_header=False,un_rotate_velocties=True): 173 | """ 174 | Writes comma-delimited ensemble-mean U,V 175 | Inputs: 176 | ADCP = ADCPData object 177 | csv_filename = file path of output file 178 | no_header = boolean, True = don'r write position data, False = write position data 179 | Returns: 180 | nothing 181 | """ 182 | # direct dump of numpy array - opps required numpy v1.8 183 | #np.savetext(csv_filename,adcp.velocity,delimiter=",") 184 | if un_rotate_velocties: 185 | v = adcp.get_unrotated_velocity() 186 | else: 187 | v = adcp.velocity 188 | with open(csv_filename, 'wb') as csvfile: 189 | arraywriter = csv.writer(csvfile, delimiter=',', 190 | quoting=csv.QUOTE_MINIMAL) 191 | if not no_header: 192 | if adcp.xy is not None: 193 | header = ['x [%s]'%adcp.xy_srs,'y [%s]'%adcp.xy_srs] 194 | elif adcp.lonlat is not None: 195 | header = ['longitude [degE]','latitude [degN]'] 196 | else: 197 | print 'Error, input adcp has no position data - no file written' 198 | return 199 | header.extend(['z [m]','datetime','u [m/s]','v [m/s]','w [m/s]',]) 200 | arraywriter.writerow(header) 201 | for i in range(adcp.n_ensembles): 202 | for j in range(adcp.n_bins): 203 | if adcp.mtime is None: 204 | rec_time = 'None' 205 | elif adcp.mtime[i] is None or np.isnan(adcp.mtime[i]): 206 | rec_time = 'None' 207 | else: 208 | rec_time = num2date(adcp.mtime[i]).strftime('%c') 209 | if adcp.xy is not None: 210 | db_record = [adcp.xy[i,0],adcp.xy[i,1]] 211 | else: 212 | db_record = [adcp.lonlat[i,0], adcp.lonlat[i,1]] 213 | db_record = db_record + [adcp.bin_center_elevation[j], 214 | rec_time, 215 | v[i,j,0], 216 | v[i,j,1], 217 | v[i,j,2]] 218 | arraywriter.writerow(db_record) 219 | 220 | 221 | def write_ensemble_mean_velocity_db(adcp,csv_filename,no_header=False, 222 | un_rotate_velocties=True,elev_line=None, 223 | range_from_velocities=False): 224 | """ 225 | Writes comma-delimited velocties to a text file, optionally with xy-positions 226 | or lon-lat positions, and bin_center_elveations. The write order for a 2D 227 | velocity aray is the first (leftmost) axis is written horizontally. 228 | Inputs: 229 | ADCP = ADCPData object 230 | csv_filename = file path of output file 231 | no_header = boolean, True = don'r write position data, False = write position data 232 | Returns: 233 | nothing 234 | """ 235 | # direct dump of numpy array - opps required numpy v1.8 236 | #np.savetext(csv_filename,adcp.velocity,delimiter=",") 237 | if un_rotate_velocties and adcp.rotation_angle is not None: 238 | r_axis = adcp.rotation_axes 239 | r_angle = adcp.rotation_angle 240 | adcp.set_rotation(None) 241 | UVW = adcp.ensemble_mean_velocity(elev_line=elev_line, 242 | range_from_velocities=range_from_velocities) 243 | adcp.set_rotation(r_angle,r_axis) 244 | else: 245 | UVW = adcp.ensemble_mean_velocity(elev_line=elev_line, 246 | range_from_velocities=range_from_velocities) 247 | with open(csv_filename, 'wb') as csvfile: 248 | arraywriter = csv.writer(csvfile, delimiter=',', 249 | quoting=csv.QUOTE_MINIMAL) 250 | if not no_header: 251 | if adcp.xy is not None: 252 | header = ['x [%s]'%adcp.xy_srs,'y [%s]'%adcp.xy_srs] 253 | elif adcp.lonlat is not None: 254 | header = ['longitude [degE]','latitude [degN]'] 255 | else: 256 | print 'Error, input adcp has no position data - no file written' 257 | return 258 | header.extend(['datetime','U [m/s]','V [m/s]']) 259 | arraywriter.writerow(header) 260 | for i in range(adcp.n_ensembles): 261 | if adcp.mtime is None: 262 | rec_time = 'None' 263 | elif adcp.mtime[i] is None or np.isnan(adcp.mtime[i]): 264 | rec_time = 'None' 265 | else: 266 | rec_time = num2date(adcp.mtime[i]).strftime('%c') 267 | if adcp.xy is not None: 268 | db_record = [adcp.xy[i,0],adcp.xy[i,1]] 269 | else: 270 | db_record = [adcp.lonlat[i,0], adcp.lonlat[i,1]] 271 | 272 | db_record = db_record + [rec_time, 273 | UVW[i,0], 274 | UVW[i,1]] 275 | arraywriter.writerow(db_record) 276 | 277 | 278 | 279 | #def split_repeat_survey_into_transects(adcp_survey): 280 | # 281 | # if adcp_survey.xy is None: 282 | # raise Exception,'ADCP data must have an XY projection for trasect detection and splitting' 283 | # velocity_change = np.abs(adcp_survey.xy[1:-1,0]-adcp_survey.xy[0:-2,0]) + \ 284 | # np.abs(adcp_survey.xy[1:-1,1]-adcp_survey.xy[0:-2,1]) 285 | # sps.find_peaks_cwt(velocity_change, np.arange(1,10), wavelet=None, max_distances=None, gap_thresh=None, min_length=None, min_snr=1, noise_perc=10) 286 | 287 | def group_adcp_obs_by_spacetime(adcp_obs,max_gap_m=30.0, 288 | max_gap_minutes=20.0,max_group_size=6): 289 | """ 290 | Sorts ADCPData objects first into groups by closeness in terms of location, 291 | and then further sorts location groups by time. Groups of ADCPData objects 292 | must first be within max_gap_m from each other, and then be within max_gap_minutes 293 | of each other. 294 | Inputs: 295 | adcp_obs = list of ADCPTransectData objects 296 | max_gap_m = maximum distance allowed between ADCP observations when grouping 297 | max_gap_minutes = maximum time allowed between ADCP observations when grouping 298 | max_group_size = maximum number of ADCPData objects per group 299 | Returns: 300 | List of lists that contain groups of input ADCPData objects 301 | """ 302 | space_groups = group_adcp_obs_within_space(adcp_obs,max_gap_m) 303 | for i in range(len(space_groups)): 304 | print 'space group',i,'- ',len(space_groups[i]), 'observations' 305 | spacetime_groups = [] 306 | for grp in space_groups: 307 | (sub_groups, gaps) = group_adcp_obs_within_time(grp, 308 | max_gap_minutes, 309 | max_group_size) 310 | spacetime_groups.extend(sub_groups) 311 | for i in range(len(spacetime_groups)): 312 | print 'spacetime group',i,'- ',len(spacetime_groups[i]), 'observations' 313 | return spacetime_groups 314 | 315 | 316 | def group_adcp_obs_within_space(adcp_obs,max_gap_m=30.0): 317 | """ 318 | Sorts ADCPData objects into groups by closeness in space, in an 319 | ordered-walk/brute force manner. Distances between all ADCO_Data observation 320 | centroids are found, and then starting with the first ADCO_data, the remaining 321 | ADCPData objects are evaluated for distance to the first. If within 'max_gap_m' 322 | they are grouped and marked as 'picked' so they will not assigned to a group 323 | more than once. 324 | Inputs: 325 | adcp_obs = list of ADCPTransectData objects 326 | max_gap_m = maximum distance allowed between ADCP observations when grouping 327 | Returns: 328 | List of lists that contain groups of input ADCPData objects 329 | """ 330 | 331 | n_obs = len(adcp_obs) 332 | (centers,distances) = find_centroid_distance_matrix(adcp_obs) 333 | picked = np.zeros(n_obs,np.int) 334 | groups = [] 335 | for i in range(n_obs): 336 | if not picked[i] and ~np.isnan(centers[i][0,0]): 337 | sub_group = [adcp_obs[i],] 338 | picked[i] = 1 339 | my_dist = distances[i,:] 340 | nn = np.argsort(my_dist) 341 | for n in nn: 342 | if not picked[n] and ~np.isnan(centers[n][0,0]): 343 | if my_dist[n] < max_gap_m: 344 | sub_group.append(adcp_obs[n]) 345 | picked[n] = 1 346 | groups.append(sub_group) 347 | return groups 348 | 349 | 350 | def group_adcp_obs_within_time(adcp_obs,max_gap_minutes=20.0,max_group_size=6): 351 | """ 352 | Sorts ADCPData objects into groups by closeness in time, with groups being 353 | separated by more than 'max_gap_minutes'.This method first sorts the group by 354 | start time, and then splits the observations where they are more than 355 | 'max_gap_minutes' apart. 356 | Inputs: 357 | adcp_obs = list of ADCPTransectData objects 358 | max_gap_minutes = maximum Time allowed between ADCP observations when grouping 359 | max_group_size = maximum number of ADCPData objects per group 360 | Returns: 361 | List of lists that contain groups of input ADCPData objects 362 | """ 363 | if len(adcp_obs) == 1: 364 | return ([adcp_obs,], [None,]) 365 | else: 366 | start_times = list() 367 | for a in adcp_obs: 368 | if a.mtime is not None: 369 | start_times.append(a.mtime[0]) 370 | else: 371 | start_times.append(None) 372 | 373 | if start_times: 374 | gaps, nn, nnan = find_start_time_gaps(start_times) 375 | adcp_obs_sorted = [ adcp_obs[i] for i in nn ] 376 | # convert nnan boolean list to integer index 377 | nnan_i = nnan * range(len(nnan)) 378 | adcp_obs_sorted = [ adcp_obs_sorted[i] for i in nnan_i ] 379 | return group_according_to_gap(adcp_obs_sorted,gaps,max_gap_minutes,max_group_size=6) 380 | else: 381 | raise Exception,"find_transects_within_minimum_time_gap(): No valid times found in input files!" 382 | 383 | 384 | 385 | def find_adcp_files_within_period(working_directory,max_gap=20.0,max_group_size=6): 386 | """ 387 | Sorts a directory of ADCPRdiWorkHorseData raw files into groups by 388 | closeness in time, with groups being separated by more than 389 | 'max_gap_minutes'. This method first sorts the files by start time, and 390 | then splits the observations where they are more than 391 | 'max_gap_minutes' apart. 392 | Inputs: 393 | working_directory = directory path containing ADCP raw or netcdf files 394 | max_gap = maximum time allowed between ADCP observations when grouping (minutes) 395 | max_group_size = maximum number of ADCPData objects per group 396 | Returns: 397 | List of lists that contain groups of input ADCPData objects 398 | """ 399 | if os.path.exists(working_directory): 400 | data_files = glob.glob(os.path.join(working_directory,'*[rR].000')) 401 | data_files.extend(glob.glob(os.path.join(working_directory,'*.nc'))) 402 | else: 403 | print "Path (%s) not found - exiting."%working_directory 404 | exit() 405 | 406 | start_times = list() 407 | for data_file in data_files: 408 | try: 409 | a = adcpy.open_adcp(data_file, 410 | file_type="ADCPRdiWorkhorseData", 411 | num_av=1) 412 | start_times.append(a.mtime[0]) 413 | except: 414 | start_times.append(None) 415 | 416 | if start_times: 417 | gaps, nn, nnan = find_start_time_gaps(start_times) 418 | data_files_sorted = [ data_files[i] for i in nn ] 419 | # convert nnan boolean list to integer index 420 | nnan_i = nnan * range(len(nnan)) 421 | data_files_sorted = [ data_files_sorted[i] for i in nnan_i ] 422 | return group_according_to_gap(data_files_sorted,gaps,max_gap,max_group_size) 423 | 424 | 425 | def find_start_time_gaps(start_times_list): 426 | """ 427 | Find the time difference in minutes between datenum elements in a list 428 | Sorts, removed nans, adn turns remaing datnum values in 'start_times_list' 429 | ino datetime objects, finds the timedelta objects between then, and 430 | converts to minutes. 431 | Inputs: 432 | start_times_list = numpy 1D array of matplotlib datenum values 433 | Returns: 434 | time_gaps_minutes = gaps between sorted times in start_times_list {minutes} 435 | nn = sort index for start_times_list 436 | nnan = boolean index of start_times_list[nn] where True is is non-nan 437 | """ 438 | 439 | # sort, remove unknowns, convert to datetime object 440 | start_times = np.array(start_times_list, dtype=np.float64) 441 | nn = np.argsort(start_times) 442 | start_times_sorted = start_times[nn] 443 | nnan = ~np.isnan(start_times_sorted) 444 | start_times_sorted = num2date(start_times_sorted[nnan]) 445 | # returns datetime.timedelta objects 446 | time_gaps_minutes = np.zeros(len(start_times_sorted)-1,np.float64) 447 | for i in range(len(start_times_sorted)-1): 448 | t_delta = start_times_sorted[i+1]-start_times_sorted[i] 449 | # timedelta objects only have days/seconds 450 | time_gaps_minutes[i] = t_delta.total_seconds()/60.0 451 | return (time_gaps_minutes, nn, nnan) 452 | 453 | 454 | def group_according_to_gap(flat_list,gaps,max_gap,max_group_size): 455 | """ 456 | Splits a python list into groups by their gaps in time, using a list of 457 | gaps between them. 458 | Inputs: 459 | flat_list = python list, shape [n] 460 | gaps = numeric list, shape [n-1], descibing gaps between elements of flat_list 461 | max_gap = maximum gap allowed between list elements 462 | max_group_size = maximum number of list elements per group 463 | Returns: 464 | List of lists that contain groups of input list elements 465 | """ 466 | within_gap = gaps <= max_gap 467 | groups = list() 468 | group_gaps = list() 469 | sub_group = list() 470 | sub_gaps = list() 471 | sub_group.append(flat_list[0]) 472 | for i in range(len(gaps)): 473 | if ~within_gap[i] or len(sub_group) >= max_group_size: 474 | groups.append(sub_group) 475 | if not sub_gaps: 476 | sub_gaps.append((None,)) 477 | group_gaps.append(sub_gaps) 478 | sub_group = [] 479 | sub_gaps = [] 480 | else: 481 | sub_gaps.append(gaps[i]) 482 | sub_group.append(flat_list[i+1]) 483 | groups.append(sub_group) 484 | if not sub_gaps: 485 | sub_gaps.append((None,)) 486 | group_gaps.append(sub_gaps) 487 | # returning (list of file lists, list of gap time lists) 488 | return (groups, group_gaps) 489 | 490 | 491 | def calc_transect_flows_from_uniform_velocity_grid(adcp,depths=None,use_grid_only=False): 492 | """ 493 | Calculates the cross-sectional area of the ADCP profiles from projection 494 | data, and multiplies it by the velocities to calculate flows 495 | and mean velocities. 496 | Inputs: 497 | adpc = ADCPData object. projected to an xy regualr grid projection 498 | depths = optional 1D array of depths that correspond the ensemble 499 | dimension of velocity in adcp 500 | use_grid_only = True: use each grid cell to calc flows/mean velocities 501 | False: first find depth-average velocties, then use depths to find 502 | flows/mean velocties 503 | Returns: 504 | scalar_mean_vel = mean veolocity of total flow shape [3] {m/s} 505 | depth_averaged_vel = depth averaged velocity, shape [n,3] {m/s} 506 | total_flow = total U,V, and W discharge [3] {m^3/s} 507 | total_survey_area = total area used for flow calculations {m^3} 508 | """ 509 | # check to see if adcp is child of ADCPTransectData ?? 510 | if adcp.xy is None: 511 | ValueError("xy projection required") 512 | raise 513 | 514 | if adcp.rotation_angle is None: 515 | print 'Warning - No alignment axis set: Calculating flows according to U=East and V=North' 516 | rfv = False 517 | if not "bt_depth" in dir(adcp): 518 | rfv = True 519 | elif adcp.bt_depth is None: 520 | rfv = True 521 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy) 522 | dxy = abs(dd[0]-dd[1]) 523 | dz = abs(adcp.bin_center_elevation[0]-adcp.bin_center_elevation[1]) 524 | (depths, velocity_mask) = adcp.get_velocity_mask(range_from_velocities=rfv,nan_mask=True) 525 | 526 | if use_grid_only: 527 | area_grid = velocity_mask*dxy*dz 528 | total_survey_area = np.nansum(np.nansum(area_grid)) 529 | scalar_mean_vel = np.zeros(3) 530 | total_flow = np.zeros(3) 531 | depth_averaged_vel = np.zeros((adcp.n_ensembles,3)) 532 | for i in range(3): 533 | total_flow[i] = np.nansum(np.nansum(adcp.velocity[:,:,i]*area_grid)) 534 | masked_vel = adcp.velocity[:,:,i]*velocity_mask 535 | depth_averaged_vel[:,i] = sp.nanmean(masked_vel,axis=1) 536 | scalar_mean_vel[i] = sp.nanmean(masked_vel.ravel()) 537 | else: 538 | if rfv: 539 | print 'Warning - No bottom depth set: Calculating flows according valid velocity bins only' 540 | total_survey_area = np.nansum(dxy*depths) 541 | depth_averaged_vel = adcp.ensemble_mean_velocity(range_from_velocities=rfv) 542 | depth_integrated_flow = adcp.calc_ensemble_flow(range_from_velocities=rfv) 543 | scalar_mean_vel = sp.nanmean(depth_averaged_vel,axis=0) 544 | total_flow = np.nansum(depth_integrated_flow,axis=0) 545 | 546 | return (scalar_mean_vel, depth_averaged_vel, total_flow, total_survey_area) 547 | 548 | 549 | def find_centroid_distance_matrix(adcp_obs): 550 | """ 551 | Calculates all possible distances between a list of ADCPData objects (twice...ineffcient) 552 | Inputs: 553 | adcp_obs = list ADCPData objects, shape [n] 554 | Returns: 555 | centers = list of centorids of ensemble locations of input ADCPData objects, shape [n] 556 | distances = xy distance between centers, shape [n-1] 557 | """ 558 | n_obs = len(adcp_obs) 559 | distances = np.empty((n_obs,n_obs),np.float64) 560 | centers = [] 561 | for a in adcp_obs: 562 | if a.xy is not None: 563 | centers.append(adcpy.util.centroid(a.xy)) 564 | else: 565 | centers.append(np.array([np.nan,np.nan])) 566 | centers = [adcpy.util.centroid(a.xy) for a in adcp_obs] 567 | for i in range(n_obs): 568 | for j in range(n_obs): 569 | distances[i,j] = adcpy.util.find_line_distance(centers[i],centers[j]) 570 | return (centers,distances) 571 | 572 | 573 | def transect_rotate(adcp_transect,rotation,xy_line=None): 574 | """ 575 | Calculates all possible distances between a list of ADCPData objects (twice...ineffcient) 576 | Inputs: 577 | adcp_obs = list ADCPData objects, shape [n] 578 | Returns: 579 | centers = list of centorids of ensemble locations of input ADCPData objects, shape [n] 580 | distances = xy distance between centers, shape [n-1] 581 | """ 582 | """ 583 | Rotates ADCPTransectData U and V velocities. 584 | Inputs: 585 | adcp_transect = ADCPTransectData object 586 | rotation = one of: 587 | None - no rotation of averaged velocity profiles 588 | 'normal' - rotation based upon the normal to the plotline (default rotation type) 589 | 'pricipal flow' - uses the 1st principal component of variability in uv flow direction 590 | 'Rozovski' - individual rotation of each verticle velocity to maximize U 591 | 'no transverse flow' - rotation by the net flow vector is used to minnumize V 592 | xy_line = numpy array of line defined by 2 points: [[x1,y1],[x2,y2]], or None 593 | Returns 594 | adcp_transect = ADCPTransectData object with rotated uv velocities 595 | """ 596 | if rotation == "normal": 597 | # find angle of line: 598 | if xy_line is None: 599 | if adcp_transect.xy is None: 600 | raise Exception,"transect_rotate() error: ADCPData must be xy projected, or input xy_line must be supplied for normal rotation" 601 | xy_line = adcpy.util.map_xy_to_line(adcp_transect.xy) 602 | theta = adcpy.util.calc_normal_rotation(xy_line) 603 | elif rotation == "no transverse flow": 604 | flows = adcp_transect.calc_ensemble_flow(range_from_velocities=True) 605 | theta = adcpy.util.calc_net_flow_rotation(flows[:,0],flows[:,1]) 606 | elif rotation == "Rozovski": 607 | flows = adcp_transect.calc_ensemble_flow(range_from_velocities=True) 608 | theta = adcpy.util.calc_Rozovski_rotation(flows[:,0],flows[:,1]) 609 | elif rotation == "principal flow": 610 | flows = adcp_transect.calc_ensemble_flow(range_from_velocities=True) 611 | theta = adcpy.util.principal_axis(flows[:,0],flows[:,1],calc_type='EOF') 612 | elif type(rotation) is str: 613 | raise Exception,"In transect_rotate(): input 'rotation' string not understood: %s"%rotation 614 | else: 615 | theta = rotation 616 | 617 | adcp_transect.set_rotation(theta,'uv') 618 | 619 | return adcp_transect 620 | 621 | 622 | def find_uv_dispersion(adcp): 623 | """ 624 | Calculates dispersion coeffcients of velocties in adcp according to 625 | Fischer et al. 1979 626 | Inputs: 627 | adcp = ADCPTransectData object 628 | Returns: 629 | ustbar = 630 | Kx_3i = horizontal dispersion coefficients 631 | Ky_3i = lateral dispersion coefficients 632 | """ 633 | # should check to see if it is regular grid - required for dispersion calc 634 | if adcp.xy is None: 635 | ValueError("adcp.xy (xy projection) must exist for dispersion calculation") 636 | raise 637 | if adcp.bt_depth in adcp.__dict__: 638 | depth = adcp.bt_depth 639 | else: 640 | (depth, velocity_mask) = adcp.get_velocity_mask(range_from_velocities=True, 641 | nan_mask=True) 642 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(adcp.xy) 643 | return adcpy.util.calcKxKy(adcp.velocity[:,:,0], 644 | adcp.velocity[:,:,1], 645 | dd, 646 | adcp.bin_center_elevation, 647 | depth) 648 | -------------------------------------------------------------------------------- /adcpy/pynmea/DISCLAIMER: -------------------------------------------------------------------------------- 1 | This code is from the pynmea project, 2 | code.google.com/p/pynmea 3 | 4 | released under the MIT License. 5 | 6 | Version 0.6.0, downloaded 10/9/2012 7 | 8 | Code has been modified by Rusty Holleman, rustychris@gmail.com, to understand RDENS 9 | sentences present in WinRiver n files. 10 | 11 | -------------------------------------------------------------------------------- /adcpy/pynmea/__init__.py: -------------------------------------------------------------------------------- 1 | __VERSION__ = (0, 6, 0) 2 | def get_version(): 3 | return '.'.join([str(x) for x in __VERSION__]) 4 | -------------------------------------------------------------------------------- /adcpy/pynmea/exceptions.py: -------------------------------------------------------------------------------- 1 | """ Collection of pynmea specific errors 2 | """ 3 | 4 | class NoDataGivenError(Exception): 5 | def __init__(self, message): 6 | self.message = message -------------------------------------------------------------------------------- /adcpy/pynmea/gps.py: -------------------------------------------------------------------------------- 1 | import bluetooth 2 | 3 | 4 | class GPSReader(object): 5 | def __init__(self): 6 | target = "BT-GPS" 7 | nearby_devices = bluetooth.discover_devices() 8 | for dev in nearby_devices: 9 | if bluetooth.lookup_name(dev) == target: 10 | # Get GPS stuff 11 | pass 12 | 13 | 14 | 15 | if __name__ == "__main__": 16 | gps = GPSReader() -------------------------------------------------------------------------------- /adcpy/pynmea/streamer.py: -------------------------------------------------------------------------------- 1 | """ For dealing with streams of nmea data 2 | """ 3 | from pynmea.exceptions import NoDataGivenError 4 | 5 | 6 | class NMEAStream(object): 7 | """ NMEAStream object is used to 8 | """ 9 | def __init__(self, stream_obj=None): 10 | """ stream_obj should be a file like object. 11 | If the requirement is just to split data in memory, no stream_obj 12 | is required. Simply create an instance of this class and 13 | call _split directly with the data. 14 | """ 15 | self.stream = stream_obj 16 | self.head = '' 17 | 18 | def get_strings(self, data=None, size=1024): 19 | """ Read and return sentences as strings 20 | """ 21 | return self._read(data=data, size=size) 22 | 23 | def get_objects(self, data=None, size=1024): 24 | """ Get sentences but return list of NMEA objects 25 | """ 26 | str_data = self._read(data=data, size=size) 27 | nmea_objects = [] 28 | for nmea_str in str_data: 29 | try: 30 | nmea_ob = self._get_type(nmea_str)() 31 | except TypeError: 32 | # NMEA sentence was not recognised 33 | continue 34 | nmea_ob.parse(nmea_str) 35 | nmea_objects.append(nmea_ob) 36 | 37 | return nmea_objects 38 | 39 | 40 | def _read(self, data=None, size=1024): 41 | """ Read size bytes of data. Always strip off the last record and 42 | append to the start of the data stream on the next call. 43 | This ensures that only full sentences are returned. 44 | """ 45 | if not data and not self.stream and not self.head: 46 | # If there's no data and no stream, raise an error 47 | raise NoDataGivenError('No data was provided') 48 | 49 | if not data and self.stream: 50 | read_data = self.stream.read(size) 51 | else: 52 | read_data = data 53 | 54 | data = self.head + read_data 55 | # DBG: 56 | print "Joined head and read_data to get" 57 | print "-"*20 58 | print data 59 | print "-"*20 60 | raw_sentences = self._split(data) 61 | if not read_data: 62 | self.head = '' 63 | return raw_sentences 64 | self.head = raw_sentences[-1] 65 | full_sentences = raw_sentences[:-1] 66 | return full_sentences 67 | 68 | def _get_type(self, sentence): 69 | """ Get the NMEA type and return the appropriate object. Returns 70 | None if no such object was found. 71 | 72 | TODO: raise error instead of None. Failing silently is a Bad Thing. 73 | We can always catch the error later if the user wishes to supress 74 | errors. 75 | """ 76 | sen_type = sentence.split(',')[0].lstrip('$') 77 | sen_mod = __import__('pynmea.nmea', fromlist=[sen_type]) 78 | sen_obj = getattr(sen_mod, sen_type, None) 79 | return sen_obj 80 | 81 | def _split(self, data, separator=None): 82 | """ Take some data and split up based on the notion that a sentence 83 | looks something like: 84 | $x,y,z or $x,y,z*ab 85 | 86 | separator is for cases where there is something strange or 87 | non-standard as a separator between sentences. 88 | Without this, there is no real way to tell whether: 89 | $x,y,zSTUFF 90 | is legal or if STUFF should be stripped. 91 | """ 92 | sentences = data.split('$') 93 | clean_sentences = [] 94 | for item in sentences: 95 | cleaned_item = item.rstrip() 96 | if separator: 97 | cleaned_item = cleaned_item.rstrip(separator) 98 | if '*' in cleaned_item.split(',')[-1]: 99 | # There must be a checksum. Remove any trailing fluff: 100 | try: 101 | first, checksum = cleaned_item.split('*') 102 | except ValueError: 103 | # Some GPS data recorders have been shown to output 104 | # run-together sentences (no leading $). 105 | # In this case, ignore error and continue, discarding the 106 | # erroneous data. 107 | # TODO: try and fix the data. 108 | continue 109 | cleaned_item = '*'.join([first, checksum[:2]]) 110 | if cleaned_item: 111 | clean_sentences.append(cleaned_item) 112 | 113 | return clean_sentences 114 | -------------------------------------------------------------------------------- /adcpy/pynmea/utils.py: -------------------------------------------------------------------------------- 1 | """ Functions that get used by multiple classes go in here 2 | """ 3 | 4 | 5 | def checksum_calc(nmea_str): 6 | """ Loop through all of the given characters and xor the current to the 7 | previous (cumulatively). 8 | """ 9 | chksum_val = 0 10 | nmea_str = nmea_str.replace('$', '') 11 | nmea_str = nmea_str.split('*')[0] 12 | for next_char in nmea_str: 13 | chksum_val ^= ord(next_char) 14 | 15 | return "%02X" % chksum_val 16 | 17 | -------------------------------------------------------------------------------- /adcpy/transect_average.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Production example script that averages repeated tranects, resolving and visualizing secondary circulation 3 | Driver script that is designed to average repeated ADCP transect observations 4 | in an effort of reduce measurement error and better resolve non-steamwise and 5 | velocities and secondary circulation features. A summary of script functions: 6 | 1) Assess an input directory for ADCP observations (raw files) that match 7 | in space and time. 8 | 2) Group matchcing ADCP observations into groups of a maxium number for 9 | averaging. 10 | 3) Pre-process raw ADCP observations before averaging as appropriate. 11 | 4) Bin-average pre-processed ADCP observation velcotities 12 | 5) Generate netcdf and/or CSV output files of bin-average velocities 13 | 6) Generate various plots of streamwise, depth averaged, 3D velocities 14 | and secondary circulation features. 15 | 16 | The script options are listed and described immediatly below this comment 17 | block. 18 | 19 | This code is open source, and defined by the included MIT Copyright License 20 | 21 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 22 | 2014-09 - First Release; blsaenz, esatel 23 | """ 24 | ## START script options ####################################################### 25 | 26 | # bin-averaging paramterters 27 | avg_dxy = 2.0 # Horizontal resolution of averaging bins {m} 28 | avg_dz = 0.25 # Vertical resolution of averaging bins {m} 29 | avg_max_gap_m = 30.0 # Maximum distance allowed between ADCP observations when averaging {m} 30 | avg_max_gap_minutes = 20.0 # Maximum time allowed between ADCP observations when averaging {m} 31 | avg_max_group_size = 6 # Maximum number of ADCP observations to average {m} 32 | avg_bin_sd_drop = 3 # Maximum number of ADCP observations to average {m} 33 | avg_normal_to_flow = False 34 | 35 | # post-average options 36 | avg_rotation = 'Rozovski' # One of ['Rozovski','no transverse flow','principal flow','normal',None] 37 | avg_std_drop = 3.0 # Standard deviation of velocity, above which samples are dropped from analysis {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]} 38 | avg_std_interp = True # Perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0} 39 | avg_smooth_kernel = 3 # Smooth velocity data using a square kernel box-filter, with side dimension = avg_smooth_kernel. 0 = no kernel smoothing 40 | avg_save_netcdf = True # Save bin-averaged velocities as an ADCPData netcdf file 41 | avg_save_csv = True # Save bin-averaged velocities as a CSV text file 42 | avg_plot_xy = True # Generate a composite plot of survey location(s) of original ADCP ensembles 43 | avg_plot_avg_n_sd = True # Generate image plots of bin-averaged U,V,W velocities, and the number and standard deviation of bin averages 44 | avg_plot_mean_vectors = True # Generate an arrow plot of bin-averaged U-V mean velocities in the x-y plane 45 | avg_plot_secondary_circulation = True # Generate an image plot of 2D bin-averaged steamwise (u) velocities, overlain by an arrow plot showing secondary circulation in the V-W plane 46 | avg_plot_uvw_velocity_array = True # Generate a 3-panel image plot showing bin-averaged U,V,W velocities in the V-W plane 47 | avg_plot_flow_summmary = True # Generate a summary plot showing image plots of U,V bin-averaged velocities, an arrow plot of bin-averaged U-V mean velocities, and flow/discharge calculations 48 | avg_save_plots = True # Save the plots to disk 49 | avg_show_plots = False # Print plots to screen (pauses execution until plots are manually closed) 50 | 51 | 52 | ## END script options ######################################################### 53 | 54 | import os 55 | import numpy as np 56 | import matplotlib 57 | matplotlib.use('TkAgg') 58 | import matplotlib.pyplot as plt 59 | import transect_preprocessor 60 | reload(transect_preprocessor) 61 | import adcpy 62 | reload(adcpy) 63 | from adcpy_recipes import * 64 | 65 | def transect_average(pre_process_input_file=None): 66 | """ 67 | Received a list of ADCPTransectData objects from transect_preprocessor.py, 68 | and then groups and bin-averages the transects. Group average ADCPData 69 | objects, velocties, and plots are optionally output to the outpath supplied 70 | by transect_preprocessor(). 71 | Inputs: 72 | pre_process_input_file = path to a transect_preprocessor input file, 73 | or None to use the default file. 74 | """ 75 | 76 | (transects,outpath) = transect_preprocessor.transect_preprocessor(pre_process_input_file) 77 | print 'total transects loaded:',len(transects) 78 | grps_to_average = group_adcp_obs_by_spacetime(transects, 79 | max_gap_m=avg_max_gap_m, 80 | max_gap_minutes=avg_max_gap_minutes, 81 | max_group_size=avg_max_group_size) 82 | print 'total groups to average',len(grps_to_average) 83 | #write_csv_velocity(transects[0],os.path.join(outpath,'temp.csv'),no_header=True) 84 | #grps_to_average = grps_to_average[1:] 85 | 86 | grp_num = 0 87 | track_fig=101 88 | for grp in grps_to_average: 89 | if avg_plot_xy: 90 | adcpy.plot.plot_obs_group_xy_lines(grp,fig=track_fig,title='Group%03i Source Observations'%grp_num) 91 | 92 | avg = average_transects(grp, 93 | dxy=avg_dxy, 94 | dz=avg_dz, 95 | return_adcpy=True, 96 | plotline_from_flow=avg_normal_to_flow, 97 | sd_drop=avg_bin_sd_drop) 98 | 99 | if avg_plot_xy: 100 | adcpy.plot.get_fig(fig=track_fig) 101 | plt.plot(avg.xy[:,0],avg.xy[:,1],label='average projection') 102 | plt.legend(prop={'size':10}) 103 | if avg_save_plots: 104 | plt.savefig(os.path.join(outpath,"group%03i_xy_lines.png"%grp_num)) 105 | 106 | if avg_rotation is not None: 107 | avg = transect_rotate(avg,avg_rotation) 108 | if avg_std_drop > 0: 109 | avg.sd_drop(sd=3.0, 110 | sd_axis='elevation', 111 | interp_holes=avg_std_interp) 112 | avg.sd_drop(sd=3.0, 113 | sd_axis='ensemble', 114 | interp_holes=avg_std_interp) 115 | if avg_smooth_kernel > 2: 116 | avg.kernel_smooth(kernel_size = 3) 117 | 118 | if avg_save_csv: 119 | write_csv_velocity_array(avg,os.path.join(outpath,'group%03i_velocity.csv'%grp_num),no_header=True) 120 | write_csv_velocity_db(avg,os.path.join(outpath,'group%03i_velocity_db.csv'%grp_num),no_header=False) 121 | write_ensemble_mean_velocity_db(avg,os.path.join(outpath,'group%03i_velocity_depth_means.csv'%grp_num), 122 | no_header=False,range_from_velocities=True) 123 | 124 | if avg_save_netcdf: 125 | fname = os.path.join(outpath,'group%03i.nc'%grp_num) 126 | avg.write_nc(fname,zlib=True) 127 | 128 | if avg_plot_avg_n_sd: 129 | uvw = 'uvw' 130 | for i in range(3): 131 | plot_avg_n_sd(avg,i,0.05) 132 | if avg_save_plots: 133 | plt.savefig(os.path.join(outpath,"group%03i_%s_avg_n_sd.png"%(grp_num,uvw[i]))) 134 | 135 | if avg_plot_mean_vectors: 136 | fig3 = adcpy.plot.plot_ensemble_mean_vectors(avg,title='Group%03i Mean Velocity [m/s]'%grp_num) 137 | if avg_save_plots: 138 | plt.savefig(os.path.join(outpath,"group%03i_mean_velocity.png"%grp_num)) 139 | 140 | if avg_plot_secondary_circulation: 141 | fig4 = adcpy.plot.plot_secondary_circulation(avg,u_vecs=30,v_vecs=30, 142 | title='Group%03i Cross-Stream Velocity [m/s] and Secondary Circulation Vectors'%grp_num) 143 | if avg_save_plots: 144 | plt.savefig(os.path.join(outpath,"group%03i_secondary_circulation.png"%grp_num)) 145 | 146 | if avg_plot_uvw_velocity_array: 147 | fig5 = adcpy.plot.plot_uvw_velocity_array(avg.velocity, 148 | title='Group%03i Velocity [m/s]'%grp_num, 149 | ures=0.1,vres=0.1,wres=0.05) 150 | if avg_save_plots: 151 | plt.savefig(os.path.join(outpath,"group%03i_uvw_velocity.png"%grp_num)) 152 | 153 | if avg_plot_flow_summmary: 154 | fig6 = adcpy.plot.plot_flow_summmary(avg,title='Group%03i Streamwise Summary'%grp_num, 155 | ures=0.1,vres=0.1,use_grid_flows=True) 156 | if avg_save_plots: 157 | plt.savefig(os.path.join(outpath,"group%03i_flow_summary.png"%grp_num)) 158 | 159 | if avg_show_plots: 160 | plt.show() 161 | plt.close('all') 162 | grp_num += 1 163 | 164 | print 'ADCP processing complete!' 165 | 166 | 167 | def plot_avg_n_sd(avg,uvw,resolution=0.1): 168 | """ 169 | Generates a vertical three-panel plot, showing images of a bin-averaged 170 | velocity, the number of velociy measurements in each bin, and the bin standard 171 | deviation velocity. Desinged to be used in conjuction with 172 | transect_average() output. 173 | Inputs: 174 | avg = ADCPData object, with extra velocity_n and velocity_sd data 175 | fields produced by transect_average() 176 | uvw = python string, either 'U','V', or 'W' to select which velocity 177 | compontent to plot. 178 | resolution = optional value to round the plot velocity scales up toward 179 | """ 180 | 181 | if uvw == 0: 182 | v_str = 'U' 183 | elif uvw == 1: 184 | v_str = 'V' 185 | else: 186 | v_str = 'W' 187 | 188 | inv = 1/resolution 189 | xx,yy,dd,pline = adcpy.util.find_projection_distances(avg.xy) 190 | mtest = np.floor(avg.velocity[...,uvw]*inv) 191 | minv = np.nanmin(np.nanmin(mtest))*resolution 192 | mtest = np.ceil(avg.velocity[...,uvw]*inv) 193 | maxv = np.nanmax(np.nanmax(mtest))*resolution 194 | avg_panel = adcpy.plot.IPanel(velocity = avg.velocity[:,:,uvw], 195 | x = dd, 196 | y = avg.bin_center_elevation, 197 | minv = minv, 198 | maxv = maxv, 199 | xlabel = 'm', 200 | ylabel = 'm', 201 | units = 'm/s', 202 | use_pcolormesh = True, 203 | title='%s Averaged Velocity [m/s]'%v_str) 204 | maxv = np.nanmax(np.nanmax(avg.velocity_n[...,uvw])) 205 | n_panel = adcpy.plot.IPanel(velocity = avg.velocity_n[:,:,uvw], 206 | x = dd, 207 | y = avg.bin_center_elevation, 208 | minv = 0, 209 | maxv = maxv, 210 | xlabel = 'm', 211 | ylabel = 'm', 212 | units = 'number', 213 | use_pcolormesh = True, 214 | title='n Samples') 215 | mtest = np.floor(avg.velocity_sd[...,uvw]*inv) 216 | minv = np.nanmin(np.nanmin(mtest))*resolution 217 | mtest = np.ceil(avg.velocity_sd[...,uvw]*inv) 218 | maxv = np.nanmax(np.nanmax(mtest))*resolution 219 | sd_panel = adcpy.plot.IPanel(velocity = avg.velocity_sd[:,:,uvw], 220 | x = dd, 221 | y = avg.bin_center_elevation, 222 | minv = 0, 223 | maxv = maxv, 224 | xlabel = 'm', 225 | ylabel = 'm', 226 | units = 'm/s', 227 | use_pcolormesh = True, 228 | title='Standard Deviation [m/s]') 229 | fig = adcpy.plot.plot_vertical_panels((avg_panel,n_panel,sd_panel)) 230 | return fig 231 | 232 | 233 | def main(): 234 | import sys 235 | prepro_input = sys.argv[1] 236 | transect_average(prepro_input) 237 | 238 | 239 | # run myself 240 | if __name__ == "__main__": 241 | 242 | #transect_average('trn_pre_input_GEO20090106.py') 243 | main() 244 | 245 | -------------------------------------------------------------------------------- /adcpy/transect_flow_estimator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Production example script that averages repeated tranects, resolving and visualizing secondary circulation 3 | Driver script that is designed to average repeated ADCP transect observations 4 | in an effort of reduce measurement error and better resolve non-steamwise and 5 | velocities and secondary circulation features. A summary of script functions: 6 | 1) Assess an input directory for ADCP observations (raw files) that match 7 | in space and time. 8 | 2) Group matchcing ADCP observations into groups of a maxium number for 9 | averaging. 10 | 3) Pre-process raw ADCP observations before averaging as appropriate. 11 | 4) Bin-average pre-processed ADCP observation velcotities 12 | 5) Generate netcdf and/or CSV output files of bin-average velocities 13 | 6) Generate various plots of streamwise, depth averaged, 3D velocities 14 | and secondary circulation features. 15 | 16 | The script options are listed and described immediatly below this comment 17 | block. 18 | 19 | This code is open source, and defined by the included MIT Copyright License 20 | 21 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 22 | 2014-09 - First Release; blsaenz, esatel 23 | """ 24 | ## START script options ####################################################### 25 | 26 | # processing paramterters 27 | flow_regrid = False 28 | flow_regrid_dxy = 25.0 # Horizontal resolution of averaging bins {m} 29 | flow_regrid_dz = 0.25 # Vertical resolution of averaging bins {m} 30 | flow_regrid_bin_sd_drop = 3 # Maximum number of ADCP observations to average {m} 31 | flow_regrid_normal_to_flow = False 32 | flow_regrid_normal_to_flow = False 33 | flow_crossprod_ens_num_ave = 15 34 | flow_sd_drop = 3 # Maximum number of ADCP observations to average {m} 35 | flow_sd_drop_interp = True # Maximum number of ADCP observations to average {m} 36 | flow_smooth_kernel = 3 # Smooth velocity data using a square kernel box-filter, with side dimension = avg_smooth_kernel. 0 = no kernel smoothing 37 | flow_extrapolate_boundaries = True # Maximum number of ADCP observations to average {m} 38 | flow_rotation = "no transverse flow" 39 | flow_p1_lon_lat = None 40 | flow_p2_lon_lat = None 41 | # post-average options 42 | flow_save_processed_netcdf = True 43 | flow_netcdf_data_compression = True 44 | flow_plot_timeseries_values = True 45 | flow_write_timeseries_flows_to_csv = True 46 | flow_plot_mean_vectors = True # Generate an arrow plot of bin-averaged U-V mean velocities in the x-y plane 47 | flow_plot_flow_summmary = True # Generate a summary plot showing image plots of U,V bin-averaged velocities, an arrow plot of bin-averaged U-V mean velocities, and flow/discharge calculations 48 | flow_save_plots = True # Save the plots to disk 49 | flow_show_plots = False # Print plots to screen (pauses execution until plots are manually closed) 50 | 51 | 52 | ## END script options ######################################################### 53 | 54 | import os 55 | import numpy as np 56 | import csv 57 | import scipy.stats.stats as sp 58 | import matplotlib.pyplot as plt 59 | import transect_preprocessor 60 | reload(transect_preprocessor) 61 | import adcpy 62 | reload(adcpy) 63 | import adcpy_recipes 64 | 65 | def transect_flow_estimator(pre_process_input_file=None): 66 | """ 67 | Receives a list of ADCPTransectData objects from transect_preprocessor.py, 68 | and processes to find average velocities acording to options, and finally 69 | outputs flows. Flows can be written to CSV or and plotted across time, and 70 | invidual plot produced for each transect. Output fils=es are save to 71 | the outpath supplied by transect_preprocessor(). 72 | Inputs: 73 | pre_process_input_file = path to a transect_preprocessor input file, 74 | or None to use the default file. 75 | """ 76 | 77 | 78 | # initialize collector lists for time series plots 79 | mtimes = [] 80 | mean_u = [] 81 | mean_v = [] 82 | align_angle = [] 83 | total_flow = [] 84 | ustars = [] 85 | kxs = [] 86 | kys = [] 87 | data_file_num = -1 88 | 89 | # setup compression option 90 | if flow_netcdf_data_compression: 91 | zlib = True 92 | else: 93 | zlib = None 94 | 95 | # prepare plot line, if any 96 | if flow_p1_lon_lat is None or flow_p2_lon_lat is None: 97 | ll_pline = None 98 | else: 99 | ll_pline = np.array([flow_p1_lon_lat,flow_p2_lon_lat]) 100 | 101 | (transects,outpath) = transect_preprocessor.transect_preprocessor(pre_process_input_file) 102 | 103 | if flow_write_timeseries_flows_to_csv: 104 | logfname = os.path.join(outpath,'transect_flow_log.csv') 105 | lf = open(logfname,'wb') 106 | logfile = csv.writer(lf) 107 | logfile.writerow(('filename','start_date','end_date','water_mode', 108 | 'bottom_mode' 109 | 'mean_velocity_U [m/s]', 110 | 'mean_velocity V [m/s]', 111 | 'flow_volume_U [m^3/s]','flow_volume_V [m^3/s]', 112 | 'sampling_area [m^2]', 113 | 'alignment_angle [degree]', 114 | 'notes')) 115 | 116 | for t in transects: 117 | 118 | fname, ext = os.path.splitext(t.source) 119 | outname = os.path.join(outpath,fname) 120 | 121 | if flow_regrid: 122 | 123 | if ll_pline is not None: 124 | if flow_regrid_normal_to_flow: 125 | print "Warning! Regridding plotline given, but options also ask for a flow-based plotline." 126 | print "Ignoring flow-based plotline option..." 127 | if t.xy_srs is not None: 128 | ll_srs = t.lonlat_srs 129 | if ll_srs is None: 130 | ll_srs = t.default_lonlat_srs 131 | pline = adcpy.util.coordinate_transform(ll_pline,ll_srs,t.xy_srs) 132 | else: 133 | print "ADCPData must be projected to use transect_flow_estimator" 134 | exit() 135 | else: 136 | if flow_regrid_normal_to_flow: 137 | flows = t.calc_ensemble_flows(range_from_velocities=True) 138 | pline = adcpy.util.map_flow_to_line(t.xy,flows[:,0],flows[:,1]) 139 | else: 140 | pline = adcpy.util.map_xy_to_line(t.xy) 141 | t.xy_regrid(dxy=2.0,dz=0.25, 142 | pline=pline, 143 | sd_drop=flow_regrid_bin_sd_drop, 144 | mtime_regrid=True) 145 | 146 | else: 147 | t = t.average_ensembles(flow_crossprod_ens_num_ave) 148 | 149 | if flow_sd_drop > 0: 150 | t.sd_drop(sd=flow_sd_drop, 151 | sd_axis='elevation', 152 | interp_holes=flow_sd_drop_interp) 153 | t.sd_drop(sd=flow_sd_drop, 154 | sd_axis='ensemble', 155 | interp_holes=flow_sd_drop_interp) 156 | if flow_smooth_kernel > 2: 157 | t.kernel_smooth(kernel_size = flow_smooth_kernel) 158 | if flow_extrapolate_boundaries: 159 | t.extrapolate_boundaries() 160 | if flow_rotation is not None: 161 | t = adcpy_recipes.transect_rotate(t,flow_rotation) 162 | 163 | if flow_regrid: 164 | UVW,UVWens,flow,survey_area = \ 165 | adcpy_recipes.calc_transect_flows_from_uniform_velocity_grid(t,use_grid_only=False) 166 | Uflow = flow[0] 167 | Vflow = flow[1] 168 | U = UVW[0] 169 | V = UVW[1] 170 | else: 171 | U,Uflow,total_area,survey_area = t.calc_crossproduct_flow() 172 | V, Vflow = (np.nan,np.nan) 173 | 174 | if flow_plot_mean_vectors: 175 | fig3 = adcpy.plot.plot_ensemble_mean_vectors(t,title='Mean Velocity [m/s]') 176 | if flow_save_plots: 177 | adcpy.plot.plt.savefig("%s_mean_velocity.png"%outname) 178 | 179 | if flow_plot_flow_summmary: 180 | fig6 = adcpy.plot.plot_flow_summmary(t,title='Streamwise Summary', 181 | ures=0.1,vres=0.1, 182 | use_grid_flows=flow_regrid) 183 | if flow_save_plots: 184 | adcpy.plot.plt.savefig("%s_flow_summary.png"%outname) 185 | 186 | if flow_show_plots: 187 | adcpy.plot.show() 188 | adcpy.plot.plt.close('all') 189 | 190 | if (flow_save_processed_netcdf): 191 | fname = outname + '.flow_processed.nc' 192 | t.write_nc(fname,zlib=zlib) 193 | 194 | if flow_plot_timeseries_values or flow_write_timeseries_flows_to_csv: 195 | 196 | data_file_num += 1 197 | 198 | # must fit to line to calc dispersion 199 | xy_line = adcpy.util.map_xy_to_line(t.xy) 200 | xd,yd,dd,xy_line = adcpy.util.find_projection_distances(t.xy,xy_line) 201 | ustar, kx, ky = adcpy.util.calcKxKy(t.velocity[:,:,0],t.velocity[:,:,1], 202 | dd,t.bin_center_elevation,t.bt_depth) 203 | 204 | if t.rotation_angle is not None: 205 | r_angle = t.rotation_angle*180.0/np.pi 206 | else: 207 | r_angle = 0.0 208 | 209 | if flow_write_timeseries_flows_to_csv: 210 | times = t.date_time_str(filter_missing=True) 211 | try: 212 | w_mode_str = '%s'%t.raw_adcp.config.prof_mode 213 | bm = t.raw_adcp.bt_mode.tolist() 214 | b_mode = []; 215 | for bm1 in bm: 216 | bmi = int(bm1) 217 | if (bmi not in b_mode): 218 | b_mode.append(bmi) 219 | b_mode_str = '' 220 | for bm1 in b_mode: 221 | if (b_mode_str == ''): 222 | b_mode_str = '%i'%bm1 223 | else: 224 | b_mode_str = '%s/%i'%(b_mode_str,bm1) 225 | except: 226 | w_mode_str = 'Unknown' 227 | b_mode_str = 'Unknown' 228 | logfile.writerow((t.source, 229 | times[0], 230 | times[-1], 231 | w_mode_str, 232 | b_mode_str, 233 | '%7.4f'%U, 234 | '%7.4f'%V, 235 | '%10.2f'%Uflow, 236 | '%10.2f'%Vflow, 237 | '%10.2f'%survey_area, 238 | '%5.2f'%r_angle, 239 | t.history)) 240 | 241 | if flow_plot_timeseries_values: 242 | mtimes.append(sp.nanmedian(t.mtime)) 243 | mean_u.append(U) 244 | mean_v.append(V) 245 | total_flow.append(Uflow) 246 | align_angle.append(r_angle) 247 | ustars.append(ustar) 248 | kxs.append(kx) 249 | kys.append(ky) 250 | 251 | # plot timeseries data after all files have been processed 252 | if flow_plot_timeseries_values and data_file_num>0: 253 | 254 | # plot timeseries figures 255 | fig_handle = plt.figure() 256 | plt.subplot(311) 257 | align_angle= np.array(align_angle) 258 | mtimes = np.array(mtimes) 259 | mean_u = np.array(mean_u) 260 | mean_v = np.array(mean_v) 261 | total_flow = np.array(total_flow) 262 | aa = -align_angle*np.pi/180.0 263 | uq = np.cos(aa)*mean_u + np.sin(aa)*mean_v 264 | vq = -np.sin(aa)*mean_u + np.cos(aa)*mean_v 265 | v_mag = np.sqrt(uq**2 + vq**2) 266 | vScale = np.max(v_mag) 267 | vScale = max(vScale,0.126) 268 | qk_value = np.round(vScale*4)/4 269 | Q = plt.quiver(mtimes,np.zeros(len(mtimes)),uq,vq, 270 | width=0.003, 271 | headlength=10, 272 | headwidth=7, 273 | scale = 10*vScale, #scale = 0.005, 274 | scale_units = 'width' 275 | ) 276 | qk = plt.quiverkey(Q, 0.5, 0.85, qk_value, 277 | r'%3.2f '%qk_value + r'$ \frac{m}{s}$', labelpos='W', 278 | ) 279 | plt.title('Time series data: %s'%outpath) 280 | ax = plt.gca() 281 | ax.xaxis_date() 282 | plt.gcf().autofmt_xdate() 283 | ax.yaxis.set_visible(False) 284 | ax.set_xticklabels([]) 285 | plt.subplot(312) 286 | plt.plot(mtimes,total_flow) 287 | plt.ylabel('m^3/s') 288 | ax = plt.gca() 289 | ax.xaxis_date() 290 | plt.gcf().autofmt_xdate() 291 | ax.set_xticklabels([]) 292 | plt.subplot(313) 293 | plt.plot(mtimes,align_angle,'bo') 294 | plt.ylabel('rotation angle') 295 | ax = plt.gca() 296 | ax.xaxis_date() 297 | plt.gcf().autofmt_xdate() 298 | ts_plot = os.path.join(outpath,'time_series_plots.png') 299 | fig_handle.savefig(ts_plot) 300 | 301 | fig_handle = plt.figure(1111) 302 | plt.subplot(311) 303 | plt.plot(mtimes,ustars) 304 | plt.ylabel('u* m/s') 305 | ax = plt.gca() 306 | ax.xaxis_date() 307 | #plt.gcf().autofmt_xdate() 308 | ax.set_xticklabels([]) 309 | plt.title('Dispersion Coefficients: %s'%outpath) 310 | plt.subplot(312) 311 | plt.plot(mtimes,kxs) 312 | plt.ylabel('Kx m^2/s') 313 | ax = plt.gca() 314 | ax.xaxis_date() 315 | #plt.gcf().autofmt_xdate() 316 | ax.set_xticklabels([]) 317 | plt.subplot(313) 318 | plt.plot(mtimes,kys,'b') 319 | plt.ylabel('Ky m^2/s') 320 | ax = plt.gca() 321 | ax.xaxis_date() 322 | plt.gcf().autofmt_xdate() 323 | ts_plot = os.path.join(outpath,'time_series_dispersion.png') 324 | fig_handle.savefig(ts_plot) 325 | plt.close('all') 326 | 327 | if flow_write_timeseries_flows_to_csv: 328 | lf.close() 329 | 330 | print 'transect_flow_estimator completed!' 331 | 332 | 333 | def main(): 334 | import sys 335 | prepro_input = sys.argv[1] 336 | transect_flow_estimator(prepro_input) 337 | 338 | 339 | # run myself 340 | if __name__ == "__main__": 341 | 342 | #transect_flow_estimator('trn_pre_input_RIO.py') 343 | main() 344 | 345 | -------------------------------------------------------------------------------- /adcpy/transect_preprocessor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Example preprocessor ADCP files for ADCPRdiWorkhorseData compatible raw/netcdf files 3 | 4 | Driver script that is designed find and load raw ADCP observations from a 5 | designated directory, and perform certain processing task on them, optionally 6 | saving the reuslts to ADCPData netcdf format, and/or passing a list of 7 | ADCPData python objects to a parent script. 8 | 9 | This script requires an options file. If no path to a valid options file is 10 | passed as an argument to transect_preprocessor(), it look for the default 11 | file 'transect_preprocessor_input.py' in the local run directory. 12 | 13 | This code is open source, and defined by the included MIT Copyright License 14 | 15 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 16 | 2014-09 - First Release; blsaenz, esatel 17 | """ 18 | import glob 19 | import os 20 | import numpy as np 21 | #import scipy.stats.stats as sp 22 | import scipy.stats.morestats as ssm 23 | 24 | import transect_preprocessor_input 25 | reload(transect_preprocessor_input) 26 | from transect_preprocessor_input import * 27 | import shutil 28 | import adcpy 29 | reload(adcpy) 30 | from matplotlib.dates import num2date#,date2num, 31 | 32 | # Common formatting for datenums: 33 | def fmt_dnum(dn): 34 | return num2date(dn).strftime('%c') 35 | 36 | 37 | 38 | default_option_file = r"transect_preprocessor_input.py" 39 | 40 | def transect_preprocessor(option_file=None): 41 | """ 42 | The method finds, loads, pre-preprocess, and returns ADCPData ojects 43 | for ADCPRdiWorkhorseData compatible raw/netcdf files. It returns 44 | a list of ADCPData objects. 45 | 46 | See the default options file 'transect_preprocessor_input.py' for the 47 | input options. 48 | """ 49 | np.seterr(all='ignore') 50 | 51 | data_files = None 52 | data_files_nc = None 53 | 54 | if option_file is None: 55 | option_file = default_option_file 56 | try: 57 | options, fileExtension = os.path.splitext(option_file) 58 | exec('import %s'%options) 59 | exec('reload(%s)'%options) 60 | exec("from %s import *"%options) 61 | except: 62 | print "Could not load options file: %s"%option_file 63 | raise 64 | 65 | if os.path.exists(working_directory): 66 | if (os.path.isdir(working_directory)): 67 | if file_list is not None: 68 | data_files = [] 69 | for f in file_list: 70 | fileName, fileExtension = os.path.splitext(f) 71 | if (('R.000' in f and fileExtension == '.000') or 72 | ('r.000' in f and fileExtension == '.000')): 73 | data_files.append(os.path.join(working_directory,f)) 74 | elif fileExtension == '.nc': 75 | data_files.append(os.path.join(working_directory,f)) 76 | else: 77 | print "Filename '%s' does not appear to be a valid raw (*r.000) or netcdf (*.nc) file - skipping."%f 78 | else: 79 | data_files = glob.glob(os.path.join(working_directory,'*[rR].000')) 80 | data_files += glob.glob(os.path.join(working_directory,'*.nc')) 81 | data_path = working_directory 82 | else: 83 | print "Could not open working_directory '%s' - exiting."%working_directory 84 | exit() 85 | else: 86 | print "working_directory '%s' not found - exiting."%working_directory 87 | exit() 88 | 89 | outpath = os.path.join(data_path,'adcpy') 90 | if not os.path.exists(outpath): 91 | os.makedirs(outpath) 92 | 93 | # copy processing options 94 | shutil.copyfile(option_file, os.path.join(outpath,option_file)) 95 | 96 | hc_count = 0 97 | if head_correct_spanning and (len(data_files) > 1): 98 | # bin data rquired for head_correct form input files 99 | mtime_a = None 100 | mtime_b = None 101 | reference_heading = None 102 | is_a = list() 103 | print 'Gathering data from data_files for head_correct spanning...' 104 | for data_file in data_files: 105 | path, fname = os.path.split(data_file) 106 | 107 | # try: 108 | a = adcpy.open_adcp(data_file, 109 | file_type="ADCPRdiWorkhorseData", 110 | num_av=1, 111 | adcp_depth=adcp_depth) 112 | m1,h1,bt1,xy1 = a.copy_head_correct_vars(xy_srs=xy_projection) 113 | 114 | if reference_heading is None: 115 | reference_heading = ssm.circmean(h1*np.pi/180.)*180./np.pi 116 | 117 | current_heading = ssm.circmean(h1*np.pi/180.)*180./np.pi 118 | 119 | if mtime_a is None: 120 | mtime_a = m1 121 | heading_a = h1 122 | bt_vel_a = bt1 123 | xy_a = xy1 124 | else: 125 | mtime_a = np.concatenate((mtime_a,m1)) 126 | heading_a = np.concatenate((heading_a,h1)) 127 | bt_vel_a = np.row_stack((bt_vel_a,bt1)) 128 | xy_a = np.row_stack((xy_a,xy1)) 129 | 130 | print '+',fname,fmt_dnum(a.mtime[0]) 131 | 132 | 133 | if debug_stop_after_n_transects: 134 | hc_count += 1 135 | if hc_count >= debug_stop_after_n_transects: 136 | break 137 | # except: 138 | 139 | # print 'Failure reading %s for head_correct spanning!'%fname 140 | # is_a.append('True') 141 | 142 | print 'Number direction a headings:',np.shape(mtime_a) 143 | 144 | # this method is independent of self/a 145 | #try: 146 | heading_correction_a = adcpy.util.fit_head_correct(mtime_in=mtime_a, 147 | hdg_in=heading_a, 148 | bt_vel_in=bt_vel_a, 149 | xy_in=xy_a, 150 | u_min_bt=u_min_bt, 151 | hdg_bin_size=hdg_bin_size, 152 | hdg_bin_min_samples=hdg_bin_min_samples) 153 | # except: 154 | # print 'Failure fitting head_correct spanning!' 155 | # if mag_declination is not None: 156 | # print 'Using simple magnetic declination correction instead' 157 | # heading_correction_a = None 158 | # else: 159 | # print 'No magnetic declination value found - head_correct failure.' 160 | # print 'exiting' 161 | # exit() 162 | 163 | # setup compression option 164 | if use_netcdf_data_compression: 165 | zlib = True 166 | else: 167 | zlib = None 168 | 169 | # begin cycling/processing input files 170 | adcp_preprocessed = [] 171 | for data_file in data_files: 172 | # try: 173 | a = adcpy.open_adcp(data_file, 174 | file_type='ADCPRdiWorkhorseData', 175 | num_av=1, 176 | adcp_depth=adcp_depth) 177 | path, fname = os.path.split(data_file) 178 | fname, ext = os.path.splitext(fname) 179 | outname = os.path.join(outpath,fname) 180 | 181 | print 'Processing data_file:', outname 182 | 183 | if save_raw_data_to_netcdf: 184 | fname = outname + '.nc' 185 | a.write_nc(fname,zlib=zlib) 186 | 187 | # setup for heading correction based 188 | if head_correct_spanning and (len(data_files) > 1): 189 | heading_cf = heading_correction_a 190 | else: 191 | heading_cf = None 192 | 193 | a.lonlat_to_xy(xy_srs=xy_projection) 194 | a.heading_correct(cf=heading_cf, 195 | u_min_bt=u_min_bt, 196 | hdg_bin_size=hdg_bin_size, 197 | hdg_bin_min_samples=hdg_bin_min_samples, 198 | mag_dec=mag_declination) 199 | if sidelobe_drop != 0: 200 | a.remove_sidelobes(fsidelobe=sidelobe_drop) 201 | if std_drop > 0: 202 | a.sd_drop(sd=std_drop, 203 | sd_axis='elevation', 204 | interp_holes=True) 205 | a.sd_drop(sd=std_drop, 206 | sd_axis='ensemble', 207 | interp_holes=True) 208 | if average_ens > 1: 209 | a = a.average_ensembles(ens_to_avg=average_ens) 210 | if regrid_horiz_m is not None: 211 | a.xy_regrid(dxy=regrid_horiz_m, 212 | dz=regrid_vert_m, 213 | xy_srs=xy_projection, 214 | pline=None, 215 | sort=False) 216 | if smooth_kernel > 2: 217 | a.kernel_smooth(kernel_size = smooth_kernel) 218 | if extrap_boundaries: 219 | a.extrapolate_boundaries() 220 | 221 | if (save_preprocessed_data_to_netcdf): 222 | fname = outname + '.preprocessed.nc' 223 | a.write_nc(fname,zlib=zlib) 224 | 225 | adcp_preprocessed.append(a) 226 | 227 | if debug_stop_after_n_transects: 228 | if len(adcp_preprocessed) >= debug_stop_after_n_transects: 229 | return (adcp_preprocessed,outpath) 230 | 231 | return (adcp_preprocessed,outpath) 232 | 233 | # run myself 234 | if __name__ == "__main__": 235 | transect_preprocessor() 236 | 237 | -------------------------------------------------------------------------------- /adcpy/transect_preprocessor_input.py: -------------------------------------------------------------------------------- 1 | """ 2 | transect_preprocessor.py Input File 3 | Set options in this file in order to use process adcp transects using the 4 | For further information on setting these options, the ADCPy Documentation 5 | 6 | IMPORTANT: This options file is uses Python 2.X code convenctions, meaning: 7 | 1) there may not be any tab characters in the file; 8 | 2) option lines may have no leadng spaces; 9 | 3) strings (text in quotes) should be preceded by an 'r' for maximum compatibility. 10 | 11 | This code is open source, and defined by the included MIT Copyright License 12 | 13 | Designed for Python 2.7; NumPy 1.7; SciPy 0.11.0; Matplotlib 1.2.0 14 | 2014-09 - First Release; blsaenz, esatel 15 | 16 | """ 17 | 18 | # ADCP Data File location(s) 19 | # ----------------------------------------------------------------------------- 20 | working_directory = r'Y:\temp\adcp_anaylsis_stations\RIO20100309_avg_test' # or None for current directory 21 | 22 | # Processing Options 23 | # ----------------------------------------------------------------------------- 24 | xy_projection = r'EPSG:26910' # The text-based EPSG code describing the map projection (in Northern CA, UTM Zone 10N = 'EPSG:26910') 25 | do_head_correct = False # Switch for using/not using heading correction due to magnetic compass declination and errors. {True or False} 26 | head_correct_spanning = False # perform heading correction on all data files binned together {True or False} 27 | mag_declination = 15 # magnetic compass declination - this value will be used to correct compass heading if head_correcting is not used {degrees E of true North, or None} 28 | u_min_bt=0.3 # minimum bottom track velocity for head_correct {typically 0-0.3 [m/s] or None} 29 | hdg_bin_size=5 # bin size of heading correction {typically 5,10 [degrees]} 30 | hdg_bin_min_samples=10 # minimum number of sample headings in a heading bin for consideration in heading correction {typically 10-50, more is safer} 31 | sidelobe_drop=0.1 # fraction of vertical profile to drop due to sidelobe/bottom interaction {typically 0.5-0.15 [fraction]} 32 | std_drop=3.0 # standard deviation of velocity, above which samples are dropped from analysis {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]} 33 | std_interp=True # perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0} 34 | smooth_kernel=0 # smooth velocity data using a square kernel box-filter, with side dimension = 35 | extrap_boundaries=False # extrapolate velocity profiles upward toward surface, and downward to the sounder-detected bottom {True or False} 36 | average_ens = 1 # average adjacent (in time) velocity profiles {typically 0-15 [number of adjacent velocity profiles(ensembles)]} 37 | regrid_horiz_m = None # horizontal grid resolution used when regridding results {resonable fraction of transect width, or None for default(2m) [m]} 38 | regrid_vert_m = None # vertical grid resolution used when regridding results {resonable fraction of transect depth, or None for default(0.1) [m]} 39 | adcp_depth = 0.244 # depth of the adcp face under the surface {[m] or None} 40 | p1lat = 38.1619 # latitude of origin of optional transect plot line [degrees E] or None 41 | p1lon = -121.6843 # longitude of origin of optional transect plot line [degrees N] or None 42 | p2lat = 38.1578 # latitude of end of optional transect plot line [degrees E] or None 43 | p2lon = -121.6781 # longitude of end of optional transect plot line [degrees N] or None 44 | 45 | # Data Output Options 46 | # ----------------------------------------------------------------------------- 47 | save_raw_data_to_netcdf = True # Switch to output raw data to netCDF-CF format. {True or False} 48 | save_preprocessed_data_to_netcdf = True # Switch to output results to netCDF-CF format. {True or False} 49 | use_netcdf_data_compression = True # Switch to use NetCDF 4 data compression to save disk space in data and results files. {True or False} 50 | 51 | # Debug options 52 | debug_stop_after_n_transects = False # False, or number to limit return to -------------------------------------------------------------------------------- /adcpy/trn_pre_input_GEO20090106.py: -------------------------------------------------------------------------------- 1 | # INPUT FILE for adco_transect_preprocessor 2 | # Set options in this file in order to use process adcp transects using the 3 | # For further information on setting these options, the ADCP Python Documentation 4 | # 5 | # IMPORTANT: This options file is uses Python 2.X code convenctions, meaning: 6 | # 1) there may not be any tab characters in the file; 7 | # 2) option lines may have no leadng spaces; 8 | # 3) strings (test in quotes) should be preceded by an 'r' for maximum compatibility. 9 | 10 | 11 | # ADCP Data File location(s) 12 | # ----------------------------------------------------------------------------- 13 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGB20090721' 14 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGA20090722' # or None for current directory 15 | 16 | working_directory = r'Y:\temp\ADCP_2008\NDSOS_DLADCP.VelocityData\4thRelease\GEO4thRelease\GEO20090106' # or None for current directory 17 | #working_directory = r'/Volumes/Aux/temp/adcp_anaylsis_stations/RIO20100309' # or None for current directory 18 | 19 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\TMS20090513' 20 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\JPT20080618' 21 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU022510' # can't headcorrect, not enough bins 22 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU012810' 23 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU060408' # no nav 24 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU112707' # no nav 25 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI042011' # done 26 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI102009' # done 27 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI050608' 28 | 29 | #file_list = ['GEO4thRelease641r.000', 30 | # 'GEO4thRelease642r.000'] 31 | file_list = None 32 | 33 | # Processing Options 34 | # ----------------------------------------------------------------------------- 35 | xy_projection = r'EPSG:26910' # The text-based EPSG code describing the map projection (in Northern CA, UTM Zone 10N = 'EPSG:26910') 36 | do_head_correct = False # Switch for using/not using heading correction due to magnetic compass declination and errors. {True or False} 37 | head_correct_spanning = False # perform heading correction on all data files binned together {True or False} 38 | mag_declination = 14.7 # magnetic compass declination - this value will be used to correct compass heading if head_correcting is not used {degrees E of true North, or None} 39 | u_min_bt=0.3 # minimum bottom track velocity for head_correct {typically 0-0.3 [m/s] or None} 40 | hdg_bin_size=5 # bin size of heading correction {typically 5,10 [degrees]} 41 | hdg_bin_min_samples=10 # minimum number of sample headings in a heading bin for consideration in heading correction {typically 10-50, more is safer} 42 | sidelobe_drop=0.1 # fraction of vertical profile to drop due to sidelobe/bottom interaction {typically 0.5-0.15 [fraction]} 43 | std_drop=3.0 # standard deviation of velocity, above which samples are dropped from analysis {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]} 44 | std_interp=True # perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0} 45 | smooth_kernel=0 # smooth velocity data using a square kernel box-filter, with side dimension = 46 | extrap_boundaries=False # extrapolate velocity profiles upward toward surface, and downward to the sounder-detected bottom {True or False} 47 | average_ens = 1 # average adjacent (in time) velocity profiles {typically 0-15 [number of adjacent velocity profiles(ensembles)]} 48 | regrid_horiz_m = None # horizontal grid resolution used when regridding results {resonable fraction of transect width, or None for default(2m) [m]} 49 | regrid_vert_m = None # vertical grid resolution used when regridding results {resonable fraction of transect depth, or None for default(0.1) [m]} 50 | adcp_depth = 0.100579092 # depth of the adcp face under the surface {[m] or None} 51 | #p1lat = 38.0527 # latitude of origin of optional transect plot line [degrees E] or None 52 | #p1lon = -121.6943 # longitude of origin of optional transect plot line [degrees N] or None 53 | #p2lat = 38.0505 # latitude of end of optional transect plot line [degrees E] or None 54 | #p2lon = -121.6900 # longitude of end of optional transect plot line [degrees N] or None 55 | p1lat = 38.1619 # latitude of origin of optional transect plot line [degrees E] or None 56 | p1lon = -121.6843 # longitude of origin of optional transect plot line [degrees N] or None 57 | p2lat = 38.1578 # latitude of end of optional transect plot line [degrees E] or None 58 | p2lon = -121.6781 # longitude of end of optional transect plot line [degrees N] or None 59 | 60 | # Data Output Options 61 | # ----------------------------------------------------------------------------- 62 | save_raw_data_to_netcdf = True # Switch to output raw data to netCDF-CF format. {True or False} 63 | save_preprocessed_data_to_netcdf = True # Switch to output results to netCDF-CF format. {True or False} 64 | use_netcdf_data_compression = True # Switch to use NetCDF 4 data compression to save disk space in data and results files. {True or False} 65 | 66 | # Debug options 67 | debug_stop_after_n_transects = 10 # False, or number to limit return to -------------------------------------------------------------------------------- /adcpy/trn_pre_input_GEO20090117.py: -------------------------------------------------------------------------------- 1 | # INPUT FILE for adco_transect_preprocessor 2 | # Set options in this file in order to use process adcp transects using the 3 | # For further information on setting these options, the ADCP Python Documentation 4 | # 5 | # IMPORTANT: This options file is uses Python 2.X code convenctions, meaning: 6 | # 1) there may not be any tab characters in the file; 7 | # 2) option lines may have no leadng spaces; 8 | # 3) strings (test in quotes) should be preceded by an 'r' for maximum compatibility. 9 | 10 | 11 | # ADCP Data File location(s) 12 | # ----------------------------------------------------------------------------- 13 | 14 | #working_directory = r'Y:\temp\ADCP_2008\NDSOS_DLADCP.VelocityData\5thRelease\GEO5thRelease\GEO20090116' 15 | 16 | working_directory = r'C:\Delta\ADCPy\GEO20090116' 17 | # or None for current directory 18 | #working_directory = r'/Volumes/Aux/temp/adcp_anaylsis_stations/RIO20100309' # or None for current directory 19 | 20 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGB20090721' 21 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGA20090722' # or None for current directory 22 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\TMS20090513' 23 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\JPT20080618' 24 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU022510' # can't head_correct, not enough bins 25 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU012810' 26 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU060408' # no nav 27 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU112707' # no nav 28 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI042011' # done 29 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI102009' # done 30 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI050608' 31 | file_list = None 32 | 33 | # Processing Options 34 | # ----------------------------------------------------------------------------- 35 | xy_projection = r'EPSG:26910' # The text-based EPSG code describing the map projection (in Northern CA, UTM Zone 10N = 'EPSG:26910') 36 | do_head_correct = False # Switch for using/not using heading correction due to magnetic compass declination and errors. {True or False} 37 | head_correct_spanning = False # perform heading correction on all data files binned together {True or False} 38 | mag_declination = 14.7 # magnetic compass declination - this value will be used to correct compass heading if head_correcting is not used {degrees E of true North, or None} 39 | u_min_bt=0.3 # minimum bottom track velocity for head_correct {typically 0-0.3 [m/s] or None} 40 | hdg_bin_size=5 # bin size of heading correction {typically 5,10 [degrees]} 41 | hdg_bin_min_samples=10 # minimum number of sample headings in a heading bin for consideration in heading correction {typically 10-50, more is safer} 42 | sidelobe_drop=0.1 # fraction of vertical profile to drop due to sidelobe/bottom interaction {typically 0.5-0.15 [fraction]} 43 | std_drop=3.0 # standard deviation of velocity, above which samples are dropped from analysis {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]} 44 | std_interp=True # perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0} 45 | smooth_kernel=0 # smooth velocity data using a square kernel box-filter, with side dimension = 46 | extrap_boundaries=False # extrapolate velocity profiles upward toward surface, and downward to the sounder-detected bottom {True or False} 47 | average_ens = 1 # average adjacent (in time) velocity profiles {typically 0-15 [number of adjacent velocity profiles(ensembles)]} 48 | regrid_horiz_m = None # horizontal grid resolution used when regridding results {resonable fraction of transect width, or None for default(2m) [m]} 49 | regrid_vert_m = None # vertical grid resolution used when regridding results {resonable fraction of transect depth, or None for default(0.1) [m]} 50 | adcp_depth = 0.100579092 # depth of the adcp face under the surface {[m] or None} 51 | #p1lat = 38.0527 # latitude of origin of optional transect plot line [degrees E] or None 52 | #p1lon = -121.6943 # longitude of origin of optional transect plot line [degrees N] or None 53 | #p2lat = 38.0505 # latitude of end of optional transect plot line [degrees E] or None 54 | #p2lon = -121.6900 # longitude of end of optional transect plot line [degrees N] or None 55 | p1lat = 38.1619 # latitude of origin of optional transect plot line [degrees E] or None 56 | p1lon = -121.6843 # longitude of origin of optional transect plot line [degrees N] or None 57 | p2lat = 38.1578 # latitude of end of optional transect plot line [degrees E] or None 58 | p2lon = -121.6781 # longitude of end of optional transect plot line [degrees N] or None 59 | 60 | # Data Output Options 61 | # ----------------------------------------------------------------------------- 62 | save_raw_data_to_netcdf = True # Switch to output raw data to netCDF-CF format. {True or False} 63 | save_preprocessed_data_to_netcdf = True # Switch to output results to netCDF-CF format. {True or False} 64 | use_netcdf_data_compression = True # Switch to use NetCDF 4 data compression to save disk space in data and results files. {True or False} 65 | 66 | # Debug options 67 | debug_stop_after_n_transects = 16 # False, or number to limit return to -------------------------------------------------------------------------------- /adcpy/trn_pre_input_RIO.py: -------------------------------------------------------------------------------- 1 | # INPUT FILE for adco_transect_preprocessor 2 | # Set options in this file in order to use process adcp transects using the 3 | # For further information on setting these options, the ADCP Python Documentation 4 | # 5 | # IMPORTANT: This options file is uses Python 2.X code convenctions, meaning: 6 | # 1) there may not be any tab characters in the file; 7 | # 2) option lines may have no leadng spaces; 8 | # 3) strings (test in quotes) should be preceded by an 'r' for maximum compatibility. 9 | 10 | 11 | # ADCP Data File location(s) 12 | # ----------------------------------------------------------------------------- 13 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGB20090721' 14 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WGA20090722' # or None for current directory 15 | 16 | working_directory = r'Y:\temp\adcp_anaylsis_stations\RIO20100309' # or None for current directory 17 | #working_directory = r'/Volumes/Aux/temp/adcp_anaylsis_stations/RIO20100309' # or None for current directory 18 | 19 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\TMS20090513' 20 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\JPT20080618' 21 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU022510' # can't headcorrect, not enough bins 22 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU012810' 23 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU060408' # no nav 24 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\MRU112707' # no nav 25 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI042011' # done 26 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI102009' # done 27 | #working_directory = r'Z:\temp\adcp_anaylsis_stations\WCI050608' 28 | 29 | #file_list = ['GEO4thRelease641r.000', 30 | # 'GEO4thRelease642r.000'] 31 | file_list = None 32 | 33 | # Processing Options 34 | # ----------------------------------------------------------------------------- 35 | xy_projection = r'EPSG:26910' # The text-based EPSG code describing the map projection (in Northern CA, UTM Zone 10N = 'EPSG:26910') 36 | do_head_correct = False # Switch for using/not using heading correction due to magnetic compass declination and errors. {True or False} 37 | head_correct_spanning = False # perform heading correction on all data files binned together {True or False} 38 | mag_declination = 14.7 # magnetic compass declination - this value will be used to correct compass heading if head_correcting is not used {degrees E of true North, or None} 39 | u_min_bt=0.3 # minimum bottom track velocity for head_correct {typically 0-0.3 [m/s] or None} 40 | hdg_bin_size=5 # bin size of heading correction {typically 5,10 [degrees]} 41 | hdg_bin_min_samples=10 # minimum number of sample headings in a heading bin for consideration in heading correction {typically 10-50, more is safer} 42 | sidelobe_drop=0.1 # fraction of vertical profile to drop due to sidelobe/bottom interaction {typically 0.5-0.15 [fraction]} 43 | std_drop=3.0 # standard deviation of velocity, above which samples are dropped from analysis {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]} 44 | std_interp=True # perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0} 45 | smooth_kernel=3 # smooth velocity data using a square kernel box-filter, with side dimension = 46 | extrap_boundaries=False # extrapolate velocity profiles upward toward surface, and downward to the sounder-detected bottom {True or False} 47 | average_ens = 1 # average adjacent (in time) velocity profiles {typically 0-15 [number of adjacent velocity profiles(ensembles)]} 48 | regrid_horiz_m = None # horizontal grid resolution used when regridding results {resonable fraction of transect width, or None for default(2m) [m]} 49 | regrid_vert_m = None # vertical grid resolution used when regridding results {resonable fraction of transect depth, or None for default(0.1) [m]} 50 | adcp_depth = 0.244 # depth of the adcp face under the surface {[m] or None} 51 | p1lat = 38.0527 # latitude of origin of optional transect plot line [degrees E] or None 52 | p1lon = -121.6943 # longitude of origin of optional transect plot line [degrees N] or None 53 | p2lat = 38.0505 # latitude of end of optional transect plot line [degrees E] or None 54 | p2lon = -121.6900 # longitude of end of optional transect plot line [degrees N] or None 55 | 56 | # Data Output Options 57 | # ----------------------------------------------------------------------------- 58 | save_raw_data_to_netcdf = True # Switch to output raw data to netCDF-CF format. {True or False} 59 | save_preprocessed_data_to_netcdf = False # Switch to output results to netCDF-CF format. {True or False} 60 | use_netcdf_data_compression = True # Switch to use NetCDF 4 data compression to save disk space in data and results files. {True or False} 61 | 62 | # Debug options 63 | debug_stop_after_n_transects = 3 # False, or number to limit return to -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ADCpy.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ADCpy.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/ADCpy" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ADCpy" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | call makeapi 50 | 51 | %SPHINXBUILD% 2> nul 52 | if errorlevel 9009 ( 53 | echo. 54 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 55 | echo.installed, then set the SPHINXBUILD environment variable to point 56 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 57 | echo.may add the Sphinx directory to PATH. 58 | echo. 59 | echo.If you don't have Sphinx installed, grab it from 60 | echo.http://sphinx-doc.org/ 61 | exit /b 1 62 | ) 63 | 64 | if "%1" == "html" ( 65 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 66 | if errorlevel 1 exit /b 1 67 | echo. 68 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 69 | goto end 70 | ) 71 | 72 | if "%1" == "dirhtml" ( 73 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 74 | if errorlevel 1 exit /b 1 75 | echo. 76 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 77 | goto end 78 | ) 79 | 80 | if "%1" == "singlehtml" ( 81 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 82 | if errorlevel 1 exit /b 1 83 | echo. 84 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 85 | goto end 86 | ) 87 | 88 | if "%1" == "pickle" ( 89 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 90 | if errorlevel 1 exit /b 1 91 | echo. 92 | echo.Build finished; now you can process the pickle files. 93 | goto end 94 | ) 95 | 96 | if "%1" == "json" ( 97 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can process the JSON files. 101 | goto end 102 | ) 103 | 104 | if "%1" == "htmlhelp" ( 105 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished; now you can run HTML Help Workshop with the ^ 109 | .hhp project file in %BUILDDIR%/htmlhelp. 110 | goto end 111 | ) 112 | 113 | if "%1" == "qthelp" ( 114 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 115 | if errorlevel 1 exit /b 1 116 | echo. 117 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 118 | .qhcp project file in %BUILDDIR%/qthelp, like this: 119 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\ADCPy.qhcp 120 | echo.To view the help file: 121 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\ADCPy.ghc 122 | goto end 123 | ) 124 | 125 | if "%1" == "devhelp" ( 126 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 127 | if errorlevel 1 exit /b 1 128 | echo. 129 | echo.Build finished. 130 | goto end 131 | ) 132 | 133 | if "%1" == "epub" ( 134 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 135 | if errorlevel 1 exit /b 1 136 | echo. 137 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 138 | goto end 139 | ) 140 | 141 | if "%1" == "latex" ( 142 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 143 | if errorlevel 1 exit /b 1 144 | echo. 145 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 146 | goto end 147 | ) 148 | 149 | if "%1" == "latexpdf" ( 150 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 151 | cd %BUILDDIR%/latex 152 | make all-pdf 153 | cd %BUILDDIR%/.. 154 | echo. 155 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 156 | goto end 157 | ) 158 | 159 | if "%1" == "latexpdfja" ( 160 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 161 | cd %BUILDDIR%/latex 162 | make all-pdf-ja 163 | cd %BUILDDIR%/.. 164 | echo. 165 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 166 | goto end 167 | ) 168 | 169 | if "%1" == "text" ( 170 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 171 | if errorlevel 1 exit /b 1 172 | echo. 173 | echo.Build finished. The text files are in %BUILDDIR%/text. 174 | goto end 175 | ) 176 | 177 | if "%1" == "man" ( 178 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 179 | if errorlevel 1 exit /b 1 180 | echo. 181 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 182 | goto end 183 | ) 184 | 185 | if "%1" == "texinfo" ( 186 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 187 | if errorlevel 1 exit /b 1 188 | echo. 189 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 190 | goto end 191 | ) 192 | 193 | if "%1" == "gettext" ( 194 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 195 | if errorlevel 1 exit /b 1 196 | echo. 197 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 198 | goto end 199 | ) 200 | 201 | if "%1" == "changes" ( 202 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 203 | if errorlevel 1 exit /b 1 204 | echo. 205 | echo.The overview file is in %BUILDDIR%/changes. 206 | goto end 207 | ) 208 | 209 | if "%1" == "linkcheck" ( 210 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 211 | if errorlevel 1 exit /b 1 212 | echo. 213 | echo.Link check complete; look for any errors in the above output ^ 214 | or in %BUILDDIR%/linkcheck/output.txt. 215 | goto end 216 | ) 217 | 218 | if "%1" == "doctest" ( 219 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 220 | if errorlevel 1 exit /b 1 221 | echo. 222 | echo.Testing of doctests in the sources finished, look at the ^ 223 | results in %BUILDDIR%/doctest/output.txt. 224 | goto end 225 | ) 226 | 227 | if "%1" == "xml" ( 228 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 229 | if errorlevel 1 exit /b 1 230 | echo. 231 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 232 | goto end 233 | ) 234 | 235 | if "%1" == "pseudoxml" ( 236 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 237 | if errorlevel 1 exit /b 1 238 | echo. 239 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 240 | goto end 241 | ) 242 | 243 | :end 244 | -------------------------------------------------------------------------------- /doc/makeapi.bat: -------------------------------------------------------------------------------- 1 | sphinx-apidoc --force -o source\api ..\adcpy build ../adcpy/pynmea ../adcpy/pynmea/exceptions.py ../adcpy/pynmea/gps.py ../adcpy/pynmea/streamer.py ../adcpy/pynmea/utils.py -------------------------------------------------------------------------------- /doc/source/api/adcpy.pynmea.rst: -------------------------------------------------------------------------------- 1 | pynmea Package 2 | ============== 3 | 4 | :mod:`pynmea` Package 5 | --------------------- 6 | 7 | .. automodule:: adcpy.pynmea 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | :mod:`exceptions` Module 13 | ------------------------ 14 | 15 | .. automodule:: adcpy.pynmea.exceptions 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | :mod:`gps` Module 21 | ----------------- 22 | 23 | .. automodule:: adcpy.pynmea.gps 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | :mod:`nmea` Module 29 | ------------------ 30 | 31 | .. automodule:: adcpy.pynmea.nmea 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | :mod:`streamer` Module 37 | ---------------------- 38 | 39 | .. automodule:: adcpy.pynmea.streamer 40 | :members: 41 | :undoc-members: 42 | :show-inheritance: 43 | 44 | :mod:`utils` Module 45 | ------------------- 46 | 47 | .. automodule:: adcpy.pynmea.utils 48 | :members: 49 | :undoc-members: 50 | :show-inheritance: 51 | 52 | -------------------------------------------------------------------------------- /doc/source/api/adcpy.rst: -------------------------------------------------------------------------------- 1 | adcpy Package 2 | ============= 3 | 4 | :mod:`ADCPRdiWorkhorseData` Module 5 | ---------------------------------- 6 | 7 | .. automodule:: adcpy.ADCPRdiWorkhorseData 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | :mod:`adcpy` Module 13 | ------------------- 14 | 15 | .. automodule:: adcpy.adcpy 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | :mod:`adcpy_plot` Module 21 | ------------------------ 22 | 23 | .. automodule:: adcpy.adcpy_plot 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | :mod:`adcpy_recipes` Module 29 | --------------------------- 30 | 31 | .. automodule:: adcpy.adcpy_recipes 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | :mod:`adcpy_utilities` Module 37 | ----------------------------- 38 | 39 | .. automodule:: adcpy.adcpy_utilities 40 | :members: 41 | :undoc-members: 42 | :show-inheritance: 43 | 44 | :mod:`rdradcp` Module 45 | --------------------- 46 | 47 | .. automodule:: adcpy.rdradcp 48 | :members: 49 | :undoc-members: 50 | :show-inheritance: 51 | 52 | :mod:`transect_average` Module 53 | ------------------------------ 54 | 55 | .. automodule:: adcpy.transect_average 56 | :members: 57 | :undoc-members: 58 | :show-inheritance: 59 | 60 | :mod:`transect_preprocessor` Module 61 | ----------------------------------- 62 | 63 | .. automodule:: adcpy.transect_preprocessor 64 | :members: 65 | :undoc-members: 66 | :show-inheritance: 67 | 68 | :mod:`transect_preprocessor_input` Module 69 | ----------------------------------------- 70 | 71 | .. automodule:: adcpy.transect_preprocessor_input 72 | :members: 73 | :undoc-members: 74 | :show-inheritance: 75 | 76 | :mod:`trn_pre_input_GEO20090106` Module 77 | --------------------------------------- 78 | 79 | .. automodule:: adcpy.trn_pre_input_GEO20090106 80 | :members: 81 | :undoc-members: 82 | :show-inheritance: 83 | 84 | :mod:`trn_pre_input_GEO20090117` Module 85 | --------------------------------------- 86 | 87 | .. automodule:: adcpy.trn_pre_input_GEO20090117 88 | :members: 89 | :undoc-members: 90 | :show-inheritance: 91 | 92 | Subpackages 93 | ----------- 94 | 95 | .. toctree:: 96 | 97 | adcpy.pynmea 98 | 99 | -------------------------------------------------------------------------------- /doc/source/api/modules.rst: -------------------------------------------------------------------------------- 1 | adcpy 2 | ===== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | adcpy 8 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # ADCpy documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Oct 07 11:54:34 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'matplotlib.sphinxext.mathmpl', 33 | 'matplotlib.sphinxext.plot_directive', 34 | 'sphinx.ext.intersphinx', 35 | 'sphinx.ext.autodoc', 36 | 'sphinx.ext.doctest','numpydoc', 37 | 'sphinx.ext.autosummary'] 38 | #'numpydoc'] 39 | #'ipython_console_highlighting', 40 | #'inheritance_diagram', 41 | #'numpydoc'] 42 | 43 | autodoc_member_order = 'alphabetical' 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ['_templates'] 47 | 48 | # The suffix of source filenames. 49 | source_suffix = '.rst' 50 | 51 | # The encoding of source files. 52 | #source_encoding = 'utf-8-sig' 53 | 54 | # The master toctree document. 55 | master_doc = 'index' 56 | 57 | # General information about the project. 58 | project = u'ADCPy' 59 | copyright = u'2014, California Department of Water Resources' 60 | 61 | # The version info for the project you're documenting, acts as replacement for 62 | # |version| and |release|, also used in various other places throughout the 63 | # built documents. 64 | # 65 | # The short X.Y version. 66 | version = '0.1' 67 | # The full version, including alpha/beta/rc tags. 68 | release = '0.1' 69 | 70 | # The language for content autogenerated by Sphinx. Refer to documentation 71 | # for a list of supported languages. 72 | #language = None 73 | 74 | # There are two options for replacing |today|: either, you set today to some 75 | # non-false value, then it is used: 76 | #today = '' 77 | # Else, today_fmt is used as the format for a strftime call. 78 | #today_fmt = '%B %d, %Y' 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | exclude_patterns = [] 83 | 84 | # The reST default role (used for this markup: `text`) to use for all 85 | # documents. 86 | #default_role = None 87 | 88 | # If true, '()' will be appended to :func: etc. cross-reference text. 89 | #add_function_parentheses = True 90 | 91 | # If true, the current module name will be prepended to all description 92 | # unit titles (such as .. function::). 93 | #add_module_names = True 94 | 95 | # If true, sectionauthor and moduleauthor directives will be shown in the 96 | # output. They are ignored by default. 97 | #show_authors = False 98 | 99 | # The name of the Pygments (syntax highlighting) style to use. 100 | pygments_style = 'sphinx' 101 | 102 | # A list of ignored prefixes for module index sorting. 103 | #modindex_common_prefix = [] 104 | 105 | # If true, keep warnings as "system message" paragraphs in the built documents. 106 | #keep_warnings = False 107 | 108 | 109 | # -- Options for HTML output ---------------------------------------------- 110 | 111 | # The theme to use for HTML and HTML Help pages. See the documentation for 112 | # a list of builtin themes. 113 | html_theme = 'sphinxdoc' 114 | 115 | # Theme options are theme-specific and customize the look and feel of a theme 116 | # further. For a list of options available for each theme, see the 117 | # documentation. 118 | #html_theme_options = {} 119 | 120 | # Add any paths that contain custom themes here, relative to this directory. 121 | #html_theme_path = [] 122 | 123 | # The name for this set of Sphinx documents. If None, it defaults to 124 | # " v documentation". 125 | #html_title = None 126 | 127 | # A shorter title for the navigation bar. Default is the same as html_title. 128 | #html_short_title = None 129 | 130 | # The name of an image file (relative to this directory) to place at the top 131 | # of the sidebar. 132 | html_logo = 'dwrsmall.gif' 133 | 134 | # The name of an image file (within the static path) to use as favicon of the 135 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 136 | # pixels large. 137 | #html_favicon = None 138 | 139 | # Add any paths that contain custom static files (such as style sheets) here, 140 | # relative to this directory. They are copied after the builtin static files, 141 | # so a file named "default.css" will overwrite the builtin "default.css". 142 | html_static_path = ['_static'] 143 | 144 | # Add any extra paths that contain custom files (such as robots.txt or 145 | # .htaccess) here, relative to this directory. These files are copied 146 | # directly to the root of the documentation. 147 | #html_extra_path = [] 148 | 149 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 150 | # using the given strftime format. 151 | #html_last_updated_fmt = '%b %d, %Y' 152 | 153 | # If true, SmartyPants will be used to convert quotes and dashes to 154 | # typographically correct entities. 155 | #html_use_smartypants = True 156 | 157 | # Custom sidebar templates, maps document names to template names. 158 | #html_sidebars = {} 159 | 160 | # Additional templates that should be rendered to pages, maps page names to 161 | # template names. 162 | #html_additional_pages = {} 163 | 164 | # If false, no module index is generated. 165 | # This prevents the weird 2-index result if you use numpydoc 166 | html_domain_indices = ['py-modindex'] 167 | 168 | # If false, no index is generated. 169 | #html_use_index = True 170 | 171 | # If true, the index is split into individual pages for each letter. 172 | #html_split_index = False 173 | 174 | # If true, links to the reST sources are added to the pages. 175 | #html_show_sourcelink = True 176 | 177 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 178 | #html_show_sphinx = True 179 | 180 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 181 | #html_show_copyright = True 182 | 183 | # If true, an OpenSearch description file will be output, and all pages will 184 | # contain a tag referring to it. The value of this option must be the 185 | # base URL from which the finished HTML is served. 186 | #html_use_opensearch = '' 187 | 188 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 189 | #html_file_suffix = None 190 | 191 | # Output file base name for HTML help builder. 192 | htmlhelp_basename = 'ADCPydoc' 193 | 194 | 195 | # -- Options for LaTeX output --------------------------------------------- 196 | 197 | latex_elements = { 198 | # The paper size ('letterpaper' or 'a4paper'). 199 | #'papersize': 'letterpaper', 200 | 201 | # The font size ('10pt', '11pt' or '12pt'). 202 | #'pointsize': '10pt', 203 | 204 | # Additional stuff for the LaTeX preamble. 205 | #'preamble': '', 206 | } 207 | 208 | # Grouping the document tree into LaTeX files. List of tuples 209 | # (source start file, target name, title, 210 | # author, documentclass [howto, manual, or own class]). 211 | latex_documents = [ 212 | ('index', 'ADCPy.tex', u'ADCPy Documentation', 213 | u'Benjamin Saenz, David Ralston, Rusty Holleman,\nEd Gross, Eli Ateljevich', 'manual'), 214 | ] 215 | 216 | # The name of an image file (relative to this directory) to place at the top of 217 | # the title page. 218 | #latex_logo = None 219 | 220 | # For "manual" documents, if this is true, then toplevel headings are parts, 221 | # not chapters. 222 | latex_use_parts = False 223 | 224 | # If true, show page references after internal links. 225 | #latex_show_pagerefs = False 226 | 227 | # If true, show URL addresses after external links. 228 | #latex_show_urls = False 229 | 230 | # Documents to append as an appendix to all manuals. 231 | #latex_appendices = [] 232 | 233 | # If false, no module index is generated. 234 | latex_domain_indices = ['py-modindex'] 235 | 236 | 237 | # -- Options for manual page output --------------------------------------- 238 | 239 | # One entry per manual page. List of tuples 240 | # (source start file, name, description, authors, manual section). 241 | man_pages = [ 242 | ('index', 'adcpy', u'ADCpy Documentation', 243 | [u'Benjamin Saenz, David Ralston, Rusty Holleman, Ed Gross, Eli Ateljevich'], 1) 244 | ] 245 | 246 | # If true, show URL addresses after external links. 247 | #man_show_urls = False 248 | 249 | 250 | # -- Options for Texinfo output ------------------------------------------- 251 | 252 | # Grouping the document tree into Texinfo files. List of tuples 253 | # (source start file, target name, title, author, 254 | # dir menu entry, description, category) 255 | texinfo_documents = [ 256 | ('index', 'ADCpy', u'ADCpy Documentation', 257 | u'Benjamin Saenz, David Ralston, Rusty Holleman, Ed Gross, Eli Ateljevich', 'ADCPy', 'Tools for ADCP analysis and visualization.', 258 | 'Miscellaneous'), 259 | ] 260 | 261 | # Documents to append as an appendix to all manuals. 262 | #texinfo_appendices = [] 263 | 264 | # If false, no module index is generated. 265 | #texinfo_domain_indices = True 266 | 267 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 268 | #texinfo_show_urls = 'footnote' 269 | 270 | # If true, do not generate a @detailmenu in the "Top" node's menu. 271 | #texinfo_no_detailmenu = False 272 | -------------------------------------------------------------------------------- /doc/source/configuration.rst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/configuration.rst -------------------------------------------------------------------------------- /doc/source/dwrsmall.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/dwrsmall.gif -------------------------------------------------------------------------------- /doc/source/image/group007_flow_summary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_flow_summary.png -------------------------------------------------------------------------------- /doc/source/image/group007_mean_velocity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_mean_velocity.png -------------------------------------------------------------------------------- /doc/source/image/group007_secondary_circulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_secondary_circulation.png -------------------------------------------------------------------------------- /doc/source/image/group007_u_avg_n_sd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_u_avg_n_sd.png -------------------------------------------------------------------------------- /doc/source/image/group007_uvw_velocity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_uvw_velocity.png -------------------------------------------------------------------------------- /doc/source/image/group007_v_avg_n_sd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_v_avg_n_sd.png -------------------------------------------------------------------------------- /doc/source/image/group007_w_avg_n_sd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_w_avg_n_sd.png -------------------------------------------------------------------------------- /doc/source/image/group007_xy_lines.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CADWRDeltaModeling/ADCPy/16e6add146d53ce98dcdc08a39632d2686ce835f/doc/source/image/group007_xy_lines.png -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. ADCPy documentation master file, created by 2 | sphinx-quickstart on Tue Oct 07 11:54:34 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | ***** 7 | ADCPy 8 | ***** 9 | 10 | Introduction 11 | ================= 12 | 13 | ADCPy is a package much like a scripting version of the Velocity Mapping Toolkit in Matlab. 14 | The tools aid in the analysis of data from Acoustic Doppler Current Profilers. 15 | 16 | ADCPy allows the user to read raw (unprocessed) data from ADCP instruments, perform a suite of processing functions and data transformations, and output summary data and related plots. By providing access to the raw ADCP velocities, ADCPy allows exacting user control over the diagnosis of water movement. Numerous built-in data transformation tools and associated utilities allow full scripting of ADCP processing, from raw data to final output plots and flow volumes. Raw ADCP data is stored as a python object class, which may be exported and imported to disk in the Network Common Data Format (Climate and Forecast Metadata Convention; NetCDF-CF). 17 | An abbreviated list of ADCPy functions: 18 | * Read native ADCP instrument binary file formats 19 | * Georeference/re-project ADCP measurements onto fitted or user-defined grids 20 | * ADCP Data processing methods: 21 | 22 | * Correct measured current velocities to account for platform/vessel speed 23 | * Correct measured velocities for instrument compass errors 24 | * Drop outliers, remove side lobe contamination, kernel smoothing 25 | * Combine/average ADCP ensembles from different sources 26 | * Extrapolation of boundaries (i.e. towards channel/sea bed, and towards ADCP face) 27 | 28 | * Calculate dispersion 29 | * Archive and read to NetCDF-CF files 30 | * Export velocities to comma-separated values (CSV file) for easy porting 31 | * Generate various surface and arrow (quiver) plots showing 2D velocity profiles, mean flow vectors, and survey geometries 32 | * A processing history is automatically generated and updated for ADCPy data classes, allowing tracking of processing methods 33 | 34 | 35 | 36 | The California Department of Water Resources (DWR) commissioned the development of a tool to provide methods for ADCP transect analysis that are more customizable to different tasks and processing parameters than currently-available closed-source solutions. These Python-based tools are designed to facilitate quality control and projection/extrapolation of ADCP surveys, conversion of ADCP transects to streamwise coordinates, re-gridding of ADCP transect profiles from vessel tracks onto a uniform grid, estimation and output of streamwise flows and velocities (NetCDF-CF format, see http://cfconventions.org/), and calculation of lateral and longitudinal dispersion coefficients (Fischer et al., 1979). 37 | 38 | Using ADCPy 39 | =========== 40 | 41 | .. toctree:: 42 | :maxdepth: 2 43 | 44 | installation 45 | scripting 46 | terms 47 | api/modules 48 | 49 | Key modules 50 | =========== 51 | 52 | .. currentmodule:: adcpy 53 | 54 | .. autosummary:: 55 | adcpy 56 | adcpy_plot 57 | adcpy_utilities 58 | adcpy_recipes 59 | transect_average 60 | transect_preprocessor 61 | ADCPRdiWorkhorseData 62 | rdradcp 63 | 64 | 65 | 66 | Indices and tables 67 | ================== 68 | 69 | * :ref:`genindex` 70 | * :ref:`modindex` 71 | * :ref:`search` 72 | 73 | -------------------------------------------------------------------------------- /doc/source/installation.rst: -------------------------------------------------------------------------------- 1 | 2 | Installation 3 | ================ 4 | 5 | Python Prerequisites 6 | -------------------- 7 | 8 | External dependencies for running the ADCPy include: 9 | 1. A working Python 2.5-2.7 distribution (Python version 3 is not compatible) 10 | 2. Specific Python packages that might not be included with the base Python distribution: 11 | a. Numpy version 1.7 12 | b. Scipy version 0.11.0 13 | c. Matplotlib version 1.2.0 14 | d. Pynmea (currently prepackaged inside ADCPy) 15 | e. Gdal 16 | f. Netcdf4 17 | 18 | The ADCP Python code is developed and tested on Windows XP (32-bit) using the Python(x,y) python distribution (http://code.google.com/p/pythonxy/), and on Windows 7 (64-bit) using 64-bit Python 2.7 and associated libraries compiled and available at the time of writing at: http://www.lfd.uci.edu/~gohlke/pythonlibs/. It has also been tested to a lesser extent with the Anaconda 32 and 64 bit Python distributinos. Running the Spyder Python development environment is very helpful, but not necessary. 19 | 20 | IMPORTANT: Python version 3 is not backwards-compatible with versions 2.5-2.7. ADCP Python tools have not been tested under Python 3 and likely don’t work. 21 | 22 | ADCP Python does not use any platform-dependent code, and will likely run on any other system capable of supporting the above Python distribution and associated packages. This has not been tested however. 23 | 24 | 25 | Installation 26 | ------------ 27 | 28 | 29 | -------------------------------------------------------------------------------- /doc/source/scripting.rst: -------------------------------------------------------------------------------- 1 | ============================= 2 | Scripting Examples with ADCPy 3 | ============================= 4 | 5 | Open, processing and visualizing a raw file 6 | ------------------------------------------- 7 | ADCPy is constructed to allow a high level of control and tuning over processing of large and varied datasets, however processing can be accomplished easily. For example, as a first pass to visualize data, a short sequence of commands can open, pre-process, and visualize a raw ADCP data file: 8 | 9 | 1. Import ADCPy to your python session, and open a raw file named ‘example.r000’:: 10 | 11 | import ADCPy 12 | adcp = ADCPy.open_file(‘example.r000’,file_type=’ADCP_RdiWorkhorse_Data’) 13 | 14 | 15 | 2. Perform some data processing steps including dropping outliers based on standard deviation, smoothing using a boxcar filter, and re-projecting to a regular 2m horizontal by 0.3 m vertical grid:: 16 | 17 | adcp.sd_drop(sd=3) # drop velocities showing a high standard deviation 18 | adcp.kernel_smooth(kernel_size=3) # boxcar filter to smooth velocities 19 | adcp.xy_regrid(dxy=2.0, dz=0.3, xy_srs='EPSG:26910') 20 | 21 | 3. Rotate and plot a U,V and W velocities as images:: 22 | 23 | import ADCPy_recipes 24 | adcp = ADCPy_recipes.transect_rotate(adcp,’principal flow’) 25 | fig = ADCPy.plot.plot_uvw_velocity_array(adcp.velocity) 26 | 27 | 4. Save data to disk:: 28 | 29 | adcp.write_nc(‘example_processed.nc’,zlib=True) 30 | 31 | For further descriptions of ADCPy functionality, see the ADCPy Function Reference. 32 | 33 | 34 | 35 | Batch processing and secondary circulation 36 | ------------------------------------------ 37 | 38 | Our first "industrial strength" example is the script transect_average.py. This script is actually used in our production work to 39 | sort through ADCP transects, find ones that are appropriate for averaging, projecting, rotating and visualizing them. 40 | 41 | Basic Usage 42 | ^^^^^^^^^^^ 43 | 44 | 1. Set the options in the transect_average.py script by opening it with a text editor, and changing the parameters listed near the top of the file (See transect_average.py options below for reference). 45 | 46 | 2. Edit the transect_preprocessor_input.py file in a text editor, paying attention to the file requirements (i.e. Python conventions: only spaces [no tabs], and no white space in front the parameter lines). 47 | 48 | a. Assign the working_directory to the path containing your raw (*.r.000, *.nc) files 49 | b. Review the processing options section of the file, and change if needed 50 | c. Enable/disable netcdf file output as desired 51 | d. Save changes to adcp_processor_input.py 52 | 53 | 3. Run the transect_average.py script file using your installed Python distribution. There are many ways to run the python script – here are three useful ones: 54 | 55 | a. Open a command window (cmd.exe on Windows); change to the adcp_python directory (cd ); type “python transect_average.py”. 56 | b. In an already –running python session, at the prompt type “execfile(‘/transect_average.py’)”. 57 | c. Begin a Spyder session (if you have elected to install Spyder). Choose “File:Open…” from the program menus, and open adcp_processor.py. Then Choose “Run“ from the program menu, or press function key F5. 58 | 59 | 4. The script will output various text progress messages, warnings, figures, or errors during operation. Script output will be located inside a folder named “ADCPy” that will be created inside the working_directory specified in the transect_preprocessor_input.py file . 60 | 61 | 62 | Averaging 0ptions 63 | ^^^^^^^^^^^^^^^^ 64 | At present, some options pertaining to averaging are written directly at the top of the adcpy.transect_average.py script and some options pertaining to preprocessing of raw transects are stored in a file called transect_preprocessor_input.py. Here are the averaging options: 65 | 66 | ============================== ================== 67 | Option Description 68 | ============================== ================== 69 | avg_dxy Horizontal resolution of averaging bins {m}. 70 | avg_dz Vertical resolution of averaging bins [m]. 71 | avg_max_gap_m Maximum distance allowed between ADCP observations when averaging [m ]. 72 | avg_max_gap_minutes Maximum time allowed between ADCP observations when averaging[minutes]. 73 | avg_max_group_size The maximum number of ADCP observations to average. 74 | avg_rotation One of {'Rozovski','no transverse flow', 'principal flow', 'normal', None} or alternatively can be a scalar in radians to specify a rotation angle . 75 | avg_std_drop The number of calculated standard deviations away from the mean velocity, above beyond which velocity samples are dropped from analysis. {0.0=no dropping, 2.0-3.0 typically [number of standard deviations ]}. 76 | avg_std_interp Switch to perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0}. 77 | avg_smooth_kernel Remove noise from ADCP velocities through smoothing data using a square-kernel boxcar-filter. The square filter average neighboring velocities in a square pattern (kernel), with the sidelength of the square = smooth_kernel (i.e. smooth_kernel=3 specifies a 3x3 square, effectively averaging the 9 neighboring velocities). {0 for no smoothing, or odd integer between 3-9 }. 78 | avg_save_netcdf True = Save bin-averaged velocities as an ADCP_Data netcdf file. 79 | avg_save_csv True = Save bin-averaged velocities as a CSV text file. 80 | avg_plot_xy True = Generate a composite plot of survey location(s) of original ADCP ensembles. 81 | avg_plot_avg_n_sd True = Generate pcolor plots of bin-averaged uv,w velocities, and the number and standard deviation of bin averages. 82 | avg_plot_mean_vectors True = Generate an arrow plot of bin-averaged U-V mean velocities in the x-y plane. 83 | avg_plot_secondary_circulation True = Generate an image plot of 2D bin-averaged streamwise (u) velocities, overlain by an arrow plot showing secondary circulation in the y-z plane. 84 | avg_plot_uvw_velocity_array True = Generate a 3-panel image plot showing bin-averaged U,V,W velocities in the y-z plane. 85 | avg_plot_flow_summmary True = Generate a summary plot showing image plots of U,V bin-averaged velocities, an arrow plot of bin-averaged U-V mean velocities, and flow/discharge calculations. 86 | avg_save_plots True = Save generated plots to disk as .PNG files. 87 | avg_show_plots True = Print plots to screen (pauses execution until plots are manually closed). 88 | ============================== ================== 89 | 90 | Preprocessing options 91 | ^^^^^^^^^^^^^^^^^^^^^ 92 | 93 | Here are the preprocessing options, as would normally be kept in 94 | transect_preprocessor_input.py. 95 | 96 | ================================ ============== 97 | Name Description 98 | ================================ ============== 99 | working_directory Path to the directory containing data files (WinRiver raw or ADCP Python NetCDF) for processing. Note required ‘r’ character before first text quote. 100 | xy_projection The text-based EPSG code describing the map projection (in Northern CA, UTM Zone 10N = ‘EPSG:26910’) to use for projecting ADCP profile locations onto an regular grid. 101 | do_head_correct Switch for heading correction due to magnetic compass declination and errors. {True or False}. 102 | Headhead_correct_spanning Switch to perform heading correction on all data files binned together (True), or on each file individually (False). This option should be set to true whenever possible; errors in processing can occur if the sample size of headings used for correction is small. {True or False}. 103 | mag_declination Magnetic compass declination; this value will be used to correct compass heading if do_head_correct is False {degrees E of true North, or None}. 104 | u_min_bt Minimum bottom track velocity for use in heading correcting. If the survey platform (boat) is moving too slowly, the GPS-based navigation heading may be invalid. {typically 0-0.3 [m/s] or None}. 105 | hdg_bin_size The size of the heading bin size of heading correction. A value of 5 means headings will be grouped and averaged in over a range of 5 degrees. Experimentation with this value may be required to produce a valid heading correction. {typically 5,10 [degrees]}. 106 | hdg_bin_min_samples Minimum number of sample headings in a heading bin for consideration in heading correction. It is wise to use a larger number here if there is a large amount of data/number of data files, however using too large a number may exclude important data used for fitting with less data. Experimentation with this value may be required. {typically 10-50 }. 107 | sidelobe_drop The fraction of vertical profile to drop due from analysis due to sidelobe/bottom interaction. {typically 0.05-0.15 [fraction]}. 108 | std_drop The calculated standard deviation from the mean velocity, above which velocity samples are dropped from analysis. {0.0=no dropping, 2.0-3.0 typically [number of standard deviations]}. 109 | std_interp Switch to perform interpolation of holes in velocity profiles left by high standard deviation removal {typically True with std_drop > 0.0}. 110 | smooth_kernel Remove noise from ADCP velocities through smoothing data using a square-kernel boxcar-filter. The square filter average neighboring velocities in a square pattern (kernel), with the sidelength of the square = smooth_kernel (i.e. smooth_kernel=3 specifies a 3x3 square, effectively averaging the 9 neighboring velocities). {0 for no smoothing, or odd integer between 3-9 }. 111 | extrap_boundaries Switch to extrapolate velocity profiles upward toward surface, and downward to the sounder-detected bottom. {True or False} 112 | average_ens Specifies how many adjacent (in time) velocity profiles should be averaged together to reduce noise. {typically 0-15 [number of adjacent velocity profiles(ensembles)]}. 113 | regrid_horiz_m Horizontal resolution of averaging bins {m, or None for no regridding}. 114 | regrid_vert_m Vertical resolution of averaging bins {m, or None for no regridding}. 115 | adcp_depth Scalar value indicating at the depth of the ADCP face underwater (positive downward from zero at surface) {m}. 116 | p1lat, p1lon, p2lat, p2lon Latitude/Longitude coordinates of points p1 and p2 which designate a plotline for projection and regridding. 117 | save_raw_data_to_netcdf Switch to output raw data to netCDF-CF format. {True or False}. 118 | save_preprocessed_data_to_netcdf Switch to output results to netCDF-CF format. {True or False}. 119 | use_netcdf_data_compression Switch to use NetCDF 4 data compression to save disk space in data and results files. {True or False}. 120 | debug_stop_after_n_transects Limits the number of ADCP_Data objects returned to this scalar integer value. {True or False}. 121 | ================================ ============== 122 | 123 | 124 | A tutorial example 125 | ^^^^^^^^^^^^^^^^^^ 126 | 127 | As an example analysis we use the transect_average.py script to average multiple ADCP transects from different locations. In order to increase the signal-to-noise ratio in the data, we will instruct the transect_average.py to average transects close in space and time, such that final gridded velocity profiles have a better chance at resolving secondary circulation features. Below we describe inline the steps required to produce this analysis. 128 | 129 | The USGS ADCP data used for this analysis comes from repeated transect sampling at Walnut Grove on the Sacramento/San Joaquin delta, on 16 January, 2009. In this series of observations, the ADCP traversed the width of the channel three times. The total survey consisted of 54 raw RDI WinRiver data files from several locations, with gaps ranging from 2 – 35 minutes. 130 | 131 | Step 1: 132 | """"""" 133 | Locate the path to the data files, and set working_directory parameter in the pre-processor input file (transect_preprocessor_input.py) to this path. In our case, the link from the input file looks like this:: 134 | 135 | working_directory = r'C:\adcp_anaylsis_stations\GEO20090116' 136 | 137 | The small ‘r’ in front of the single quote is required – this ‘r’ sets the interpretation of the path as literal (i.e. no special characters [like \t = tab] are inferred). 138 | 139 | Step 2: 140 | """"""" 141 | Set the processing parameters to these suggested default settings in the input file (adcp_processor_input.py):: 142 | 143 | xy_projection = 'EPSG:26910' 144 | do_headCorrect = False 145 | headCorrect_spanning = False 146 | mag_declination = 14.7 147 | u_min_bt = 0.3 148 | hdg_bin_size = 5 149 | hdg_bin_min_samples = 10 150 | sidelobe_drop = 0.1 151 | std_drop = 3.0 152 | std_interp = True 153 | smooth_kernel = 0 154 | smooth_interp = True 155 | average_ens = 1 156 | regrid_horiz_m = None 157 | regrid_vert_m = None 158 | adcp_depth = 0.10 159 | average_ens = 10 160 | p1lat = None 161 | p1lon = None 162 | p2lat = None 163 | p2lon = None 164 | 165 | 166 | In this case, we instruct transect_preprocessor.py to perform a minimum of processing, so that most of the raw ADCP velocities can be used in bin-averaging. We have used sidelobe_drop = 0.1 and std_drop = 3.0 to remove bottom interference and outliers before averaging. transect_preprocessor.py will return a python list of ADCP_Data objects containing the ADCP observation data in the working_directory. 167 | 168 | 169 | Step 3: 170 | """"""" 171 | Check the options in the transect_average.py file. We have set the options as follows:: 172 | 173 | avg_dxy = 1.0 174 | avg_dz = 0.25 175 | avg_max_gap_m = 30.0 176 | avg_max_gap_minutes = 60.0 177 | avg_max_group_size = 6 178 | avg_rotation = 'Rozovski' 179 | avg_std_drop = 3.0 180 | avg_std_interp = True 181 | avg_smooth_kernel = 3 182 | avg_save_netcdf = True 183 | avg_save_csv = True 184 | avg_plot_xy = True 185 | avg_plot_avg_n_sd = True 186 | avg_plot_mean_vectors = True 187 | avg_plot_secondary_circulation = True 188 | avg_plot_uvw_velocity_array = True 189 | avg_plot_flow_summmary = True 190 | avg_save_plots = True 191 | avg_show_plots = False 192 | 193 | 194 | These options will average velocities from ADCP_Data objects that are within 30 m and 60 minutes of each other. The resulting velocities are projected onto a 1.0 x 0.25 m regular grid, whose x-y orientation is fitted from the locations of input ensembles. The velocity data will be further cleaned (std_drop) and smoothed (kernel_smooth), rotated (Rozovski method), and the full array of output data formats and plots for each grouping of transects will be written to disk. 195 | 196 | Step 4: 197 | """"""" 198 | Run the transect_average.py script. 199 | E.g. type ‘python transect_average.py’ from an appropriate directory in the command windows to start the processing. 200 | 201 | During preprocessing, each individual raw file will be read into an ADCP_Data object. Output from this activity shows the full path of the raw files:: 202 | 203 | Processing data_file: Y:\temp\ADCP_2008\NDSOS_DLADCP.VelocityData\5thRelease\GEO5thRelease\GEO20090116\ADCPy\GEO5thRelease055r 204 | No fitted heading correction found - performing single magnetic declination correction 205 | Processing data_file: Y:\temp\ADCP_2008\NDSOS_DLADCP.VelocityData\5thRelease\GEO5thRelease\GEO20090116\ADCPy\GEO5thRelease056r 206 | No fitted heading correction found - performing single magnetic declination correction 207 | Processing data_file: Y:\temp\ADCP_2008\NDSOS_DLADCP.VelocityData\5thRelease\GEO5thRelease\GEO20090116\ADCPy\GEO5thRelease057r 208 | … 209 | 210 | After the ADCP_Data objects are generated, preprocessed, and passed back to the main transect_average.py script, the groupings of the ADCP_Data objects are determined. ADCP_Data objects are first grouped by space (space groups), then these groups are further split by time (spacetime groups). In general we observe from the displayed messages that most often we arrive at just two ADCP_Data objects per group for averaging. Recall that for this survey, three complete transects are containing in each observation, so in reality we will be averaging six transects across the channel:: 211 | 212 | space group 0 - 1 observations 213 | space group 1 - 4 observations 214 | space group 2 - 4 observations 215 | space group 3 - 3 observations 216 | spacetime group 0 - 1 observations 217 | spacetime group 1 - 2 observations 218 | spacetime group 2 - 2 observations 219 | spacetime group 3 - 2 observations 220 | spacetime group 4 - 2 observations 221 | spacetime group 5 - 2 observations 222 | 223 | Finally each group is averaged, cleaned, and outputs are saved. 224 | 225 | Outputs 226 | """"""" 227 | 228 | .. image:: image/group007_Flow_Summary.png 229 | 230 | .. image:: image/group007_xy_lines.png 231 | 232 | .. image:: image/group007_mean_velocity.png 233 | 234 | .. image:: image/group007_secondary_circulation.png 235 | 236 | .. image:: image/group007_u_avg_n_sd.png 237 | 238 | .. image:: image/group007_u_avg_n_sd.png 239 | 240 | .. image:: image/group007_w_avg_n_sd.png 241 | 242 | 243 | 244 | 245 | -------------------------------------------------------------------------------- /doc/source/terms.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _glossary 3 | 4 | ADCPy Glossary 5 | ============== 6 | 7 | .. glossary :: 8 | 9 | Elevation 10 | The distance from the transducer face to the point of observation, starting at zero and decreasing . 11 | 12 | Ensemble 13 | A group of velocity measurements processing processed together in the elevation dimension. Ensembles may be the result of a single ADCP ping, or may be the result of combinations of pings or extrapolation between adjacent ensembles . 14 | 15 | Observation 16 | An instance of class ADCP_Data class resulting from the reading of a single raw instrument file (before binning or averaging of data form multiple input files). 17 | 18 | Transect 19 | An ADCP observation where the instrument points downward from the surface, and moves horizontally to generate measures of velocity from different locations in the body of water. 20 | -------------------------------------------------------------------------------- /doc/source/transect_average.rst: -------------------------------------------------------------------------------- 1 | 2 | Installation 3 | ================ 4 | 5 | Supported versions 6 | ------------------ 7 | 8 | Prerequisites 9 | ------------- 10 | Here are prereques 11 | 12 | Installation 13 | ------------ 14 | 15 | Configuration considerations 16 | ---------------------------- 17 | 18 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages # Always prefer setuptools over distutils 2 | from codecs import open # To use a consistent encoding 3 | from os import path 4 | 5 | here = path.abspath(path.dirname(__file__)) 6 | 7 | # Get the long description from the relevant file 8 | #with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f: 9 | # long_description = f.read() 10 | 11 | setup( 12 | name='ADCPy', 13 | 14 | # Versions should comply with PEP440. For a discussion on single-sourcing 15 | # the version across setup.py and the project code, see 16 | # http://packaging.python.org/en/latest/tutorial.html#version 17 | version='0.1.1', 18 | 19 | description='ADCPy: Tools for ADCP processing', 20 | #long_description=long_description, 21 | 22 | # The project's main homepage. 23 | url='https://github.com/esatel/ADCPy', 24 | 25 | # Author details 26 | author='California Department of Water Resources', 27 | author_email='Eli.Ateljevich@water.ca.gov', 28 | 29 | # Choose your license 30 | license='MIT', 31 | 32 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 33 | classifiers=[ 34 | # How mature is this project? Common values are 35 | # 3 - Alpha 36 | # 4 - Beta 37 | # 5 - Production/Stable 38 | 'Development Status :: 3 - Alpha', 39 | 40 | # Indicate who your project is intended for 41 | 'Intended Audience :: Oceanographers, Field technicians', 42 | 'Topic :: Velocity measurement and analysis, Hydrodynamics', 43 | 44 | # Pick your license as you wish (should match "license" above) 45 | 'License :: OSI Approved :: MIT License', 46 | 47 | # Specify the Python versions you support here. In particular, ensure 48 | # that you indicate whether you support Python 2, Python 3 or both. 49 | 'Programming Language :: Python :: 2.7', 50 | ], 51 | 52 | # What does your project relate to? 53 | keywords='Acoustic Doppler Current Profiler, Hydrodynamics', 54 | 55 | # You can just specify the packages manually here if your project is 56 | # simple. Or you can use find_packages(). 57 | packages=['adcpy'], 58 | #find_packages(exclude=['contrib', 'docs', 'tests*']), 59 | 60 | # List run-time dependencies here. These will be installed by pip when your 61 | # project is installed. For an analysis of "install_requires" vs pip's 62 | # requirements files see: 63 | # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files 64 | install_requires=['numpy>1.7', 'scipy>=0.11.0', 'matplotlib>=1.2', 'gdal', 'netcdf4'], 65 | 66 | # List additional groups of dependencies here (e.g. development dependencies). 67 | # You can install these using the following syntax, for example: 68 | # $ pip install -e .[dev,test] 69 | #extras_require = { 70 | # 'dev': ['check-manifest'], 71 | # 'test': ['coverage'], 72 | #}, 73 | 74 | # If there are data files included in your packages that need to be 75 | # installed, specify them here. If using Python 2.6 or less, then these 76 | # have to be included in MANIFEST.in as well. 77 | #package_data={ 78 | # 'sample': ['package_data.dat'], 79 | #}, 80 | 81 | # Although 'package_data' is the preferred approach, in some case you may 82 | # need to place data files outside of your packages. 83 | # see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files 84 | # In this case, 'data_file' will be installed into '/my_data' 85 | #data_files=[('my_data', ['data/data_file'])], 86 | 87 | # To provide executable scripts, use entry points in preference to the 88 | # "scripts" keyword. Entry points provide cross-platform support and allow 89 | # pip to create the appropriate form of executable for the target platform. 90 | entry_points={ 91 | 'console_scripts': [ 92 | 'transect_average=transect_average:main', 93 | ], 94 | }, 95 | ) 96 | --------------------------------------------------------------------------------