├── .gitignore ├── D800_res.py ├── README.md ├── agisoft_all.py ├── bh_plot.py ├── cam_comparison_planning.py ├── dji_alt_adjust.py ├── dji_relalt2hae.sh ├── emlid_survey_update.py ├── exif2gpslog.sh ├── exif_msl2hae.sh ├── flytrex2gpslog.sh ├── flytrex_batch.sh ├── fmt ├── gpx.fmt └── kml.fmt ├── gps_msl2hae_csv.py ├── gpslog2exif.sh ├── mavic_proc.sh ├── movie_preview.sh ├── nex5_nadir.py ├── old ├── import_D800.csh ├── import_timelapse.sh ├── process_D800.sh └── timelapse.sh ├── px4_dflog_gps2utc.py ├── px4_get_sdlog.sh ├── px4_tlog_gps2utc.sh ├── rtklib_pos_stats.py ├── sdlog2_dump.py ├── sdlog2_dump_solo.py ├── sfm_shp2exif.csh └── solo_getlogs.sh /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg 8 | *.egg-info 9 | dist 10 | build 11 | eggs 12 | parts 13 | bin 14 | var 15 | sdist 16 | develop-eggs 17 | .installed.cfg 18 | lib 19 | lib64 20 | 21 | # Installer logs 22 | pip-log.txt 23 | 24 | # Unit test / coverage reports 25 | .coverage 26 | .tox 27 | nosetests.xml 28 | 29 | # Translations 30 | *.mo 31 | 32 | # Mr Developer 33 | .mr.developer.cfg 34 | .project 35 | .pydevproject 36 | -------------------------------------------------------------------------------- /D800_res.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #3/1/13 6 | 7 | #Script to compute resolution and fov for Nikon D800 for variable focal length and range 8 | 9 | import sys 10 | import numpy 11 | import matplotlib.pyplot as plt 12 | 13 | x_mm = 35.9 14 | y_mm = 24 15 | x_px = 7360 16 | y_px = 4912 17 | 18 | diag_mm = numpy.sqrt(x_mm**2 + y_mm**2) 19 | diag_px = numpy.sqrt(x_px**2 + y_px**2) 20 | 21 | def calcfov(f): 22 | return 2*numpy.arctan2(diag_mm, (2*f)) 23 | 24 | def calcres(alt, fov): 25 | #Convert feet to m 26 | res = 2*alt*numpy.tan(fov/2)/diag_px 27 | return res/numpy.cos(numpy.radians(offnadir)) 28 | 29 | def plotgen(alt_list, f_list): 30 | alt_range = numpy.arange(alt_list[0], alt_list[-1], 100) 31 | for f in f_list: 32 | fov = calcfov(f) 33 | res = calcres(alt_range, fov) 34 | x_gd = numpy.array(res)*x_px 35 | plt.figure(1) 36 | plt.plot(alt_range/0.3048, res*100, label='%i mm' % f) 37 | plt.figure(2) 38 | plt.plot(alt_range/0.3048, x_gd, label='%i mm' % f) 39 | 40 | plt.figure(1) 41 | plt.xlabel('Distance (ft)') 42 | plt.ylabel('Resolution (cm)') 43 | plt.legend(loc=2) 44 | 45 | plt.figure(2) 46 | plt.xlabel('Distance (ft)') 47 | plt.ylabel('X field of view (m)') 48 | plt.legend(loc=2) 49 | 50 | plt.show() 51 | 52 | offnadir = 0 53 | alt_list = numpy.array([500, 1500, 3000, 6000, 12000, 24000]) 54 | alt_list *= 0.3048 55 | f_list = numpy.array([16.0, 28.0, 50.0, 85.0, 300.0]) 56 | 57 | plotgen(alt_list, f_list) 58 | 59 | sys.exit() 60 | 61 | print offnadir, "degrees off-nadir" 62 | for alt in alt_list: 63 | for f in f_list: 64 | fov = calcfov(f) 65 | res = calcres(alt, fov) 66 | x_gd = res*x_px 67 | print "alt: %s', fl: %0i mm, fov: %0.1f deg, res: %0.1f cm, x_dist: %0.1f m" % (alt, f, numpy.degrees(fov), res*100, x_gd) 68 | #print alt_m, x_gd 69 | #print "alt: %s' fl: %0i mm res: %0.1f cm" % (alt, f, res*100) 70 | print 71 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sfm_tools 2 | A collection of tools to geotag and process photos for Structure from Motion (SfM) 3 | 4 | ## Most useful/mature tools: 5 | * `cam_comparison_planning.py` - comparison of different cameras at different altitudes (FOV, pixel ground resolution, etc.) 6 | * `agisoft_all.py` - automated Agisoft PhotoScanPro workflow using Agisoft Python API (written for older version, not tested with recent releases) 7 | * `exif2gpslog.sh` - read EXIF data for a directory of photos and generate GPS log (csv, shp, or gpx) 8 | * `exif_msl2hae.sh` and `gps_msl2hae_csv.py` - convert between default EXIF height above geoid (MSL, mean sea level) to height above ellipsoid (WGS84) 9 | * `gpslog2exif.sh` - update EXIF positions based one external GPS log (e.g., log from Pixhawk4/Flytrex) 10 | * Tools to pull GPS positions from Pixhawk4/Flytrex/Solo data logs, convert GPS week/seconds to UTC, etc. 11 | 12 | ## Dependencies: 13 | * Several scripts rely on Phil Harvey's excellent [exiftool](http://www.sno.phy.queensu.ca/~phil/exiftool/) 14 | * Output gpx currently uses [gpsbabel](http://www.gpsbabel.org/) 15 | * Some shell scripts require [GDAL/OGR](http://www.gdal.org/) command line tools 16 | * Some Python tools require numpy, matplotlib, and other python libraries (e.g., [pygeotools](https://github.com/dshean/pygeotools)) 17 | 18 | ## Disclaimer: 19 | Most of these were written for one-time projects with my UAV/camera hardware, which has evolved since 2013. Many scripts still include hardcoded paths. There are likely cleaner, more elegant ways to do much of this by now. These days, many commercial UAV options/apps (e.g. DJI platforms, SenseFly platforms, 3DR SiteScan) will do much of this automatically. 20 | -------------------------------------------------------------------------------- /agisoft_all.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #8/22/14 6 | 7 | #Script for Agisoft PhotoScanPro workflow 8 | #Based on API v1.0.0, Python 3.3 9 | #Comments include notes for more advanced functionality 10 | 11 | #See following forum discussions: 12 | #http://www.agisoft.ru/forum/index.php?topic=2263.0 13 | #http://www.agisoft.ru/forum/index.php?topic=1881.0 14 | 15 | import os 16 | import glob 17 | import PhotoScan 18 | import sys 19 | 20 | #Need to set the following appropriately 21 | 22 | #Path to photos 23 | #photo_fn_path = "/tmp/export" 24 | photo_fn_path = "/Volumes/SHEAN_PHOTO/photo/20140825_MammothTerraces_SfM/export_orig" 25 | photo_fn_ext = "*.jpg" 26 | #Path to ground control file, can contain orientation 27 | gc_fn = "/tmp/gcp.txt" 28 | #Path to calibration file 29 | cal_fn = "/tmp/D800_cal.xml" 30 | #This is the base fn for output files 31 | #out_fn = "/tmp/test" 32 | out_fn = os.path.join(photo_fn_path, "test") 33 | 34 | #Define input coordinate system as WGS84 35 | in_crs = PhotoScan.CoordinateSystem() 36 | in_crs.init("EPSG::4326") 37 | #Define output coordinate system as UTM 10N, WGS84 38 | out_crs = PhotoScan.CoordinateSystem() 39 | #out_crs.init("EPSG::32610") 40 | #This is Yellowstone 41 | out_crs.init("EPSG::32612") 42 | 43 | #Add timestamp 44 | print("Started") 45 | 46 | #Create project file 47 | doc = PhotoScan.app.document 48 | 49 | #*** 50 | #NOTE: the following (loading photos, ground control, calibration, etc.) can be done manually 51 | #*** 52 | 53 | #Load photos 54 | new_chunk = PhotoScan.Chunk() 55 | new_chunk.label = "chunk1" 56 | 57 | photo_fn_list = glob.glob(os.path.join(photo_fn_path, photo_fn_ext)) 58 | for photo_fn in photo_fn_list: 59 | new_chunk.cameras.add(photo_fn) 60 | 61 | #Import ground control 62 | gc = new_chunk.ground_control 63 | gc.projection = in_crs 64 | #Load EXIF data from photos 65 | gc.loadExif() 66 | #Alternatively, load csv containing file names and coordinates for photos 67 | #gc.load(gc_fn, "csv") 68 | #Set accuracy of camera positions in meters 69 | #GeoXH 70 | gc.accuracy_cameras = 0.5 71 | #Nikon GP-1 72 | #gc.accuracy_cameras = 5.0 73 | gc.apply() 74 | 75 | #Import calibration 76 | #cal = PhotoScan.Calibration(cal_fn) 77 | #new_chunk.calibration_mode('fixed') 78 | 79 | #This adds the chunk to the project 80 | doc.chunks.add(new_chunk) 81 | 82 | #Update the GUI 83 | PhotoScan.app.update() 84 | doc.save(out_fn + "_init.psz") 85 | 86 | #*** 87 | #NOTE: end of section with steps that can be accomplished manually 88 | #*** 89 | 90 | #Grab the active chunk 91 | chunk = doc.activeChunk 92 | 93 | #Align photos 94 | print("Aligning photos") 95 | chunk.matchPhotos(accuracy="high", preselection="disabled") 96 | #Use ground control if appropriate for input photos 97 | #chunk.matchPhotos(accuracy="high", preselection="ground control") 98 | chunk.alignPhotos() 99 | PhotoScan.app.update() 100 | doc.save(out_fn + "_sparse.psz") 101 | 102 | #NOTE: Adjust bounding box here 103 | #NOTE: markers should be manually identified here 104 | 105 | #Build Dense Cloud 106 | print("Building dense cloud") 107 | ncpu = 24 108 | #Not sure about gpu_mask value here, if both cards will be enabled 109 | #Says value 5 enables device number 0 and 2 110 | chunk.buildDenseCloud(quality="medium", filter="mild", gpu_mask=3, cpu_cores_inactive=ncpu) 111 | PhotoScan.app.update() 112 | doc.save(out_fn + "_dense.psz") 113 | 114 | #NOTE: Edit dense cloud 115 | 116 | #Build Mesh 117 | #NOTE: want to do this both with and without interpolation, export DEM for both 118 | print("Building mesh") 119 | chunk.buildModel(object="arbitrary", source="dense", interpolation="disabled", faces="high") 120 | PhotoScan.app.update() 121 | doc.save(out_fn + "_mesh_nointerp.psz") 122 | 123 | #Want to test this smoothing - could help with TIN mesh artifacts 124 | #chunk.smoothModel() 125 | 126 | #Build Texture 127 | #chunk.buildTexture(mapping="generic", blending="average", width=2048, height=2048) 128 | 129 | #Export DEM 130 | #Should automatically compute appropraite resolution/extent 131 | print("Exporting DEM") 132 | dem_fn = os.path.join(out_fn, "_dem.tif") 133 | chunk.exportDem(dem_fn, format="tif", projeciton=out_crs) 134 | 135 | #Export ortho 136 | print("Exporting orthomosaic") 137 | ortho_fn = os.path.join(out_fn, "_ortho.tif") 138 | chunk.exportOrthophoto(ortho_fn, format="tif", blending="average", project=out_crs) 139 | 140 | #Export point cloud 141 | print("Exporting point cloud") 142 | #Export las or xyz format 143 | pc_type = "las" 144 | #pc_type = "xyz" 145 | pc_fn = os.path.join(out_fn, "_dense_wgs84.", pc_type) 146 | #Export WGS84 point cloud 147 | chunk.exportPoints(pc_fn, dense=True, precision=7, format=pc_type, projeciton=in_crs) 148 | #Export projected point cloud 149 | #For coord in meters, default precision of 6 is overkill 150 | pc_fn = os.path.join(out_fn, "_dense_proj.", pc_type) 151 | chunk.exportPoints(pc_fn, dense=True, precision=3, format=pc_type, projeciton=out_crs) 152 | 153 | #Add timestamp 154 | print("Finshed") 155 | 156 | -------------------------------------------------------------------------------- /bh_plot.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | 6 | def conv2bh(conv): 7 | return 2*np.tan(np.radians(conv/2.)) 8 | 9 | def bh2conv(bh): 10 | return 2*np.degrees(np.tanh(bh/2.0)) 11 | 12 | bh_range = np.arange(0.5, 2.0, 0.25) 13 | hb_range = 1.0/bh_range 14 | conv_range = bh2conv(bh_range) 15 | iconv_range = np.arange(10, 70, 10) 16 | ibh_range = conv2bh(iconv_range) 17 | 18 | b_error = np.array([0.01, 0.1, 1.0, 10.0, 100.0]) 19 | #h_error = b_error * hb_range 20 | 21 | plt.figure() 22 | plt.plot(bh_range, conv_range) 23 | plt.xlabel("Base to height ratio") 24 | plt.ylabel("Convergence angle (deg)") 25 | plt.figure() 26 | plt.plot(iconv_range, ibh_range) 27 | plt.ylabel("Base to height ratio") 28 | plt.xlabel("Convergence angle (deg)") 29 | plt.figure() 30 | plt.ylabel("Vertical error (m)") 31 | plt.xlabel("Horizontal offset (m)") 32 | for bh in bh_range: 33 | plt.plot(b_error, b_error/bh, label='B/H:%0.2f (%0.1f deg)' % (bh, bh2conv(bh))) 34 | plt.axes().set_aspect('equal') 35 | plt.legend() 36 | plt.show() 37 | -------------------------------------------------------------------------------- /cam_comparison_planning.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #3/1/13 6 | 7 | #Script to compute resolution and fov for Nikon D800 for variable focal length and range 8 | 9 | #Add weight 10 | #Add MicaSense RedEdge 11 | #Add diag fov plot 12 | 13 | import sys 14 | import numpy as np 15 | import matplotlib.pyplot as plt 16 | from itertools import cycle 17 | 18 | def calcfov(cam, f): 19 | fov = 2*np.arctan2(cam['diag_mm'], (2*f)) 20 | #print np.degrees(fov) 21 | return fov 22 | 23 | def calcres(cam, alt, fov, offnadir=0): 24 | res = 2*alt*np.tan(fov/2)/cam['diag_px'] 25 | #return res/np.cos(np.radians(offnadir)) 26 | return res/np.cos(offnadir) 27 | 28 | def plotgen(cam, alt_range): 29 | c = next(colorcycler) 30 | linecycler = cycle(lines) 31 | if alt_units == 'ft': 32 | alt_scale = 0.3048 33 | else: 34 | alt_scale = 1.0 35 | for f in cam['f_list']: 36 | fov = calcfov(cam, f) 37 | res_center = calcres(cam, alt_range, fov, offnadir=0) 38 | res_corner = calcres(cam, alt_range, fov, offnadir=fov/2.) 39 | x_gd = np.array(res_center)*cam['x_px'] 40 | y_gd = np.array(res_center)*cam['y_px'] 41 | #diag_gd = np.array(res_center)*cam['diag_px'] 42 | diag_gd = np.sqrt(x_gd**2 + y_gd**2) 43 | gd_area = x_gd * y_gd 44 | #ls = next(linecycler) 45 | ls = '-' 46 | plt.figure(1) 47 | plt.plot(alt_range/alt_scale, res_center*100, color=c, ls=ls, label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq) Center' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 48 | #plt.plot(alt_range/alt_scale, res_corner*100, color=c, ls='--', label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq) Corner' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 49 | plt.figure(2) 50 | plt.plot(alt_range/alt_scale, x_gd, color=c, ls=ls, label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq)' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 51 | plt.figure(3) 52 | plt.plot(alt_range/alt_scale, y_gd, color=c, ls=ls, label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq)' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 53 | plt.figure(4) 54 | plt.plot(alt_range/alt_scale, diag_gd, color=c, ls=ls, label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq)' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 55 | plt.figure(5) 56 | plt.plot(alt_range/alt_scale, gd_area, color=c, ls=ls, label='%s, %0.1f mm (%0.1f$^\circ$, %0.1f mm eq)' % (cam['name'],f,np.degrees(fov),f*cam['crop_factor'])) 57 | 58 | offnadir = 0 59 | #Altitude range in feet 60 | #alt_units = 'ft' 61 | #alt_list = np.arange(0, 401, 10) 62 | #if alt_units == 'ft': 63 | # alt_list *= 0.3048 64 | #Altitude range in meters 65 | alt_units = 'm' 66 | alt_list = np.arange(0, 122, 4) 67 | 68 | d800 = {'name':'D800', 'weight':1193, 'x_mm':35.9, 'y_mm':24.0, 'x_px':7360, 'y_px':4912, 'f_list': (50.0,) } 69 | #NEX5 = {'name':'NEX-5', 'x_mm':23.4, 'y_mm':15.6, 'x_px':4912, 'y_px':3264, 'f_list': (16.0, 20.0) } 70 | NEX5 = {'name':'NEX-5', 'weight':338, 'x_mm':23.4, 'y_mm':15.6, 'x_px':4912, 'y_px':3264, 'f_list': (20.0,) } 71 | #a5000 = {'name':'a5000', 'weight':338, 'x_mm':23.4, 'y_mm':15.6, 'x_px':5456, 'y_px':3632, 'f_list': (16.0, 20.0) } 72 | a5000 = {'name':'a5000', 'weight':338, 'x_mm':23.4, 'y_mm':15.6, 'x_px':5456, 'y_px':3632, 'f_list': (20.0,) } 73 | #rx100 = {'name':'rx100_III', 'x_mm':13.2, 'y_mm':8.8, 'x_px':5472, 'y_px':3648, 'f_list': (8.8, 25.7) } 74 | rx100 = {'name':'RX100_III', 'weight':287, 'x_mm':13.2, 'y_mm':8.8, 'x_px':5472, 'y_px':3648, 'f_list': (8.8,) } 75 | #gx1 = {'name':'GX1', 'weight':418, 'x_mm':17.3, 'y_mm':13.0, 'x_px':4592, 'y_px':3448, 'f_list': (14.0, 20.0) } 76 | gx1 = {'name':'GX1', 'weight':418, 'x_mm':17.3, 'y_mm':13.0, 'x_px':4592, 'y_px':3448, 'f_list': (20.0,) } 77 | s100 = {'name':'s100', 'weight':173, 'x_mm':7.6, 'y_mm':5.7, 'x_px':4000, 'y_px':3000, 'f_list': (5.2,) } 78 | gopro12 = {'name':'gopro_12MP', 'weight':74, 'x_mm':6.17, 'y_mm':4.55, 'x_px':4000, 'y_px':3000, 'f_list': (2.77,) } 79 | gopro7 = {'name':'gopro_7MP', 'weight':74, 'x_mm':4.6275, 'y_mm':3.4125, 'x_px':3000, 'y_px':2250, 'f_list': (2.77,) } 80 | #the 'x_mm' tag was inverted from 8 cm/px resolution at 120 m altitude 81 | #alt=120 82 | #x_px=1280 83 | #x_gd=0.08 84 | #f=4.2 85 | #x_mm=2*f*np.tan(np.arctan(0.5*(x_px*x_gd)/alt)) 86 | #y_mm=2*f*np.tan(np.arctan(0.5*(y_px*x_gd)/alt)) 87 | micasense = {'name':'rededge', 'weight':150, 'x_mm':3.584, 'y_mm':2.688, 'x_px':1280, 'y_px':960, 'f_list':(4.2,)} 88 | 89 | 90 | cams = [micasense, d800, NEX5, a5000, gx1, rx100, s100, gopro12, gopro7] 91 | #cams = [d800, NEX5, s100, gopro12, gopro7] 92 | cams = cams[::-1] 93 | 94 | lines = ["-","--","-.",":"] 95 | colors = ['r','b','g','orange','y','c','m','k','0.5'] 96 | colors = colors[::-1] 97 | colorcycler = cycle(colors) 98 | diag_px_35mm = 43.3 99 | 100 | for cam in cams: 101 | cam['diag_mm'] = np.sqrt(cam['x_mm']**2 + cam['y_mm']**2) 102 | cam['diag_px'] = np.sqrt(cam['x_px']**2 + cam['y_px']**2) 103 | cam['pitch'] = cam['x_mm']/cam['x_px'] 104 | cam['crop_factor'] = diag_px_35mm/cam['diag_mm'] 105 | 106 | plt.figure(0) 107 | plt.title('Camera Pixel Pitch (sensor pixel size)') 108 | plt.scatter(range(0,len(cams)), [(cam['pitch']*1000.0)**2 for cam in cams], color=colors) 109 | plt.xticks(range(0,len(cams)), [cam['name'] for cam in cams]) 110 | plt.ylabel(r'Pixel Area $(\mu m^2)$') 111 | plt.gca().tick_params(axis='x', labelsize=8) 112 | plt.savefig('pixel_pitch.pdf') 113 | 114 | for cam in cams: 115 | plotgen(cam, alt_list) 116 | 117 | plt.figure(1) 118 | plt.grid(b=True, which='major', color='k', linestyle=':', linewidth=0.2) 119 | #plt.grid(b=True, which='minor', color='k', linestyle=':', linewidth=0.2) 120 | plt.gca().minorticks_on() 121 | plt.title('Camera Altitude vs. Ground Sample Distance (best possible pixel res)') 122 | plt.xlabel('Altitude (%s)' % alt_units) 123 | plt.ylabel('GSD (cm)') 124 | plt.legend(loc=2,prop={'size':8}) 125 | 126 | plt.figure(2) 127 | plt.grid(b=True, which='major', color='k', linestyle=':', linewidth=0.2) 128 | #plt.grid(b=True, which='minor', color='k', linestyle=':', linewidth=0.2) 129 | plt.gca().minorticks_on() 130 | plt.title('Camera Altitude vs. Image Width (on ground)') 131 | plt.xlabel('Altitude (%s)' % alt_units) 132 | plt.ylabel('X field of view (m)') 133 | plt.legend(loc=2,prop={'size':8}) 134 | 135 | plt.figure(3) 136 | plt.grid(b=True, which='major', color='k', linestyle=':', linewidth=0.2) 137 | #plt.grid(b=True, which='minor', color='k', linestyle=':', linewidth=0.2) 138 | plt.gca().minorticks_on() 139 | plt.title('Camera Altitude vs. Image Height (on ground)') 140 | plt.xlabel('Altitude (%s)' % alt_units) 141 | plt.ylabel('Y field of view (m)') 142 | plt.legend(loc=2,prop={'size':8}) 143 | 144 | plt.figure(4) 145 | plt.grid(b=True, which='major', color='k', linestyle=':', linewidth=0.2) 146 | #plt.grid(b=True, which='minor', color='k', linestyle=':', linewidth=0.2) 147 | plt.gca().minorticks_on() 148 | plt.title('Camera Altitude vs. Image Diag (on ground)') 149 | plt.xlabel('Altitude (%s)' % alt_units) 150 | plt.ylabel('Diag field of view (m)') 151 | plt.legend(loc=2,prop={'size':8}) 152 | 153 | plt.figure(5) 154 | plt.grid(b=True, which='major', color='k', linestyle=':', linewidth=0.2) 155 | #plt.grid(b=True, which='minor', color='k', linestyle=':', linewidth=0.2) 156 | plt.gca().minorticks_on() 157 | plt.title('Camera Altitude vs. Image Area (on ground)') 158 | plt.xlabel('Altitude (%s)' % alt_units) 159 | plt.ylabel('Image area (m^2)') 160 | plt.legend(loc=2,prop={'size':8}) 161 | 162 | plt.figure(6) 163 | plt.title('Camera Weight (inc. lens+battery+card)') 164 | plt.scatter(range(0,len(cams)), [cam['weight'] for cam in cams], color=colors) 165 | plt.xticks(range(0,len(cams)), [cam['name'] for cam in cams]) 166 | plt.ylabel('Mass (g)') 167 | plt.gca().tick_params(axis='x', labelsize=8) 168 | plt.savefig('cam_weight.pdf') 169 | 170 | plt.figure(7) 171 | plt.title('Camera Pixel Area vs. Weight') 172 | plt.scatter(range(0,len(cams)), [(cam['pitch']*1000.0)**2/cam['weight'] for cam in cams], color=colors) 173 | plt.xticks(range(0,len(cams)), [cam['name'] for cam in cams]) 174 | plt.ylabel(r'Pixel Area $(\mu m^2)$ per g') 175 | plt.gca().tick_params(axis='x', labelsize=8) 176 | plt.savefig('pitch_weight_ratio.pdf') 177 | 178 | plt.figure(1) 179 | plt.savefig('alt_vs_gsd.pdf') 180 | plt.figure(2) 181 | plt.savefig('alt_vs_xfov.pdf') 182 | plt.figure(3) 183 | plt.savefig('alt_vs_yfov.pdf') 184 | plt.figure(4) 185 | plt.savefig('alt_vs_dfov.pdf') 186 | plt.figure(5) 187 | plt.savefig('alt_vs_area.pdf') 188 | 189 | #plt.show() 190 | sys.exit() 191 | -------------------------------------------------------------------------------- /dji_alt_adjust.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | """ 4 | David Shean 5 | dshean@gmail.com 6 | 7 | This script will fix issues with EXIF Altitude geotags for images acquired with DJI Phantom 8 | Necessary for standard SfM workflows with software that reads EXIF data (Agisoft PhotoScanPro, Pix4D) 9 | 10 | The EXIF AbsoluteAltitude and GPSAltitude tags should be relative to mean sea level (MSL, using EGM96 geoid). 11 | 12 | For whatever reason, with DJI platforms, these values are way off, in some cases >100 m off 13 | The error is way too large for typical GNSS vertical error, and is likely a DJI bug (sigh) 14 | Forums suggest this is also an issue for DJI Inspire. 15 | This was not issue with Mavic Pro during tests in 2018, but is an issue in 2019. 16 | 17 | The RelativeAltitude (using barometer) is a much more precise altitude relative to the home point (where RelativeAltitude is 0.0) 18 | If we have a known absolute altitude for the home point (from GCPs or accurate AbsoluteAltitude), we can then use the RelativeAltitude values to update the AbsoluteAltitude for each image 19 | This script creates a copy of original images ("modified" subdirectory), and updates the GPSAltitude tags 20 | 21 | Currently requires PyExifTool: 22 | pip install ocrd-pyexiftool 23 | 24 | which in turn, requires that exiftool is installed and available on your $PATH 25 | 26 | Should migrate to exifread instead of exiftool 27 | """ 28 | 29 | import os 30 | import sys 31 | import glob 32 | import shutil 33 | import argparse 34 | 35 | import requests 36 | 37 | import exiftool 38 | 39 | #Start subprocess with exiftool 40 | et = exiftool.ExifTool() 41 | et.start() 42 | 43 | #Returns dictionary with relevant EXIF tags 44 | def get_metadata(fn, et=et): 45 | tags = ['EXIF:GPSLatitude', 'EXIF:GPSLongitude', 'EXIF:GPSLatitudeRef', 'EXIF:GPSLongitudeRef', \ 46 | 'EXIF:GPSAltitude', 'EXIF:GPSAltitudeRef', 'XMP:AbsoluteAltitude', 'XMP:RelativeAltitude'] 47 | metadata = et.get_tags(tags, fn) 48 | #Convert to positive east longitude 49 | if metadata['EXIF:GPSLongitudeRef'] == "W": 50 | metadata['EXIF:GPSLongitude'] *= -1 51 | if metadata['EXIF:GPSLatitudeRef'] == "S": 52 | metadata['EXIF:GPSLatitude'] *= -1 53 | print(metadata) 54 | return metadata 55 | 56 | #Get approximate elevation MSL from 10-m NED 57 | def get_NED(lon, lat): 58 | url = 'https://nationalmap.gov/epqs/pqs.php?x=%.8f&y=%.8f&units=Meters&output=json' % (lon, lat) 59 | r = requests.get(url) 60 | out = None 61 | if r.status_code == 200: 62 | out = r.json()['USGS_Elevation_Point_Query_Service']['Elevation_Query']['Elevation'] 63 | print("USGS elevation MSL: %0.2f" % out) 64 | return out 65 | 66 | #Get approximate elevation MSL from Open Elevation API 67 | def get_OpenElevation(lon, lat): 68 | url = 'https://api.open-elevation.com/api/v1/lookup?locations=%0.8f,%0.8f' % (lat, lon) 69 | r = requests.get(url) 70 | out = None 71 | if r.status_code == 200: 72 | out = r.json()['results'][0]['elevation'] 73 | print("Open Elevation MSL: %0.2f" % out) 74 | return out 75 | 76 | def get_MSL(lon,lat): 77 | out = get_NED(lon, lat) 78 | if out is None: 79 | out = get_OpenElevation(lon, lat) 80 | return out 81 | 82 | #https://www.ngs.noaa.gov/web_services/geoid.shtml 83 | def get_GeoidOffset(lon, lat): 84 | #Can specify model, 13 = GEOID12B 85 | url = 'https://geodesy.noaa.gov/api/geoid/ght?lat=%0.8f&lon=%0.8f' % (lat, lon) 86 | r = requests.get(url) 87 | out = None 88 | if r.status_code == 200: 89 | out = r.json()['geoidHeight'] 90 | print("NGS geoid offset: %0.2f" % out) 91 | return out 92 | 93 | #Can also query UNAVCO geoid offset calculator 94 | 95 | def update_gps_altitude(fn, home_elev): 96 | #tags = ['XMP:RelativeAltitude'] 97 | #metadata = et.get_tags(tags, fn) 98 | metadata = get_metadata(fn) 99 | 100 | relAlt = float(metadata['XMP:RelativeAltitude']) 101 | adjAlt = home_elev + relAlt 102 | 103 | #Update metadata 104 | etArg = ["-GPSAltitude=" + str(adjAlt),] 105 | etArg.append("-AbsoluteAltitude=" + str(adjAlt)) 106 | 107 | #Set altitude reference 108 | #1 is 'Below Sea Level'; 0 is 'Above Sea Level' 109 | if adjAlt >= 0.0: 110 | etArg.append("-GPSAltitudeRef=0") 111 | else: 112 | etArg.append("-GPSAltitudeRef=1") 113 | 114 | #Since we're modifying our own copy of originl, we don't need the default exiftool _original copy 115 | etArg.append("-overwrite_original") 116 | print(etArg) 117 | 118 | #pyexiftool execution requires binary string 119 | etArg_b = [str.encode(a) for a in etArg] 120 | f_b = str.encode(fn) 121 | etArg_b.append(f_b) 122 | et.execute(*etArg_b) 123 | 124 | #Check updated 125 | metadata = get_metadata(fn) 126 | 127 | def getparser(): 128 | cwd = os.getcwd() 129 | parser = argparse.ArgumentParser(description="Update incorrect GPS altitude for images acquired with DJI platforms", \ 130 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) 131 | #Could make this optional with cwd, but good to have check 132 | parser.add_argument('img_dir', type=str, default=cwd, help='Directory containing images') 133 | parser.add_argument('-out_dir', type=str, default=None, help='Output directory (default is "modified" subdirectory)') 134 | parser.add_argument('-ext_list', type=str, nargs='+', default=['JPG',], help='Process files with these extensions') 135 | parser.add_argument('-out_elev_ref', type=str, default='MSL', choices=['MSL', 'HAE'], help='Output elevation reference') 136 | parser.add_argument('-home_HAE', type=float, default=None, \ 137 | help='Known home point elevation, meters height above ellipsoid') 138 | parser.add_argument('-home_MSL', type=float, default=None, \ 139 | help='Known home point elevation, meters height above geoid (mean sea level)') 140 | parser.add_argument('-geoid_offset', type=float, default=None, \ 141 | help='Known offset of geoid relative to ellipsoid (meters)') 142 | return parser 143 | 144 | #Known altitude of home point, meters above WGS84 ellipsoid 145 | #This comes from GCP near home point 146 | #Montlake Triangle 147 | #home_HAE = -1.0 148 | #IMA fields 149 | #home_HAE = -14.2 150 | #Beach at sea level 151 | #home_HAE = -22.7 152 | #Nooksack River Site 153 | #home_HAE = 72 154 | #Baker 155 | #home_HAE = 1642 156 | 157 | #Approx geoid offset for Benchmark #533 on UW Campus in Seattle, relative to ellipsoid 158 | #Note: geoid is below ellipsoid at this location 159 | #geoid_offset = -23.75 160 | 161 | def main(argv=None): 162 | parser = getparser() 163 | args = parser.parse_args() 164 | 165 | #Input directory containing images 166 | image_dir = args.img_dir 167 | 168 | if args.out_dir is not None: 169 | image_dir_mod = args.out_dir 170 | else: 171 | image_dir_mod = os.path.join(image_dir, 'modified') 172 | 173 | if not os.path.exists(image_dir_mod): 174 | os.makedirs(image_dir_mod) 175 | 176 | #This is the final reference elevation to use 177 | home_elev = None 178 | 179 | #Extract home point information - only need to do this once 180 | #Assume that first image, sorted alphanumerically, is near home point 181 | fn_list_orig = sorted(glob.glob(os.path.join(image_dir, '*.%s' % args.ext_list[0]))) 182 | print('\nGetting metadata for first image (assumed to be near home point)') 183 | home_metadata = get_metadata(fn_list_orig[0]) 184 | home_lon = home_metadata['EXIF:GPSLongitude'] 185 | home_lat = home_metadata['EXIF:GPSLatitude'] 186 | 187 | if args.out_elev_ref == 'MSL': 188 | #If input HAE is provided 189 | if args.home_HAE is not None: 190 | if args.geoid_offset is None: 191 | args.geoid_offset = get_GeoidOffset(home_lon, home_lat) 192 | args.home_MSL = args.home_HAE - args.geoid_offset 193 | else: 194 | if args.home_MSL is None: 195 | args.home_MSL = get_MSL(home_lon, home_lat) 196 | home_elev = args.home_MSL 197 | 198 | elif args.out_elev_ref == 'HAE': 199 | if args.home_HAE is None: 200 | if args.home_MSL is None: 201 | args.home_MSL = get_MSL(home_lon, home_lat) 202 | if args.geoid_offset is None: 203 | args.geoid_offset = get_GeoidOffset(home_lon, home_lat) 204 | args.home_HAE = args.home_MSL + args.geoid_offset 205 | home_elev = args.home_HAE 206 | 207 | print("Setting home point elevation to %0.2f m %s" % (home_elev, args.out_elev_ref)) 208 | 209 | for ext in args.ext_list: 210 | fn_list_orig = sorted(glob.glob(os.path.join(image_dir, '*.%s' % ext))) 211 | print("\nProcessing %s files" % ext) 212 | print("Creating copy of all files") 213 | for fn in fn_list_orig: 214 | if (os.path.isfile(fn)): 215 | if not os.path.exists(os.path.join(image_dir_mod, fn)): 216 | print(fn) 217 | shutil.copy2(fn, image_dir_mod) 218 | 219 | #Get list of files to modify 220 | fn_list_mod = sorted(glob.glob(os.path.join(image_dir_mod, '*.%s' % ext))) 221 | #Update EXIF:GPSAltitude to be the value of (XMP:RelativeAltitude + homePointAltitude) 222 | for fn in fn_list_mod: 223 | update_gps_altitude(fn, home_elev) 224 | et.terminate() 225 | 226 | if __name__ == "__main__": 227 | main() 228 | -------------------------------------------------------------------------------- /dji_relalt2hae.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | exiftool *DNG | grep -i altitude 4 | 5 | #DJI images contain the following tags: 6 | #Absolute Altitude - elevation above msl 7 | #Relative Altitude - altitude about home point 8 | #GPS Altitude - altitude above home point 9 | #GPS Altitude Ref - "Above Sea Level" 10 | 11 | #Note that Relative Altitude and GPS altitude don't necessariliy agree 12 | #Relative Altitude is likely barometer/sonic 13 | 14 | #Need to compute elevation for each image above ellipsoid and replace the appropriate tags (read by Aigsoft/Pix4D) 15 | #Use Absolute Altitude 16 | #Or calculate home point absolute altitude (where relative altitude is 0), then add to all Relative Altitude tags 17 | #Convert to HAE 18 | -------------------------------------------------------------------------------- /emlid_survey_update.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | """ 4 | Update Emlid Reach Survey points with PPK position output from RTKLIB 5 | David Shean 6 | dshean@gmail.com 7 | Edited to fix Pandas datetime/Timestamp tz issues, and a few key changes likely based on Emlid updates 8 | """ 9 | 10 | import os 11 | import argparse 12 | import numpy as np 13 | import pandas as pd 14 | 15 | #Hack to update solution status 16 | def get_solution_status(Q): 17 | Q = np.round(Q) 18 | out = None 19 | if Q == 1.0: 20 | out = 'FIX' 21 | elif Q == 2.0: 22 | out = 'FLOAT' 23 | elif Q == 5.0: 24 | out = 'SINGLE' 25 | return out 26 | 27 | def getparser(): 28 | parser = argparse.ArgumentParser(description='Update Emlid Reach Survey points with \ 29 | PPK positions from RTKLIB') 30 | parser.add_argument('survey_pts_csv_fn', type=str, help='Survey point csv filename') 31 | parser.add_argument('ppk_pos_fn', type=str, help='PPK pos filename') 32 | return parser 33 | 34 | def main(): 35 | parser = getparser() 36 | args = parser.parse_args() 37 | 38 | survey_pts_csv_fn = args.survey_pts_csv_fn 39 | ppk_pos_fn = args.ppk_pos_fn 40 | #survey_pts_csv_fn = 'Lab 2.csv' 41 | #ppk_pos_fn = 'raw_201804061828_RINEX-2_11/test_20180412/ppk_ssho_cont/raw_201804061828.pos' 42 | 43 | print('Loading: %s' % survey_pts_csv_fn) 44 | survey_pts = pd.read_csv(survey_pts_csv_fn, parse_dates=[5,6], index_col=0) 45 | header = 'Date UTC latitude(deg) longitude(deg) height(m) Q ns sdn(m) sde(m) sdu(m) sdne(m) sdeu(m) sdun(m) age(s) ratio' 46 | print('Loading: %s' % ppk_pos_fn) 47 | ppk_pos = pd.read_csv(ppk_pos_fn, comment='%', delim_whitespace=True, names=header.split(), parse_dates=[[0,1]]) 48 | 49 | out_pt = [] 50 | print('Processing %i input points' % survey_pts.shape[0]) 51 | for index, pt in survey_pts.iterrows(): 52 | #Extract start/stop times for the point 53 | start = pt['Averaging start'] 54 | end = pt['Averaging end'] 55 | start = pd.to_datetime(start).tz_localize(None).tz_localize('US/Pacific').tz_convert('UTC').tz_localize(None) 56 | end = pd.to_datetime(end).tz_localize(None).tz_localize('US/Pacific').tz_convert('UTC').tz_localize(None) 57 | #Determine indices in ppk pos file for corresponding datetime 58 | ppk_pos_idx = (ppk_pos['Date_UTC'] >= start) & (ppk_pos['Date_UTC'] < end) 59 | #Pull out corresponding ppk positions 60 | pt_ppk_pos = ppk_pos[ppk_pos_idx] 61 | #Should check that pt['sample count'] == pt_ppk_pos.count()[0] 62 | 63 | #Compute statistics for pos 64 | pt_ppk_pos_mean = pt_ppk_pos.mean() 65 | pt_ppk_pos_std = pt_ppk_pos.std() 66 | #pt_ppk_pos_med = pt_ppk_pos.median() 67 | 68 | #Update fields for point 69 | pt['longitude'] = pt_ppk_pos_mean['longitude(deg)'] 70 | pt['latitude'] = pt_ppk_pos_mean['latitude(deg)'] 71 | pt['height'] = pt_ppk_pos_mean['height(m)'] 72 | 73 | #Assume antenna height is not included in PPK solution pos output 74 | pt['height'] -= pt['Antenna height'] 75 | 76 | #Calculate mean solution status 77 | #Could add a Q = 1 filter 78 | pt['solution status'] = get_solution_status(pt_ppk_pos_mean['Q']) 79 | 80 | #Compute standard deviation of all positions at this point 81 | #Should really convert to a local coord system, but this estimate works for now 82 | lat_m = 111000 83 | lon_m = np.cos(np.radians(pt['latitude']))*lat_m 84 | pt['sde_samples'] = pt_ppk_pos_std['longitude(deg)'] * lon_m 85 | pt['sdn_samples'] = pt_ppk_pos_std['latitude(deg)'] * lat_m 86 | pt['sdu_samples'] = pt_ppk_pos_std['height(m)'] 87 | 88 | #Compute mean of std values from input positions 89 | pt['sde_mean'] = pt_ppk_pos_mean['sde(m)'] 90 | pt['sdn_mean'] = pt_ppk_pos_mean['sdn(m)'] 91 | pt['sdu_mean'] = pt_ppk_pos_mean['sdu(m)'] 92 | 93 | out_pt.append(pt) 94 | 95 | out_df = pd.DataFrame(out_pt) 96 | #Write out new file 97 | out_fn = os.path.splitext(survey_pts_csv_fn)[0]+'_ppk_pos.csv' 98 | print("Writing out: %s" % out_fn) 99 | out_df.to_csv(out_fn) 100 | 101 | if __name__ == "__main__": 102 | main() -------------------------------------------------------------------------------- /exif2gpslog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #11/4/13 6 | 7 | #This is will generate a shapefile using EXIF gps tags for photos in input directory 8 | #Requires ExifTool and GDAL/OGR 9 | 10 | #!!! 11 | #NOTE: Nikon GP-1 GPS altitude is MSL, not WGS84 ellipsoid 12 | #Need to write HAE altitude back to original images 13 | #run exif_gpsalt_msl2hae.sh dir first 14 | #!!! 15 | 16 | #Nikon GP-1 accuracy is 10 m RMS (as stated in manual), assumed to be horizontal 17 | #Print UTC and local time, can use $Timezone tag 18 | 19 | #See http://www.sno.phy.queensu.ca/~phil/exiftool/geotag.html 20 | 21 | echo 22 | if [ $# -ne 2 ] ; then 23 | echo "Usage is $0 photodir outname" 24 | echo 25 | exit 1 26 | fi 27 | 28 | dir=$1 29 | out=${2%.*} 30 | 31 | #Correct GPSAltitude from MSL to HAE 32 | msl2hae=false 33 | if $msl2hae ; then 34 | echo "Correcting GPSAltitude from MSL to HAE" 35 | #Note: this script contains a check to see if tags have already been corrected 36 | exif_msl2hae.sh $dir 37 | echo 38 | fi 39 | 40 | #fmt=/Users/dshean/src/sfm/gpx.fmt 41 | #fmt=/Users/dshean/src/sfm/kml.fmt 42 | #exiftool -r -if '$gpsdatetime' -fileOrder gpsdatetime -p $fmt -d %Y-%m-%dT%H:%M:%SZ $dir/*.JPG > out.gpx 43 | 44 | #-m ignores minor warnings and prints empty values to csv field 45 | #-r is recursive 46 | #-c is coordinate format, + for signed, decimal degrees 47 | #-n will force numerical output for all tags (could be used instead of -c above) 48 | #The # after a tag forces numerical output 49 | #-if conditionally processes input 50 | 51 | #Define exiftool tags to be extracted 52 | #For some reason, the GPSAltitude tag is integer in output 53 | fmt_str='$FileName,$DateTimeOriginal,$SubSecDateTimeOriginal,$GPSDateTime,$GPSLatitude#,$GPSLongitude#,$GPSAltitude#,$GPSMapDatum,$LensID,$FocalLength#,$ShutterSpeed,$Aperture,$ISO,$FOV' 54 | 55 | #Extract EXIF data to csv file 56 | #exiftool -progress -m -r -c '%.6f' -p "$fmt_str" $dir/*.JPG 57 | echo "Extracting GPS info for shp" 58 | echo $fmt_str | sed -e 's/\$//g' -e 's/\#//g' > $out.csv 59 | #Limit to specific extension (useful when raw and jpg in same directory) 60 | #exiftool -progress -if '$GPSDateTime' -fileOrder GPSDateTime -m -r -ext $ext -p "$fmt_str" $dir >> $out.csv 61 | echo exiftool -progress -if '$GPSDateTime' -fileOrder DateTimeOriginal -m -p "$fmt_str" $dir 62 | exiftool -progress -if '$GPSDateTime' -fileOrder DateTimeOriginal -m -p "$fmt_str" $dir >> $out.csv 63 | 64 | #Write out vrt for csv 65 | echo -n > $out.vrt 66 | echo '' >> $out.vrt 67 | echo " " >> $out.vrt 68 | echo " $out.csv" >> $out.vrt 69 | echo ' wkbPoint25D' >> $out.vrt 70 | echo ' EPSG:4326' >> $out.vrt 71 | echo ' ' >> $out.vrt 72 | echo ' ' >> $out.vrt 73 | echo '' >> $out.vrt 74 | 75 | #Convert to ESRI Shapefile 76 | echo 77 | echo "Creating shp from vrt" 78 | echo ogr2ogr -overwrite $out.shp $out.vrt 79 | ogr2ogr -overwrite -nln $out $out.shp $out.vrt 80 | 81 | #ogr2ogr -f GPX -dsco GPX_USE_EXTENSIONS=YES out.gpx out.vrt 82 | -------------------------------------------------------------------------------- /exif_msl2hae.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #8/13/14 6 | 7 | #This will replace the default MSL GPSAltitude with HAE GPSAltitude for all photos in input directory 8 | #Requires ExifTool and GDAL/OGR 9 | 10 | echo 11 | if [ $# -eq 0 ] ; then 12 | echo "No input directory specified" 13 | exit 1 14 | fi 15 | 16 | dir=$1 17 | 18 | #-m ignores minor warnings and prints empty values to csv field 19 | #-r is recursive 20 | #The # after a tag forces numerical output 21 | #-if conditionally processes input 22 | 23 | #Define exiftool tags to be extracted 24 | fmt_str='$FileName,$GPSLatitude#,$GPSLongitude#,$GPSAltitude#,$GPSMapDatum' 25 | 26 | #Extract EXIF data to csv file 27 | echo "Generating csv of existing MSL GPSAltitude tags" 28 | out=temp.csv 29 | echo $fmt_str | sed -e 's/\$//g' -e 's/\#//g' -e 's/FileName/SourceFile/' > $out 30 | #exiftool -progress -if '$GPSAltitude' -m -r -p "$fmt_str" $dir >> $out 31 | #-if here checks to see if datum has already been updated with 'WGS 84 HAE' tag 32 | echo exiftool -progress -fileOrder DateTimeOriginal -if '$GPSMapDatum ne "WGS 84 HAE"' -m -p "$fmt_str" $dir 33 | exiftool -progress -fileOrder DateTimeOriginal -if '$GPSMapDatum ne "WGS 84 HAE"' -m -p "$fmt_str" $dir >> $out 34 | 35 | if [ "$(cat $out | wc -l)" -gt "1" ] ; then 36 | out=exif_gps_orig.csv 37 | mv temp.csv $out 38 | echo 39 | echo "Generated new csv with HAE GPSAltitude tags" 40 | gps_msl2hae_csv.py $out 41 | out_hae=${out%.*}_hae.csv 42 | #Isolate only tags to be updated (offers speedup?) 43 | cat $out_hae | awk 'BEGIN {FS=","; OFS=","} {print $1,$4,$5}' > temp.csv 44 | mv temp.csv $out_hae 45 | #Note: this creates a backup copy of the file, which takes forever for ~40-70MB D800 NEF 46 | echo 47 | echo "Updating original files with HAE GPSAltitude tags (creates backup copies = slow)" 48 | exiftool -progress -fileOrder DateTimeOriginal -csv=$out_hae -m $dir 49 | else 50 | rm $out 51 | fi 52 | -------------------------------------------------------------------------------- /flytrex2gpslog.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #David Shean 4 | #10/8/14 5 | #dshean@gmail.com 6 | 7 | #Utility to clean up FlyTrex GPS log in preparation for geotagging photos 8 | 9 | fn=$1 10 | 11 | #latitude,longitude,altitude(feet),ascent(feet),speed(mph),distance(feet),max_altitude(feet),max_ascent(feet),max_speed(mph),max_distance(feet),time(millisecond),datetime(utc),datetime(local),satellites,pressure(Pa),temperature(F) 12 | 13 | out_fn=${fn%.*}_clean.csv 14 | 15 | #utc,lat,lon,alt,nsat 16 | #Note: cut does not respect field order 17 | #cut -d',' -f 12,1,2,3,14 $fn > $out_fn 18 | awk 'BEGIN {FS=","; OFS=","} {print $12,$1,$2,$3,$14}' $fn | sed 's/ /T/' > $out_fn 19 | 20 | #Scrub field names containing parenthesis 21 | head -1 $out_fn | sed -e 's/(/_/g' -e 's/)//g' > temp 22 | sed '1d' $out_fn >> temp 23 | mv temp $out_fn 24 | 25 | #Filter by number of satellites 26 | nsat_fltr=true 27 | min_nsat=5 28 | if $nsat_fltr ; then 29 | #head -1 $out_fn > temp 30 | awk 'BEGIN {FS=","; OFS=","} {if( $5 >= '$min_nsat' ) print}' $out_fn >> temp 31 | mv $out_fn ${out_fn%.*}_orig.csv 32 | mv temp $out_fn 33 | fi 34 | -------------------------------------------------------------------------------- /flytrex_batch.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | fn_list=$(ls *[0-9].csv) 4 | 5 | for i in $fn_list 6 | do 7 | flytrex2gpslog.sh $i 8 | gps_msl2hae_csv.py ${i%.*}_clean.csv 9 | done 10 | 11 | #Merge 12 | ltype=clean_hae 13 | merge_fn=merge_${ltype} 14 | csv_list=($(ls *${ltype}.csv)) 15 | cat ${csv_list[@]} | sort -n | sed "1,$((${#csv_list[@]} - 1))d" > ${merge_fn}.csv 16 | csv2vrt.py ${merge_fn}.csv 17 | ogr2ogr -overwrite -nln $merge_fn ${merge_fn}.shp ${merge_fn}.vrt 18 | gpx_list=($(ls *${ltype}.gpx)) 19 | str='' 20 | for i in ${gpx_list[@]} 21 | do 22 | str+="-f $i " 23 | done 24 | gpsbabel -t -i gpx $str -x track,merge,discard -o gpx -F ${merge_fn}.gpx 25 | -------------------------------------------------------------------------------- /fmt/gpx.fmt: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # File: gpx.fmt 3 | # 4 | # Description: Example ExifTool print format file for generating GPX track log 5 | # 6 | # Usage: exiftool -p gpx.fmt -d %Y-%m-%dT%H:%M:%SZ FILE [...] > out.gpx 7 | # 8 | # Revisions: 2010/02/05 - P. Harvey created 9 | # 10 | # Notes: 1) All input files must contain GPSLatitude and GPSLongitude. 11 | # 2) The -fileOrder option may be used to control the order of the 12 | # generated track points. 13 | #------------------------------------------------------------------------------ 14 | #[HEAD] 15 | #[HEAD] 20 | #[HEAD] 21 | #[HEAD]1 22 | #[HEAD] 23 | #[BODY] 24 | #[BODY] $gpsaltitude# 25 | #[BODY] 26 | #[BODY] 27 | #[TAIL] 28 | #[TAIL] 29 | #[TAIL] 30 | -------------------------------------------------------------------------------- /fmt/kml.fmt: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # File: kml.fmt 3 | # 4 | # Description: Example ExifTool print format file for generating a 5 | # Google Earth KML file from a collection of geotagged images 6 | # 7 | # Usage: exiftool -p kml.fmt FILE [...] > out.kml 8 | # 9 | # Revisions: 2010/02/05 - P. Harvey created 10 | # 2013/02/05 - PH Fixed camera icon to work with new Google Earth 11 | # 12 | # Notes: 1) All input files must contain GPSLatitude and GPSLongitude. 13 | # 2) For Google Earth to be able to find the images, the input 14 | # images must be specified using relative paths, and "out.kml" 15 | # must stay in the same directory as where the command was run. 16 | # 3) Google Earth is picky about the case of the image file extension, 17 | # and may not be able to display the image if an upper-case 18 | # extension is used. 19 | # 4) The -fileOrder option may be used to control the order of the 20 | # generated placemarks. 21 | #------------------------------------------------------------------------------ 22 | #[HEAD] 23 | #[HEAD] 24 | #[HEAD] 25 | #[HEAD] My Photos 26 | #[HEAD] 1 27 | #[HEAD] 35 | #[HEAD] 36 | #[HEAD] Waypoints 37 | #[HEAD] 0 38 | #[BODY] 39 | #[BODY]
40 | #[BODY] 42 | #[BODY]
]]>
43 | #[BODY] 44 | #[BODY] $filename 45 | #[BODY] #Photo 46 | #[BODY] 47 | #[BODY] clampedToGround 48 | #[BODY] $gpslongitude#,$gpslatitude#,0 49 | #[BODY] 50 | #[BODY]
51 | #[TAIL]
52 | #[TAIL]
53 | #[TAIL]
54 | -------------------------------------------------------------------------------- /gps_msl2hae_csv.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | 6 | #This tool reads a csv dump from exiftool and replaces the default MSL GPSAltitude with HAE using PROJ4 7 | #Requires functions present in demtools/geolib 8 | 9 | #Proj4 requires the following to be present 10 | #cd /usr/local/share/proj 11 | #wget http://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx 12 | #wget http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx 13 | 14 | import sys 15 | import os 16 | 17 | import numpy as np 18 | 19 | from pygeotools.lib import geolib 20 | from pygeotools.lib import iolib 21 | 22 | #Load csv 23 | fn = sys.argv[1] 24 | outfn = os.path.splitext(fn)[0]+'_hae.csv' 25 | 26 | #Import csv file to numpy recarray 27 | #The comments hack is for handling files with # in the field names 28 | #ra = np.genfromtxt(fn, delimiter=',', comments='ASDFASDF', names=True, dtype=None, filling_values=' ') 29 | #lat, lon, z = ra['$GPSLatitude#'], ra['$GPSLongitude#'], ra['$GPSAltitude#'] 30 | #Had to hardcode these dtype lengths to prevent clipping with dtype=None 31 | 32 | #Load into object array - should preserve everything as str 33 | ra = np.genfromtxt(fn, delimiter=',', names=True, dtype='O') 34 | 35 | #Determine type of input file 36 | if 'SourceFile' in ra.dtype.names: 37 | intype = 'exif' 38 | elif 'GPS_TimeMS' in ra.dtype.names: 39 | intype = 'px4_gps' 40 | #Note: CAM altitude is relative! 41 | elif 'CAM_GPSTime' in ra.dtype.names: 42 | intype = 'px4_cam' 43 | elif 'DateTime' in ra.dtype.names: 44 | intype = 'px4_utc' 45 | elif 'altitude_feet' in ra.dtype.names: 46 | intype = 'flytrex' 47 | else: 48 | sys.exit('Input format not recognized') 49 | 50 | #Default altitude factor, assuming meters 51 | alt_factor = 1.0 52 | 53 | if intype is 'exif': 54 | #dtype=[('SourceFile', 'S255'), ('GPSLatitude', ' ${out_fn}.log 65 | fi 66 | #Dump merged GPS/ATT information 67 | if [ ! -e ${out_fn}_GPS_ATT.csv ] ; then 68 | echo "Dumping GPS/ATT to csv" 69 | #Want to delete first GPS line - always 0,0 70 | #Throw out entries with bad PDOP, usually 99.99 71 | #$dump $log -m GPS -m ATT -t GPS | sed '2d' | awk 'BEGIN {FS=","; OFS=","} {if($5 < 12.0) print}' > ${out_fn}_GPS_ATT.csv 72 | $dump $log -m GPS -m ATT -t GPS > ${out_fn}_GPS_ATT.csv 73 | fi 74 | 75 | #GPS log processing 76 | 77 | pdop_fltr=true 78 | max_pdop=12.0 79 | 80 | #Dump only GPS data 81 | if [ ! -e ${out_fn}_GPS.csv ] ; then 82 | echo "Dumping GPS to csv" 83 | $dump $log -m GPS > ${out_fn}_GPS.csv 84 | #Throw out entries with bad PDOP, usually 99.99 85 | if $pdop_fltr ; then 86 | echo "Applying PDOP filter (max $max_pdop)" 87 | head -1 ${out_fn}_GPS.csv > temp 88 | awk 'BEGIN {FS=","; OFS=","} {if( $5 < '$max_pdop' ) print}' ${out_fn}_GPS.csv >> temp 89 | mv ${out_fn}_GPS.csv ${out_fn}_GPS_orig.csv 90 | mv temp ${out_fn}_GPS.csv 91 | echo "Input GPS records: $(cat ${out_fn}_GPS_orig.csv | wc -l)" 92 | echo "Output GPS records: $(cat ${out_fn}_GPS.csv | wc -l)" 93 | fi 94 | fi 95 | 96 | if [ "$(cat ${out_fn}_GPS.csv | wc -l)" -gt "1" ] ; then 97 | #Convert GPS altitude from MSL to HAE 98 | hae_fn=${out_fn}_GPS_hae.csv 99 | if [ ! -e $hae_fn ] ; then 100 | echo "Converting GPS altitude from MSL to HAE" 101 | gps_msl2hae_csv.py ${out_fn}_GPS.csv 102 | fi 103 | #Convert GPS week/sec to UTC 104 | utc_fn=${hae_fn%.*}_utc.csv 105 | if [ ! -e $utc_fn ] ; then 106 | echo "Converting GPS week/sec to UTC" 107 | px4_dflog_gps2utc.py $hae_fn 108 | fi 109 | #Convert to GPX 110 | gpx_fn=${utc_fn%.*}.gpx 111 | if [ ! -e $gpx_fn ] ; then 112 | echo "Converting csv to gpx" 113 | gpsbabel -t -i unicsv,utc=0 -f $utc_fn -x track,merge,discard -o GPX -F $gpx_fn 114 | fi 115 | else 116 | echo "No valid GPS records in input log" 117 | fi 118 | 119 | #CAM log processing 120 | 121 | #Dump only CAM data 122 | if [ ! -e ${out_fn}_CAM.csv ] ; then 123 | echo "Dumping CAM to csv" 124 | $dump $log -m CAM > ${out_fn}_CAM.csv 125 | fi 126 | #Convert GPS altitude from MSL to HAE 127 | hae_fn=${out_fn}_CAM_hae.csv 128 | if [ ! -e $hae_fn ] ; then 129 | echo "Converting GPS altitude from MSL to HAE" 130 | gps_msl2hae_csv.py ${out_fn}_CAM.csv 131 | fi 132 | #Convert GPS week/sec to UTC 133 | utc_fn=${hae_fn%.*}_utc.csv 134 | if [ ! -e $utc_fn ] ; then 135 | echo "Converting GPS week/sec to UTC" 136 | px4_dflog_gps2utc.py $hae_fn 137 | fi 138 | #Convert to GPX 139 | gpx_fn=${utc_fn%.*}.gpx 140 | if [ ! -e $gpx_fn ] ; then 141 | echo "Converting csv to gpx" 142 | gpsbabel -t -i unicsv,utc=0 -f $utc_fn -x track,merge,discard -o GPX -F $gpx_fn 143 | fi 144 | echo 145 | done 146 | 147 | #Merge all gpx log files 148 | ltype=GPS_hae_utc 149 | if $merge ; then 150 | echo "Merging files" 151 | echo 152 | cd $out_logdir 153 | merge_fn=merge_${ltype} 154 | 155 | csv_list=($(ls *${ltype}.csv)) 156 | #concatenate, sort by time, then remove extraneous headers 157 | cat ${csv_list[@]} | sort -n | sed "1,$((${#csv_list[@]} - 1))d" > ${merge_fn}.csv 158 | 159 | #Create a shp for viewing 160 | csv2vrt.py ${merge_fn}.csv 161 | ogr2ogr -overwrite -nln $merge_fn ${merge_fn}.shp ${merge_fn}.vrt 162 | 163 | #Should be more careful with this 164 | gpx_list=($(ls *${ltype}.gpx)) 165 | str='' 166 | for i in ${gpx_list[@]} 167 | do 168 | str+="-f $i " 169 | done 170 | #merge sort by time, discard missing timestamps 171 | gpsbabel -t -i gpx $str -x track,merge,discard -o gpx -F ${merge_fn}.gpx 172 | fi 173 | 174 | exit 175 | 176 | -------------------------------------------------------------------------------- /px4_tlog_gps2utc.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #8/10/14 6 | 7 | #This formats the time and gps info from a PX4 telemetry log (tlog) 8 | 9 | #tlog 10 | #2014-08-08T20:46:55.860,FE,1E,A4, 1, 1,18,mavlink_gps_raw_int_t,time_usec,114150000,lat,487368470,lon,-1218404915,alt,1753700,eph,439,epv,65535,vel,2,cog,0,fix_type,3,satellites_visible,6,,Len,38 11 | #fn=2014-08-08_13-46-36.csv 12 | 13 | fn=$1 14 | outfn=${fn%.*}_utc.csv 15 | gpxfn=${fn%.*}_utc.gpx 16 | 17 | echo 'DateTime,Lat,Lon,Elev' > $outfn 18 | 19 | #Values are int in tlog 20 | #Want to throw out anomalous 0,0,0 points - this is a hack 21 | cat $fn | grep mavlink_gps_raw_int_t | 22 | awk 'BEGIN {FS=","; OFS=","} {printf "%s,%0.7f,%0.7f,%0.2f\n",$1, $12/1E7, $14/1E7, $16/1E3}' | 23 | grep -v '0.0000000,0.0000000,0.00' >> $outfn 24 | 25 | gpsbabel -t -i unicsv,utc=0 -f $outfn -x track,merge,discard -o GPX -F $gpxfn 26 | 27 | #Note: want to set exiftool to limit GeoMaxExtSecs in ~/.ExifTool_config 28 | #This prevents photos taken before/after the GPS tracklog from being assigned positions 29 | #exiftool -progress -Geotag $gpxfn -Geosync=-1.0 "-Geotime<\${DateTimeOrigial}-00:00" export_orig 30 | -------------------------------------------------------------------------------- /rtklib_pos_stats.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | """ 4 | Compute final base position and statistics 5 | Input is pos file from RTKLIB 6 | 7 | David Shean 8 | dshean@gmail.com 9 | """ 10 | 11 | import os 12 | import argparse 13 | import numpy as np 14 | import pandas as pd 15 | from pygeotools.lib import geolib 16 | 17 | #Hack to update solution status 18 | def get_solution_status(Q): 19 | Q = np.round(Q) 20 | out = None 21 | if Q == 1.0: 22 | out = 'FIX' 23 | elif Q == 2.0: 24 | out = 'FLOAT' 25 | elif Q == 5.0: 26 | out = 'SINGLE' 27 | return out 28 | 29 | def getparser(): 30 | parser = argparse.ArgumentParser(description='Comptue base position from PPK position output from RTKLIB') 31 | parser.add_argument('ppk_pos_fn', type=str, help='PPK pos filename') 32 | return parser 33 | 34 | def main(): 35 | parser = getparser() 36 | args = parser.parse_args() 37 | 38 | ppk_pos_fn = args.ppk_pos_fn 39 | 40 | header = 'Date UTC latitude(deg) longitude(deg) height(m) Q ns sdn(m) sde(m) sdu(m) sdne(m) sdeu(m) sdun(m) age(s) ratio' 41 | print('Loading: %s' % ppk_pos_fn) 42 | ppk_pos = pd.read_csv(ppk_pos_fn, comment='%', delim_whitespace=True, names=header.split(), parse_dates=[[0,1]]) 43 | 44 | #Add filter to include only fix positions 45 | 46 | #Compute statistics for pos 47 | ppk_pos_mean = ppk_pos.mean() 48 | ppk_pos_std = ppk_pos.std() 49 | ppk_pos_med = ppk_pos.median() 50 | ppk_pos_nmad = (abs(ppk_pos.drop('Date_UTC', axis=1) - ppk_pos_med)).median() 51 | ppk_pos_itrf = geolib.ll2itrf(ppk_pos_med['longitude(deg)'], ppk_pos_med['latitude(deg)'], ppk_pos_med['height(m)']) 52 | 53 | #Should format output to be mean +/- std in meters 54 | print("\nMean") 55 | print(ppk_pos_mean) 56 | print("\nStd") 57 | print(ppk_pos_std) 58 | print("\nMedian") 59 | print(ppk_pos_med) 60 | print("\nNMAD") 61 | print(ppk_pos_nmad) 62 | print("\nITRF") 63 | print(ppk_pos_itrf) 64 | 65 | if __name__ == "__main__": 66 | main() 67 | -------------------------------------------------------------------------------- /sdlog2_dump.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | #This was copied from the PX4/Firmware/Tools/sdlog2/sdlog2_dump.py on 8/10/14 4 | 5 | from __future__ import print_function 6 | 7 | """Dump binary log generated by PX4's sdlog2 or APM as CSV 8 | 9 | Usage: python sdlog2_dump.py [-v] [-e] [-d delimiter] [-n null] [-m MSG[.field1,field2,...]] 10 | 11 | -v Use plain debug output instead of CSV. 12 | 13 | -e Recover from errors. 14 | 15 | -d Use "delimiter" in CSV. Default is ",". 16 | 17 | -n Use "null" as placeholder for empty values in CSV. Default is empty. 18 | 19 | -m MSG[.field1,field2,...] 20 | Dump only messages of specified type, and only specified fields. 21 | Multiple -m options allowed.""" 22 | 23 | __author__ = "Anton Babushkin" 24 | __version__ = "1.2" 25 | 26 | import struct, sys 27 | 28 | if sys.hexversion >= 0x030000F0: 29 | runningPython3 = True 30 | def _parseCString(cstr): 31 | return str(cstr, 'ascii').split('\0')[0] 32 | else: 33 | runningPython3 = False 34 | def _parseCString(cstr): 35 | return str(cstr).split('\0')[0] 36 | 37 | class SDLog2Parser: 38 | BLOCK_SIZE = 8192 39 | MSG_HEADER_LEN = 3 40 | MSG_HEAD1 = 0xA3 41 | MSG_HEAD2 = 0x95 42 | MSG_FORMAT_PACKET_LEN = 89 43 | MSG_FORMAT_STRUCT = "BB4s16s64s" 44 | MSG_TYPE_FORMAT = 0x80 45 | FORMAT_TO_STRUCT = { 46 | "b": ("b", None), 47 | "B": ("B", None), 48 | "h": ("h", None), 49 | "H": ("H", None), 50 | "i": ("i", None), 51 | "I": ("I", None), 52 | "f": ("f", None), 53 | "n": ("4s", None), 54 | "N": ("16s", None), 55 | "Z": ("64s", None), 56 | "c": ("h", 0.01), 57 | "C": ("H", 0.01), 58 | "e": ("i", 0.01), 59 | "E": ("I", 0.01), 60 | "L": ("i", 0.0000001), 61 | "M": ("b", None), 62 | "q": ("q", None), 63 | "Q": ("Q", None), 64 | } 65 | __csv_delim = "," 66 | __csv_null = "" 67 | __msg_filter = [] 68 | __time_msg = None 69 | __debug_out = False 70 | __correct_errors = False 71 | __file_name = None 72 | __file = None 73 | 74 | def __init__(self): 75 | return 76 | 77 | def reset(self): 78 | self.__msg_descrs = {} # message descriptions by message type map 79 | self.__msg_labels = {} # message labels by message name map 80 | self.__msg_names = [] # message names in the same order as FORMAT messages 81 | self.__buffer = bytearray() # buffer for input binary data 82 | self.__ptr = 0 # read pointer in buffer 83 | self.__csv_columns = [] # CSV file columns in correct order in format "MSG.label" 84 | self.__csv_data = {} # current values for all columns 85 | self.__csv_updated = False 86 | self.__msg_filter_map = {} # filter in form of map, with '*" expanded to full list of fields 87 | 88 | def setCSVDelimiter(self, csv_delim): 89 | self.__csv_delim = csv_delim 90 | 91 | def setCSVNull(self, csv_null): 92 | self.__csv_null = csv_null 93 | 94 | def setMsgFilter(self, msg_filter): 95 | self.__msg_filter = msg_filter 96 | 97 | def setTimeMsg(self, time_msg): 98 | self.__time_msg = time_msg 99 | 100 | def setDebugOut(self, debug_out): 101 | self.__debug_out = debug_out 102 | 103 | def setCorrectErrors(self, correct_errors): 104 | self.__correct_errors = correct_errors 105 | 106 | def setFileName(self, file_name): 107 | self.__file_name = file_name 108 | if file_name != None: 109 | self.__file = open(file_name, 'w+') 110 | else: 111 | self.__file = None 112 | 113 | 114 | def process(self, fn): 115 | self.reset() 116 | if self.__debug_out: 117 | # init __msg_filter_map 118 | for msg_name, show_fields in self.__msg_filter: 119 | self.__msg_filter_map[msg_name] = show_fields 120 | first_data_msg = True 121 | f = open(fn, "rb") 122 | bytes_read = 0 123 | while True: 124 | chunk = f.read(self.BLOCK_SIZE) 125 | if len(chunk) == 0: 126 | break 127 | self.__buffer = self.__buffer[self.__ptr:] + chunk 128 | self.__ptr = 0 129 | while self.__bytesLeft() >= self.MSG_HEADER_LEN: 130 | head1 = self.__buffer[self.__ptr] 131 | head2 = self.__buffer[self.__ptr+1] 132 | if (head1 != self.MSG_HEAD1 or head2 != self.MSG_HEAD2): 133 | if self.__correct_errors: 134 | self.__ptr += 1 135 | continue 136 | else: 137 | raise Exception("Invalid header at %i (0x%X): %02X %02X, must be %02X %02X" % (bytes_read + self.__ptr, bytes_read + self.__ptr, head1, head2, self.MSG_HEAD1, self.MSG_HEAD2)) 138 | msg_type = self.__buffer[self.__ptr+2] 139 | if msg_type == self.MSG_TYPE_FORMAT: 140 | # parse FORMAT message 141 | if self.__bytesLeft() < self.MSG_FORMAT_PACKET_LEN: 142 | break 143 | self.__parseMsgDescr() 144 | else: 145 | # parse data message 146 | msg_descr = self.__msg_descrs[msg_type] 147 | if msg_descr == None: 148 | raise Exception("Unknown msg type: %i" % msg_type) 149 | msg_length = msg_descr[0] 150 | if self.__bytesLeft() < msg_length: 151 | break 152 | if first_data_msg: 153 | # build CSV columns and init data map 154 | if not self.__debug_out: 155 | self.__initCSV() 156 | first_data_msg = False 157 | self.__parseMsg(msg_descr) 158 | bytes_read += self.__ptr 159 | if not self.__debug_out and self.__time_msg != None and self.__csv_updated: 160 | self.__printCSVRow() 161 | f.close() 162 | 163 | def __bytesLeft(self): 164 | return len(self.__buffer) - self.__ptr 165 | 166 | def __filterMsg(self, msg_name): 167 | show_fields = "*" 168 | if len(self.__msg_filter_map) > 0: 169 | show_fields = self.__msg_filter_map.get(msg_name) 170 | return show_fields 171 | 172 | def __initCSV(self): 173 | if len(self.__msg_filter) == 0: 174 | for msg_name in self.__msg_names: 175 | self.__msg_filter.append((msg_name, "*")) 176 | for msg_name, show_fields in self.__msg_filter: 177 | if show_fields == "*": 178 | show_fields = self.__msg_labels.get(msg_name, []) 179 | self.__msg_filter_map[msg_name] = show_fields 180 | for field in show_fields: 181 | full_label = msg_name + "_" + field 182 | self.__csv_columns.append(full_label) 183 | self.__csv_data[full_label] = None 184 | if self.__file != None: 185 | print(self.__csv_delim.join(self.__csv_columns), file=self.__file) 186 | else: 187 | print(self.__csv_delim.join(self.__csv_columns)) 188 | 189 | def __printCSVRow(self): 190 | s = [] 191 | for full_label in self.__csv_columns: 192 | v = self.__csv_data[full_label] 193 | if v == None: 194 | v = self.__csv_null 195 | else: 196 | v = str(v) 197 | s.append(v) 198 | 199 | if self.__file != None: 200 | print(self.__csv_delim.join(s), file=self.__file) 201 | else: 202 | print(self.__csv_delim.join(s)) 203 | 204 | def __parseMsgDescr(self): 205 | if runningPython3: 206 | data = struct.unpack(self.MSG_FORMAT_STRUCT, self.__buffer[self.__ptr + 3 : self.__ptr + self.MSG_FORMAT_PACKET_LEN]) 207 | else: 208 | data = struct.unpack(self.MSG_FORMAT_STRUCT, str(self.__buffer[self.__ptr + 3 : self.__ptr + self.MSG_FORMAT_PACKET_LEN])) 209 | msg_type = data[0] 210 | if msg_type != self.MSG_TYPE_FORMAT: 211 | msg_length = data[1] 212 | msg_name = _parseCString(data[2]) 213 | msg_format = _parseCString(data[3]) 214 | msg_labels = _parseCString(data[4]).split(",") 215 | # Convert msg_format to struct.unpack format string 216 | msg_struct = "" 217 | msg_mults = [] 218 | for c in msg_format: 219 | try: 220 | f = self.FORMAT_TO_STRUCT[c] 221 | msg_struct += f[0] 222 | msg_mults.append(f[1]) 223 | except KeyError as e: 224 | raise Exception("Unsupported format char: %s in message %s (%i)" % (c, msg_name, msg_type)) 225 | msg_struct = "<" + msg_struct # force little-endian 226 | self.__msg_descrs[msg_type] = (msg_length, msg_name, msg_format, msg_labels, msg_struct, msg_mults) 227 | self.__msg_labels[msg_name] = msg_labels 228 | self.__msg_names.append(msg_name) 229 | if self.__debug_out: 230 | if self.__filterMsg(msg_name) != None: 231 | print("MSG FORMAT: type = %i, length = %i, name = %s, format = %s, labels = %s, struct = %s, mults = %s" % ( 232 | msg_type, msg_length, msg_name, msg_format, str(msg_labels), msg_struct, msg_mults)) 233 | self.__ptr += self.MSG_FORMAT_PACKET_LEN 234 | 235 | def __parseMsg(self, msg_descr): 236 | msg_length, msg_name, msg_format, msg_labels, msg_struct, msg_mults = msg_descr 237 | if not self.__debug_out and self.__time_msg != None and msg_name == self.__time_msg and self.__csv_updated: 238 | self.__printCSVRow() 239 | self.__csv_updated = False 240 | show_fields = self.__filterMsg(msg_name) 241 | if (show_fields != None): 242 | if runningPython3: 243 | data = list(struct.unpack(msg_struct, self.__buffer[self.__ptr+self.MSG_HEADER_LEN:self.__ptr+msg_length])) 244 | else: 245 | data = list(struct.unpack(msg_struct, str(self.__buffer[self.__ptr+self.MSG_HEADER_LEN:self.__ptr+msg_length]))) 246 | for i in range(len(data)): 247 | if type(data[i]) is str: 248 | data[i] = _parseCString(data[i]) 249 | m = msg_mults[i] 250 | if m != None: 251 | data[i] = data[i] * m 252 | if self.__debug_out: 253 | s = [] 254 | for i in range(len(data)): 255 | label = msg_labels[i] 256 | if show_fields == "*" or label in show_fields: 257 | s.append(label + "=" + str(data[i])) 258 | print("MSG %s: %s" % (msg_name, ", ".join(s))) 259 | else: 260 | # update CSV data buffer 261 | for i in range(len(data)): 262 | label = msg_labels[i] 263 | if label in show_fields: 264 | self.__csv_data[msg_name + "_" + label] = data[i] 265 | if self.__time_msg != None and msg_name != self.__time_msg: 266 | self.__csv_updated = True 267 | if self.__time_msg == None: 268 | self.__printCSVRow() 269 | self.__ptr += msg_length 270 | 271 | def _main(): 272 | if len(sys.argv) < 2: 273 | print("Usage: python sdlog2_dump.py [-v] [-e] [-d delimiter] [-n null] [-m MSG[.field1,field2,...]] [-t TIME_MSG_NAME]\n") 274 | print("\t-v\tUse plain debug output instead of CSV.\n") 275 | print("\t-e\tRecover from errors.\n") 276 | print("\t-d\tUse \"delimiter\" in CSV. Default is \",\".\n") 277 | print("\t-n\tUse \"null\" as placeholder for empty values in CSV. Default is empty.\n") 278 | print("\t-m MSG[.field1,field2,...]\n\t\tDump only messages of specified type, and only specified fields.\n\t\tMultiple -m options allowed.") 279 | print("\t-t\tSpecify TIME message name to group data messages by time and significantly reduce duplicate output.\n") 280 | print("\t-fPrint to file instead of stdout") 281 | return 282 | fn = sys.argv[1] 283 | debug_out = False 284 | correct_errors = False 285 | msg_filter = [] 286 | csv_null = "" 287 | csv_delim = "," 288 | #time_msg = "TIME" 289 | time_msg = None 290 | file_name = None 291 | opt = None 292 | for arg in sys.argv[2:]: 293 | if opt != None: 294 | if opt == "d": 295 | csv_delim = arg 296 | elif opt == "n": 297 | csv_null = arg 298 | elif opt == "t": 299 | time_msg = arg 300 | elif opt == "f": 301 | file_name = arg 302 | elif opt == "m": 303 | show_fields = "*" 304 | a = arg.split("_") 305 | if len(a) > 1: 306 | show_fields = a[1].split(",") 307 | msg_filter.append((a[0], show_fields)) 308 | opt = None 309 | else: 310 | if arg == "-v": 311 | debug_out = True 312 | elif arg == "-e": 313 | correct_errors = True 314 | elif arg == "-d": 315 | opt = "d" 316 | elif arg == "-n": 317 | opt = "n" 318 | elif arg == "-m": 319 | opt = "m" 320 | elif arg == "-t": 321 | opt = "t" 322 | elif arg == "-f": 323 | opt = "f" 324 | 325 | if csv_delim == "\\t": 326 | csv_delim = "\t" 327 | parser = SDLog2Parser() 328 | parser.setCSVDelimiter(csv_delim) 329 | parser.setCSVNull(csv_null) 330 | parser.setMsgFilter(msg_filter) 331 | parser.setTimeMsg(time_msg) 332 | parser.setFileName(file_name) 333 | parser.setDebugOut(debug_out) 334 | parser.setCorrectErrors(correct_errors) 335 | parser.process(fn) 336 | 337 | if __name__ == "__main__": 338 | _main() 339 | -------------------------------------------------------------------------------- /sdlog2_dump_solo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | """Dump binary log generated by PX4's sdlog2 or APM as CSV 6 | 7 | Usage: python sdlog2_dump.py [-v] [-e] [-d delimiter] [-n null] [-m MSG[.field1,field2,...]] 8 | 9 | -v Use plain debug output instead of CSV. 10 | 11 | -e Recover from errors. 12 | 13 | -d Use "delimiter" in CSV. Default is ",". 14 | 15 | -n Use "null" as placeholder for empty values in CSV. Default is empty. 16 | 17 | -m MSG[.field1,field2,...] 18 | Dump only messages of specified type, and only specified fields. 19 | Multiple -m options allowed.""" 20 | 21 | __author__ = "Anton Babushkin" 22 | __version__ = "1.2" 23 | 24 | import struct, sys 25 | 26 | if sys.hexversion >= 0x030000F0: 27 | runningPython3 = True 28 | def _parseCString(cstr): 29 | return str(cstr, 'ascii').split('\0')[0] 30 | else: 31 | runningPython3 = False 32 | def _parseCString(cstr): 33 | return str(cstr).split('\0')[0] 34 | 35 | class SDLog2Parser: 36 | BLOCK_SIZE = 8192 37 | MSG_HEADER_LEN = 3 38 | MSG_HEAD1 = 0xA3 39 | MSG_HEAD2 = 0x95 40 | MSG_FORMAT_PACKET_LEN = 89 41 | MSG_FORMAT_STRUCT = "BB4s16s64s" 42 | MSG_TYPE_FORMAT = 0x80 43 | FORMAT_TO_STRUCT = { 44 | "b": ("b", None), 45 | "B": ("B", None), 46 | "h": ("h", None), 47 | "H": ("H", None), 48 | "i": ("i", None), 49 | "I": ("I", None), 50 | "f": ("f", None), 51 | "d": ("d", None), 52 | "n": ("4s", None), 53 | "N": ("16s", None), 54 | "Z": ("64s", None), 55 | "c": ("h", 0.01), 56 | "C": ("H", 0.01), 57 | "e": ("i", 0.01), 58 | "E": ("I", 0.01), 59 | "L": ("i", 0.0000001), 60 | "M": ("b", None), 61 | "q": ("q", None), 62 | "Q": ("Q", None), 63 | } 64 | __csv_delim = "," 65 | __csv_null = "" 66 | __msg_filter = [] 67 | __time_msg = None 68 | __debug_out = False 69 | __correct_errors = False 70 | __file_name = None 71 | __file = None 72 | 73 | def __init__(self): 74 | return 75 | 76 | def reset(self): 77 | self.__msg_descrs = {} # message descriptions by message type map 78 | self.__msg_labels = {} # message labels by message name map 79 | self.__msg_names = [] # message names in the same order as FORMAT messages 80 | self.__buffer = bytearray() # buffer for input binary data 81 | self.__ptr = 0 # read pointer in buffer 82 | self.__csv_columns = [] # CSV file columns in correct order in format "MSG.label" 83 | self.__csv_data = {} # current values for all columns 84 | self.__csv_updated = False 85 | self.__msg_filter_map = {} # filter in form of map, with '*" expanded to full list of fields 86 | 87 | def setCSVDelimiter(self, csv_delim): 88 | self.__csv_delim = csv_delim 89 | 90 | def setCSVNull(self, csv_null): 91 | self.__csv_null = csv_null 92 | 93 | def setMsgFilter(self, msg_filter): 94 | self.__msg_filter = msg_filter 95 | 96 | def setTimeMsg(self, time_msg): 97 | self.__time_msg = time_msg 98 | 99 | def setDebugOut(self, debug_out): 100 | self.__debug_out = debug_out 101 | 102 | def setCorrectErrors(self, correct_errors): 103 | self.__correct_errors = correct_errors 104 | 105 | def setFileName(self, file_name): 106 | self.__file_name = file_name 107 | if file_name != None: 108 | self.__file = open(file_name, 'w+') 109 | else: 110 | self.__file = None 111 | 112 | 113 | def process(self, fn): 114 | self.reset() 115 | if self.__debug_out: 116 | # init __msg_filter_map 117 | for msg_name, show_fields in self.__msg_filter: 118 | self.__msg_filter_map[msg_name] = show_fields 119 | first_data_msg = True 120 | f = open(fn, "rb") 121 | bytes_read = 0 122 | while True: 123 | chunk = f.read(self.BLOCK_SIZE) 124 | if len(chunk) == 0: 125 | break 126 | self.__buffer = self.__buffer[self.__ptr:] + chunk 127 | self.__ptr = 0 128 | while self.__bytesLeft() >= self.MSG_HEADER_LEN: 129 | head1 = self.__buffer[self.__ptr] 130 | head2 = self.__buffer[self.__ptr+1] 131 | if (head1 != self.MSG_HEAD1 or head2 != self.MSG_HEAD2): 132 | if self.__correct_errors: 133 | self.__ptr += 1 134 | continue 135 | else: 136 | raise Exception("Invalid header at %i (0x%X): %02X %02X, must be %02X %02X" % (bytes_read + self.__ptr, bytes_read + self.__ptr, head1, head2, self.MSG_HEAD1, self.MSG_HEAD2)) 137 | msg_type = self.__buffer[self.__ptr+2] 138 | if msg_type == self.MSG_TYPE_FORMAT: 139 | # parse FORMAT message 140 | if self.__bytesLeft() < self.MSG_FORMAT_PACKET_LEN: 141 | break 142 | self.__parseMsgDescr() 143 | else: 144 | # parse data message 145 | msg_descr = self.__msg_descrs[msg_type] 146 | if msg_descr == None: 147 | raise Exception("Unknown msg type: %i" % msg_type) 148 | msg_length = msg_descr[0] 149 | if self.__bytesLeft() < msg_length: 150 | break 151 | if first_data_msg: 152 | # build CSV columns and init data map 153 | if not self.__debug_out: 154 | self.__initCSV() 155 | first_data_msg = False 156 | self.__parseMsg(msg_descr) 157 | bytes_read += self.__ptr 158 | if not self.__debug_out and self.__time_msg != None and self.__csv_updated: 159 | self.__printCSVRow() 160 | f.close() 161 | 162 | def __bytesLeft(self): 163 | return len(self.__buffer) - self.__ptr 164 | 165 | def __filterMsg(self, msg_name): 166 | show_fields = "*" 167 | if len(self.__msg_filter_map) > 0: 168 | show_fields = self.__msg_filter_map.get(msg_name) 169 | return show_fields 170 | 171 | def __initCSV(self): 172 | if len(self.__msg_filter) == 0: 173 | for msg_name in self.__msg_names: 174 | self.__msg_filter.append((msg_name, "*")) 175 | for msg_name, show_fields in self.__msg_filter: 176 | if show_fields == "*": 177 | show_fields = self.__msg_labels.get(msg_name, []) 178 | self.__msg_filter_map[msg_name] = show_fields 179 | for field in show_fields: 180 | full_label = msg_name + "_" + field 181 | self.__csv_columns.append(full_label) 182 | self.__csv_data[full_label] = None 183 | if self.__file != None: 184 | print(self.__csv_delim.join(self.__csv_columns), file=self.__file) 185 | else: 186 | print(self.__csv_delim.join(self.__csv_columns)) 187 | 188 | def __printCSVRow(self): 189 | s = [] 190 | for full_label in self.__csv_columns: 191 | v = self.__csv_data[full_label] 192 | if v == None: 193 | v = self.__csv_null 194 | else: 195 | v = str(v) 196 | s.append(v) 197 | 198 | if self.__file != None: 199 | print(self.__csv_delim.join(s), file=self.__file) 200 | else: 201 | print(self.__csv_delim.join(s)) 202 | 203 | def __parseMsgDescr(self): 204 | if runningPython3: 205 | data = struct.unpack(self.MSG_FORMAT_STRUCT, self.__buffer[self.__ptr + 3 : self.__ptr + self.MSG_FORMAT_PACKET_LEN]) 206 | else: 207 | data = struct.unpack(self.MSG_FORMAT_STRUCT, str(self.__buffer[self.__ptr + 3 : self.__ptr + self.MSG_FORMAT_PACKET_LEN])) 208 | msg_type = data[0] 209 | if msg_type != self.MSG_TYPE_FORMAT: 210 | msg_length = data[1] 211 | msg_name = _parseCString(data[2]) 212 | msg_format = _parseCString(data[3]) 213 | msg_labels = _parseCString(data[4]).split(",") 214 | # Convert msg_format to struct.unpack format string 215 | msg_struct = "" 216 | msg_mults = [] 217 | for c in msg_format: 218 | try: 219 | f = self.FORMAT_TO_STRUCT[c] 220 | msg_struct += f[0] 221 | msg_mults.append(f[1]) 222 | except KeyError as e: 223 | raise Exception("Unsupported format char: %s in message %s (%i)" % (c, msg_name, msg_type)) 224 | msg_struct = "<" + msg_struct # force little-endian 225 | self.__msg_descrs[msg_type] = (msg_length, msg_name, msg_format, msg_labels, msg_struct, msg_mults) 226 | self.__msg_labels[msg_name] = msg_labels 227 | self.__msg_names.append(msg_name) 228 | if self.__debug_out: 229 | if self.__filterMsg(msg_name) != None: 230 | print("MSG FORMAT: type = %i, length = %i, name = %s, format = %s, labels = %s, struct = %s, mults = %s" % ( 231 | msg_type, msg_length, msg_name, msg_format, str(msg_labels), msg_struct, msg_mults)) 232 | self.__ptr += self.MSG_FORMAT_PACKET_LEN 233 | 234 | def __parseMsg(self, msg_descr): 235 | msg_length, msg_name, msg_format, msg_labels, msg_struct, msg_mults = msg_descr 236 | if not self.__debug_out and self.__time_msg != None and msg_name == self.__time_msg and self.__csv_updated: 237 | self.__printCSVRow() 238 | self.__csv_updated = False 239 | show_fields = self.__filterMsg(msg_name) 240 | if (show_fields != None): 241 | if runningPython3: 242 | data = list(struct.unpack(msg_struct, self.__buffer[self.__ptr+self.MSG_HEADER_LEN:self.__ptr+msg_length])) 243 | else: 244 | data = list(struct.unpack(msg_struct, str(self.__buffer[self.__ptr+self.MSG_HEADER_LEN:self.__ptr+msg_length]))) 245 | for i in range(len(data)): 246 | if type(data[i]) is str: 247 | data[i] = _parseCString(data[i]) 248 | m = msg_mults[i] 249 | if m != None: 250 | data[i] = data[i] * m 251 | if self.__debug_out: 252 | s = [] 253 | for i in range(len(data)): 254 | label = msg_labels[i] 255 | if show_fields == "*" or label in show_fields: 256 | s.append(label + "=" + str(data[i])) 257 | print("MSG %s: %s" % (msg_name, ", ".join(s))) 258 | else: 259 | # update CSV data buffer 260 | for i in range(len(data)): 261 | label = msg_labels[i] 262 | if label in show_fields: 263 | self.__csv_data[msg_name + "_" + label] = data[i] 264 | if self.__time_msg != None and msg_name != self.__time_msg: 265 | self.__csv_updated = True 266 | if self.__time_msg == None: 267 | self.__printCSVRow() 268 | self.__ptr += msg_length 269 | 270 | def _main(): 271 | if len(sys.argv) < 2: 272 | print("Usage: python sdlog2_dump.py [-v] [-e] [-d delimiter] [-n null] [-m MSG[.field1,field2,...]] [-t TIME_MSG_NAME]\n") 273 | print("\t-v\tUse plain debug output instead of CSV.\n") 274 | print("\t-e\tRecover from errors.\n") 275 | print("\t-d\tUse \"delimiter\" in CSV. Default is \",\".\n") 276 | print("\t-n\tUse \"null\" as placeholder for empty values in CSV. Default is empty.\n") 277 | print("\t-m MSG[.field1,field2,...]\n\t\tDump only messages of specified type, and only specified fields.\n\t\tMultiple -m options allowed.") 278 | print("\t-t\tSpecify TIME message name to group data messages by time and significantly reduce duplicate output.\n") 279 | print("\t-fPrint to file instead of stdout") 280 | return 281 | fn = sys.argv[1] 282 | debug_out = False 283 | correct_errors = False 284 | msg_filter = [] 285 | csv_null = "" 286 | csv_delim = "," 287 | time_msg = "TIME" 288 | file_name = None 289 | opt = None 290 | for arg in sys.argv[2:]: 291 | if opt != None: 292 | if opt == "d": 293 | csv_delim = arg 294 | elif opt == "n": 295 | csv_null = arg 296 | elif opt == "t": 297 | time_msg = arg 298 | elif opt == "f": 299 | file_name = arg 300 | elif opt == "m": 301 | show_fields = "*" 302 | a = arg.split("_") 303 | if len(a) > 1: 304 | show_fields = a[1].split(",") 305 | msg_filter.append((a[0], show_fields)) 306 | opt = None 307 | else: 308 | if arg == "-v": 309 | debug_out = True 310 | elif arg == "-e": 311 | correct_errors = True 312 | elif arg == "-d": 313 | opt = "d" 314 | elif arg == "-n": 315 | opt = "n" 316 | elif arg == "-m": 317 | opt = "m" 318 | elif arg == "-t": 319 | opt = "t" 320 | elif arg == "-f": 321 | opt = "f" 322 | 323 | if csv_delim == "\\t": 324 | csv_delim = "\t" 325 | parser = SDLog2Parser() 326 | parser.setCSVDelimiter(csv_delim) 327 | parser.setCSVNull(csv_null) 328 | parser.setMsgFilter(msg_filter) 329 | parser.setTimeMsg(time_msg) 330 | parser.setFileName(file_name) 331 | parser.setDebugOut(debug_out) 332 | parser.setCorrectErrors(correct_errors) 333 | parser.process(fn) 334 | 335 | if __name__ == "__main__": 336 | _main() 337 | 338 | -------------------------------------------------------------------------------- /sfm_shp2exif.csh: -------------------------------------------------------------------------------- 1 | #! /bin/tcsh -f 2 | 3 | #NOTE 7/27/13 4 | #See http://www.sno.phy.queensu.ca/~phil/exiftool/geotag.html 5 | #Good information about tagging photos w/ GPS tracklog 6 | 7 | #This script will assign lat/lon/elev from a shapefile to exif data of photos 8 | #It is assumed that the points are in the same order as the sfm site IDs, 9 | 10 | #Should really do this in python 11 | 12 | #For future SfM surveys, fully-automated pipeline 13 | #With automatic geotagging from GPS log, can use spatial info to break into sites, or just lump everything together 14 | #Can automatically process files based on time, break into sites, extract focal length 15 | 16 | #exiftool can automatically geotag photos given an input GPS log - need some way to go from GeoXH 17 | 18 | #set photodir = ~/Documents/UW/MtRainier/2012_Field_Data/Paradise/SfM/Nisqually_vista 19 | #set shpfile = ~/Documents/UW/MtRainier/2012_Field_Data/Paradise/shp/20120706_shean_rainier_nisqually.shp 20 | 21 | #set echo 22 | 23 | set photodir = $1 24 | set shpfile = $2 25 | 26 | #set exiftool = '/Volumes/dshean/sw/Image-ExifTool-8.97/exiftool' 27 | set exiftool = '~/sw/Image-ExifTool-8.97/exiftool' 28 | 29 | cd $photodir 30 | 31 | set photodirlist = (`ls -d * | grep 'sfm' `) 32 | 33 | echo 34 | foreach i ($photodirlist) 35 | echo $i 36 | set sitenum = `echo $i | cut -c 14-15` 37 | #Point features are 0-relative 38 | @ sitenum-- 39 | #set sitenum = `printf '%02i' $sitenum` 40 | 41 | #Need to deal with features that have multiple attributes 42 | 43 | #Sunrise 44 | #grep statement must account for comment field 45 | #set xyz = (`ogrinfo -al $shpfile | grep -A 2 ":$sitenum" | sed -e '1,2d' -e 's/[()]//g' -e 's/ POINT //' -e 's/^ //'`) 46 | 47 | #Nisqually - no comment 48 | set xyz = (`ogrinfo -al $shpfile | grep -A 1 ":${sitenum}"$ | grep POINT | sed -e 's/[()]//g' -e 's/ POINT //' -e 's/^ //'`) 49 | echo $xyz 50 | 51 | set latref = 'N' 52 | if (`echo "a=($xyz[2] < 0); a" | bc -l`) then 53 | set latref = 'S' 54 | endif 55 | 56 | set lonref = 'E' 57 | if (`echo "a=($xyz[1] < 0); a" | bc -l`) then 58 | set lonref = 'W' 59 | set xyz[1] = `echo $xyz[1] | sed 's/-//'` 60 | endif 61 | 62 | set altref = 0 63 | if (`echo "a=($xyz[3] < 0); a" | bc -l`) then 64 | set altref = 1 65 | endif 66 | 67 | ls $photodir/$i 68 | set imglist = (`ls $photodir/$i/*/*.jpg`) 69 | 70 | foreach img ($imglist) 71 | echo $img 72 | #Note, the #= turns off conversion and forces a uint8 value of 0 for "Above Sea Level" 73 | $exiftool -exif:GPSLatitude=$xyz[2] -exif:GPSLatitudeRef=$latref -exif:GPSLongitude=$xyz[1] -exif:GPSLongitudeRef=$lonref -exif:GPSAltitude=$xyz[3] -exif:GPSAltitudeRef\#=$altref $img 74 | end 75 | end 76 | -------------------------------------------------------------------------------- /solo_getlogs.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | #David Shean 4 | #dshean@gmail.com 5 | #9/12/16 6 | 7 | #Download logs from 3DR Solo filesystem via scp 8 | 9 | #Connect to solo via wifi first 10 | 11 | outdir=/tmp/logs 12 | 13 | if [ ! -e $outdir ] ; then 14 | mkdir $outdir 15 | fi 16 | cd $outdir 17 | 18 | #Interactive Connect 19 | #ssh root@10.1.1.1 20 | 21 | ip=10.1.1.1 22 | user=root 23 | pw=TjSDBkAu 24 | 25 | #This contains all logs, including telemetry, exlcuding dataflash 26 | dir=/log 27 | #This dir appears empty 28 | #dir=/log/solo/dataflash 29 | 30 | scp -rp ${user}@${ip}:${dir}/* . 31 | 32 | --------------------------------------------------------------------------------