├── __init__.py ├── .vscode ├── tasks.json ├── settings.json └── launch.json ├── .vs └── pyall │ └── v15 │ └── .suo ├── Simrad EM Datagrams Oct 2013.pdf ├── .gitattributes ├── pyall.sln ├── ggmbes.py ├── pyall.pyproj ├── timeseries.py ├── .gitignore ├── README.MD ├── multiprocesshelper.py ├── all2points.py ├── fileutils.py ├── ggmbesstandard.py ├── LICENSE ├── cloud2tif.py ├── geodetic.py └── lashelper.py /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.vs/pyall/v15/.suo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guardiangeomatics/pyall/HEAD/.vs/pyall/v15/.suo -------------------------------------------------------------------------------- /Simrad EM Datagrams Oct 2013.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/guardiangeomatics/pyall/HEAD/Simrad EM Datagrams Oct 2013.pdf -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | // Place your settings in this file to overwrite default and user settings. 2 | { 3 | "[python]": { 4 | "editor.defaultFormatter": "ms-python.autopep8" 5 | } 6 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Python", 6 | "type": "python", 7 | "request": "launch", 8 | "stopOnEntry": false, 9 | "program": "${file}", 10 | "python": "python.exe", 11 | "args": ["-i", "C:/development/python/0004_20110307_041009.all", "-cpu", "1"] 12 | } 13 | ] 14 | } -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /pyall.sln: -------------------------------------------------------------------------------- 1 | 2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.28010.2041 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "pyall", "pyall.pyproj", "{713FAE3D-1FE4-495E-91F6-6D34F70BE326}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {713FAE3D-1FE4-495E-91F6-6D34F70BE326}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {713FAE3D-1FE4-495E-91F6-6D34F70BE326}.Release|Any CPU.ActiveCfg = Release|Any CPU 16 | EndGlobalSection 17 | GlobalSection(SolutionProperties) = preSolution 18 | HideSolutionNode = FALSE 19 | EndGlobalSection 20 | GlobalSection(ExtensibilityGlobals) = postSolution 21 | SolutionGuid = {2229B4B1-A315-45E4-89A0-836AE81326CD} 22 | EndGlobalSection 23 | EndGlobal 24 | -------------------------------------------------------------------------------- /ggmbes.py: -------------------------------------------------------------------------------- 1 | #name: ggmbes 2 | #created: July 2017 3 | #by: p.kennedy@guardiangeomatics.com 4 | #description: python module to represent MBES data so we can QC, compute and merge. 5 | 6 | import pprint 7 | 8 | ############################################################################### 9 | class GGPING: 10 | '''used to hold the metadata associated with a ping of data.''' 11 | def __init__(self): 12 | self.timestamp = 0 13 | self.longitude = 0 14 | self.latitude = 0 15 | self.ellipsoidalheight = 0 16 | self.heading = 0 17 | self.pitch = 0 18 | self.roll = 0 19 | self.heave = 0 20 | self.tidecorrector = 0 21 | self.hydroid = 0 22 | self.hydroidsmooth = 0 23 | self.waterLevelReRefPoint_m = 0 24 | self.txtransducerdepth_m = 0 25 | self.hydroidstandarddeviation = 0 26 | 27 | ############################################################################### 28 | def __str__(self): 29 | return pprint.pformat(vars(self)) 30 | 31 | 32 | ############################################################################### 33 | class GGBeam: 34 | def __init__(self): 35 | self.east = 0 36 | self.north = 0 37 | self.depth = 0 38 | self.backscatter = 0 39 | self.id = 0 40 | self.rejectionInfo1 = 0 41 | -------------------------------------------------------------------------------- /pyall.pyproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Debug 5 | 2.0 6 | {713fae3d-1fe4-495e-91f6-6d34f70be326} 7 | 8 | pyall.py 9 | 10 | . 11 | . 12 | {888888a0-9f3d-457c-b088-3a5042f75d52} 13 | Standard Python launcher 14 | 15 | 16 | 17 | 18 | 19 | 10.0 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /timeseries.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import math 4 | 5 | ############################################################################### 6 | class ctimeSeries: 7 | '''# how to use the time series class, a 2D list of time 8 | # attitude = [[1,100],[2,200], [5,500], [10,1000]] 9 | # tsRoll = ctimeSeries(attitude) 10 | # print(tsRoll.getValueAt(6))''' 11 | 12 | 13 | ############################################################################### 14 | def __init__(self, timeOrtimeValue, values=""): 15 | '''the time series requires a 2d series of [[timestamp, value],[timestamp, value]]. It then converts this into a numpy array ready for fast interpolation''' 16 | self.name = "2D time series" 17 | # user has passed 1 list with both time and values, so handle it 18 | if len(values) == 0: 19 | if isinstance(timeOrtimeValue, np.ndarray): 20 | arr = timeOrtimeValue 21 | else: 22 | arr = np.array(timeOrtimeValue) 23 | #sort the list into ascending time order 24 | arr = arr[np.argsort(arr[:,0])] 25 | self.times = arr[:,0] 26 | self.values = arr[:,1] 27 | else: 28 | # user has passed 2 list with time and values, so handle it. in this case the list MUST be sorted 29 | self.times = np.array(timeOrtimeValue) 30 | self.values = np.array(values) 31 | 32 | ############################################################################### 33 | def getValueAt(self, timestamp): 34 | '''get an interpolated value for an exact time''' 35 | '''requested values for times BEFORE the''' 36 | return np.interp(timestamp, self.times, self.values, left=None, right=None) 37 | 38 | ############################################################################### 39 | def getNearestAt(self, timestamp): 40 | '''get the nearest actual value to the time provided''' 41 | idx = np.searchsorted(self.times, timestamp, side="left") 42 | if idx > 0 and (idx == len(self.times) or math.fabs(timestamp - self.times[idx-1]) < math.fabs(timestamp - self.times[idx])): 43 | return self.times[idx-1], self.values[idx-1] 44 | else: 45 | return self.times[idx], self.values[idx] 46 | 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | <<<<<<< HEAD 57 | # Flask instance folder 58 | instance/ 59 | ======= 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | >>>>>>> origin/master 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # IPython Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | 93 | # Rope project settings 94 | .ropeproject 95 | <<<<<<< HEAD 96 | 97 | # ========================= 98 | # Operating System Files 99 | # ========================= 100 | 101 | # OSX 102 | # ========================= 103 | 104 | .DS_Store 105 | .AppleDouble 106 | .LSOverride 107 | 108 | # Thumbnails 109 | ._* 110 | 111 | # Files that might appear in the root of a volume 112 | .DocumentRevisions-V100 113 | .fseventsd 114 | .Spotlight-V100 115 | .TemporaryItems 116 | .Trashes 117 | .VolumeIcon.icns 118 | 119 | # Directories potentially created on remote AFP share 120 | .AppleDB 121 | .AppleDesktop 122 | Network Trash Folder 123 | Temporary Items 124 | .apdisk 125 | 126 | # Windows 127 | # ========================= 128 | 129 | # Windows image file caches 130 | Thumbs.db 131 | ehthumbs.db 132 | 133 | # Folder config file 134 | Desktop.ini 135 | 136 | # Recycle Bin used on file shares 137 | $RECYCLE.BIN/ 138 | 139 | # Windows Installer files 140 | *.cab 141 | *.msi 142 | *.msm 143 | *.msp 144 | 145 | # Windows shortcuts 146 | *.lnk 147 | ======= 148 | >>>>>>> origin/master 149 | -------------------------------------------------------------------------------- /README.MD: -------------------------------------------------------------------------------- 1 | pyall 2 | ===== 3 | * created: August 2016 4 | * version 1.50 5 | * by: p.kennedy@fugro.com 6 | * description: python module to read an Kongsberg .ALL file 7 | * notes: See main at end of script for example how to use this 8 | * based on ALL file version October 2013 9 | * developed for Python version 3.4 10 | 11 | Done 12 | ==== 13 | * Added A_Attitude decode 14 | * Added n_Attitude network attitude decode 15 | * Added method to read rawbytes for an entire datagram including header. This is needed to exclude datagrams when conditioning files. 16 | * Added functionality for 'U' UNKNOWN_RECORD so we handle all records in an identical manner 17 | * Added functionality to return 'data' property for each class so we can access raw bytes 18 | * Added decode for N (travel time) records 19 | * Added backward support for python 2.7. Needed to add support for timestamp as this was only availble in python 3.4 20 | * Added error trap for reading a corrupt .all file 21 | * Added support for I installation datagram into a key/value pair 22 | * Added loadnavigation method which reads all nav into a list 23 | * Trialled some struct speed-ups. no success 24 | * Patched D decode to simplify and repair missing field (maxbeams) 25 | * Patched print statement for python 3.x instead of only python 2.x 26 | * Added read for trailing bytes in 'P' records for M3 sonar which appears to write imperfect records 27 | * now read the full 5 fields in the header so we can access the record time. then resent the pointer so the full record can be read and parsed. This would appear to be IO inefficient, but it is not as the OS buffers 4 k when reading 28 | * Basic read loop is in place. reads datagram header and skips to next record. THis is quick 29 | * P = 80 P Position record decode in place 30 | * X = 88 X depth record decode in place. Beam data is in a list of depths[], alongtrack[] etc instead of a beam object. This may be more efficient? 31 | * Tested X reader by making a waterfall image. Results look good 32 | 33 | 2Do 34 | === 35 | E = 69 36 | F = 70 37 | G = 71 38 | H = 72 39 | I = 73 40 | J = 74 41 | K = 75 42 | L = 76 43 | M = 77 44 | O = 79 45 | Q = 81 46 | R = 82 47 | S = 83 48 | T = 84 49 | U = 85 50 | V = 86 51 | W = 87 52 | Y = 89 53 | 54 | Example Reader 55 | ------- 56 | ``` 57 | r = allreader("C:/development/all/sampledata/EM2040/GeoFocusEM2040400kHzdual-Rx0.5degx1degPitchStabilised.all") 58 | start_time = time.time() # time the process 59 | 60 | while r.moreData(): 61 | # read a datagram. If we support it, return the datagram type and aclass for that datagram 62 | # The user then needs to call the read() method for the class to undertake a fileread and binary decode. This keeps the read super quick. 63 | TypeOfDatagram, datagram = r.readDatagram() 64 | 65 | if TypeOfDatagram == 'P': 66 | datagram.read() 67 | print ("Lat: %.5f Lon: %.5f" % (datagram.Latitude, datagram.Longitude)) 68 | 69 | if TypeOfDatagram == 'X': 70 | datagram.read() 71 | 72 | print("Read Duration: %.3f seconds" % (time.time() - start_time)) # print the processing time. It is handy to keep an eye on processing performance. 73 | 74 | r.rewind() 75 | print("Complete reading ALL file :-)") 76 | r.close() 77 | # see the test code in main() at the end of pyall for more details. Have Fun 78 | ``` 79 | ``` 80 | Data Type Conversions from ALL to Python:: 81 | * ALL types to python struct types 82 | * signed char = 1 byte = "b" 83 | * unsigned char = 1 byte = "B" 84 | * XTFWORD = signed int 2 bytes = h 85 | * XTFWORD = UNsigned int 2 bytes = H (for unipolar data) 86 | * DWORD = unsigned int 4 bytes = "L" 87 | * short = short integer 2 bytes = "h" 88 | * char = 1 byte = "c" 89 | ``` -------------------------------------------------------------------------------- /multiprocesshelper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import multiprocessing 4 | import ctypes 5 | import logging 6 | from datetime import datetime, timedelta 7 | 8 | 9 | ############################################################################### 10 | def log(msg, error = False, printmsg=True): 11 | if printmsg: 12 | print (msg) 13 | if error == False: 14 | logging.info(msg) 15 | else: 16 | logging.error(msg) 17 | 18 | ######################################## 19 | def mpresult(msg): 20 | # print (msg) 21 | # g_procprogress.increment_progress(os.path.basename(msg)) 22 | g_procprogress.increment_progress() 23 | 24 | ############################################################################### 25 | def getcpucount(requestedcpu): 26 | '''control how many CPU's we use for multi processing''' 27 | if int(requestedcpu) == 0: 28 | requestedcpu = multiprocessing.cpu_count() 29 | 30 | stat = MEMORYSTATUSEX() 31 | ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 32 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 33 | # print("MemoryAvailable: %d%%" % (stat.ullAvailPhys/(1024*1024*1024))) 34 | availablememoryingigs = stat.ullAvailPhys/(1024*1024*1024) 35 | # make sure we have enough memory per CPU 36 | requiredgigspercpu = 4 37 | 38 | maxcpu = max(1, int(availablememoryingigs/ requiredgigspercpu)) 39 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 40 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 41 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 42 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 43 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 44 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 45 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 46 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 47 | 48 | if int(requestedcpu) > maxcpu: 49 | requestedcpu = maxcpu 50 | return int(requestedcpu) 51 | 52 | ############################################################################### 53 | class MEMORYSTATUSEX(ctypes.Structure): 54 | _fields_ = [ 55 | ("dwLength", ctypes.c_ulong), 56 | ("dwMemoryLoad", ctypes.c_ulong), 57 | ("ullTotalPhys", ctypes.c_ulonglong), 58 | ("ullAvailPhys", ctypes.c_ulonglong), 59 | ("ullTotalPageFile", ctypes.c_ulonglong), 60 | ("ullAvailPageFile", ctypes.c_ulonglong), 61 | ("ullTotalVirtual", ctypes.c_ulonglong), 62 | ("ullAvailVirtual", ctypes.c_ulonglong), 63 | ("sullAvailExtendedVirtual", ctypes.c_ulonglong), 64 | ] 65 | 66 | ############################################################################### 67 | def __init__(self): 68 | # have to initialize this to the size of MEMORYSTATUSEX 69 | self.dwLength = ctypes.sizeof(self) 70 | super(MEMORYSTATUSEX, self).__init__() 71 | 72 | ############################################################################### 73 | class CPROGRESS(object): 74 | '''thread safe class to display progress in command window when in multiprocess mode''' 75 | # procprogress = CPROGRESS(1000) 76 | # for i in range(1000): 77 | # time.sleep(0.01) 78 | # procprogress.increment_progress("test", i) 79 | 80 | ########################################################################### 81 | def __init__(self, maxcount=100): 82 | self.length = 20 # modify this to change the length 83 | self.maxcount = max(maxcount,1) 84 | # self.progress = 0 85 | self.stime = datetime.now() 86 | self.value = 0 87 | self.msg = "Progress:" 88 | 89 | ########################################################################### 90 | def setmaximum(self, value, current=0): 91 | self.maxcount = value 92 | self.value = current 93 | self.stime = datetime.now() 94 | 95 | ########################################################################### 96 | def increment_progress(self, msg="", value=0): 97 | 98 | if len(str(msg)) > 0: 99 | self.msg = msg 100 | 101 | if value == 0: 102 | self.value = self.value + 1 103 | else: 104 | self.value = value 105 | 106 | self.maxcount = max(self.maxcount,1) 107 | progress = self.value/self.maxcount 108 | 109 | # print(value) 110 | secondsconsumed = (datetime.now() - self.stime).total_seconds() 111 | secondsperitem = secondsconsumed / max(self.value,1) 112 | secondsremaining = int((self.maxcount - self.value) * secondsperitem) 113 | timeremaining = str(timedelta(seconds=secondsremaining)) 114 | block = int(round(self.length*progress)) 115 | msg = "\r{0}: [{1}] {2:2.2f}% Remaining: {3}".format(self.msg, "#"*block + "-"*(self.length-block), round(progress*100, 2), timeremaining) 116 | if progress >= 1: msg += " DONE\r\n" 117 | sys.stdout.write(msg) 118 | sys.stdout.flush() 119 | 120 | ########################################################################### 121 | def complete(self, msg): 122 | length = 20 # modify this to change the length 123 | progress = 1 124 | block = int(round(length*progress)) 125 | msg = "\r{0}: [{1}] {2}%".format(msg, "#"*block + "-"*(length-block), round(progress*100, 2)) 126 | if progress >= 1: msg += " DONE\r\n" 127 | sys.stdout.write(msg) 128 | sys.stdout.flush() 129 | 130 | 131 | #class used to display the progress 132 | g_procprogress = CPROGRESS(0) 133 | -------------------------------------------------------------------------------- /all2points.py: -------------------------------------------------------------------------------- 1 | #name: all2points 2 | #created: October 2023 3 | #by: paul.kennedy@guardiangeomatics.com 4 | #description: python module to read a Kongsberg ALL file, create a point cloud 5 | 6 | #done########################################## 7 | 8 | #todo########################################## 9 | 10 | import os.path 11 | from argparse import ArgumentParser 12 | from datetime import datetime, timedelta 13 | import math 14 | import numpy as np 15 | # import open3d as o3d 16 | import sys 17 | import time 18 | import rasterio 19 | import multiprocessing as mp 20 | import shapefile 21 | import logging 22 | 23 | import pyall 24 | import fileutils 25 | import geodetic 26 | import multiprocesshelper 27 | import cloud2tif 28 | import lashelper 29 | import ggmbesstandard 30 | 31 | ########################################################################### 32 | def main(): 33 | 34 | iho = ggmbesstandard.sp44() 35 | msg = str(iho.getordernames()) 36 | 37 | parser = ArgumentParser(description='Read a ALL file and create point clouds.') 38 | parser.add_argument('-epsg', action='store', default="0", dest='epsg', help='Specify an output EPSG code for transforming from WGS84 to East,North,e.g. -epsg 4326') 39 | parser.add_argument('-i', action='store', default="", dest='inputfolder', help='Input filename/folder to process.') 40 | parser.add_argument('-cpu', action='store', default='0', dest='cpu', help='number of cpu processes to use in parallel. [Default: 0, all cpu]') 41 | parser.add_argument('-odir', action='store', default="", dest='odir', help='Specify a relative output folder e.g. -odir GIS') 42 | parser.add_argument('-debug', action='store', default="500", dest='debug', help='Specify the number of pings to process. good only for debugging. [Default:-1]') 43 | parser.add_argument('-tvu', action='store_true', default=False, dest='tvu', help='Use the Total Vertical Uncertainty cleaning algorithm') 44 | parser.add_argument('-verbose', action='store_true', default=False, dest='verbose', help='verbose to write LAZ files and other supproting file.s takes some additional time!,e.g. -verbose [Default:false]') 45 | parser.add_argument('-standard',action='store', default="order1a", dest='standard', help='(optional) Specify the IHO SP44 survey order so we can set the filters to match the required specification. Select from :' + ''.join(msg) + ' [Default:order1a]' ) 46 | parser.add_argument('-near', action='store', default="7", dest='near', help='(optional) ADVANCED:Specify the MEDIAN filter kernel width for computation of the regional surface so nearest neighbours can be calculated. [Default:5]') 47 | 48 | matches = [] 49 | args = parser.parse_args() 50 | # args.inputfolder = "C:/sampledata/all/B_S2980_3005_20220220_084910.all" 51 | args.inputfolder = r"C:\sampledata\all\ncei_order_2023-10-09T06_31_19.276Z\multibeam-item-517619\insitu_ocean\trackline\atlantis\at26-15\multibeam\data\version1\MB\em122\0000_20140521_235308_Atlantis.all.mb58\0000_20140521_235308_Atlantis.all" 52 | 53 | args.spherical = False 54 | args.tvu = True 55 | # args.verbose = True 56 | 57 | if os.path.isfile(args.inputfolder): 58 | matches.append(args.inputfolder) 59 | 60 | if len (args.inputfolder) == 0: 61 | # no file is specified, so look for a .pos file in the current folder. 62 | inputfolder = os.getcwd() 63 | matches = fileutils.findFiles2(False, inputfolder, "*.all") 64 | 65 | if os.path.isdir(args.inputfolder): 66 | matches = fileutils.findFiles2(False, args.inputfolder, "*.all") 67 | 68 | #make sure we have a folder to write to 69 | args.inputfolder = os.path.dirname(matches[0]) 70 | 71 | #make an output folder 72 | if len(args.odir) == 0: 73 | args.odir = os.path.join(args.inputfolder, str("all2point_%s" % (time.strftime("%Y%m%d-%H%M%S")))) 74 | makedirs(args.odir) 75 | 76 | logging.basicConfig(filename = os.path.join(args.odir,"all2point_log.txt"), level=logging.INFO) 77 | log("configuration: %s" % (str(args))) 78 | log("Output Folder: %s" % (args.odir)) 79 | 80 | results = [] 81 | if args.cpu == '1': 82 | for file in matches: 83 | all2point(file, args) 84 | else: 85 | multiprocesshelper.log("Files to Import: %d" %(len(matches))) 86 | cpu = multiprocesshelper.getcpucount(args.cpu) 87 | log("Processing with %d CPU's" % (cpu)) 88 | 89 | pool = mp.Pool(cpu) 90 | multiprocesshelper.g_procprogress.setmaximum(len(matches)) 91 | poolresults = [pool.apply_async(all2point, (file, args), callback=multiprocesshelper.mpresult) for file in matches] 92 | pool.close() 93 | pool.join() 94 | # for idx, result in enumerate (poolresults): 95 | # results.append([file, result._value]) 96 | # print (result._value) 97 | 98 | ############################################################ 99 | def all2point(filename, args): 100 | '''we will try to auto clean beams by extracting the beam xyzF flag data and attempt to clean in scipy''' 101 | '''we then set the beam flags to reject files we think are outliers and write the all file to a new file''' 102 | 103 | #load the python proj projection object library if the user has requested it 104 | if args.epsg != "0": 105 | geo = geodetic.geodesy(args.epsg) 106 | else: 107 | args.epsg = pyall.getsuitableepsg(filename) 108 | geo = geodetic.geodesy(args.epsg) 109 | 110 | log("Processing file: %s" % (filename)) 111 | 112 | maxpings = int(args.debug) 113 | if maxpings == -1: 114 | maxpings = 999999999 115 | 116 | pingcounter = 0 117 | beamcountarray = 0 118 | 119 | log("Loading Point Cloud...") 120 | pointcloud = pyall.loaddata(filename, args) 121 | xyz = np.column_stack([pointcloud.xarr, pointcloud.yarr, pointcloud.zarr, pointcloud.qarr, pointcloud.idarr]) 122 | 123 | #report on RAW POINTS 124 | outfile = os.path.join(args.odir, os.path.basename(filename) + "_R.txt") 125 | np.savetxt(outfile, (xyz), fmt='%.10f', delimiter=',') 126 | 127 | outfilename = os.path.join(outfile + "_Raw_depth.tif") 128 | cloud2tif.saveastif(outfilename, geo, xyz, resolution=2, fill=False) 129 | 130 | log("Read complete at: %s" % (datetime.now())) 131 | return outfilename 132 | 133 | ############################################################################### 134 | def update_progress(job_title, progress): 135 | '''progress value should be a value between 0 and 1''' 136 | length = 20 # modify this to change the length 137 | block = int(round(length*progress)) 138 | msg = "\r{0}: [{1}] {2}%".format(job_title, "#"*block + "-"*(length-block), round(progress*100, 2)) 139 | if progress >= 1: msg += " DONE\r\n" 140 | sys.stdout.write(msg) 141 | sys.stdout.flush() 142 | 143 | ############################################################################### 144 | def makedirs(odir): 145 | if not os.path.isdir(odir): 146 | os.makedirs(odir, exist_ok=True) 147 | 148 | ############################################################################### 149 | def log(msg, error = False, printmsg=True): 150 | if printmsg: 151 | print (msg) 152 | if error == False: 153 | logging.info(msg) 154 | else: 155 | logging.error(msg) 156 | 157 | ############################################################################### 158 | if __name__ == "__main__": 159 | main() 160 | -------------------------------------------------------------------------------- /fileutils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import fnmatch 4 | from glob import glob 5 | import shutil 6 | 7 | from ctypes import Structure, c_int32, c_uint64, sizeof, byref, windll 8 | 9 | class MemoryStatusEx(Structure): 10 | _fields_ = [ 11 | ('length', c_int32), 12 | ('memoryLoad', c_int32), 13 | ('totalPhys', c_uint64), 14 | ('availPhys', c_uint64), 15 | ('totalPageFile', c_uint64), 16 | ('availPageFile', c_uint64), 17 | ('totalVirtual', c_uint64), 18 | ('availVirtual', c_uint64), 19 | ('availExtendedVirtual', c_uint64)] 20 | def __init__(self): 21 | self.length = sizeof(self) 22 | 23 | ############################################################################### 24 | def main(*opargs, **kwargs): 25 | '''test rig for fileutils''' 26 | 27 | filename= "c:/temp/pk_1.txt" 28 | f = open(filename, "w") 29 | f.write("gg") 30 | f.close() 31 | print(filename) 32 | filename = createOutputFileName(filename, ext="") 33 | print(filename) 34 | 35 | filename = createOutputFileName(filename, ext="") 36 | print(filename) 37 | 38 | filename = createOutputFileName(filename, ext="") 39 | print(filename) 40 | 41 | return 42 | 43 | 44 | recursive = False 45 | # local folder 46 | print ("11", findFiles2(recursive, ".", "*.py")) 47 | # absolute folder 48 | mypath = os.path.dirname(os.path.realpath(__file__)) 49 | print ("22", mypath, findFiles2(recursive, mypath, "*.py")) 50 | 51 | # recursive local folder 52 | recursive = True 53 | print ("33", mypath, findFiles2(recursive, ".", "*.py")) 54 | #recursive absolute folder 55 | mypath = os.path.dirname(os.path.realpath(__file__)) 56 | print ("44", mypath, findFiles2(recursive, mypath, "*.py")) 57 | 58 | 59 | 60 | 61 | ############################################################################### 62 | def createOutputFileName(path, ext=""): 63 | '''Create a valid output filename. if the name of the file already exists the file name is auto-incremented.''' 64 | path = os.path.expanduser(path) 65 | if not os.path.exists(os.path.dirname(path)): 66 | os.makedirs(os.path.dirname(path)) 67 | 68 | if not os.path.exists(path): 69 | return path 70 | 71 | if len(ext) == 0: 72 | root, ext = os.path.splitext(os.path.expanduser(path)) 73 | else: 74 | # use the user supplied extension 75 | root, ext2 = os.path.splitext(os.path.expanduser(path)) 76 | 77 | dir = os.path.dirname(root) 78 | fname = os.path.basename(root) 79 | candidate = fname+ext 80 | index = 1 81 | ls = set(os.listdir(dir)) 82 | candidate = "{}_{}{}".format(fname,index,ext) 83 | while candidate in ls: 84 | candidate = "{}_{}{}".format(fname,index,ext) 85 | index += 1 86 | 87 | return os.path.join(dir, candidate).replace('\\','/') 88 | 89 | ############################################################################### 90 | 91 | # ############################################################################### 92 | # def createOutputFileName(path): 93 | # '''Create a valid output filename. if the name of the file already exists the file name is auto-incremented.''' 94 | # path = os.path.expanduser(path) 95 | 96 | # if not os.path.exists(os.path.dirname(path)): 97 | # os.makedirs(os.path.dirname(path)) 98 | 99 | # if not os.path.exists(path): 100 | # return path 101 | 102 | # root, ext = os.path.splitext(os.path.expanduser(path)) 103 | # dir = os.path.dirname(root) 104 | # fname = os.path.basename(root) 105 | # candidate = fname+ext 106 | # index = 1 107 | # ls = set(os.listdir(dir)) 108 | # while candidate in ls: 109 | # candidate = "{}_{}{}".format(fname,index,ext) 110 | # index += 1 111 | # return os.path.join(dir, candidate) 112 | 113 | ############################################################################### 114 | def findFiles2(recursive, filespec, filter): 115 | '''tool to find files based on user request. This can be a single file, a folder start point for recursive search or a wild card''' 116 | matches = [] 117 | if recursive: 118 | matches = glob(os.path.join(filespec, "**", filter), recursive = True) 119 | else: 120 | matches = glob(os.path.join(filespec, filter)) 121 | 122 | mclean = [] 123 | for m in matches: 124 | mclean.append(m.replace('\\','/')) 125 | 126 | # if len(mclean) == 0: 127 | # print ("Nothing found to convert, quitting") 128 | # exit() 129 | return mclean 130 | ############################################################################### 131 | def findFiles(recursive, filespec, filter): 132 | '''tool to find files based on user request. This can be a single file, a folder start point for recursive search or a wild card''' 133 | filespec = filespec + "/" 134 | matches = [] 135 | if recursive: 136 | if not os.path.exists(filespec): 137 | #if the user passes a relative path, deal with it 138 | filespec = os.path.join(os.getcwd(), filespec) 139 | for root, dirnames, filenames in os.walk(os.path.dirname(filespec)): 140 | for f in fnmatch.filter(filenames, filter): 141 | matches.append(os.path.join(root, f)) 142 | print (matches[-1]) 143 | else: 144 | if os.path.exists(filespec): 145 | matches.append (os.path.abspath(filespec)) 146 | else: 147 | for filename in glob(filespec): 148 | matches.append(filename) 149 | if len(matches) == 0: 150 | print ("Nothing found to convert, quitting") 151 | return [] 152 | print ("File Find Count:", len(matches)) 153 | return matches 154 | 155 | ############################################################################### 156 | def addFileNameAppendage(path, appendage): 157 | '''Create a valid output filename. if the name of the file already exists the file name is auto-incremented.''' 158 | path = os.path.expanduser(path) 159 | 160 | if not os.path.exists(os.path.dirname(path)): 161 | os.makedirs(os.path.dirname(path)) 162 | 163 | # if not os.path.exists(path): 164 | # return path 165 | 166 | root, ext = os.path.splitext(os.path.expanduser(path)) 167 | dir = os.path.dirname(root) 168 | fname = os.path.basename(root) 169 | candidate = "{}{}{}".format(fname, appendage, ext) 170 | 171 | return os.path.join(dir, candidate) 172 | 173 | ############################################################################### 174 | def copyfile(srcfile, dstfile, replace=True): 175 | '''Copy a file safely''' 176 | 177 | # log ("Copying %s to %s" %(srcfile, dstfile)) 178 | 179 | if not os.path.exists(srcfile): 180 | print ("source file does not exist, skipping : %s" % (srcfile)) 181 | return 0, "" 182 | 183 | if os.path.isfile(dstfile) and replace: 184 | # Handle errors while calling os.remove() 185 | try: 186 | os.remove(dstfile) 187 | except: 188 | print("Error while deleting file %s. Maybe its in use?" % (dstfile)) 189 | 190 | # Handle errors while calling os.ulink() 191 | try: 192 | os.ulink(dstfile) 193 | except: 194 | print("Error while deleting file %s. Maybe its in use?" % (dstfile)) 195 | 196 | if os.path.exists(dstfile): 197 | print ("destination file exists, skipping : %s" % (dstfile)) 198 | return 0 , dstfile 199 | 200 | # the file does not exist so copy it. 201 | try: 202 | shutil.copy(srcfile, dstfile) 203 | return 1, dstfile 204 | except: 205 | print("Error while copying file %s" % (dstfile)) 206 | return 0, "" 207 | 208 | ############################################################################### 209 | def outfilename(filename, prefix="", appendix="", extension=""): 210 | filename = filename.replace('\\','/') 211 | root, ext = os.path.splitext(os.path.basename(filename)) 212 | if len(extension) == 0: 213 | extension = ext 214 | if not "." in extension[0]: 215 | extension = "." + extension 216 | return os.path.join(os.path.dirname(filename), prefix + root + appendix + extension).replace('\\','/') 217 | 218 | ############################################################################### 219 | def deletefile(filename): 220 | if os.path.exists(filename): 221 | try: 222 | os.remove(filename) 223 | except: 224 | return 225 | #log("file is locked, cannot delete: %s " % (filename)) 226 | 227 | ############################################################################### 228 | if __name__ == "__main__": 229 | print(outfilename("c:\\temp\\pk.txt", )) 230 | print(outfilename("c:/temp/pk.txt", "", "_appendix")) 231 | print(outfilename("c:/temp/pk.txt", "prefix_", "_appendix")) 232 | print(outfilename("c:/temp/pk.txt", "prefix_", "_appendix", "shp")) 233 | #main() 234 | -------------------------------------------------------------------------------- /ggmbesstandard.py: -------------------------------------------------------------------------------- 1 | #name: ggmbesstandard 2 | #created: July 2017 3 | #by: p.kennedy@guardiangeomatics.com 4 | #description: python module to represent MBES data STANDARDS 5 | 6 | import math 7 | import pprint 8 | import rasterio 9 | import numpy as np 10 | import logging 11 | import gc 12 | 13 | ############################################################################### 14 | class sp44: 15 | '''used to hold the metadata associated with an IHO MBES standard.''' 16 | def __init__(self): 17 | self.name = "" 18 | self.longitude = 0 19 | self.standards = [] 20 | 21 | self.standards.append(standard("order2", 1.0, 0.023)) 22 | self.standards.append(standard("order1b", 0.5, 0.013)) 23 | self.standards.append(standard("order1a", 0.5, 0.013)) 24 | self.standards.append(standard("specialorder", 0.25, 0.0075)) 25 | self.standards.append(standard("exclusiveorder", 0.15, 0.0075)) 26 | 27 | self.standards.append(standard("hipp1", 0.25, 0.0075)) 28 | self.standards.append(standard("hipp2", 0.5, 0.013)) 29 | self.standards.append(standard("hippassage", 1.0, 0.023)) 30 | 31 | ############################################################################### 32 | def __str__(self): 33 | return pprint.pformat(vars(self)) 34 | 35 | ############################################################################### 36 | def getordernames(self): 37 | msg = [] 38 | for rec in self.standards: 39 | msg.append(rec.name) 40 | return msg 41 | 42 | ############################################################################### 43 | def loadstandard(self, namerequired): 44 | for rec in self.standards: 45 | if namerequired in rec.name: 46 | return rec 47 | 48 | ############################################################################### 49 | class standard: 50 | '''used to hold the metadata associated with an IHO MBES standard.''' 51 | def __init__(self, name, depthtvu_a, depthtvu_b ): 52 | self.name = name 53 | self.depthtvu_a = depthtvu_a 54 | self.depthtvu_b = depthtvu_b 55 | 56 | ############################################################################### 57 | def gettvuat(self, depth): 58 | '''TVU(d) = sqrt((a*a) + ( b * d)^2)''' 59 | tvud = math.sqrt((self.depthtvu_a * self.depthtvu_a) + (self.depthtvu_b * depth)**2) 60 | return tvud 61 | ############################################################################### 62 | def details(self): 63 | msg = "Name:" + self.name + ",a=" + str(self.depthtvu_a) + ",b=" + str(self.depthtvu_b) + ",TVU(d)=sqrt((a*a)+(b*d)^2)" 64 | return msg 65 | 66 | ############################################################################### 67 | def computeTVUSurface(self, filename, outfilename): 68 | '''compute the TVU for a surface array''' 69 | with rasterio.open(filename) as src: 70 | array = src.read(1) 71 | profile = src.profile 72 | NODATA = src.nodatavals[0] 73 | 74 | #now compute the TVU for the entire surface using numpy array mathmatics so its fast 75 | #preserve the NODATA value 76 | array[array==NODATA] = -9999 77 | arrayTVU = np.multiply (array, self.depthtvu_b) 78 | arrayTVU = np.square (arrayTVU, arrayTVU) 79 | arrayTVU = np.add (arrayTVU, (self.depthtvu_a*self.depthtvu_a)) 80 | arrayTVU = np.sqrt(arrayTVU) 81 | 82 | #reset the nodata value... 83 | tmp = math.floor(self.gettvuat(-9999)) 84 | arrayTVU[arrayTVU > tmp] = NODATA 85 | 86 | # Write to tif, using the same profile as the source 87 | with rasterio.open(outfilename, 'w', **profile) as dst: 88 | dst.write_band(1, arrayTVU) 89 | 90 | return outfilename 91 | 92 | ############################################################################### 93 | def computeTVUBarometer(self, allowabletvufilename, uncertaintyfilename, outfilename): 94 | '''compute the TVU barometric pressure. A low pressure represents where the TVU for a survey point is well within specificaiton. As high pressure is where the TVU is almost using all the allowable TVU''' 95 | with rasterio.open(allowabletvufilename) as allowedsrc: 96 | allowedarray = allowedsrc.read(1) 97 | allowedprofile = allowedsrc.profile 98 | allowedNODATA = allowedsrc.nodatavals[0] 99 | allowedarray[allowedarray==allowedNODATA] = -9999 100 | allowedsrc.close() 101 | #garbage collect 102 | gc.collect() 103 | 104 | with rasterio.open(uncertaintyfilename) as uncertaintysrc: 105 | uncertaintyarray = uncertaintysrc.read(1) 106 | uncertaintyprofile = uncertaintysrc.profile 107 | uncertaintyNODATA = uncertaintysrc.nodatavals[0] 108 | uncertaintyarray[uncertaintyarray==uncertaintyNODATA] = 0 109 | uncertaintysrc.close() 110 | #garbage collect 111 | gc.collect() 112 | 113 | #now compute the TVU barometric pressure for the entire surface using numpy array mathmatics so its fast 114 | # the TVUBAROMETER is the percentage of the allowable uncertainty compared to the actual uncertainty as computed by CUBE (or other software) 115 | # eg if the allowable uncertainty is 0.5m and the actual uncertainty is 0.25m then the TVUBAROMETER is 50% 116 | # eg if the allowable uncertainty is 0.5m and the actual uncertainty is 0.75m then the TVUBAROMETER is 150% 117 | # eg if the allowable uncertainty is 0.5m and the actual uncertainty is 0.5m then the TVUBAROMETER is 100% 118 | tvubarometerarray = np.divide (uncertaintyarray, allowedarray) 119 | tvubarometerarray = np.multiply (tvubarometerarray, 100) 120 | 121 | # Write to tif, using the same profile as the source 122 | with rasterio.open(outfilename, 'w', **allowedprofile) as dst: 123 | dst.write_band(1, tvubarometerarray) 124 | 125 | return outfilename 126 | 127 | ############################################################################### 128 | def computeDeltaZ(self, regionalfilename, depthfilename, outfilename): 129 | '''compute the DeltaZ at all points in the surface. Depta is difference between the point depth and the regional depth''' 130 | with rasterio.open(regionalfilename) as regionalsrc: 131 | regionalarray = regionalsrc.read(1) 132 | regionalprofile = regionalsrc.profile 133 | regionalNODATA = regionalsrc.nodatavals[0] 134 | regionalarray[regionalarray==regionalNODATA] = -9999 135 | regionalsrc.close() 136 | 137 | #garbage collect 138 | gc.collect() 139 | 140 | with rasterio.open(depthfilename) as depthsrc: 141 | deptharray = depthsrc.read(1) 142 | depthprofile = depthsrc.profile 143 | depthNODATA = depthsrc.nodatavals[0] 144 | deptharray[deptharray==depthNODATA] = 9999 145 | depthsrc.close() 146 | 147 | #garbage collect 148 | gc.collect() 149 | 150 | #now compute the TVU barometric pressure for the entire surface using numpy array mathmatics so its fast 151 | # deltaz = abs(griddepth-depth) 152 | deltazarray = np.subtract (regionalarray, deptharray) 153 | deltazarray = np.abs(deltazarray) 154 | 155 | deltazarray[deltazarray < -1000] = regionalNODATA 156 | deltazarray[deltazarray > 1000] = regionalNODATA 157 | # deltazarray[deltazarray == 0] = regionalNODATA 158 | 159 | # Write to tif, using the same profile as the source 160 | with rasterio.open(outfilename, 'w', **regionalprofile) as dst: 161 | dst.write_band(1, deltazarray) 162 | 163 | return outfilename 164 | 165 | ############################################################################### 166 | def findoutliers(self, tvufilename, deltazfilename, outfilename): 167 | '''given a deltaz and tvu layer find the outliers by thresholding using the TVU array''' 168 | with rasterio.open(deltazfilename) as deltazsrc: 169 | deltazarray = deltazsrc.read(1) 170 | deltazprofile = deltazsrc.profile 171 | deltazNODATA = deltazsrc.nodatavals[0] 172 | height = deltazarray.shape[0] 173 | width = deltazarray.shape[1] 174 | cols, rows = np.meshgrid(np.arange(width), np.arange(height)) 175 | xs, ys = rasterio.transform.xy(deltazsrc.transform, rows, cols) 176 | xs = np.float32(xs) 177 | ys = np.float32(ys) 178 | x = np.array(xs).flatten() 179 | y = np.array(ys).flatten() 180 | # deltazarray[deltazarray==deltazNODATA] = -9999 181 | del xs 182 | del ys 183 | deltazsrc.close() 184 | 185 | #garbage collect 186 | gc.collect() 187 | 188 | with rasterio.open(tvufilename) as tvusrc: 189 | tvuarray = tvusrc.read(1) 190 | tvuprofile = tvusrc.profile 191 | tvuNODATA = tvusrc.nodatavals[0] 192 | # tvuarray[tvuarray== tvuNODATA] = 0 193 | tvusrc.close() 194 | 195 | #garbage collect 196 | gc.collect() 197 | 198 | # make outlier array of difference in deltaz and tvu. NEGATIVE values are not outliers. only POSITVE VALUEs are outliers 199 | # log("Computing outliers...") 200 | outliersarray = np.subtract(deltazarray, tvuarray) 201 | outliersarray[outliersarray==deltazNODATA] = deltazNODATA 202 | outliersarray[outliersarray < 0] = 0 203 | 204 | valid = (outliersarray>0) & (deltazarray < 1000) 205 | deltaz = np.where(valid, deltazarray, 0) 206 | 207 | #clean up 208 | del deltazarray 209 | del tvuarray 210 | #garbage collect 211 | gc.collect() 212 | 213 | dz = deltaz.flatten() 214 | xydz = np.stack((x,y,dz), axis=1, dtype=np.float32) 215 | #remove the values which are inliers 216 | xydz = xydz[np.all(xydz > 0.0, axis=1)] 217 | 218 | # Write to tif, using the same profile as the source 219 | # log("Writing outliers to raster file: %s" % (outfilename)) 220 | with rasterio.open(outfilename, 'w', **deltazprofile) as dst: 221 | dst.write_band(1, outliersarray) 222 | 223 | return outfilename, xydz 224 | 225 | ############################################################################### 226 | def log(self, msg, error = False, printmsg=True): 227 | if printmsg: 228 | print (msg) 229 | if error == False: 230 | logging.info(msg) 231 | else: 232 | logging.error(msg) 233 | 234 | ############################################################################### 235 | def log(msg, error = False, printmsg=True): 236 | if printmsg: 237 | print (msg) 238 | if error == False: 239 | logging.info(msg) 240 | else: 241 | logging.error(msg) 242 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /cloud2tif.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import math 4 | import rasterio 5 | from rasterio.transform import from_origin 6 | from rasterio.transform import Affine 7 | import numpy as np 8 | 9 | import geodetic 10 | import logging 11 | import gc 12 | from itertools import product 13 | 14 | import rasterio 15 | from rasterio.crs import CRS 16 | from rasterio import windows 17 | 18 | from scipy.signal import medfilt 19 | from scipy.signal import medfilt2d 20 | 21 | import fileutils 22 | 23 | ############################################################################### 24 | def getsize(filename): 25 | with rasterio.open(filename) as src: 26 | pixels = src.height * src.width 27 | SRCRESOLUTION = src.res[0] 28 | gc.collect() 29 | return pixels, SRCRESOLUTION 30 | 31 | ############################################################################### 32 | def get_tiles(ds, width=256, height=256): 33 | nols, nrows = ds.meta['width'], ds.meta['height'] 34 | offsets = product(range(0, nols, width), range(0, nrows, height)) 35 | big_window = windows.Window(col_off=0, row_off=0, width=nols, height=nrows) 36 | for col_off, row_off in offsets: 37 | window =windows.Window(col_off=col_off, row_off=row_off, width=width, height=height).intersection(big_window) 38 | transform = windows.transform(window, ds.transform) 39 | yield window, transform 40 | 41 | ############################################################################### 42 | def get_tiles2(ds, tile_width, tile_height, overlap): 43 | ncols, nrows = ds.meta['width'], ds.meta['height'] 44 | xstep = tile_width - overlap 45 | ystep = tile_height - overlap 46 | for x in range(0, ncols, xstep): 47 | if x + tile_width > ncols: 48 | x = ncols - tile_width 49 | for y in range(0, nrows, ystep): 50 | if y + tile_height > nrows: 51 | y = nrows - tile_height 52 | window = windows.Window(x, y, tile_width, tile_height) 53 | transform = windows.transform(window, ds.transform) 54 | yield window, transform 55 | 56 | ############################################################################### 57 | def tileraster(filename, odir, tilewidth = 512, tileheight = 512, tileoverlap= 10): 58 | '''use rasterio to tile a file into smaller manageable chunks''' 59 | 60 | outfilename = os.path.basename(filename) + "_TILE_" 61 | # odir = os.path.join(os.path.dirname(filename), os.path.splitext(os.path.basename(filename))[0] + "_TILED") 62 | makedirs(odir) 63 | 64 | with rasterio.open(filename) as src: 65 | metadata = src.meta.copy() 66 | log("Source file size is %d wide * %d high == %d pixels. This is potentially too large for your system memory so we will tile it.." % (metadata['width'], metadata['height'], metadata['width'] * metadata['height'])) 67 | idx = 0 68 | tilecount = len(list(get_tiles(src, tilewidth, tileheight))) 69 | log("Tiling into %s tiles..." % (tilecount)) 70 | for window, transform in get_tiles(src, tilewidth, tileheight): 71 | metadata['transform'] = transform 72 | metadata['width'], metadata['height'] = window.width, window.height 73 | out_filepath = os.path.join(odir, outfilename + str(window.col_off) + "_" + str(window.row_off) + ".tif") 74 | idx += 1 75 | update_progress("Tiling to conserve memory...", idx / tilecount) 76 | # print(out_filepath) 77 | with rasterio.open(out_filepath, 'w', **metadata) as dst: 78 | dst.write(src.read(window=window)) 79 | return odir 80 | ############################################################################### 81 | def getWKT(filename): 82 | 83 | if not os.path.exists(filename): 84 | return 85 | 86 | with rasterio.open(filename) as src: 87 | WKT = src._crs.wkt 88 | # pkpk = CRS.from_epsg(4326).wkt 89 | src.close() 90 | #garbage collect 91 | gc.collect() 92 | return WKT 93 | 94 | # function to caluclate hillshade 95 | ############################################################################### 96 | def hillshade(array,azimuth,angle_altitude): 97 | azimuth = 360.0 - azimuth 98 | 99 | x, y = np.gradient(array) 100 | slope = np.pi/2. - np.arctan(np.sqrt(x*x + y*y)) 101 | aspect = np.arctan2(-x, y) 102 | azm_rad = azimuth*np.pi/180. #azimuth in radians 103 | alt_rad = angle_altitude*np.pi/180. #altitude in radians 104 | 105 | shaded = np.sin(alt_rad)*np.sin(slope) + np.cos(alt_rad)*np.cos(slope)*np.cos((azm_rad - np.pi/2.) - aspect) 106 | 107 | return 255*(shaded + 1)/2 108 | 109 | ############################################################################### 110 | def smoothtif(filename, outfilename, near=5): 111 | ''' smooth a tif file using scipy. the near parameter is the size of the median filter.''' 112 | with rasterio.open(filename) as src: 113 | array = src.read(1) 114 | profile = src.profile 115 | 116 | # apply a 5x5 median filter to each band 117 | # filtered = medfilt(array, (1, 5, 5)) 118 | filtered = medfilt2d(array, near) 119 | 120 | # Write to tif, using the same profile as the source 121 | with rasterio.open(outfilename, 'w', **profile) as dst: 122 | dst.write_band(1, filtered) 123 | 124 | #garbage collect 125 | gc.collect() 126 | 127 | return outfilename 128 | 129 | ############################################################################### 130 | def saveastif(outfilename, geo, pcd, resolution=1, fill=False): 131 | '''given a numpy array of point cloud, make a floating point geotif file using rasterio''' 132 | '''the numpy point clouds define the bounding box''' 133 | 134 | if len(pcd)==0: 135 | return 136 | 137 | NODATA = -999 138 | xmin = pcd.min(axis=0)[0] 139 | ymin = pcd.min(axis=0)[1] 140 | zmin = pcd.min(axis=0)[2] 141 | 142 | xmax = pcd.max(axis=0)[0] 143 | ymax = pcd.max(axis=0)[1] 144 | zmax = pcd.max(axis=0)[2] 145 | 146 | xres = resolution 147 | yres = resolution 148 | width = math.ceil((xmax - xmin) / resolution) 149 | height = math.ceil((ymax - ymin) / resolution) 150 | 151 | transform = Affine.translation(xmin - xres / 2, ymin - yres / 2) * Affine.scale(xres, yres) 152 | 153 | log("Creating tif file... %s" % (outfilename)) 154 | transform = from_origin(xmin, ymax, xres, yres) 155 | 156 | # save to file... 157 | src= rasterio.open( 158 | outfilename, 159 | mode="w", 160 | driver="GTiff", 161 | height=height, 162 | width=width, 163 | count=1, 164 | dtype='float32', 165 | crs=geo.projection.srs, 166 | transform=transform, 167 | nodata=NODATA, 168 | ) 169 | # populate the numpy array with the values.... 170 | arr = np.full((height, width), fill_value=NODATA, dtype=float) 171 | 172 | from numpy import ma 173 | arr = ma.masked_values(arr, NODATA) 174 | 175 | for row in pcd: 176 | px = math.floor((row[0] - xmin) / xres) 177 | py = math.floor(height - (row[1] - ymin) / yres) - 1 #lord knows why -1 178 | # py, px = src.index(row[0], row[1]) 179 | arr[py, px] = row[2] 180 | 181 | #we might want to fill in the gaps. useful sometimes... 182 | if fill: 183 | from rasterio.fill import fillnodata 184 | arr = fillnodata(arr, mask=None, max_search_distance=xres*2, smoothing_iterations=0) 185 | 186 | src.write(arr, 1) 187 | src.close() 188 | log("Creating tif file Complete.") 189 | 190 | return outfilename 191 | 192 | ############################################################################### 193 | def pcd2meantif(outfilename, geo, pcd, resolution=1, fill=False): 194 | # Current (inefficient) code to quantize into XY 'bins' and take mean Z values in each bin 195 | 196 | if len(pcd)==0: 197 | return 198 | 199 | pcd[:, 0:2] = np.round(pcd[:, 0:2]/float(resolution))*float(resolution) # Round XY values to nearest resolution value 200 | 201 | NODATA = -999 202 | xmin = pcd.min(axis=0)[0] 203 | ymin = pcd.min(axis=0)[1] 204 | 205 | xmax = pcd.max(axis=0)[0] 206 | ymax = pcd.max(axis=0)[1] 207 | 208 | xres = resolution 209 | yres = resolution 210 | width = math.ceil((xmax - xmin) / resolution) 211 | height = math.ceil((ymax - ymin) / resolution) 212 | mean_height = np.zeros((height, width)) 213 | 214 | # Loop over each x-y bin and calculate mean z value 215 | x_val = xmin 216 | for x in range(width): 217 | y_val = ymax 218 | for y in range(height): 219 | height_vals = pcd[(pcd[:,0] == float(x_val)) & (pcd[:,1] == float(y_val)), 2] 220 | if height_vals.size != 0: 221 | mean_height[y,x] = np.mean(height_vals) 222 | y_val -= resolution 223 | x_val += resolution 224 | 225 | # return mean_height 226 | arr = mean_height 227 | arr[mean_height == 0] = NODATA 228 | 229 | log("Creating tif file... %s" % (outfilename)) 230 | transform = from_origin(xmin-(xres/2), ymax + (yres/2), xres, yres) 231 | 232 | # save to file... 233 | src= rasterio.open( 234 | outfilename, 235 | mode="w", 236 | driver="GTiff", 237 | height=height, 238 | width=width, 239 | count=1, 240 | dtype='float32', 241 | crs=geo.projection.srs, 242 | transform=transform, 243 | nodata=NODATA, 244 | ) 245 | #we might want to fill in the gaps. useful sometimes... 246 | if fill: 247 | from rasterio.fill import fillnodata 248 | arr = fillnodata(arr, mask=None, max_search_distance=xres*2, smoothing_iterations=0) 249 | 250 | src.write(arr, 1) 251 | src.close() 252 | log("Creating tif file Complete.") 253 | 254 | return outfilename 255 | 256 | ############################################################################### 257 | def point2raster(outfilename, geo, pcd, resolution=1, bintype="mean", fill=False): 258 | '''given a numpy array of point cloud, make a floating point geotif file using rasterio''' 259 | '''the numpy point clouds define the bounding box''' 260 | # https://stackoverflow.com/questions/54842690/how-to-efficiently-convert-large-numpy-array-of-point-cloud-data-to-downsampled 261 | 262 | NODATA = -999 263 | 264 | if len(pcd)==0: 265 | return 266 | 267 | #take the point cloud array and transpose the xyz,xyz,xyz into xxx,yyy so we can bin them efficienctly without looping thru the data 268 | xy = pcd.T[:2] 269 | #bin the xy data into buckets. at present this is only integer based so 1m resolution is minimum 270 | xy = ((xy + resolution / 2) // resolution).astype(int) 271 | # xy = ((xy - resolution / 2) // resolution).astype(int) 272 | #compute the range of the data 273 | mn, mx = xy.min(axis=1), xy.max(axis=1) 274 | #compute the size of the data 275 | sz = mx + 1 - mn 276 | 277 | if bintype == 'mean': 278 | #Converts a tuple of index arrays into an array of flat indices, applying boundary modes to the multi-index. 279 | #RETURNS An array of indices into the flattened version of an array of dimensions dims. 280 | flatidx = np.ravel_multi_index(xy-mn[:, None], dims=sz) 281 | #compute the mean of each bin as efficiently as possible 282 | histo = np.bincount(flatidx, pcd[:, 2], sz.prod()) / np.maximum(1, np.bincount(flatidx, None, sz.prod())) 283 | arr = histo.reshape(sz).T 284 | arr = np.flip(arr, axis = 0) 285 | 286 | if bintype == 'count': 287 | #Converts a tuple of index arrays into an array of flat indices, applying boundary modes to the multi-index. 288 | #RETURNS An array of indices into the flattened version of an array of dimensions dims. 289 | flatidx = np.ravel_multi_index(xy-mn[:, None], dims=sz) 290 | #we can compute the count rapidly as well... 291 | histo = np.maximum(0, np.bincount(flatidx, None, sz.prod())) 292 | arr = histo.reshape(sz).T 293 | arr = np.flip(arr, axis = 0) 294 | 295 | if bintype == 'median': 296 | #calculate the medians... 297 | #https://stackoverflow.com/questions/10305964/quantile-median-2d-binning-in-python 298 | # Median is a bit harder 299 | flatidx = np.ravel_multi_index(xy-mn[:, None], dims=sz) 300 | order = flatidx.argsort() 301 | bin = flatidx[order] 302 | w = pcd[:, 2][order] 303 | edges = (bin[1:] != bin[:-1]).nonzero()[0] + 1 304 | # Median 305 | median = [np.median(i) for i in np.split(w, edges)] 306 | #construct BINSxBINS matrix with median values 307 | binvals=np.unique(bin) 308 | medvals=np.zeros([sz.prod()]) 309 | medvals[binvals]=median 310 | medvals=medvals.reshape(sz) 311 | arr = np.asarray(medvals).reshape(sz).T 312 | arr = np.flip(arr, axis = 0) 313 | 314 | if bintype == 'stddev': 315 | #https://stackoverflow.com/questions/10305964/quantile-median-2d-binning-in-python 316 | # Median is a bit harder 317 | flatidx = np.ravel_multi_index(xy-mn[:, None], dims=sz) 318 | order = flatidx.argsort() 319 | bin = flatidx[order] 320 | w = pcd[:, 2][order] 321 | edges = (bin[1:] != bin[:-1]).nonzero()[0] + 1 322 | # Standard Deviation 323 | stddev = [np.std(i) for i in np.split(w, edges)] 324 | #construct BINSxBINS matrix with median values 325 | binvals=np.unique(bin) 326 | sdvals=np.zeros([sz.prod()]) 327 | sdvals[binvals]=stddev 328 | sdvals=sdvals.reshape(sz) 329 | arr = np.asarray(sdvals).reshape(sz).T 330 | arr = np.flip(arr, axis = 0) 331 | 332 | # clear out the empty nodes and set to NODATA value 333 | arr[arr == 0] = NODATA 334 | 335 | xmin = mn[0] 336 | ymin = mn[1] 337 | xmax = mx[0] 338 | ymax = mx[1] 339 | xres = resolution 340 | yres = resolution 341 | 342 | width = math.ceil((xmax - xmin) / resolution) 343 | height = math.ceil((ymax - ymin) / resolution) 344 | 345 | log("Creating tif file... %s" % (outfilename)) 346 | transform = from_origin(xmin-(xres/2), ymax + (yres/2), xres, yres) 347 | 348 | # save to file... 349 | src= rasterio.open( 350 | outfilename, 351 | mode="w", 352 | driver="GTiff", 353 | height=height, 354 | width=width, 355 | count=1, 356 | dtype='float32', 357 | crs=geo.projection.srs, 358 | transform=transform, 359 | nodata=NODATA, 360 | ) 361 | #we might want to fill in the gaps. useful sometimes... 362 | if fill: 363 | from rasterio.fill import fillnodata 364 | arr = fillnodata(arr, mask=None, max_search_distance=xres*2, smoothing_iterations=0) 365 | 366 | src.write(arr, 1) 367 | src.close() 368 | log("Creating tif file Complete.") 369 | 370 | return outfilename 371 | ############################################################################### 372 | def log(msg, error = False, printmsg=True): 373 | if printmsg: 374 | print (msg) 375 | if error == False: 376 | logging.info(msg) 377 | else: 378 | logging.error(msg) 379 | 380 | ############################################################################### 381 | # def createprj(outfilename, epsg, wkt=""): 382 | def createprj(outfilename, wkt=""): 383 | '''create the PRJ file''' 384 | 385 | # geo = geodetic.geodesy(epsg) 386 | # prj = open(outfilename, "w") 387 | # prj.writelines(geo.projection.crs.to_wkt(version="WKT1_ESRI", pretty=True)) 388 | # prj.close() 389 | 390 | prj = open(outfilename, "w") 391 | prj.writelines(wkt) 392 | prj.close() 393 | 394 | ############################################################################### 395 | ############################################################################### 396 | def makedirs(odir): 397 | if not os.path.isdir(odir): 398 | os.makedirs(odir, exist_ok=True) 399 | ############################################################################### 400 | def update_progress(job_title, progress): 401 | '''progress value should be a value between 0 and 1''' 402 | length = 20 # modify this to change the length 403 | block = int(round(length*progress)) 404 | msg = "\r{0}: [{1}] {2}%".format(job_title, "#"*block + "-"*(length-block), round(progress*100, 2)) 405 | if progress >= 1: msg += " DONE\r\n" 406 | sys.stdout.write(msg) 407 | sys.stdout.flush() 408 | -------------------------------------------------------------------------------- /geodetic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # --------------------------------------------------------------------- 4 | # | | 5 | # | geodetic.cc - a collection of geodetic functions | 6 | # | Paul Kennedy May 2016 | 7 | # | Jim Leven - Dec 99 | 8 | # | | 9 | # | originally from: | 10 | # | http://wegener.mechanik.tu-darmstadt.de/GMT-Help/Archiv/att-8710/Geodetic_py | 11 | # |ftp://pdsimage2.wr.usgs.gov/pub/pigpen/Python/Geodetic_py.py | 12 | # | | 13 | # --------------------------------------------------------------------- 14 | # 15 | # ------------------------------------------------------------------------------ 16 | # | Algrothims from Geocentric Datum of Australia Technical Manual | 17 | # | | 18 | # | http://www.anzlic.org.au/icsm/gdatum/chapter4.html | 19 | # | | 20 | # | This page last updated 11 May 1999 | 21 | # | | 22 | # | Computations on the Ellipsoid | 23 | # | | 24 | # | There are a number of formulae that are available | 25 | # | to calculate accurate geodetic positions, | 26 | # | azimuths and distances on the ellipsoid. | 27 | # | | 28 | # | Vincenty's formulae (Vincenty, 1975) may be used | 29 | # | for lines ranging from a few cm to nearly 20,000 km, | 30 | # | with millimetre accuracy. | 31 | # | The formulae have been extensively tested | 32 | # | for the Australian region, by comparison with results | 33 | # | from other formulae (Rainsford, 1955 & Sodano, 1965). | 34 | # | | 35 | # | * Inverse problem: azimuth and distance from known | 36 | # | latitudes and longitudes | 37 | # | * Direct problem: Latitude and longitude from known | 38 | # | position, azimuth and distance. | 39 | # | * Sample data | 40 | # | * Excel spreadsheet | 41 | # | | 42 | # | Vincenty's Inverse formulae | 43 | # | Given: latitude and longitude of two points | 44 | # | (latitude1, longitude1 and latitude2, longitude2), | 45 | # | Calculate: the ellipsoidal distance (s) and | 46 | # | forward and reverse azimuths between the points (alpha1Tp2, alpha21). | 47 | # | | 48 | # ------------------------------------------------------------------------------ 49 | 50 | import math 51 | import numpy as np 52 | import sys 53 | import os.path 54 | import pyproj 55 | 56 | ############################################################################### 57 | def main(): 58 | 59 | easting = 10 60 | northing = 10 61 | distance = 5 62 | print(calculateGridPositionFromrangeBearing(easting, northing, distance, 0)) 63 | print(calculateGridPositionFromrangeBearing(easting, northing, distance, 90)) 64 | print(calculateGridPositionFromrangeBearing(easting, northing, distance, 180)) 65 | print(calculateGridPositionFromrangeBearing(easting, northing, distance, 270)) 66 | print(calculateGridPositionFromrangeBearing(easting, northing, distance, 360)) 67 | 68 | f = 1.0 / 298.257223563 # WGS84 69 | a = 6378137.0 # metres 70 | 71 | print ("\n Ellipsoidal major axis = %12.3f metres\n" % ( a )) 72 | print ("\n Inverse flattening = %15.9f\n" % ( 1.0/f )) 73 | 74 | print ("\n Test Flinders Peak to Buninyon") 75 | print ("\n ****************************** \n") 76 | latitude1 = -(( 3.7203 / 60. + 57) / 60. + 37 ) 77 | longitude1 = ( 29.5244 / 60. + 25) / 60. + 144 78 | print ("Flinders Peak = %12.6f, %13.6f \n" % ( latitude1, longitude1 )) 79 | deg = int(latitude1) 80 | min = int(abs( ( latitude1 - deg) * 60.0 )) 81 | sec = abs(latitude1 * 3600 - deg * 3600) - min * 60 82 | print (" Flinders Peak = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),) 83 | deg = int(longitude1) 84 | min = int(abs( ( longitude1 - deg) * 60.0 )) 85 | sec = abs(longitude1 * 3600 - deg * 3600) - min * 60 86 | print (" %3i\xF8%3i\' %6.3f\" \n" % ( deg, min, sec )) 87 | 88 | latitude2 = -(( 10.1561 / 60. + 39) / 60. + 37 ) 89 | longitude2 = ( 35.3839 / 60. + 55) / 60. + 143 90 | print ("\n Buninyon = %12.6f, %13.6f \n" % ( latitude2, longitude2 )) 91 | 92 | deg = int(latitude2) 93 | min = int(abs( ( latitude2 - deg) * 60.0 )) 94 | sec = abs(latitude2 * 3600 - deg * 3600) - min * 60 95 | print (" Buninyon = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),) 96 | deg = int(longitude2) 97 | min = int(abs( ( longitude2 - deg) * 60.0 )) 98 | sec = abs(longitude2 * 3600 - deg * 3600) - min * 60 99 | print (" %3i\xF8%3i\' %6.3f\" \n" % ( deg, min, sec )) 100 | 101 | # dist, alpha1Tp2, alpha21 = vinc_dist ( f, a, latitude1, longitude1, latitude2, longitude2 ) 102 | dist, alpha1Tp2, alpha21 = calculaterangeBearingFromGeographicals(longitude1, latitude1, longitude2, latitude2 ) 103 | 104 | print ("\n Ellipsoidal Distance = %15.3f metres\n should be 54972.271 m\n" % ( dist )) 105 | print ("\n Forward and back azimuths = %15.6f, %15.6f \n" % ( alpha1Tp2, alpha21 )) 106 | deg = int(alpha1Tp2) 107 | min = int( abs(( alpha1Tp2 - deg) * 60.0 ) ) 108 | sec = abs(alpha1Tp2 * 3600 - deg * 3600) - min * 60 109 | print (" Forward azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )) 110 | deg = int(alpha21) 111 | min = int(abs( ( alpha21 - deg) * 60.0 )) 112 | sec = abs(alpha21 * 3600 - deg * 3600) - min * 60 113 | print (" Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )) 114 | 115 | # Test the direct function */ 116 | latitude1 = -(( 3.7203 / 60. + 57) / 60. + 37 ) 117 | longitude1 = ( 29.5244 / 60. + 25) / 60. + 144 118 | dist = 54972.271 119 | alpha1Tp2 = ( 5.37 / 60. + 52) / 60. + 306 120 | latitude2 = longitude2 = 0.0 121 | alpha21 = 0.0 122 | 123 | # latitude2, longitude2, alpha21 = vincentyDirect (latitude1, longitude1, alpha1Tp2, dist ) 124 | latitude2, longitude2, alpha21 = calculateGeographicalPositionFromrangeBearing(latitude1, longitude1, alpha1Tp2, dist) 125 | 126 | print ("\n Projected point =%11.6f, %13.6f \n" % ( latitude2, longitude2 )) 127 | deg = int(latitude2) 128 | min = int(abs( ( latitude2 - deg) * 60.0 )) 129 | sec = abs( latitude2 * 3600 - deg * 3600) - min * 60 130 | print (" Projected Point = %3i\xF8%3i\' %6.3f\", " % ( deg, min, sec ),) 131 | deg = int(longitude2) 132 | min = int(abs( ( longitude2 - deg) * 60.0 )) 133 | sec = abs(longitude2 * 3600 - deg * 3600) - min * 60 134 | print (" %3i\xF8%3i\' %6.3f\"\n" % ( deg, min, sec )) 135 | print (" Should be Buninyon \n" ) 136 | print ("\n Reverse azimuth = %10.6f \n" % ( alpha21 )) 137 | deg = int(alpha21) 138 | min = int(abs( ( alpha21 - deg) * 60.0 )) 139 | sec = abs(alpha21 * 3600 - deg * 3600) - min * 60 140 | print (" Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n\n" % ( deg, min, sec )) 141 | 142 | ############################################################################### 143 | def epsgfromlonglat (longitude, latitude): 144 | from pyproj import CRS 145 | from pyproj.aoi import AreaOfInterest 146 | from pyproj.database import query_utm_crs_info 147 | 148 | utm_crs_list = query_utm_crs_info( 149 | datum_name="WGS 84", 150 | area_of_interest=AreaOfInterest( 151 | west_lon_degree=longitude, 152 | south_lat_degree=latitude, 153 | east_lon_degree=longitude, 154 | north_lat_degree=latitude, 155 | ), 156 | ) 157 | utm_crs = CRS.from_epsg(utm_crs_list[0].code) 158 | return utm_crs_list[0].code 159 | 160 | ############################################################################### 161 | def medfilt (x, k): 162 | """Apply a length-k median filter to a 1D array x. 163 | Boundaries are extended by repeating endpoints. 164 | """ 165 | assert k % 2 == 1, "Median filter length must be odd." 166 | assert x.ndim == 1, "Input must be one-dimensional." 167 | k2 = (k - 1) // 2 168 | y = np.zeros ((len (x), k), dtype=x.dtype) 169 | y[:,k2] = x 170 | for i in range (k2): 171 | j = k2 - i 172 | y[j:,i] = x[:-j] 173 | y[:j,i] = x[0] 174 | y[:-j,-(i+1)] = x[j:] 175 | y[-j:,-(i+1)] = x[-1] 176 | return np.median (y, axis=1) 177 | 178 | ############################################################################### 179 | # from: http://mathforum.org/library/drmath/view/62034.html 180 | def calculaterangeBearingFromGridPosition(easting1, northing1, easting2, northing2): 181 | """given 2 east, north, pairs, compute the range and bearing""" 182 | 183 | dx = easting2-easting1 184 | dy = northing2-northing1 185 | 186 | bearing = 90 - (180/math.pi)*math.atan2(northing2-northing1, easting2-easting1) 187 | return (math.sqrt((dx*dx)+(dy*dy)), bearing) 188 | 189 | ############################################################################### 190 | def normalize360(brg): 191 | brg = brg % 360 192 | if (brg < 0): 193 | brg += 360 194 | return brg 195 | 196 | ############################################################################### 197 | # taken frm http://gis.stackexchange.com/questions/76077/how-to-create-points-based-on-the-distance-and-bearing-from-a-survey-point 198 | def calculateGridPositionFromrangeBearing(easting, northing, distance, bearing): 199 | """given an east, north, range and bearing, compute a new coordinate on the grid""" 200 | point = (easting, northing) 201 | angle = 90 - bearing 202 | bearing = math.radians(bearing) 203 | angle = math.radians(angle) 204 | 205 | # polar coordinates 206 | dist_x = distance * math.cos(angle) 207 | dist_y = distance * math.sin(angle) 208 | 209 | xfinal = point[0] + dist_x 210 | yfinal = point[1] + dist_y 211 | 212 | # direction cosines 213 | cosa = math.cos(angle) 214 | cosb = math.cos(bearing) 215 | xfinal = point[0] + (distance * cosa) 216 | yfinal = point[1] + (distance * cosb) 217 | 218 | return [xfinal, yfinal] 219 | 220 | ############################################################################## 221 | def calculateGridPositionFromBearingDxDy(x, y, heading, dx, dy): 222 | '''given a grid position, heading, Dx(metres) and Dy(metres), compute a new grid position. handle zero length x,y gracefully ''' 223 | 224 | # compute along the heading using the dy coordinate 225 | if dy != 0: 226 | x2,y2 = calculateGridPositionFromrangeBearing(x,y, dy, heading) 227 | else: 228 | x2 = x 229 | y2 = y 230 | 231 | # compute along the heading using the dx coordinate 232 | if dx != 0: 233 | x,y = calculateGridPositionFromrangeBearing(x2,y2, dx, heading+90) 234 | else: 235 | x = x2 236 | y = y2 237 | return x, y 238 | 239 | ############################################################################## 240 | def calculateGeographicalPositionFromBearingDxDy(longitude, latitude, heading, dx, dy): 241 | '''given a geographical position, heading, Dx(metres) and Dy(metres), compute a new geographical position. handle zero length x,y gracefully ''' 242 | 243 | # compute along the heading using the dy coordinate 244 | if dy != 0: 245 | lat, lon, az = calculateGeographicalPositionFromrangeBearing(latitude, longitude, heading, dy) 246 | else: 247 | lon = longitude 248 | lat = latitude 249 | 250 | # compute along the heading using the dx coordinate 251 | if dx != 0: 252 | lat, lon, az = calculateGeographicalPositionFromrangeBearing(lat, lon, heading + 90.0, dx) 253 | else: 254 | lon = longitude 255 | lat = latitude 256 | return lon, lat 257 | 258 | ############################################################################### 259 | def calculaterangeBearingFromGeographicals(longitude1, latitude1, longitude2, latitude2 ) : 260 | """ 261 | Returns s, the distance between two geographic points on the ellipsoid 262 | and alpha1, alpha2, the forward and reverse azimuths between these points. 263 | lats, longs and azimuths are in decimal degrees, distance in metres 264 | 265 | Returns ( s, alpha1Tp2, alpha21 ) as a tuple 266 | """ 267 | f = 1.0 / 298.257223563 # WGS84 268 | a = 6378137.0 # metres 269 | 270 | if (abs( latitude2 - latitude1 ) < 1e-8) and ( abs( longitude2 - longitude1) < 1e-8 ) : 271 | return 0.0, 0.0, 0.0 272 | 273 | piD4 = math.atan( 1.0 ) 274 | two_pi = piD4 * 8.0 275 | 276 | latitude1 = latitude1 * piD4 / 45.0 277 | longitude1 = longitude1 * piD4 / 45.0 # unfortunately lambda is a key word! 278 | latitude2 = latitude2 * piD4 / 45.0 279 | longitude2 = longitude2 * piD4 / 45.0 280 | 281 | b = a * (1.0 - f) 282 | 283 | TanU1 = (1-f) * math.tan( latitude1 ) 284 | TanU2 = (1-f) * math.tan( latitude2 ) 285 | 286 | U1 = math.atan(TanU1) 287 | U2 = math.atan(TanU2) 288 | 289 | lembda = longitude2 - longitude1 290 | last_lembda = -4000000.0 # an impossibe value 291 | omega = lembda 292 | 293 | # Iterate the following equations, 294 | # until there is no significant change in lembda 295 | 296 | while ( last_lembda < -3000000.0 or lembda != 0 and abs( (last_lembda - lembda)/lembda) > 1.0e-9 ) : 297 | 298 | sqr_sin_sigma = pow( math.cos(U2) * math.sin(lembda), 2) + \ 299 | pow( (math.cos(U1) * math.sin(U2) - \ 300 | math.sin(U1) * math.cos(U2) * math.cos(lembda) ), 2 ) 301 | 302 | Sin_sigma = math.sqrt( sqr_sin_sigma ) 303 | 304 | Cos_sigma = math.sin(U1) * math.sin(U2) + math.cos(U1) * math.cos(U2) * math.cos(lembda) 305 | 306 | sigma = math.atan2( Sin_sigma, Cos_sigma ) 307 | 308 | Sin_alpha = math.cos(U1) * math.cos(U2) * math.sin(lembda) / math.sin(sigma) 309 | alpha = math.asin( Sin_alpha ) 310 | 311 | Cos2sigma_m = math.cos(sigma) - (2 * math.sin(U1) * math.sin(U2) / pow(math.cos(alpha), 2) ) 312 | 313 | C = (f/16) * pow(math.cos(alpha), 2) * (4 + f * (4 - 3 * pow(math.cos(alpha), 2))) 314 | 315 | last_lembda = lembda 316 | 317 | lembda = omega + (1-C) * f * math.sin(alpha) * (sigma + C * math.sin(sigma) * \ 318 | (Cos2sigma_m + C * math.cos(sigma) * (-1 + 2 * pow(Cos2sigma_m, 2) ))) 319 | 320 | u2 = pow(math.cos(alpha),2) * (a*a-b*b) / (b*b) 321 | 322 | A = 1 + (u2/16384) * (4096 + u2 * (-768 + u2 * (320 - 175 * u2))) 323 | 324 | B = (u2/1024) * (256 + u2 * (-128+ u2 * (74 - 47 * u2))) 325 | 326 | delta_sigma = B * Sin_sigma * (Cos2sigma_m + (B/4) * \ 327 | (Cos_sigma * (-1 + 2 * pow(Cos2sigma_m, 2) ) - \ 328 | (B/6) * Cos2sigma_m * (-3 + 4 * sqr_sin_sigma) * \ 329 | (-3 + 4 * pow(Cos2sigma_m,2 ) ))) 330 | 331 | s = b * A * (sigma - delta_sigma) 332 | 333 | alpha1Tp2 = math.atan2( (math.cos(U2) * math.sin(lembda)), \ 334 | (math.cos(U1) * math.sin(U2) - math.sin(U1) * math.cos(U2) * math.cos(lembda))) 335 | 336 | alpha21 = math.atan2( (math.cos(U1) * math.sin(lembda)), \ 337 | (-math.sin(U1) * math.cos(U2) + math.cos(U1) * math.sin(U2) * math.cos(lembda))) 338 | 339 | if ( alpha1Tp2 < 0.0 ) : 340 | alpha1Tp2 = alpha1Tp2 + two_pi 341 | if ( alpha1Tp2 > two_pi ) : 342 | alpha1Tp2 = alpha1Tp2 - two_pi 343 | 344 | alpha21 = alpha21 + two_pi / 2.0 345 | if ( alpha21 < 0.0 ) : 346 | alpha21 = alpha21 + two_pi 347 | if ( alpha21 > two_pi ) : 348 | alpha21 = alpha21 - two_pi 349 | 350 | alpha1Tp2 = alpha1Tp2 * 45.0 / piD4 351 | alpha21 = alpha21 * 45.0 / piD4 352 | return s, alpha1Tp2, alpha21 353 | 354 | # END of Vincenty's Inverse formulae 355 | 356 | 357 | ############################################################################### 358 | #------------------------------------------------------------------------------- 359 | # Vincenty's Direct formulae | 360 | # Given: latitude and longitude of a point (latitude1, longitude1) and | 361 | # the geodetic azimuth (alpha1Tp2) | 362 | # and ellipsoidal distance in metres (s) to a second point, | 363 | # | 364 | # Calculate: the latitude and longitude of the second point (latitude2, longitude2) | 365 | # and the reverse azimuth (alpha21). | 366 | # | 367 | #------------------------------------------------------------------------------- 368 | def calculateGeographicalPositionFromrangeBearing(latitude1, longitude1, alpha1To2, s) : 369 | """ 370 | Returns the lat and long of projected point and reverse azimuth 371 | given a reference point and a distance and azimuth to project. 372 | lats, longs and azimuths are passed in decimal degrees 373 | 374 | Returns ( latitude2, longitude2, alpha2To1 ) as a tuple 375 | 376 | """ 377 | f = 1.0 / 298.257223563 # WGS84 378 | a = 6378137.0 # metres 379 | 380 | piD4 = math.atan( 1.0 ) 381 | two_pi = piD4 * 8.0 382 | 383 | latitude1 = latitude1 * piD4 / 45.0 384 | longitude1 = longitude1 * piD4 / 45.0 385 | alpha1To2 = alpha1To2 * piD4 / 45.0 386 | if ( alpha1To2 < 0.0 ) : 387 | alpha1To2 = alpha1To2 + two_pi 388 | if ( alpha1To2 > two_pi ) : 389 | alpha1To2 = alpha1To2 - two_pi 390 | 391 | b = a * (1.0 - f) 392 | 393 | TanU1 = (1-f) * math.tan(latitude1) 394 | U1 = math.atan( TanU1 ) 395 | sigma1 = math.atan2( TanU1, math.cos(alpha1To2) ) 396 | Sinalpha = math.cos(U1) * math.sin(alpha1To2) 397 | cosalpha_sq = 1.0 - Sinalpha * Sinalpha 398 | 399 | u2 = cosalpha_sq * (a * a - b * b ) / (b * b) 400 | A = 1.0 + (u2 / 16384) * (4096 + u2 * (-768 + u2 * \ 401 | (320 - 175 * u2) ) ) 402 | B = (u2 / 1024) * (256 + u2 * (-128 + u2 * (74 - 47 * u2) ) ) 403 | 404 | # Starting with the approximation 405 | sigma = (s / (b * A)) 406 | 407 | last_sigma = 2.0 * sigma + 2.0 # something impossible 408 | 409 | # Iterate the following three equations 410 | # until there is no significant change in sigma 411 | 412 | # two_sigma_m , delta_sigma 413 | while ( abs( (last_sigma - sigma) / sigma) > 1.0e-9 ) : 414 | two_sigma_m = 2 * sigma1 + sigma 415 | 416 | delta_sigma = B * math.sin(sigma) * ( math.cos(two_sigma_m) \ 417 | + (B/4) * (math.cos(sigma) * \ 418 | (-1 + 2 * math.pow( math.cos(two_sigma_m), 2 ) - \ 419 | (B/6) * math.cos(two_sigma_m) * \ 420 | (-3 + 4 * math.pow(math.sin(sigma), 2 )) * \ 421 | (-3 + 4 * math.pow( math.cos (two_sigma_m), 2 ))))) \ 422 | 423 | last_sigma = sigma 424 | sigma = (s / (b * A)) + delta_sigma 425 | 426 | latitude2 = math.atan2 ( (math.sin(U1) * math.cos(sigma) + math.cos(U1) * math.sin(sigma) * math.cos(alpha1To2) ), \ 427 | ((1-f) * math.sqrt( math.pow(Sinalpha, 2) + \ 428 | pow(math.sin(U1) * math.sin(sigma) - math.cos(U1) * math.cos(sigma) * math.cos(alpha1To2), 2)))) 429 | 430 | lembda = math.atan2( (math.sin(sigma) * math.sin(alpha1To2 )), (math.cos(U1) * math.cos(sigma) - \ 431 | math.sin(U1) * math.sin(sigma) * math.cos(alpha1To2))) 432 | 433 | C = (f/16) * cosalpha_sq * (4 + f * (4 - 3 * cosalpha_sq )) 434 | 435 | omega = lembda - (1-C) * f * Sinalpha * \ 436 | (sigma + C * math.sin(sigma) * (math.cos(two_sigma_m) + \ 437 | C * math.cos(sigma) * (-1 + 2 * math.pow(math.cos(two_sigma_m),2) ))) 438 | 439 | longitude2 = longitude1 + omega 440 | 441 | alpha21 = math.atan2 ( Sinalpha, (-math.sin(U1) * math.sin(sigma) + \ 442 | math.cos(U1) * math.cos(sigma) * math.cos(alpha1To2))) 443 | 444 | alpha21 = alpha21 + two_pi / 2.0 445 | if ( alpha21 < 0.0 ) : 446 | alpha21 = alpha21 + two_pi 447 | if ( alpha21 > two_pi ) : 448 | alpha21 = alpha21 - two_pi 449 | 450 | latitude2 = latitude2 * 45.0 / piD4 451 | longitude2 = longitude2 * 45.0 / piD4 452 | alpha21 = alpha21 * 45.0 / piD4 453 | 454 | return latitude2, longitude2, alpha21 455 | 456 | # END of Vincenty's Direct formulae 457 | 458 | ############################################################################### 459 | def est_dist( latitude1, longitude1, latitude2, longitude2 ) : 460 | """ 461 | 462 | Returns an estimate of the distance between two geographic points 463 | This is a quick and dirty vinc_dist 464 | which will generally estimate the distance to within 1% 465 | Returns distance in metres 466 | 467 | """ 468 | f = 1.0 / 298.257223563 # WGS84 469 | a = 6378137.0 # metres 470 | 471 | piD4 = 0.785398163397 472 | 473 | latitude1 = latitude1 * piD4 / 45.0 474 | longitude1 = longitude1 * piD4 / 45.0 475 | latitude2 = latitude2 * piD4 / 45.0 476 | longitude2 = longitude2 * piD4 / 45.0 477 | 478 | c = math.cos((latitude2+latitude1)/2.0) 479 | 480 | return math.sqrt( pow(math.fabs(latitude2-latitude1), 2) + \ 481 | pow(math.fabs(longitude2-longitude1)*c, 2) ) * a * ( 1.0 - f + f * c ) 482 | # END of rough estimate of the distance. 483 | 484 | ############################################################################### 485 | def getPRJFromEPSG(EPSGCode): 486 | '''read through the SRID.csv file from Pyproj to find the correct PRJ string for a given EPSG code. This is used to write out a sensible PRJ file alongside a shape file. ''' 487 | localpath = os.path.dirname(os.path.realpath(__file__)) 488 | sys.path.append(localpath) 489 | filename = os.path.join(localpath, "srid.csv") 490 | # filename = 'srid.csv' 491 | if os.path.isfile(filename): 492 | datafile = open(filename) 493 | for line in datafile: 494 | if EPSGCode in line[0:10]: 495 | prj = line.split(";")[1] 496 | #remove triple quotes 497 | prj = prj.replace('"""', '"') 498 | #remove double quotes 499 | prj = prj.replace('""', '"') 500 | #replace comma and quotes with comma 501 | prj = prj.replace(',"', ',') 502 | #replace squae bracket and quote with sqb 503 | prj = prj.replace(']"', ']') 504 | prj = prj.replace(',,,,', '') 505 | 506 | return prj 507 | return "" 508 | 509 | ############################################################################### 510 | def loadProj(EPSGCode=0, wkt=""): 511 | '''load a pyproj object using the supplied code''' 512 | # wgs84=pyproj.Proj("+init=EPSG:4326") # LatLon with WGS84 datum used by GPS units and Google Earth 513 | #note: anaconda conda install has a bug when installing. It is stupid and forgets to install the proj data folder. 514 | #to fix this, you need to 515 | #copy the data folder from c:\ggtools\python\pyproj to # C:\ProgramData\Anaconda3\Lib\site-packages\pyproj 516 | #rename the datadir.py to datadir.bak and then copy the datadir.py from the c:\ggtools\python\pyproj into the folder 517 | projection = None 518 | if len(WKT) > 0: 519 | try: 520 | projection = pyproj.Proj("EPSG:" + str(WKT)) 521 | except: 522 | return None 523 | else: 524 | projection = None 525 | 526 | if EPSGCode > 0: 527 | try: 528 | # projection = pyproj.Proj("+init=EPSG:" + str(EPSGCode)) 529 | projection = pyproj.Proj("EPSG:" + str(EPSGCode)) 530 | except: 531 | return None 532 | else: 533 | projection = None 534 | 535 | return projection 536 | 537 | ############################################################################### 538 | def writePRJ(filename, EPSGCode='4326'): 539 | '''try and find a matching PRJ string from the Proj CSV file. If we find one, write it as a PRJ file so the shape file opens nicely in GIS''' 540 | prjstring = getPRJFromEPSG(EPSGCode) 541 | prj = open(filename, 'w') 542 | if len(prjstring) > 0: 543 | prj.write(prjstring) # python will convert \n to os.linesep 544 | else: 545 | prj.write('GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]') # python will convert \n to os.linesep 546 | prj.close() # you can omit in most cases as the destructor will call it 547 | 548 | ############################################################################### 549 | def wkt2epsg(wkt=""): 550 | '''load a pyproj object using the WKT and return the EPSG code''' 551 | epsg = 0 552 | if len(wkt) > 0: 553 | try: 554 | crs = pyproj.CRS(wkt) 555 | epsg = crs.to_epsg(min_confidence=25) 556 | except: 557 | return None 558 | else: 559 | return epsg 560 | return epsg 561 | 562 | ########################################################################################################################## 563 | class geodesy: 564 | '''a simple helper class to hold a pyproj geodesy object so we can transform with ease''' 565 | def __init__(self, EPSGCode = "", wkt=""): 566 | 567 | #clear the GDAL paths to proj as they break the pyproj libraries 568 | os.environ["PROJ_LIB"] = "" 569 | os.environ["GDAL_DATA"] = "" 570 | 571 | self.EPSGCode = EPSGCode 572 | self.projection = None 573 | self.loadProj(EPSGCode, wkt) 574 | 575 | ############################################################################### 576 | def wkt2epsg(self, wkt=""): 577 | '''load a pyproj object using the WKT and return the EPSG code''' 578 | if len(wkt) > 0: 579 | try: 580 | crs = pyproj.CRS(wkt) 581 | epsg = crs.to_epsg(min_confidence=25) 582 | except: 583 | return None 584 | else: 585 | return None 586 | return epsg 587 | 588 | ############################################################################### 589 | def loadProj(self, EPSGCode=0, wkt=""): 590 | '''load a pyproj object using the supplied code''' 591 | # wgs84=pyproj.Proj("+init=EPSG:4326") # LatLon with WGS84 datum used by GPS units and Google Earth 592 | #note: anaconda conda install has a bug when installing. It is stupid and forgets to install the proj data folder. 593 | #to fix this, you need to 594 | #copy the data folder from c:\ggtools\python\pyproj to # C:\ProgramData\Anaconda3\Lib\site-packages\pyproj 595 | #rename the datadir.py to datadir.bak and then copy the datadir.py from the c:\ggtools\python\pyproj into the folder 596 | projection = None 597 | 598 | if len(wkt) > 0: 599 | try: 600 | projection = pyproj.Proj(pyproj.CRS(wkt)) 601 | except: 602 | return None 603 | else: 604 | projection = None 605 | 606 | if len(EPSGCode) > 0: 607 | try: 608 | # projection = pyproj.Proj("+init=EPSG:" + str(EPSGCode)) 609 | projection = pyproj.Proj("EPSG:" + str(EPSGCode)) 610 | 611 | except: 612 | return None 613 | 614 | self.projection = projection 615 | 616 | return projection 617 | 618 | ############################################################################### 619 | def convertToGrid(self, longitude, latitude): 620 | '''convert from longitude, latitude to a projected easting, northing''' 621 | if int(self.EPSGCode) == 4326: 622 | return longitude, latitude 623 | 624 | if self.projection is None: 625 | return longitude, latitude 626 | 627 | # if self.projection.crs.is_projected == True: 628 | x,y = self.projection(float(longitude),float(latitude)) 629 | return x,y 630 | 631 | ############################################################################### 632 | def convertToGeographicals(self, easting, northing): 633 | '''convert from East, North to longitude, latitude''' 634 | if self.projection is not None: 635 | x,y = self.projection(float(easting),float(northing), inverse=True) 636 | return x,y 637 | else: 638 | return easting, northing 639 | 640 | 641 | 642 | # Test driver 643 | 644 | if __name__ == "__main__" : 645 | main() 646 | 647 | #-------------------------------------------------------------------------- 648 | # Notes: 649 | # 650 | # * "The inverse formulae may give no solution over a line 651 | # between two nearly antipodal points. This will occur when 652 | # lembda ... is greater than pi in absolute value". (Vincenty, 1975) 653 | # 654 | # * In Vincenty (1975) L is used for the difference in longitude, 655 | # however for consistency with other formulae in this Manual, 656 | # omega is used here. 657 | # 658 | # * Variables specific to Vincenty's formulae are shown below, 659 | # others common throughout the manual are shown in the Glossary. 660 | # 661 | # 662 | # alpha = Azimuth of the geodesic at the equator 663 | # U = Reduced latitude 664 | # lembda = Difference in longitude on an auxiliary sphere (longitude1 & longitude2 665 | # are the geodetic longitudes of points 1 & 2) 666 | # sigma = Angular distance on a sphere, from point 1 to point 2 667 | # sigma1 = Angular distance on a sphere, from the equator to point 1 668 | # sigma2 = Angular distance on a sphere, from the equator to point 2 669 | # sigma_m = Angular distance on a sphere, from the equator to the 670 | # midpoint of the line from point 1 to point 2 671 | # u, A, B, C = Internal variables 672 | # 673 | # 674 | # Sample Data 675 | # 676 | # Flinders Peak 677 | # -37 57'03.72030" 678 | # 144 25'29.52440" 679 | # Buninyong 680 | # -37 39'10.15610" 681 | # 143 55'35.38390" 682 | # Ellipsoidal Distance 683 | # 54,972.271 m 684 | # 685 | # Forward Azimuth 686 | # 306 52'05.37" 687 | # 688 | # Reverse Azimuth 689 | # 127 10'25.07" 690 | # 691 | # 692 | -------------------------------------------------------------------------------- /lashelper.py: -------------------------------------------------------------------------------- 1 | #name: lashelper.py 2 | #created: jan 2020 3 | #by: paul.kennedy@guardiangeomatics.com 4 | #description: python module to spawn processes in relation to lastools 5 | #copyright Guardian Geomatics Pty Ltd 6 | # This software is explicitly prohibited by use of any non-guardian employee or subcontractor. 7 | # 8 | ################## 9 | # #DONE 10 | ################## 11 | 12 | import os 13 | import shlex 14 | import subprocess 15 | import uuid 16 | import sys 17 | import time 18 | import logging 19 | import ctypes 20 | import multiprocessing 21 | import tempfile 22 | 23 | sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'shared')) 24 | import geodetic 25 | import fileutils 26 | 27 | ############################################################################### 28 | def runner(cmd, verbose=False): 29 | '''process runner method. pass the command to run and True if you want to real time verbose output of errors''' 30 | 31 | cmdname = cmd.split(" ") 32 | 33 | # log('Processing command %s' % (cmdname)) 34 | 35 | args = shlex.split(cmd) 36 | 37 | stdout = [] 38 | stderr = [] 39 | popen = subprocess.Popen(args, stdout=subprocess.PIPE, universal_newlines=True, stderr=subprocess.PIPE) 40 | for stderr_line in iter(popen.stderr.readline, ""): 41 | stderr.append(stderr_line) 42 | if verbose: 43 | print(stderr_line.rstrip()) 44 | for stdout_line in iter(popen.stdout.readline, ""): 45 | stdout.append(stdout_line) 46 | popen.stdout.close() 47 | popen.stderr.close() 48 | 49 | popen.wait() 50 | 51 | return [stdout, stderr] 52 | 53 | ############################################################################### 54 | def lassort(filename, odir=""): 55 | '''sort a laz file on gpstime''' 56 | 57 | if len(odir)==0: 58 | odir = os.path.dirname(filename) 59 | 60 | odirlog = makedirs(odir) 61 | 62 | root, ext = os.path.splitext(os.path.expanduser(os.path.basename(filename))) 63 | outfilename = os.path.join(odir, root + "_S" + ext) 64 | outfilename = outfilename.replace('\\','/') 65 | 66 | cmd = "lassort.exe" + \ 67 | " -i %s" % (filename) + \ 68 | " -gps_time " + \ 69 | " -olaz " + \ 70 | " -o %s" % (outfilename) 71 | 72 | stdout, stderr = runner(cmd, False) 73 | 74 | return outfilename 75 | 76 | ############################################################################### 77 | def lasmergelof2(listoffiles, filename, odir, rect=None): 78 | '''merge all files in a folder''' 79 | # odirlog = makedirs(odir) 80 | 81 | outfilename = os.path.join(odir, filename + ".laz") 82 | outfilename = outfilename.replace('\\','/') 83 | 84 | if rect is None: 85 | clipper = "" 86 | else: 87 | # -keep_xy 630000 4834000 631000 4836000 (min_x min_y max_x max_y) 88 | clipper = " -keep_xy %s %s %s %s" % (rect.left, rect.bottom, rect.right, rect.top) 89 | 90 | cmd = "lasmerge64.exe" + \ 91 | " -lof %s" % (listoffiles) + \ 92 | clipper + \ 93 | " -olaz " + \ 94 | " -o %s" % (outfilename) 95 | 96 | stdout, stderr = runner(cmd, False) 97 | 98 | return outfilename 99 | 100 | ############################################################################### 101 | def lasmergelof(listoffiles, filename, odir): 102 | '''merge all files in a folder''' 103 | # odirlog = makedirs(odir) 104 | 105 | outfilename = os.path.join(odir, filename + ".laz") 106 | outfilename = outfilename.replace('\\','/') 107 | 108 | cmd = "lasmerge64.exe" + \ 109 | " -drop_x_below %s" % (str(10))+ \ 110 | " -lof %s" % (listoffiles) + \ 111 | " -olaz " + \ 112 | " -o %s" % (outfilename) 113 | 114 | stdout, stderr = runner(cmd, False) 115 | 116 | return outfilename 117 | 118 | ############################################################################### 119 | def lasmerge2(filespec, outfilename): 120 | '''merge all files in a folder''' 121 | 122 | cmd = "lasmerge64.exe" + \ 123 | " -i %s" % (filespec) + \ 124 | " -drop_x_below %s" % (str(10))+ \ 125 | " -olaz " + \ 126 | " -o %s" % (outfilename) 127 | 128 | stdout, stderr = runner(cmd, False) 129 | 130 | return outfilename 131 | 132 | 133 | ############################################################################### 134 | def lasmerge(filespec, filename, odir): 135 | '''merge all files in a folder''' 136 | odirlog = makedirs(odir) 137 | 138 | outfilename = os.path.join(odir, filename) 139 | outfilename = outfilename.replace('\\','/') 140 | 141 | cmd = "lasmerge64.exe" + \ 142 | " -i %s" % (filespec) + \ 143 | " -drop_x_below %s" % (str(10))+ \ 144 | " -olaz " + \ 145 | " -o %s" % (outfilename) 146 | 147 | stdout, stderr = runner(cmd, False) 148 | 149 | return outfilename 150 | 151 | ############################################################################### 152 | def las2asc(filename): 153 | '''export to XYZ records''' 154 | 155 | odirlog = makedirs(os.path.dirname(filename)) 156 | 157 | root = os.path.splitext(os.path.basename(filename))[0] 158 | 159 | outfilename = os.path.join(os.path.dirname(filename), root + ".txt") 160 | outfilename = outfilename.replace('\\','/') 161 | 162 | cmd = "las2txt64.exe" + \ 163 | " -i %s" % (filename) + \ 164 | " -drop_x_below %s" % (str(10))+ \ 165 | " -o %s" % (outfilename) 166 | 167 | stdout, stderr = runner(cmd, False) 168 | 169 | return outfilename 170 | 171 | ############################################################################### 172 | def txt2las(filename, epsg='4326'): 173 | '''import from XYZ record to a las file''' 174 | 175 | odirlog = makedirs(os.path.dirname(filename)) 176 | 177 | root = os.path.splitext(os.path.basename(filename))[0] 178 | 179 | filename = filename.replace('\\','/') 180 | outfilename = os.path.join(os.path.dirname(filename), root + ".laz") 181 | outfilename = outfilename.replace('\\','/') 182 | 183 | cmd = "txt2las.exe" + \ 184 | " -i %s" % (filename) + \ 185 | " -epsg %s" % (epsg) + \ 186 | " -o %s" % (outfilename) 187 | 188 | stdout, stderr = runner(cmd, False) 189 | 190 | if epsg != '4326': 191 | outfilename = las2lasEPSG(outfilename, epsg=epsg) 192 | return outfilename 193 | 194 | ############################################################################### 195 | def lasgrid(filename, resolution): 196 | '''use lasgrid to grid a file efficiently at the user specified resolution''' 197 | 198 | odirlog = makedirs(os.path.dirname(filename)) 199 | 200 | root = os.path.splitext(os.path.basename(filename))[0] 201 | 202 | outfilename = os.path.join(os.path.dirname(filename), root + ".tif") 203 | outfilename = outfilename.replace('\\','/') 204 | 205 | cmd = "lasgrid64.exe" + \ 206 | " -i %s" % (filename) + \ 207 | " -drop_x_below %s" % (str(10))+ \ 208 | " -mem %s" % (str(1900)) + \ 209 | " -step %s" % (str(resolution)) + \ 210 | " -%s" % ('average') + \ 211 | " -o %s" % (outfilename) 212 | 213 | stdout, stderr = runner(cmd, False) 214 | 215 | return outfilename 216 | ############################################################################### 217 | def lasgrid4(filename, outfilename, resolution, epsg='31984'): 218 | '''use lasgrid to grid a file efficiently at the user specified resolution''' 219 | 220 | odirlog = makedirs(os.path.dirname(filename)) 221 | 222 | root = os.path.splitext(os.path.basename(filename))[0] 223 | 224 | if len(outfilename) == 0: 225 | outfilename = os.path.join(os.path.dirname(filename), root + ".tif") 226 | outfilename = outfilename.replace('\\','/') 227 | else: 228 | outfilename = outfilename.replace('\\','/') 229 | 230 | cmd = "lasgrid64.exe" + \ 231 | " -i %s" % (filename) + \ 232 | " -epsg %s" % (epsg) + \ 233 | " -mem %s" % (str(1900)) + \ 234 | " -step %s" % (str(resolution)) + \ 235 | " -%s" % ('average') + \ 236 | " -o %s" % (outfilename) 237 | 238 | stdout, stderr = runner(cmd, False) 239 | 240 | return outfilename 241 | 242 | ############################################################################### 243 | def lasgridsubcircle(filename, outfilename, resolution, epsg='31984', subcircle=1): 244 | '''use lasgrid to grid a file efficiently at the user specified resolution''' 245 | 246 | odirlog = makedirs(os.path.dirname(filename)) 247 | 248 | root = os.path.splitext(os.path.basename(filename))[0] 249 | 250 | if len(outfilename) == 0: 251 | outfilename = os.path.join(os.path.dirname(filename), root + ".tif") 252 | outfilename = outfilename.replace('\\','/') 253 | else: 254 | outfilename = outfilename.replace('\\','/') 255 | 256 | cmd = "lasgrid64.exe" + \ 257 | " -i %s" % (filename) + \ 258 | " -epsg %s" % (epsg) + \ 259 | " -mem %s" % (str(1900)) + \ 260 | " -step %s" % (str(resolution)) + \ 261 | " -subcircle %s" % (str(subcircle)) + \ 262 | " -%s" % ('average') + \ 263 | " -o %s" % (outfilename) 264 | 265 | stdout, stderr = runner(cmd, False) 266 | 267 | return outfilename 268 | 269 | ############################################################################### 270 | def blast(srcfolder, dstfolder, filespec, resolution, outfilename, gridtype="hillshade", kill="3", outtype=".laz", RECT=None, epsg='31984'): 271 | '''make a slope raster file for QC purposes''' 272 | 273 | odirlog = makedirs(dstfolder) 274 | 275 | filespec = os.path.join(srcfolder, filespec) 276 | filespec = filespec.replace('\\','/') 277 | dstfolder = dstfolder.replace('\\','/') 278 | odirlog = makedirs(dstfolder) 279 | 280 | # if the user provides a bounding box, use it 281 | keepxy="" 282 | if RECT is not None: 283 | keepxy= " -keep_xy %.3f, %.3f, %.3f, %.3f" % (RECT.left, RECT.right, RECT.bottom, RECT.top) 284 | 285 | cmd = "blast2dem.exe" + \ 286 | " -i %s" % (filespec) + \ 287 | " -drop_x_below %s" % (str(10))+ \ 288 | " -odir %s" % (dstfolder)+ \ 289 | " -step %s" % (str(resolution)) + \ 290 | " -merged" + \ 291 | " -float_precision 0.1" + \ 292 | " -nbits 32" + \ 293 | keepxy + \ 294 | " -odir %s" % (dstfolder)+ \ 295 | " -kill %s" % (str(kill)) + \ 296 | " -epsg %s" % (epsg) + \ 297 | " -%s" % (gridtype) + \ 298 | " -o %s" % (outfilename) 299 | 300 | stdout, stderr = runner(cmd, False) 301 | 302 | return outfilename 303 | 304 | ############################################################################### 305 | def blast2iso(srcfolder, filespec, dstfolder, outfilename, resolution, kill="3", RECT=None, epsg='31984'): 306 | '''make a contour map''' 307 | 308 | odirlog = makedirs(dstfolder) 309 | 310 | filespec = os.path.join(srcfolder, filespec) 311 | filespec = filespec.replace('\\','/') 312 | dstfolder = dstfolder.replace('\\','/') 313 | odirlog = makedirs(dstfolder) 314 | 315 | # if the user provides a bounding box, use it 316 | keepxy="" 317 | if RECT is not None: 318 | keepxy= " -keep_xy %.3f, %.3f, %.3f, %.3f" % (RECT.left, RECT.right, RECT.bottom, RECT.top) 319 | 320 | cmd = "blast2iso.exe" + \ 321 | " -i %s" % (filespec) + \ 322 | " -drop_x_below %s" % (str(10))+ \ 323 | " -odir %s" % (dstfolder)+ \ 324 | " -iso_every %s" % (str(resolution)) + \ 325 | " -smooth %s" % (str(float(resolution) * 10)) + \ 326 | " -clean %s" % (str(resolution)) + \ 327 | " -merged" + \ 328 | keepxy + \ 329 | " -odir %s" % (dstfolder)+ \ 330 | " -kill %s" % (str(float(kill)* 10)) + \ 331 | " -oshp" + \ 332 | " -epsg %s" % (epsg) + \ 333 | " -o %s" % (outfilename) 334 | 335 | stdout, stderr = runner(cmd, False) 336 | 337 | prjfilename = os.path.join(dstfolder, outfilename.replace('.shp','.prj')).replace('\\','/') 338 | geodetic.writePRJ(prjfilename, epsg) 339 | 340 | return outfilename 341 | 342 | ############################################################################### 343 | def lasgrid2(srcfolder, dstfolder, filespec, resolution, outfilename, gridtype="average", fill="0", outtype=".laz", epsg='31984', verbose=False): 344 | '''grid a folder of laz files in a reliable manner''' 345 | 346 | filespec = os.path.join(srcfolder, filespec) 347 | filespec = filespec.replace('\\','/') 348 | dstfolder = dstfolder.replace('\\','/') 349 | odirlog = makedirs(dstfolder) 350 | #outfilename = os.path.join(dstfolder, outtype) 351 | #outfilename = outfilename.replace('\\','/') 352 | #no point running multiple cores as we are emerging to a single file so MP is not appropriate. 353 | #tried splat 354 | #tried mem. default for 64 bit is now 6GB 355 | cmd = "lasgrid64.exe" + \ 356 | " -i %s" % (filespec) + \ 357 | " -drop_x_below %s" % (str(10))+ \ 358 | " -step %s" % (str(resolution)) + \ 359 | " -merged" + \ 360 | " -odir %s" % (dstfolder)+ \ 361 | " -%s" % (gridtype.lower()) + \ 362 | " -fill %s" % (str(fill)) + \ 363 | " -epsg %s" % (epsg) + \ 364 | " -o %s" % (outfilename) 365 | 366 | stdout, stderr = runner(cmd, verbose) 367 | 368 | # " -average" + \ 369 | # args = shlex.split(cmd) 370 | # # " -fill %s" % (str(fill)) + \ 371 | # # " -mem %s" % (2000)+ \ 372 | 373 | return os.path.join(dstfolder, outfilename) 374 | 375 | ############################################################################### 376 | def lasgrid3(srcfolder, dstfolder, filespec, resolution, outfilename, gridtype="average", fill="0", outtype=".laz", epsg='31984', verbose=False): 377 | '''grid a folder of laz files in a reliable manner''' 378 | 379 | filespec = os.path.join(srcfolder, filespec) 380 | filespec = filespec.replace('\\','/') 381 | dstfolder = dstfolder.replace('\\','/') 382 | odirlog = makedirs(dstfolder) 383 | #outfilename = os.path.join(dstfolder, outtype) 384 | #outfilename = outfilename.replace('\\','/') 385 | #no point running multiple cores as we are emerging to a single file so MP is not appropriate. 386 | #tried splat 387 | #tried mem. default for 64 bit is now 6GB 388 | cmd = "lasgrid64.exe" + \ 389 | " -i %s" % (filespec) + \ 390 | " -drop_x_below %s" % (str(10))+ \ 391 | " -step %s" % (str(resolution)) + \ 392 | " -merged" + \ 393 | " -odir %s" % (dstfolder)+ \ 394 | " -%s" % (gridtype.lower()) + \ 395 | " -fill %s" % (str(fill)) + \ 396 | " -epsg %s" % (epsg) + \ 397 | " -v"+ \ 398 | " -o %s" % (outfilename) 399 | 400 | # " -average" + \ 401 | stdout, stderr = runner(cmd, verbose) 402 | 403 | return os.path.join(dstfolder, outfilename) 404 | 405 | ############################################################################### 406 | def lasoverage(srcfolder, dstfolder, filespec, resolution, overageresolution, epsg='31984', verbose=False): 407 | '''clip out overlapping data from a series of files, to produce non-overlapped data files''' 408 | 409 | # make a list and sort instead of a wildcard. maybe this helps how overage works? 410 | # files = findFiles2(False, srcfolder, "*.laz") 411 | # files.sort() 412 | # filespec = "" 413 | # for f in files: 414 | # filespec = filespec + " " + f 415 | 416 | cpu = getcpucount(0) 417 | strcores = " -cores %s" % (cpu) 418 | log("Processing with %d CPU's" %(cpu)) 419 | 420 | filespec = os.path.join(srcfolder, filespec) 421 | filespec = filespec.replace('\\','/') 422 | 423 | print ("******Overage files to process: %s" % (filespec)) 424 | print ("******Overage output folder: %s" % (dstfolder)) 425 | 426 | dstfolder = dstfolder.replace('\\','/') 427 | odirlog = makedirs(dstfolder) 428 | 429 | if float(overageresolution) == 0: 430 | cutresolution = float(resolution) 431 | else: 432 | cutresolution = float(overageresolution) 433 | print ("******Overage Resolution: %.3f, Grid Resolution %.3f" % (cutresolution, float(resolution))) 434 | 435 | # #we need to ensure we dont cause edge effects 436 | # resolution = float(resolution) / 4 #pkpk we needed to make this 1 for the cross lines in A14 as the infill did not work well. not sure whats is happening yet. 437 | 438 | cmd = "lasoverage.exe" + \ 439 | " -i %s" % (filespec) + \ 440 | " -step %.3f" % (cutresolution) + \ 441 | " -odir %s" % (dstfolder)+ \ 442 | " -cpu64" + \ 443 | strcores + \ 444 | " -v" + \ 445 | " -odix _overage" + \ 446 | " -remove_overage" + \ 447 | " -epsg %s" % (epsg) + \ 448 | " -olaz" 449 | 450 | stdout, stderr = runner(cmd, verbose) 451 | 452 | ############################################################################### 453 | def lasoveragenew(srcfolder, dstfolder, filespec, resolution=1, overageresolution=1, epsg='31984', verbose=False): 454 | 455 | '''clip out overlapping data from a series of files, to produce non-overlapped data files''' 456 | 457 | # make a list and sort instead of a wildcard. maybe this helps how overage works? 458 | # files = findFiles2(False, srcfolder, "*.laz") 459 | # files.sort() 460 | # filespec = "" 461 | # for f in files: 462 | # filespec = filespec + " " + f 463 | 464 | cpu = getcpucount(0) 465 | strcores = " -cores %s" % (cpu) 466 | log("Processing with %d CPU's" %(cpu)) 467 | 468 | filespec = os.path.join(srcfolder, filespec) 469 | filespec = filespec.replace('\\','/') 470 | 471 | print ("******Overage2 files to process: %s" % (filespec)) 472 | print ("******Overage2 output folder: %s" % (dstfolder)) 473 | 474 | dstfolder = dstfolder.replace('\\','/') 475 | # odirlog = makedirs(dstfolder) 476 | 477 | if float(overageresolution) == 0: 478 | cutresolution = float(resolution) 479 | else: 480 | cutresolution = float(overageresolution) 481 | print ("******Overage Resolution: %.3f, Grid Resolution %.3f" % (cutresolution, float(resolution))) 482 | 483 | # #we need to ensure we dont cause edge effects 484 | # resolution = float(resolution) / 4 #pkpk we needed to make this 1 for the cross lines in A14 as the infill did not work well. not sure whats is happening yet. 485 | 486 | cmd = "lasoverage.exe" + \ 487 | " -i %s" % (filespec) + \ 488 | " -step %.3f" % (cutresolution) + \ 489 | " -odir %s" % (dstfolder)+ \ 490 | " -v" + \ 491 | " -remove_overage" + \ 492 | " -odix _overage" + \ 493 | " -epsg %s" % (epsg) + \ 494 | " -olaz" + \ 495 | " -cpu64" + \ 496 | strcores + \ 497 | " -files_are_flightlines" 498 | 499 | stdout, stderr = runner(cmd, verbose) 500 | return [stdout, stderr] 501 | 502 | ############################################################################### 503 | def lasduplicate2(filename, outfilename): 504 | '''remove duplicate records from a file and rename the file at the end''' 505 | 506 | # odirlog = makedirs(os.path.dirname(filename)) 507 | filename = filename.replace('\\','/') 508 | outfilename = outfilename.replace('\\','/') 509 | 510 | cmd = "lasduplicate64.exe" + \ 511 | " -i %s" % (filename) + \ 512 | " -olaz " + \ 513 | " -o %s" % (outfilename) 514 | 515 | # " -drop_x_below %s" % (str(10))+ \ 516 | stdout, stderr = runner(cmd, False) 517 | 518 | return outfilename 519 | 520 | ############################################################################### 521 | def lasduplicate(filename): 522 | '''remove duplicate records from a file and rename the file at the end''' 523 | 524 | odirlog = makedirs(os.path.dirname(filename)) 525 | 526 | outfilename = os.path.join(os.path.dirname(filename), "uniq.laz") 527 | outfilename = outfilename.replace('\\','/') 528 | 529 | cmd = "lasduplicate64.exe" + \ 530 | " -i %s" % (filename) + \ 531 | " -drop_x_below %s" % (str(10))+ \ 532 | " -olaz " + \ 533 | " -o %s" % (outfilename) 534 | 535 | stdout, stderr = runner(cmd, False) 536 | 537 | #now delete the original filename and rename the temp file to the original 538 | try: 539 | fileutils.deletefile(filename) 540 | os.rename(outfilename, filename) 541 | except: 542 | log("Error while duplicating & renaming file %s" % (outfilename), True) 543 | 544 | return filename 545 | 546 | ############################################################################### 547 | def hillshade(filename, odir, resolution): 548 | '''make a hillshade png file for QC purposes''' 549 | 550 | odirlog = makedirs(odir) 551 | 552 | filename = os.path.abspath(filename).replace('\\','/') 553 | root = os.path.splitext(os.path.basename(filename))[0] 554 | outfilename = os.path.join(odir, root + '_hillshade.png') 555 | outfilename = outfilename.replace('\\','/') 556 | 557 | cmd = "blast2dem.exe" + \ 558 | " -i %s" % (filename) + \ 559 | " -drop_x_below %s" % (str(10))+ \ 560 | " -step %s" % (str(resolution)) + \ 561 | " -kill %s" % (str(resolution*10)) + \ 562 | " -hillshade" + \ 563 | " -opng " + \ 564 | " -o %s" % (outfilename) 565 | 566 | stdout, stderr = runner(cmd, False) 567 | 568 | return outfilename 569 | 570 | ############################################################################### 571 | def lasclipbb(filename, rect, odir, resolution, nodata=0, prefix=""): 572 | '''clip a laz file using a rectangle file''' 573 | #-keep_xy 630000 4834000 631000 4836000 (min_x min_y max_x max_y) 574 | 575 | odirlog = makedirs(odir) 576 | 577 | filename = os.path.abspath(filename).replace('\\','/') 578 | root = os.path.splitext(os.path.basename(filename))[0] 579 | # outfilename = os.path.join(odir, prefix + '_clipped.laz') 580 | outfilename = os.path.join(odir, prefix + root + '_G_C.laz') 581 | outfilename = outfilename.replace('\\','/') 582 | 583 | #ensure the las file has positive up. some files are positive down. this is not so good so we can find out if the file has positive depthas and set blast to invert them on the fly 584 | scale_z = 1.0 585 | lasrect = getlazboundingbox(filename, odir) 586 | if ispositivedepths(lasrect): 587 | scale_z = -1.0 588 | 589 | cmd = "blast2dem.exe" + \ 590 | " -i %s" % (filename) + \ 591 | " -keep_xy %.3f %.3f %.3f %.3f" % (rect.left, rect.bottom, rect.right, rect.top) + \ 592 | " -drop_x_below %s" % (str(10))+ \ 593 | " -scale_z %s" % (str(scale_z)) + \ 594 | " -step %s" % (str(resolution)) + \ 595 | " -kill %s" % (str(resolution*100)) + \ 596 | " -olaz " + \ 597 | " -o %s" % (outfilename) 598 | 599 | stdout, stderr = runner(cmd, False) 600 | 601 | return outfilename 602 | 603 | ############################################################################### 604 | def lasclip(filename, shp, odir, nodata=0, prefix="", rejectinterior=True): 605 | '''clip a laz file using a shape file''' 606 | 607 | odirlog = makedirs(odir) 608 | 609 | cpu = getcpucount(0) 610 | strcores = " -cores %s" % (cpu) 611 | 612 | #decide if we are to keep the point inside or outside the area. 613 | if rejectinterior: 614 | rejectinterior = " -interior" 615 | else: 616 | rejectinterior = "" 617 | filename = os.path.abspath(filename).replace('\\','/') 618 | root = os.path.splitext(os.path.basename(filename))[0] 619 | outfilename = os.path.join(odir, prefix + '_clipped.laz') 620 | outfilename = os.path.join(odir, prefix + root + '_clipped.laz') 621 | outfilename = outfilename.replace('\\','/') 622 | shp = os.path.abspath(shp).replace('\\','/') 623 | cmd = "lasclip.exe" + \ 624 | " -i %s" % (filename) + \ 625 | rejectinterior + \ 626 | " -cpu64 " + \ 627 | " -donuts " + \ 628 | strcores + \ 629 | " -quiet " + \ 630 | " -olaz " + \ 631 | " -drop_x_below %s" % (str(10))+ \ 632 | " -poly %s" % (shp) + \ 633 | " -o %s" % (outfilename) 634 | 635 | stdout, stderr = runner(cmd, False) 636 | 637 | return outfilename 638 | 639 | ############################################################################### 640 | def lasclip2(inputs, shp, odir, nodata=0, prefix="", rejectinterior=True): 641 | '''clip a laz file using a shape file''' 642 | 643 | # odirlog = makedirs(odir) 644 | 645 | cpu = getcpucount(0) 646 | strcores = " -cores %s" % (cpu) 647 | 648 | odix = "_clipped" 649 | #decide if we are to keep the point inside or outside the area. 650 | if rejectinterior: 651 | rejectinterior = " -interior" 652 | else: 653 | rejectinterior = "" 654 | # filename = os.path.abspath(filename).replace('\\','/') 655 | # root = os.path.splitext(os.path.basename(filename))[0] 656 | # outfilename = os.path.join(odir, prefix + '_clipped.laz') 657 | # outfilename = os.path.join(odir, prefix + root + '_clipped.laz') 658 | # outfilename = outfilename.replace('\\','/') 659 | shp = os.path.abspath(shp).replace('\\','/') 660 | cmd = "lasclip.exe" + \ 661 | " -i %s" % (inputs) + \ 662 | rejectinterior + \ 663 | " -cpu64 " + \ 664 | " -donuts " + \ 665 | strcores + \ 666 | " -quiet " + \ 667 | " -olaz " + \ 668 | " -drop_x_below %s" % (str(10))+ \ 669 | " -poly %s" % (shp) + \ 670 | " -odir %s" % (odir) + \ 671 | " -odix %s" % (odix) 672 | 673 | # " -o %s" % (outfilename) 674 | 675 | stdout, stderr = runner(cmd, False) 676 | 677 | return 678 | ############################################################################### 679 | def demzip2(filename, outfilename, nodata=0, replace=False): 680 | '''convert the 1-band tif file to a laz file ''' 681 | 682 | odir = os.path.dirname(outfilename) 683 | odirlog = makedirs(odir) 684 | 685 | #the file already exists and the user is not wanting to replace... 686 | if os.path.exists(outfilename) and replace == False: 687 | return outfilename 688 | 689 | if os.path.exists(outfilename) and replace == True: 690 | fileutils.deletefile(outfilename) 691 | 692 | cmd = "demzip.exe" + \ 693 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 694 | " -nodata_value %s" % (str(nodata)) + \ 695 | " -olaz " + \ 696 | " -o %s" % (outfilename.replace('\\','/')) 697 | 698 | stdout, stderr = runner(cmd, False) 699 | 700 | return outfilename 701 | 702 | ############################################################################### 703 | def demzip(filename, odir, nodata=0, prefix="", replace=False): 704 | '''convert the 1-band tif file to a laz file ''' 705 | 706 | odirlog = makedirs(odir) 707 | 708 | root = os.path.splitext(os.path.basename(filename))[0] 709 | # outfilename = os.path.join(odir, prefix + '_1band.laz') 710 | outfilename = os.path.join(odir, prefix + root + '.laz') 711 | 712 | #the file already exists and the user is not wanting to replace... 713 | if os.path.exists(outfilename) and replace == False: 714 | return outfilename 715 | 716 | if os.path.exists(outfilename) and replace == True: 717 | fileutils.deletefile(outfilename) 718 | 719 | cmd = "demzip.exe" + \ 720 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 721 | " -nodata_value %s" % (str(nodata)) + \ 722 | " -olaz " + \ 723 | " -o %s" % (outfilename.replace('\\','/')) 724 | 725 | stdout, stderr = runner(cmd, False) 726 | 727 | return outfilename 728 | 729 | ############################################################################### 730 | def lasindex(inputs, rebuild=False): 731 | '''index the laz files for performance''' 732 | 733 | inputs = os.path.abspath(inputs).replace('\\','/')+"/*.laz" 734 | 735 | # log('index the laz files for performance') 736 | odir = os.path.dirname(inputs) 737 | 738 | if rebuild == False: 739 | indexfiles = fileutils.findFiles2(False, odir, "*.lax") 740 | if len(indexfiles) > 0: 741 | return 742 | 743 | odirlog = makedirs(odir) 744 | 745 | cpu = getcpucount(0) 746 | strcores = " -cores %s" % (cpu) 747 | # log("Indexing with %d CPU's" %(cpu)) 748 | 749 | cmd = "lasindex.exe" + \ 750 | " -i %s" % (inputs) + \ 751 | strcores 752 | 753 | stdout, stderr = runner(cmd, False) 754 | 755 | return [stdout, stderr] 756 | 757 | ############################################################################### 758 | def lastile(inputs, odir, tile_size=5000, prefix="", rebuild=False, verbose=False): 759 | '''tile the laz files to a regular grids so we can scale indefinitely''' 760 | 761 | inputs = os.path.abspath(inputs).replace('\\','/')+"/*.laz" 762 | 763 | log('Tile the laz files to a regular grids so we can scale indefinitely') 764 | 765 | #check to see if we really need to rebuild 766 | # if rebuild == True: 767 | # files = fileutils.findFiles2(False, odir, "*.laz") 768 | # ds.deletefolder(odir) 769 | # else: 770 | # if len(files) > 0: 771 | # #files exist so quit 772 | # return odir 773 | 774 | # starttime = time.time() 775 | # odirlog = makedirs(odir) 776 | 777 | cpu = getcpucount(0) 778 | strcores = " -cores %s" % (cpu) 779 | strcores = " -cores 4" # pkpk 780 | log("Tiling %s with %d CPU's" %(inputs, cpu)) 781 | 782 | cmd = "lastile.exe" + \ 783 | " -i %s" % (inputs) + \ 784 | " -cpu64 " + \ 785 | strcores + \ 786 | " -v" + \ 787 | " -drop_x_below %s" % (str(10))+ \ 788 | " -tile_size %s " % (str(tile_size)) + \ 789 | " -olaz " + \ 790 | " -odir %s" % (odir) 791 | 792 | # " -reversible " + \ 793 | stdout, stderr = runner(cmd, verbose) 794 | 795 | # log("Tile Elapsed time: %.1f seconds" %(float(time.time() - starttime)), True) 796 | 797 | return odir 798 | 799 | ############################################################################### 800 | def lasthin(filename, odir, resolution=1, fill=0, epsg='4326', prefix=""): 801 | '''convert the laz file to a coverage shp file''' 802 | 803 | odirlog = makedirs(odir) 804 | 805 | root, ext = os.path.splitext(os.path.basename(filename)) 806 | #outfilename = os.path.join(odir, prefix + '_1band.txt') 807 | outfilename = os.path.join(odir, prefix + root + '_thin'+ ext).replace('\\','/') 808 | 809 | resolution = float(resolution) * 3 810 | cmd = "lasthin64.exe" + \ 811 | " -i %s" % (filename) + \ 812 | " -step %s " % (str(resolution)) + \ 813 | " -o %s" % (outfilename) 814 | 815 | stdout, stderr = runner(cmd, False) 816 | 817 | return outfilename 818 | 819 | ############################################################################### 820 | def lasboundaries(inputs, odir, resolution=1, fill=0, epsg='4326', prefix="", verbose=False): 821 | '''convert the laz file to a coverage shp file''' 822 | 823 | odirlog = makedirs(odir) 824 | 825 | cpu = getcpucount(0) 826 | strcores = " -cores %s" % (cpu) 827 | # log("Processing with %d CPU's" %(cpu)) 828 | 829 | outfilename = os.path.join(odir, prefix + '_boundary.shp').replace('\\','/') 830 | 831 | # concavity mwith a smaller number more closely follows the tif file 832 | # disjoint forces individual polygons rather than connecting lines. 833 | 834 | concavity = float(resolution) + float(fill) 835 | concavity = float(fill) 836 | concavity = float(resolution) + float(fill) + float(fill) 837 | concavity = float(fill) + float(fill) 838 | concavity = max(concavity,1) 839 | 840 | cmd = "lasboundary.exe" + \ 841 | " -i %s" % (inputs) + \ 842 | " -drop_x_below %s" % (str(10))+ \ 843 | " -concavity %s " % (str(concavity)) + \ 844 | " -epsg %s " % (str(epsg)) + \ 845 | " -cpu64 " + \ 846 | strcores + \ 847 | " -merged " + \ 848 | " -labels " + \ 849 | " -disjoint " + \ 850 | " -holes" + \ 851 | " -o %s" % (outfilename) 852 | 853 | # " -v" + \ 854 | stdout, stderr = runner(cmd, verbose) 855 | 856 | return outfilename 857 | 858 | ############################################################################### 859 | # def lasboundary(filename, odir, nodata=0, resolution=1, prefix="", outfilename = "", extension="txt"): 860 | def lasboundary(filename, outfilename, nodata=0, resolution=1, replace=False): 861 | '''convert the laz file to a coverage shape file''' 862 | 863 | # odirlog = makedirs(os.path.dirname(outfilename)) 864 | 865 | # if len(outfilename) == 0: 866 | # root = os.path.splitext(os.path.basename(filename))[0] 867 | # #outfilename = os.path.join(odir, prefix + '_1band.txt') 868 | # outfilename = os.path.join(odir, prefix + root + '_boundary.'+ extension).replace('\\','/') 869 | 870 | #the file already exists and the user is not wanting to replace... 871 | if os.path.exists(outfilename) and replace == False: 872 | return outfilename 873 | 874 | cpu = getcpucount(0) 875 | strcores = " -cores %s" % (cpu) 876 | log("Processing with %d CPU's" %(cpu)) 877 | 878 | #lasboundary -i pk.laz -disjoint -o pkdisjoint.txt 879 | # concavity mwith a smaller number more closely follows the tif file 880 | # disjoint forces individual polygons rather than connecting lines. 881 | 882 | nodatamin = float(nodata) - 0.1 883 | nodatamax = float(nodata) + 0.1 884 | resolution = float(resolution) * 1.5 885 | 886 | cmd = "lasboundary.exe" + \ 887 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 888 | " -drop_x_below %s" % (str(10))+ \ 889 | " -concavity %s " % (str(resolution)) + \ 890 | " -cpu64 " + \ 891 | strcores + \ 892 | " -v" + \ 893 | " -disjoint " + \ 894 | " -drop_z %s %s" % (str(nodatamin), str(nodatamax)) + \ 895 | " -o %s" % (outfilename.replace('\\','/')) 896 | 897 | stdout, stderr = runner(cmd, False) 898 | 899 | return outfilename 900 | 901 | ############################################################################### 902 | def lasoverlap(filename1, filename2, odir, resolution=1, prefix=""): 903 | '''compute the overlap between 2 files''' 904 | 905 | odirlog = makedirs(odir) 906 | 907 | filename1 = os.path.abspath(filename1).replace('\\','/') 908 | filename2 = os.path.abspath(filename2).replace('\\','/') 909 | 910 | root1 = os.path.splitext(os.path.basename(filename1))[0] 911 | root2 = os.path.splitext(os.path.basename(filename2))[0] 912 | outfilename = os.path.join(odir, prefix + "_" + root1 + "_" + root2 + '.laz') 913 | outfilename = outfilename.replace('\\','/') 914 | 915 | cmd = "lasoverlap64.exe" + \ 916 | " -i %s %s" % (filename1, filename2) + \ 917 | " -drop_x_below %s" % (str(10))+ \ 918 | " -values " + \ 919 | " -faf " + \ 920 | " -no_over " + \ 921 | " -o %s" % (outfilename) 922 | 923 | stdout, stderr = runner(cmd, False) 924 | 925 | return outfilename 926 | 927 | ############################################################################### 928 | def las2lasEPSG(filename, odir="", epsg="4326"): 929 | '''apply EPSG code to a las file''' 930 | 931 | filename = os.path.abspath(filename).replace('\\','/') 932 | 933 | root = os.path.splitext(os.path.basename(filename))[0] 934 | outfilename = os.path.join(odir, root + "_EPSG_" + epsg + '.laz') 935 | outfilename = outfilename.replace('\\','/') 936 | 937 | outfilename = os.path.join(os.path.dirname(filename), root + ".laz") 938 | # outfilename = os.path.join(os.path.dirname(filename), root + "_EPSG_" + epsg + ".laz") 939 | outfilename = outfilename.replace('\\','/') 940 | 941 | prefix = str(uuid.uuid1()) 942 | path = os.path.join(os.path.dirname(filename), prefix + ".laz") 943 | fileutils.copyfile(filename, path) 944 | 945 | cmd = "las2las64.exe" + \ 946 | " -i %s " % (path) + \ 947 | " -epsg %s " % (epsg) + \ 948 | " -o %s" % (outfilename) 949 | 950 | stdout, stderr = runner(cmd, False) 951 | fileutils.deletefile(path) 952 | 953 | return outfilename 954 | 955 | 956 | ############################################################################### 957 | def las2las(filename, odir, zcorrection=0, suffix="_Z"): 958 | '''correct a laz file by adding a user supplied correction''' 959 | 960 | # odirlog = makedirs(odir) 961 | 962 | filename = os.path.abspath(filename).replace('\\','/') 963 | 964 | root = os.path.splitext(os.path.basename(filename))[0] 965 | outfilename = os.path.join(odir, root + suffix + '.laz') 966 | outfilename = outfilename.replace('\\','/') 967 | 968 | cmd = "las2las64.exe" + \ 969 | " -i %s " % (filename) + \ 970 | " -drop_x_below %s" % (str(10))+ \ 971 | " -translate_z %.3f " % (zcorrection) + \ 972 | " -o %s" % (outfilename) 973 | 974 | stdout, stderr = runner(cmd, False) 975 | 976 | return outfilename 977 | 978 | ############################################################################### 979 | def las2lasclipstarttime(filename, outfilename, clipstarttime=0): 980 | '''correct a laz file by adding a user supplied correction''' 981 | 982 | # odirlog = makedirs(odir) 983 | 984 | filename = os.path.abspath(filename).replace('\\','/') 985 | outfilename = os.path.abspath(outfilename).replace('\\','/') 986 | 987 | cmd = "las2las64.exe" + \ 988 | " -i %s " % (filename) + \ 989 | " -drop_x_below %s" % (str(10))+ \ 990 | " -drop_gpstime_below %.3f " % (clipstarttime) + \ 991 | " -o %s" % (outfilename) 992 | 993 | stdout, stderr = runner(cmd, False) 994 | 995 | return outfilename 996 | 997 | ############################################################################### 998 | def ispositivedepths(rect): 999 | if rect.minz > 0 or rect.maxz > 0: 1000 | return True 1001 | if rect.minz < 0 or rect.maxz < 0: 1002 | return False 1003 | ############################################################################### 1004 | def getlazfirstlast(filename, odir, prefix="las2txt"): 1005 | '''get the laz first and last records as fast as possible''' 1006 | '''las2txt -i CATHX_D2019-08-29T00-36-18-702Z_None_X0.laz -thin_with_grid 1 -stdout | more''' 1007 | 1008 | if not os.path.isdir(odir): 1009 | os.makedirs(odir) 1010 | 1011 | outfilename = os.path.join(odir, prefix + '_info.txt') 1012 | fstd = open(outfilename, 'w') 1013 | 1014 | cmd = "las2txt64.exe" + \ 1015 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 1016 | " -o %s" % (os.path.abspath(outfilename).replace('\\','/')) + \ 1017 | " -thin_with_grid 1" 1018 | 1019 | args = shlex.split(cmd) 1020 | 1021 | proc = subprocess.Popen(args, stdout=fstd, stderr=subprocess.PIPE) 1022 | proc.wait() 1023 | 1024 | '''now extract the position informaiton ''' 1025 | startx = 0 1026 | starty = 0 1027 | startz = 0 1028 | endx = 0 1029 | endy = 0 1030 | endz = 0 1031 | 1032 | f = open(outfilename, 'r') 1033 | for line in f: 1034 | line = line.strip() 1035 | words = line.split(" ") 1036 | if startx == 0: 1037 | startx = float(words[0]) 1038 | starty = float(words[1]) 1039 | startz = float(words[2]) 1040 | 1041 | endx = float(words[0]) 1042 | endy = float(words[1]) 1043 | endz = float(words[2]) 1044 | 1045 | return startx, starty, startz, endx, endy, endz 1046 | 1047 | ############################################################################### 1048 | def getlazmeandepth(filename, odir, prefix="lasinfo"): 1049 | '''get the laz metadata mean depth''' 1050 | '''lasinfo -i lidar.laz -o _info -otxt -histo z 1''' 1051 | 1052 | if not os.path.isdir(odir): 1053 | os.makedirs(odir) 1054 | 1055 | prefix = str(uuid.uuid1()) 1056 | outfilename = os.path.join(odir, prefix + '_infomd.txt').replace('\\','/') 1057 | fstd = open(outfilename, 'w') 1058 | 1059 | cmd = "lasinfo64.exe" + \ 1060 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 1061 | " -o %s" % (os.path.abspath(outfilename).replace('\\','/')) + \ 1062 | " -nv" + \ 1063 | " -histo z 1 " 1064 | args = shlex.split(cmd) 1065 | 1066 | proc = subprocess.Popen(args, stdout=fstd, stderr=subprocess.PIPE) 1067 | proc.wait() 1068 | fstd.close() 1069 | 1070 | '''now extract the position information ''' 1071 | #point data format: 0 1072 | meanz = 0 1073 | recordcount = 0 1074 | f = open(outfilename, 'r') 1075 | for line in f: 1076 | if line.lstrip().lower().startswith('number of point records'): 1077 | line = line.lstrip() 1078 | line = line.rstrip() 1079 | line = line.replace("(", "") 1080 | line = line.replace(")", "") 1081 | line = line.replace(" ", " ") 1082 | line = line.replace(" ", " ") 1083 | line = line.replace(" ", " ") 1084 | line = line.replace(" ", " ") 1085 | line = line.replace(" ", " ") 1086 | line = line.replace(" ", " ") 1087 | line = line.replace(" ", " ") 1088 | line = line.replace(" ", " ") 1089 | line = line.replace(",", "") 1090 | words = line.split(" ") 1091 | recordcount= float(words[4]) 1092 | if recordcount == 0: 1093 | return meanz, recordcount 1094 | 1095 | if line.lstrip().lower().startswith('average z'): 1096 | line = line.lstrip() 1097 | line = line.rstrip() 1098 | line = line.replace("(", "") 1099 | line = line.replace(")", "") 1100 | line = line.replace(" ", " ") 1101 | line = line.replace(" ", " ") 1102 | line = line.replace(" ", " ") 1103 | line = line.replace(" ", " ") 1104 | line = line.replace(" ", " ") 1105 | line = line.replace(" ", " ") 1106 | line = line.replace(" ", " ") 1107 | line = line.replace(" ", " ") 1108 | line = line.replace(",", "") 1109 | words = line.split(" ") 1110 | meanz= float(words[3]) 1111 | f.close() 1112 | fileutils.deletefile(outfilename) 1113 | return meanz, recordcount 1114 | 1115 | ############################################################################### 1116 | def getlazboundingbox(filename, odir, prefix="lasinfo"): 1117 | '''get the laz metadata and return the bounding box rectangle''' 1118 | '''lasinfo -i lidar.laz -odix _info -otxt''' 1119 | 1120 | x = 0 1121 | y = 0 1122 | 1123 | if not os.path.isdir(odir): 1124 | os.makedirs(odir) 1125 | 1126 | prefix = str(uuid.uuid1()) 1127 | outfilename = os.path.join(odir, prefix + '_infobb.txt') 1128 | f = open(outfilename, 'w') 1129 | 1130 | cmd = "lasinfo64.exe" + \ 1131 | " -i %s" % (os.path.abspath(filename).replace('\\','/')) + \ 1132 | " -o %s" % (os.path.abspath(outfilename).replace('\\','/')) + \ 1133 | " -nv" + \ 1134 | " -nc" 1135 | args = shlex.split(cmd) 1136 | 1137 | proc = subprocess.Popen(args, stdout=f, stderr=subprocess.PIPE) 1138 | proc.wait() 1139 | 1140 | '''now extract the position informaiton ''' 1141 | # scale factor x y z: 0.0001 0.0001 0.0001 1142 | # offset x y z: 467000 7786000 1000 1143 | # min x y z: 467106.6526 7786476.5504 1612.2718 1144 | # max x y z: 467161.9851 7786494.3574 1612.8824 1145 | 1146 | 1147 | p1 = POINT(0,0) 1148 | p2 = POINT(1,1) 1149 | rectangle = RECT(p1, p2) 1150 | 1151 | f = open(outfilename, 'r') 1152 | for line in f: 1153 | if line.lstrip().lower().startswith('min x'): 1154 | line = line.lstrip() 1155 | line = line.rstrip() 1156 | line = line.replace("(", "") 1157 | line = line.replace(")", "") 1158 | line = line.replace(" ", " ") 1159 | line = line.replace(" ", " ") 1160 | line = line.replace(" ", " ") 1161 | line = line.replace(" ", " ") 1162 | line = line.replace(" ", " ") 1163 | line = line.replace(" ", " ") 1164 | line = line.replace(" ", " ") 1165 | line = line.replace(" ", " ") 1166 | line = line.replace(",", "") 1167 | words = line.split(" ") 1168 | x = float(words[4]) 1169 | y = float(words[5]) 1170 | z = float(words[6]) 1171 | ll = POINT(x,y,z) 1172 | if line.lstrip().lower().startswith('max x'): 1173 | line = line.lstrip() 1174 | line = line.rstrip() 1175 | line = line.replace("(", "") 1176 | line = line.replace(")", "") 1177 | line = line.replace(" ", " ") 1178 | line = line.replace(" ", " ") 1179 | line = line.replace(" ", " ") 1180 | line = line.replace(" ", " ") 1181 | line = line.replace(" ", " ") 1182 | line = line.replace(" ", " ") 1183 | line = line.replace(" ", " ") 1184 | line = line.replace(" ", " ") 1185 | line = line.replace(",", "") 1186 | words = line.split(" ") 1187 | x = float(words[4]) 1188 | y = float(words[5]) 1189 | z = float(words[6]) 1190 | ur = POINT(x,y,z) 1191 | rectangle = RECT(ll, ur) 1192 | 1193 | f.close() 1194 | fileutils.deletefile(outfilename) 1195 | return rectangle 1196 | 1197 | ############################################################################### 1198 | def makedirs(odir): 1199 | if not os.path.isdir(odir): 1200 | os.makedirs(odir, exist_ok=True) 1201 | odirlog = os.path.join(odir, "log").replace('\\','/') 1202 | if not os.path.isdir(odirlog): 1203 | os.makedirs(odirlog) 1204 | return odirlog 1205 | 1206 | ############################################################################### 1207 | def getcpucount(requestedcpu): 1208 | '''control how many CPU's we use for multi processing''' 1209 | if int(requestedcpu) == 0: 1210 | requestedcpu = multiprocessing.cpu_count() 1211 | 1212 | stat = MEMORYSTATUSEX() 1213 | ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 1214 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 1215 | # print("MemoryAvailable: %d%%" % (stat.ullAvailPhys/(1024*1024*1024))) 1216 | availablememoryingigs = stat.ullAvailPhys/(1024*1024*1024) 1217 | # make sure we have enough memory per CPU 1218 | requiredgigspercpu = 4 1219 | 1220 | maxcpu = max(1, int(availablememoryingigs/ requiredgigspercpu)) 1221 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 1222 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 1223 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 1224 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 1225 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 1226 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 1227 | # ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) 1228 | # print("MemoryLoad: %d%%" % (stat.dwMemoryLoad)) 1229 | 1230 | if int(requestedcpu) > maxcpu: 1231 | requestedcpu = maxcpu 1232 | return int(requestedcpu) 1233 | 1234 | ############################################################################### 1235 | def log(msg, error = False, printmsg=True): 1236 | if printmsg: 1237 | print (msg) 1238 | if error == False: 1239 | logging.info(msg) 1240 | else: 1241 | logging.error(msg) 1242 | 1243 | ############################################################################### 1244 | class POINT(object): 1245 | def __init__(self, x, y, z=0.0): 1246 | self.x = x 1247 | self.y = y 1248 | self.z = z 1249 | 1250 | ############################################################################### 1251 | class RECT(object): 1252 | def __init__(self, p1, p2): 1253 | '''Store the top, bottom, left and right values for points 1254 | p1 and p2 are the (corners) in either order 1255 | ''' 1256 | self.left = min(p1.x, p2.x) 1257 | self.right = max(p1.x, p2.x) 1258 | self.bottom = min(p1.y, p2.y) 1259 | self.top = max(p1.y, p2.y) 1260 | self.minz = min(p1.z, p2.z) 1261 | self.maxz = max(p1.z, p2.z) 1262 | 1263 | class MEMORYSTATUSEX(ctypes.Structure): 1264 | _fields_ = [ 1265 | ("dwLength", ctypes.c_ulong), 1266 | ("dwMemoryLoad", ctypes.c_ulong), 1267 | ("ullTotalPhys", ctypes.c_ulonglong), 1268 | ("ullAvailPhys", ctypes.c_ulonglong), 1269 | ("ullTotalPageFile", ctypes.c_ulonglong), 1270 | ("ullAvailPageFile", ctypes.c_ulonglong), 1271 | ("ullTotalVirtual", ctypes.c_ulonglong), 1272 | ("ullAvailVirtual", ctypes.c_ulonglong), 1273 | ("sullAvailExtendedVirtual", ctypes.c_ulonglong), 1274 | ] 1275 | 1276 | def __init__(self): 1277 | # have to initialize this to the size of MEMORYSTATUSEX 1278 | self.dwLength = ctypes.sizeof(self) 1279 | super(MEMORYSTATUSEX, self).__init__() 1280 | 1281 | 1282 | ################################################################################################### 1283 | if __name__ == "__main__": 1284 | print("lashelper.py copyright GuardianGeomatics Pty Ltd") 1285 | --------------------------------------------------------------------------------