├── .flake8 ├── .gitattributes ├── .github └── workflows │ └── testing.yml ├── .gitignore ├── AUTHORS ├── CMakeLists.txt ├── LICENSE.txt ├── MANIFEST.in ├── Makefile ├── README.md ├── datasets └── EastCoast.nc ├── install_cgal.bat ├── install_oceanmesh.bat ├── oceanmesh ├── __init__.py ├── _version.py ├── boundary.py ├── clean.py ├── cpp │ ├── HamiltonJacobi.cpp │ ├── __init__.py │ ├── delaunay_class.cpp │ └── fast_geometry.cpp ├── edgefx.py ├── edges.py ├── filterfx.py ├── fix_mesh.py ├── geodata.py ├── grid.py ├── idw.py ├── mesh_generator.py ├── region.py └── signed_distance_function.py ├── pyproject.toml ├── setup.cfg ├── setup.py ├── tests ├── GSHHS_i_L1.dbf ├── GSHHS_i_L1.prj ├── GSHHS_i_L1.shp ├── GSHHS_i_L1.shx ├── __init__.py ├── galv_sub.nc ├── galv_sub.tif ├── global │ ├── global_latlon.cpg │ ├── global_latlon.dbf │ ├── global_latlon.prj │ ├── global_latlon.shp │ ├── global_latlon.shx │ ├── global_stereo.cpg │ ├── global_stereo.dbf │ ├── global_stereo.prj │ ├── global_stereo.shp │ └── global_stereo.shx ├── islands.cpg ├── islands.dbf ├── islands.prj ├── islands.shp ├── islands.shx ├── ocean.cpg ├── ocean.dbf ├── ocean.prj ├── ocean.shp ├── ocean.shx ├── test_bathymetric_gradient_function.py ├── test_circ_rect.py ├── test_delete_exterior.py ├── test_delete_interior.py ├── test_edgefx.py ├── test_edges.py ├── test_geodata.py ├── test_global_stereo.py ├── test_grade.py ├── test_irregular_domain.py ├── test_mesh_generator_simple.py ├── test_multiscale.py └── test_signed_distance_function.py ├── tox.ini └── versioneer.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, E266, E501, W503 3 | max-line-length = 80 4 | max-complexity = 18 5 | select = B,C,E,F,W,T4,B9 6 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | oceanmesh/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/workflows/testing.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/setup-python@v2 14 | with: 15 | python-version: "3.9" 16 | - uses: actions/checkout@v2 17 | - name: Lint with flake8 18 | run: | 19 | pip install flake8 20 | flake8 . 21 | - name: Lint with black 22 | run: | 23 | pip install black 24 | black --check . 25 | build: 26 | runs-on: ubuntu-latest 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | python-version: [ 3.8, 3.9, '3.10', '3.11'] 31 | steps: 32 | - uses: actions/checkout@v2 33 | - name: Set up Python ${{ matrix.python-version }} 34 | uses: actions/setup-python@v2 35 | with: 36 | python-version: ${{ matrix.python-version }} 37 | - name: Update 38 | run: | 39 | sudo apt update 40 | - name: Install CGAL 5 41 | run: | 42 | sudo apt install -y libcgal-dev 43 | - name: Install other dependencies 44 | run: | 45 | sudo apt install -y libopenmpi3 libopenmpi-dev openmpi-bin 46 | sudo apt install -y libhdf5-dev 47 | - name: pre install numpy (for inpoly) 48 | run: | 49 | pip install numpy 50 | - name: Test with tox 51 | run: | 52 | pip install tox 53 | tox 54 | - uses: codecov/codecov-action@v1 55 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.nc 2 | *.pyc 3 | *.swp 4 | *.prof 5 | dist/ 6 | build/ 7 | .coverage 8 | .cache/ 9 | *.egg-info/ 10 | .pytest_cache/ 11 | *.segy 12 | *.vtk 13 | *.hdf5 14 | *.msh 15 | *.so 16 | *.png 17 | tests/*.txt 18 | *.zip 19 | tests/gshhg-shp-2.3.7/ 20 | 21 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Main author: 2 | 3 | Keith J. Roberts 4 | email: keithrbt0@gmail.com 5 | www: http://www.keithroberts.site 6 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.12) 2 | 3 | project(oceanmesh) 4 | 5 | ############################################################################## 6 | 7 | # Add custom CMake modules 8 | set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH}) 9 | 10 | # Some screen output 11 | message(STATUS "OS detected: ${CMAKE_SYSTEM_NAME}") 12 | message(STATUS "CXX Compiler detected: ${CMAKE_CXX_COMPILER_ID}") 13 | message(STATUS "CMake additional search path for libraries: ${CMAKE_LIBRARY_PATH}") 14 | 15 | # CGAL and its components 16 | find_package(CGAL) 17 | message(STATUS "CGAL version: ${CGAL_VERSION}") 18 | 19 | if( CGAL_VERSION VERSION_LESS 5.0) 20 | message(FATAL_ERROR "This project requires at least CGAL 5.0 library and will not be compiled.") 21 | endif() 22 | 23 | 24 | # include helper file 25 | include( ${CGAL_USE_FILE}) 26 | 27 | set (SRC "oceanmesh/cpp") 28 | 29 | include_directories (${SRC}) 30 | 31 | find_package(pybind11) 32 | message(STATUS "Found pybind11 v${pybind11_VERSION}: ${pybind11_INCLUDE_DIRS}") 33 | 34 | #add_subdirectory(pybind11) 35 | pybind11_add_module(delaunay_class ${SOURCES} "${SRC}/delaunay_class.cpp") 36 | pybind11_add_module(HamiltonJacobi ${SOURCES} "${SRC}/HamiltonJacobi.cpp") 37 | pybind11_add_module(fast_geometry ${SOURCES} "${SRC}/fast_geometry.cpp") 38 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include oceanmesh/cpp/*.cpp 2 | include tests/* 3 | 4 | include README.md LICENSE.txt 5 | global-include CMakeLists.txt 6 | include versioneer.py 7 | include oceanmesh/_version.py 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | VERSION=$(shell python3 -c "from configparser import ConfigParser; p = ConfigParser(); p.read('setup.cfg'); print(p['metadata']['version'])") 2 | 3 | default: 4 | @echo "\"make publish\"?" 5 | 6 | clean: 7 | @find . | grep -E "(__pycache__|\.pyc|\.pyo$\)" | xargs rm -rf 8 | @rm -rf build/* 9 | @rm -rf oceanmesh.egg-info/ 10 | @rm -rf dist/ 11 | 12 | format: 13 | isort -rc oceanmesh/ tests/*.py 14 | black setup.py oceanmesh/ tests/*.py 15 | blacken-docs README.md 16 | -------------------------------------------------------------------------------- /datasets/EastCoast.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/datasets/EastCoast.nc -------------------------------------------------------------------------------- /install_cgal.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | setlocal 3 | 4 | REM This is because the latest vcpkg uses a portable python 3.10 that wont run on Windows 7 5 | REM 6 | for /F %%v in ('powershell -Command [environment]::OSVersion.Version.Major') do set OS_VER=%%v 7 | if "%OS_VER%" LSS "10" ( 8 | echo This install script requires Windows 10 or later [you have version "%OS_VER%"] 9 | echo You will need to install CGAL by other means. 10 | goto FAIL 11 | ) 12 | 13 | REM This was for Windows 7... Windows 10 comes with a suitable powershell version 14 | REM 15 | for /F %%v in ('powershell -Command $Host.Version.Major') do set PS_VER=%%v 16 | if "%PS_VER%" LSS "3" ( 17 | echo This install script requires Windows Powershell Version 3 or later [you have version "%PS_VER%"] 18 | echo See "https://docs.microsoft.com/en-us/powershell/" 19 | goto FAIL 20 | ) 21 | 22 | REM check prerequisites to build C/C++ using vcpkg 23 | REM 24 | if not exist "%ProgramFiles(x86)%\Microsoft Visual Studio\2015" ( 25 | if not exist "%ProgramFiles(x86)%\Microsoft Visual Studio\2017" ( 26 | if not exist "%ProgramFiles(x86)%\Microsoft Visual Studio\2019" ( 27 | if not exist "%ProgramFiles%\Microsoft Visual Studio\2022" ( 28 | echo This install script requires "Visual Studio" 29 | echo See "https://visualstudio.microsoft.com/downloads/" 30 | goto FAIL 31 | ) 32 | ) 33 | ) 34 | ) 35 | if not exist "%ProgramFiles%\CMake" ( 36 | echo This install script requires "CMake" 37 | echo See "https://cmake.org/download/" 38 | goto FAIL 39 | ) 40 | if not exist "%ProgramFiles%\Git" ( 41 | echo This install script requires "git" 42 | echo See "https://git-scm.com/download/" 43 | goto FAIL 44 | ) 45 | 46 | REM get cgal libraries from vcpkg and build 47 | REM 48 | set INSTALL_DIR=%USERPROFILE%\OceanMesh 49 | if not exist "%INSTALL_DIR%" mkdir "%INSTALL_DIR%" 50 | 51 | pushd "%INSTALL_DIR%" 52 | 53 | set LOG=%INSTALL_DIR%\build.log 54 | 55 | set FRESH=NO 56 | if not exist vcpkg set FRESH=YES 57 | 58 | if "%FRESH%" EQU "YES" git clone https://github.com/Microsoft/vcpkg.git 59 | 60 | if not exist vcpkg ( 61 | echo 'git clone' failed.. if you have just installed git, then close this command prompt before re-running this script. 62 | popd 63 | goto FAIL 64 | ) 65 | 66 | pushd vcpkg 67 | if "%FRESH%" EQU "YES" ( 68 | call bootstrap-vcpkg.bat 69 | ) else ( 70 | vcpkg upgrade --no-dry-run 71 | ) 72 | echo building cgal... this generally takes several minutes. 73 | vcpkg install cgal:x64-windows > "%LOG%" 74 | popd 75 | 76 | popd 77 | 78 | set BIN_DIR=%INSTALL_DIR%\vcpkg\installed\x64-windows\bin 79 | 80 | if not exist "%BIN_DIR%\gmp.dll" ( 81 | echo build has failed or is incomplete... see "%LOG%". 82 | goto FAIL 83 | ) 84 | 85 | REM permanently set environment variable to location of DLLs 86 | REM 87 | echo build seems to have succeeded. 88 | endlocal & set CGAL_BIN=%BIN_DIR% 89 | setx CGAL_BIN "%CGAL_BIN%" 90 | pause 91 | exit /b 0 92 | 93 | :FAIL 94 | pause 95 | exit /b 1 96 | -------------------------------------------------------------------------------- /install_oceanmesh.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | setlocal 3 | 4 | set ENV=OM 5 | if "%~1" NEQ "" set ENV=%~1 6 | 7 | where conda > NUL 8 | if errorlevel 1 ( 9 | echo This script is designed for a conda distribution. 10 | echo If you are using another distro with virtualenv/pip, then you 11 | echo will need to follow the steps below [e.g. for python 3.9.x]: 12 | echo 1. obtain wheels from https://www.lfd.uci.edu/~gohlke/pythonlibs: 13 | echo GDAL, Fiona, rasterio 14 | echo 2. Run: 15 | echo pip install GDAL-3.4.1-cp39-cp39-win_amd64.whl 16 | echo pip install Fiona-1.8.21-cp39-cp39-win_amd64.whl 17 | echo pip install rasterio-1.2.10-cp39-cp39-win_amd64.whl 18 | echo 3. Run: 19 | echo pip install pybind11 20 | echo python setup.py install 21 | pause 22 | exit /b 1 23 | ) 24 | 25 | echo creating OceanMesh conda environment "%ENV%" 26 | call conda create --name %ENV% 27 | call conda activate %ENV% 28 | 29 | echo installing OceanMesh conda packages 30 | call conda install -c conda-forge geopandas rasterio scikit-fmm pybind11 31 | 32 | echo Installing OceanMesh package 33 | python setup.py install 34 | 35 | echo Done 36 | pause 37 | exit /b 0 38 | -------------------------------------------------------------------------------- /oceanmesh/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # DPZ patches for miniconda on windows using vcpkg to install cgal and dependencies 4 | # 5 | if os.name == "nt": 6 | assert os.environ.get( 7 | "CGAL_BIN", None 8 | ), "The environment variable CGAL_BIN must be set." 9 | os.add_dll_directory(os.environ["CGAL_BIN"]) 10 | 11 | from oceanmesh.boundary import identify_ocean_boundary_sections 12 | from oceanmesh.clean import ( 13 | delete_boundary_faces, 14 | delete_exterior_faces, 15 | delete_faces_connected_to_one_face, 16 | delete_interior_faces, 17 | laplacian2, 18 | make_mesh_boundaries_traversable, 19 | mesh_clean, 20 | ) 21 | from oceanmesh.edgefx import ( 22 | bathymetric_gradient_sizing_function, 23 | distance_sizing_from_line_function, 24 | distance_sizing_from_point_function, 25 | distance_sizing_function, 26 | enforce_mesh_gradation, 27 | enforce_mesh_size_bounds_elevation, 28 | feature_sizing_function, 29 | multiscale_sizing_function, 30 | wavelength_sizing_function, 31 | ) 32 | from oceanmesh.edges import draw_edges, get_poly_edges 33 | from oceanmesh.filterfx import filt2 34 | from oceanmesh.geodata import ( 35 | DEM, 36 | Shoreline, 37 | create_circle_coords, 38 | get_polygon_coordinates, 39 | ) 40 | from oceanmesh.grid import Grid, compute_minimum 41 | from oceanmesh.region import ( 42 | Region, 43 | stereo_to_3d, 44 | to_3d, 45 | to_lat_lon, 46 | to_stereo, 47 | warp_coordinates, 48 | ) 49 | from oceanmesh.signed_distance_function import ( 50 | Difference, 51 | Domain, 52 | Intersection, 53 | Union, 54 | create_bbox, 55 | create_circle, 56 | multiscale_signed_distance_function, 57 | signed_distance_function, 58 | ) 59 | 60 | from .fix_mesh import fix_mesh, simp_vol 61 | from .mesh_generator import ( 62 | generate_mesh, 63 | generate_multiscale_mesh, 64 | plot_mesh_bathy, 65 | plot_mesh_connectivity, 66 | write_to_fort14, 67 | write_to_t3s, 68 | ) 69 | 70 | __all__ = [ 71 | "create_bbox", 72 | "Region", 73 | "stereo_to_3d", 74 | "to_lat_lon", 75 | "to_3d", 76 | "to_stereo", 77 | "compute_minimum", 78 | "create_circle_coords", 79 | "bathymetric_gradient_sizing_function", 80 | "multiscale_sizing_function", 81 | "delete_boundary_faces", 82 | "delete_faces_connected_to_one_face", 83 | "distance_sizing_from_point_function", 84 | "distance_sizing_from_line_function", 85 | "plot_mesh_connectivity", 86 | "plot_mesh_bathy", 87 | "make_mesh_boundaries_traversable", 88 | "enforce_mesh_size_bounds_elevation", 89 | "laplacian2", 90 | "delete_interior_faces", 91 | "delete_exterior_faces", 92 | "mesh_clean", 93 | "SizeFunction", 94 | "Grid", 95 | "DEM", 96 | "Domain", 97 | "create_circle", 98 | "crate_bbox", 99 | "Union", 100 | "Difference", 101 | "Intersection", 102 | "identify_ocean_boundary_sections", 103 | "Shoreline", 104 | "generate_multiscale_mesh", 105 | "get_polygon_coordinates", 106 | "distance_sizing_function", 107 | "feature_sizing_function", 108 | "enforce_mesh_gradation", 109 | "wavelength_sizing_function", 110 | "slope_sizing_function", 111 | "multiscale_signed_distance_function", 112 | "signed_distance_function", 113 | "filt2", 114 | "get_poly_edges", 115 | "draw_edges", 116 | "generate_mesh", 117 | "fix_mesh", 118 | "simp_vol", 119 | "simp_qual", 120 | "warp_coordinates", 121 | "write_to_fort14", 122 | "write_to_t3s", 123 | ] 124 | 125 | from . import _version 126 | 127 | __version__ = _version.get_versions()["version"] 128 | 129 | from . import _version 130 | 131 | __version__ = _version.get_versions()["version"] 132 | -------------------------------------------------------------------------------- /oceanmesh/_version.py: -------------------------------------------------------------------------------- 1 | # This file helps to compute a version number in source trees obtained from 2 | # git-archive tarball (such as those provided by githubs download-from-tag 3 | # feature). Distribution tarballs (built by setup.py sdist) and build 4 | # directories (produced by setup.py build) will contain a much shorter file 5 | # that just contains the computed version number. 6 | 7 | # This file is released into the public domain. Generated by 8 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer) 9 | 10 | """Git implementation of _version.py.""" 11 | 12 | import errno 13 | import os 14 | import re 15 | import subprocess 16 | import sys 17 | from typing import Callable, Dict 18 | 19 | 20 | def get_keywords(): 21 | """Get the keywords needed to look up the version information.""" 22 | # these strings will be replaced by git during git-archive. 23 | # setup.py/versioneer.py will grep for the variable names, so they must 24 | # each be defined on a line of their own. _version.py will just call 25 | # get_keywords(). 26 | git_refnames = " (HEAD -> master, tag: V0.5.0)" 27 | git_full = "478fe78128e8d33a6c0404a276fb6dbad72d0779" 28 | git_date = "2025-01-12 21:20:53 -0500" 29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 30 | return keywords 31 | 32 | 33 | class VersioneerConfig: 34 | """Container for Versioneer configuration parameters.""" 35 | 36 | 37 | def get_config(): 38 | """Create, populate and return the VersioneerConfig() object.""" 39 | # these strings are filled in when 'setup.py versioneer' creates 40 | # _version.py 41 | cfg = VersioneerConfig() 42 | cfg.VCS = "git" 43 | cfg.style = "pep440" 44 | cfg.tag_prefix = "v" 45 | cfg.parentdir_prefix = "None" 46 | cfg.versionfile_source = "oceanmesh/_version.py" 47 | cfg.verbose = False 48 | return cfg 49 | 50 | 51 | class NotThisMethod(Exception): 52 | """Exception raised if a method is not valid for the current scenario.""" 53 | 54 | 55 | LONG_VERSION_PY: Dict[str, str] = {} 56 | HANDLERS: Dict[str, Dict[str, Callable]] = {} 57 | 58 | 59 | def register_vcs_handler(vcs, method): # decorator 60 | """Create decorator to mark a method as the handler of a VCS.""" 61 | 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | 69 | return decorate 70 | 71 | 72 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | process = None 76 | for command in commands: 77 | try: 78 | dispcmd = str([command] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | process = subprocess.Popen( 81 | [command] + args, 82 | cwd=cwd, 83 | env=env, 84 | stdout=subprocess.PIPE, 85 | stderr=(subprocess.PIPE if hide_stderr else None), 86 | ) 87 | break 88 | except OSError: 89 | e = sys.exc_info()[1] 90 | if e.errno == errno.ENOENT: 91 | continue 92 | if verbose: 93 | print("unable to run %s" % dispcmd) 94 | print(e) 95 | return None, None 96 | else: 97 | if verbose: 98 | print("unable to find command, tried %s" % (commands,)) 99 | return None, None 100 | stdout = process.communicate()[0].strip().decode() 101 | if process.returncode != 0: 102 | if verbose: 103 | print("unable to run %s (error)" % dispcmd) 104 | print("stdout was %s" % stdout) 105 | return None, process.returncode 106 | return stdout, process.returncode 107 | 108 | 109 | def versions_from_parentdir(parentdir_prefix, root, verbose): 110 | """Try to determine the version from the parent directory name. 111 | 112 | Source tarballs conventionally unpack into a directory that includes both 113 | the project name and a version string. We will also support searching up 114 | two directory levels for an appropriately named parent directory 115 | """ 116 | rootdirs = [] 117 | 118 | for _ in range(3): 119 | dirname = os.path.basename(root) 120 | if dirname.startswith(parentdir_prefix): 121 | return { 122 | "version": dirname[len(parentdir_prefix) :], 123 | "full-revisionid": None, 124 | "dirty": False, 125 | "error": None, 126 | "date": None, 127 | } 128 | rootdirs.append(root) 129 | root = os.path.dirname(root) # up a level 130 | 131 | if verbose: 132 | print( 133 | "Tried directories %s but none started with prefix %s" 134 | % (str(rootdirs), parentdir_prefix) 135 | ) 136 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 137 | 138 | 139 | @register_vcs_handler("git", "get_keywords") 140 | def git_get_keywords(versionfile_abs): 141 | """Extract version information from the given file.""" 142 | # the code embedded in _version.py can just fetch the value of these 143 | # keywords. When used from setup.py, we don't want to import _version.py, 144 | # so we do it with a regexp instead. This function is not used from 145 | # _version.py. 146 | keywords = {} 147 | try: 148 | with open(versionfile_abs, "r") as fobj: 149 | for line in fobj: 150 | if line.strip().startswith("git_refnames ="): 151 | mo = re.search(r'=\s*"(.*)"', line) 152 | if mo: 153 | keywords["refnames"] = mo.group(1) 154 | if line.strip().startswith("git_full ="): 155 | mo = re.search(r'=\s*"(.*)"', line) 156 | if mo: 157 | keywords["full"] = mo.group(1) 158 | if line.strip().startswith("git_date ="): 159 | mo = re.search(r'=\s*"(.*)"', line) 160 | if mo: 161 | keywords["date"] = mo.group(1) 162 | except OSError: 163 | pass 164 | return keywords 165 | 166 | 167 | @register_vcs_handler("git", "keywords") 168 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 169 | """Get version information from git keywords.""" 170 | if "refnames" not in keywords: 171 | raise NotThisMethod("Short version file found") 172 | date = keywords.get("date") 173 | if date is not None: 174 | # Use only the last line. Previous lines may contain GPG signature 175 | # information. 176 | date = date.splitlines()[-1] 177 | 178 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 179 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 180 | # -like" string, which we must then edit to make compliant), because 181 | # it's been around since git-1.5.3, and it's too difficult to 182 | # discover which version we're using, or to work around using an 183 | # older one. 184 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 185 | refnames = keywords["refnames"].strip() 186 | if refnames.startswith("$Format"): 187 | if verbose: 188 | print("keywords are unexpanded, not using") 189 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 190 | refs = {r.strip() for r in refnames.strip("()").split(",")} 191 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 192 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 193 | TAG = "tag: " 194 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} 195 | if not tags: 196 | # Either we're using git < 1.8.3, or there really are no tags. We use 197 | # a heuristic: assume all version tags have a digit. The old git %d 198 | # expansion behaves like git log --decorate=short and strips out the 199 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 200 | # between branches and tags. By ignoring refnames without digits, we 201 | # filter out many common branch names like "release" and 202 | # "stabilization", as well as "HEAD" and "master". 203 | tags = {r for r in refs if re.search(r"\d", r)} 204 | if verbose: 205 | print("discarding '%s', no digits" % ",".join(refs - tags)) 206 | if verbose: 207 | print("likely tags: %s" % ",".join(sorted(tags))) 208 | for ref in sorted(tags): 209 | # sorting will prefer e.g. "2.0" over "2.0rc1" 210 | if ref.startswith(tag_prefix): 211 | r = ref[len(tag_prefix) :] 212 | # Filter out refs that exactly match prefix or that don't start 213 | # with a number once the prefix is stripped (mostly a concern 214 | # when prefix is '') 215 | if not re.match(r"\d", r): 216 | continue 217 | if verbose: 218 | print("picking %s" % r) 219 | return { 220 | "version": r, 221 | "full-revisionid": keywords["full"].strip(), 222 | "dirty": False, 223 | "error": None, 224 | "date": date, 225 | } 226 | # no suitable tags, so version is "0+unknown", but full hex is still there 227 | if verbose: 228 | print("no suitable tags, using unknown + full revision id") 229 | return { 230 | "version": "0+unknown", 231 | "full-revisionid": keywords["full"].strip(), 232 | "dirty": False, 233 | "error": "no suitable tags", 234 | "date": None, 235 | } 236 | 237 | 238 | @register_vcs_handler("git", "pieces_from_vcs") 239 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): 240 | """Get version from 'git describe' in the root of the source tree. 241 | 242 | This only gets called if the git-archive 'subst' keywords were *not* 243 | expanded, and _version.py hasn't already been rewritten with a short 244 | version string, meaning we're inside a checked out source tree. 245 | """ 246 | GITS = ["git"] 247 | TAG_PREFIX_REGEX = "*" 248 | if sys.platform == "win32": 249 | GITS = ["git.cmd", "git.exe"] 250 | TAG_PREFIX_REGEX = r"\*" 251 | 252 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) 253 | if rc != 0: 254 | if verbose: 255 | print("Directory %s not under git control" % root) 256 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 257 | 258 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 259 | # if there isn't one, this yields HEX[-dirty] (no NUM) 260 | describe_out, rc = runner( 261 | GITS, 262 | [ 263 | "describe", 264 | "--tags", 265 | "--dirty", 266 | "--always", 267 | "--long", 268 | "--match", 269 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX), 270 | ], 271 | cwd=root, 272 | ) 273 | # --long was added in git-1.5.5 274 | if describe_out is None: 275 | raise NotThisMethod("'git describe' failed") 276 | describe_out = describe_out.strip() 277 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) 278 | if full_out is None: 279 | raise NotThisMethod("'git rev-parse' failed") 280 | full_out = full_out.strip() 281 | 282 | pieces = {} 283 | pieces["long"] = full_out 284 | pieces["short"] = full_out[:7] # maybe improved later 285 | pieces["error"] = None 286 | 287 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) 288 | # --abbrev-ref was added in git-1.6.3 289 | if rc != 0 or branch_name is None: 290 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") 291 | branch_name = branch_name.strip() 292 | 293 | if branch_name == "HEAD": 294 | # If we aren't exactly on a branch, pick a branch which represents 295 | # the current commit. If all else fails, we are on a branchless 296 | # commit. 297 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) 298 | # --contains was added in git-1.5.4 299 | if rc != 0 or branches is None: 300 | raise NotThisMethod("'git branch --contains' returned error") 301 | branches = branches.split("\n") 302 | 303 | # Remove the first line if we're running detached 304 | if "(" in branches[0]: 305 | branches.pop(0) 306 | 307 | # Strip off the leading "* " from the list of branches. 308 | branches = [branch[2:] for branch in branches] 309 | if "master" in branches: 310 | branch_name = "master" 311 | elif not branches: 312 | branch_name = None 313 | else: 314 | # Pick the first branch that is returned. Good or bad. 315 | branch_name = branches[0] 316 | 317 | pieces["branch"] = branch_name 318 | 319 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 320 | # TAG might have hyphens. 321 | git_describe = describe_out 322 | 323 | # look for -dirty suffix 324 | dirty = git_describe.endswith("-dirty") 325 | pieces["dirty"] = dirty 326 | if dirty: 327 | git_describe = git_describe[: git_describe.rindex("-dirty")] 328 | 329 | # now we have TAG-NUM-gHEX or HEX 330 | 331 | if "-" in git_describe: 332 | # TAG-NUM-gHEX 333 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) 334 | if not mo: 335 | # unparsable. Maybe git-describe is misbehaving? 336 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out 337 | return pieces 338 | 339 | # tag 340 | full_tag = mo.group(1) 341 | if not full_tag.startswith(tag_prefix): 342 | if verbose: 343 | fmt = "tag '%s' doesn't start with prefix '%s'" 344 | print(fmt % (full_tag, tag_prefix)) 345 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( 346 | full_tag, 347 | tag_prefix, 348 | ) 349 | return pieces 350 | pieces["closest-tag"] = full_tag[len(tag_prefix) :] 351 | 352 | # distance: number of commits since tag 353 | pieces["distance"] = int(mo.group(2)) 354 | 355 | # commit: short hex revision ID 356 | pieces["short"] = mo.group(3) 357 | 358 | else: 359 | # HEX: no tags 360 | pieces["closest-tag"] = None 361 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) 362 | pieces["distance"] = int(count_out) # total number of commits 363 | 364 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 365 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() 366 | # Use only the last line. Previous lines may contain GPG signature 367 | # information. 368 | date = date.splitlines()[-1] 369 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 370 | 371 | return pieces 372 | 373 | 374 | def plus_or_dot(pieces): 375 | """Return a + if we don't already have one, else return a .""" 376 | if "+" in pieces.get("closest-tag", ""): 377 | return "." 378 | return "+" 379 | 380 | 381 | def render_pep440(pieces): 382 | """Build up version string, with post-release "local version identifier". 383 | 384 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 385 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 386 | 387 | Exceptions: 388 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 389 | """ 390 | if pieces["closest-tag"]: 391 | rendered = pieces["closest-tag"] 392 | if pieces["distance"] or pieces["dirty"]: 393 | rendered += plus_or_dot(pieces) 394 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 395 | if pieces["dirty"]: 396 | rendered += ".dirty" 397 | else: 398 | # exception #1 399 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 400 | if pieces["dirty"]: 401 | rendered += ".dirty" 402 | return rendered 403 | 404 | 405 | def render_pep440_branch(pieces): 406 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . 407 | 408 | The ".dev0" means not master branch. Note that .dev0 sorts backwards 409 | (a feature branch will appear "older" than the master branch). 410 | 411 | Exceptions: 412 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] 413 | """ 414 | if pieces["closest-tag"]: 415 | rendered = pieces["closest-tag"] 416 | if pieces["distance"] or pieces["dirty"]: 417 | if pieces["branch"] != "master": 418 | rendered += ".dev0" 419 | rendered += plus_or_dot(pieces) 420 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 421 | if pieces["dirty"]: 422 | rendered += ".dirty" 423 | else: 424 | # exception #1 425 | rendered = "0" 426 | if pieces["branch"] != "master": 427 | rendered += ".dev0" 428 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) 429 | if pieces["dirty"]: 430 | rendered += ".dirty" 431 | return rendered 432 | 433 | 434 | def pep440_split_post(ver): 435 | """Split pep440 version string at the post-release segment. 436 | 437 | Returns the release segments before the post-release and the 438 | post-release version number (or -1 if no post-release segment is present). 439 | """ 440 | vc = str.split(ver, ".post") 441 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None 442 | 443 | 444 | def render_pep440_pre(pieces): 445 | """TAG[.postN.devDISTANCE] -- No -dirty. 446 | 447 | Exceptions: 448 | 1: no tags. 0.post0.devDISTANCE 449 | """ 450 | if pieces["closest-tag"]: 451 | if pieces["distance"]: 452 | # update the post release segment 453 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) 454 | rendered = tag_version 455 | if post_version is not None: 456 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) 457 | else: 458 | rendered += ".post0.dev%d" % (pieces["distance"]) 459 | else: 460 | # no commits, use the tag as the version 461 | rendered = pieces["closest-tag"] 462 | else: 463 | # exception #1 464 | rendered = "0.post0.dev%d" % pieces["distance"] 465 | return rendered 466 | 467 | 468 | def render_pep440_post(pieces): 469 | """TAG[.postDISTANCE[.dev0]+gHEX] . 470 | 471 | The ".dev0" means dirty. Note that .dev0 sorts backwards 472 | (a dirty tree will appear "older" than the corresponding clean one), 473 | but you shouldn't be releasing software with -dirty anyways. 474 | 475 | Exceptions: 476 | 1: no tags. 0.postDISTANCE[.dev0] 477 | """ 478 | if pieces["closest-tag"]: 479 | rendered = pieces["closest-tag"] 480 | if pieces["distance"] or pieces["dirty"]: 481 | rendered += ".post%d" % pieces["distance"] 482 | if pieces["dirty"]: 483 | rendered += ".dev0" 484 | rendered += plus_or_dot(pieces) 485 | rendered += "g%s" % pieces["short"] 486 | else: 487 | # exception #1 488 | rendered = "0.post%d" % pieces["distance"] 489 | if pieces["dirty"]: 490 | rendered += ".dev0" 491 | rendered += "+g%s" % pieces["short"] 492 | return rendered 493 | 494 | 495 | def render_pep440_post_branch(pieces): 496 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . 497 | 498 | The ".dev0" means not master branch. 499 | 500 | Exceptions: 501 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] 502 | """ 503 | if pieces["closest-tag"]: 504 | rendered = pieces["closest-tag"] 505 | if pieces["distance"] or pieces["dirty"]: 506 | rendered += ".post%d" % pieces["distance"] 507 | if pieces["branch"] != "master": 508 | rendered += ".dev0" 509 | rendered += plus_or_dot(pieces) 510 | rendered += "g%s" % pieces["short"] 511 | if pieces["dirty"]: 512 | rendered += ".dirty" 513 | else: 514 | # exception #1 515 | rendered = "0.post%d" % pieces["distance"] 516 | if pieces["branch"] != "master": 517 | rendered += ".dev0" 518 | rendered += "+g%s" % pieces["short"] 519 | if pieces["dirty"]: 520 | rendered += ".dirty" 521 | return rendered 522 | 523 | 524 | def render_pep440_old(pieces): 525 | """TAG[.postDISTANCE[.dev0]] . 526 | 527 | The ".dev0" means dirty. 528 | 529 | Exceptions: 530 | 1: no tags. 0.postDISTANCE[.dev0] 531 | """ 532 | if pieces["closest-tag"]: 533 | rendered = pieces["closest-tag"] 534 | if pieces["distance"] or pieces["dirty"]: 535 | rendered += ".post%d" % pieces["distance"] 536 | if pieces["dirty"]: 537 | rendered += ".dev0" 538 | else: 539 | # exception #1 540 | rendered = "0.post%d" % pieces["distance"] 541 | if pieces["dirty"]: 542 | rendered += ".dev0" 543 | return rendered 544 | 545 | 546 | def render_git_describe(pieces): 547 | """TAG[-DISTANCE-gHEX][-dirty]. 548 | 549 | Like 'git describe --tags --dirty --always'. 550 | 551 | Exceptions: 552 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 553 | """ 554 | if pieces["closest-tag"]: 555 | rendered = pieces["closest-tag"] 556 | if pieces["distance"]: 557 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 558 | else: 559 | # exception #1 560 | rendered = pieces["short"] 561 | if pieces["dirty"]: 562 | rendered += "-dirty" 563 | return rendered 564 | 565 | 566 | def render_git_describe_long(pieces): 567 | """TAG-DISTANCE-gHEX[-dirty]. 568 | 569 | Like 'git describe --tags --dirty --always -long'. 570 | The distance/hash is unconditional. 571 | 572 | Exceptions: 573 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 574 | """ 575 | if pieces["closest-tag"]: 576 | rendered = pieces["closest-tag"] 577 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 578 | else: 579 | # exception #1 580 | rendered = pieces["short"] 581 | if pieces["dirty"]: 582 | rendered += "-dirty" 583 | return rendered 584 | 585 | 586 | def render(pieces, style): 587 | """Render the given version pieces into the requested style.""" 588 | if pieces["error"]: 589 | return { 590 | "version": "unknown", 591 | "full-revisionid": pieces.get("long"), 592 | "dirty": None, 593 | "error": pieces["error"], 594 | "date": None, 595 | } 596 | 597 | if not style or style == "default": 598 | style = "pep440" # the default 599 | 600 | if style == "pep440": 601 | rendered = render_pep440(pieces) 602 | elif style == "pep440-branch": 603 | rendered = render_pep440_branch(pieces) 604 | elif style == "pep440-pre": 605 | rendered = render_pep440_pre(pieces) 606 | elif style == "pep440-post": 607 | rendered = render_pep440_post(pieces) 608 | elif style == "pep440-post-branch": 609 | rendered = render_pep440_post_branch(pieces) 610 | elif style == "pep440-old": 611 | rendered = render_pep440_old(pieces) 612 | elif style == "git-describe": 613 | rendered = render_git_describe(pieces) 614 | elif style == "git-describe-long": 615 | rendered = render_git_describe_long(pieces) 616 | else: 617 | raise ValueError("unknown style '%s'" % style) 618 | 619 | return { 620 | "version": rendered, 621 | "full-revisionid": pieces["long"], 622 | "dirty": pieces["dirty"], 623 | "error": None, 624 | "date": pieces.get("date"), 625 | } 626 | 627 | 628 | def get_versions(): 629 | """Get version information or return default if unable to do so.""" 630 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 631 | # __file__, we can work backwards from there to the root. Some 632 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 633 | # case we can only use expanded keywords. 634 | 635 | cfg = get_config() 636 | verbose = cfg.verbose 637 | 638 | try: 639 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) 640 | except NotThisMethod: 641 | pass 642 | 643 | try: 644 | root = os.path.realpath(__file__) 645 | # versionfile_source is the relative path from the top of the source 646 | # tree (where the .git directory might live) to this file. Invert 647 | # this to find the root from __file__. 648 | for _ in cfg.versionfile_source.split("/"): 649 | root = os.path.dirname(root) 650 | except NameError: 651 | return { 652 | "version": "0+unknown", 653 | "full-revisionid": None, 654 | "dirty": None, 655 | "error": "unable to find root of source tree", 656 | "date": None, 657 | } 658 | 659 | try: 660 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 661 | return render(pieces, cfg.style) 662 | except NotThisMethod: 663 | pass 664 | 665 | try: 666 | if cfg.parentdir_prefix: 667 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 668 | except NotThisMethod: 669 | pass 670 | 671 | return { 672 | "version": "0+unknown", 673 | "full-revisionid": None, 674 | "dirty": None, 675 | "error": "unable to compute version", 676 | "date": None, 677 | } 678 | -------------------------------------------------------------------------------- /oceanmesh/boundary.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | 4 | from .edges import get_winded_boundary_edges 5 | 6 | __all__ = ["identify_ocean_boundary_sections"] 7 | 8 | 9 | def identify_ocean_boundary_sections( 10 | points, 11 | cells, 12 | topobathymetry, 13 | depth_threshold=-50.0, 14 | min_nodes_threshold=10, 15 | plot=False, 16 | ): 17 | """Identify the contiguous sections on the ocean boundary based on depth 18 | that could be forced in a numerical model as ocean-type boundaries (e.g., elevation-specified) 19 | 20 | Parameters 21 | ---------- 22 | points: numpy.ndarray 23 | Array of points (x,y) 24 | cells : numpy.ndarray 25 | Array of cells 26 | topobathymetry : numpy.ndarray 27 | Array of topobathymetry values (depth below datum negative) 28 | depth_threshold : float, optional 29 | Depth threshold to identify ocean boundary nodes, by default -50 m below the datum 30 | min_nodes_threshold : int, optional 31 | Minimum number of nodes to be considered a boundary section, by default 10 32 | plot : bool, optional 33 | Plot the mesh and the identified boundary sections, by default False 34 | 35 | Returns 36 | -------- 37 | boundary_sections : list 38 | List of tuples of the nodes that define the ocean boundary sections 39 | Note these map back into the points array. 40 | 41 | """ 42 | # Identify the nodes on the boundary of the mesh 43 | boundary_edges = get_winded_boundary_edges(cells) 44 | boundary_edges = boundary_edges.flatten() 45 | unique_indexes = np.unique(boundary_edges, return_index=True)[1] 46 | boundary_nodes_unmasked = [ 47 | boundary_edges[unique_index] for unique_index in sorted(unique_indexes) 48 | ] 49 | # Define a boolean array of valid nodes 50 | bathymetry_on_boundary = topobathymetry[boundary_nodes_unmasked] 51 | # Append a NaN value to the array to align with the original 52 | bathymetry_on_boundary = np.append(bathymetry_on_boundary, np.nan) 53 | stops = np.nonzero(bathymetry_on_boundary <= depth_threshold)[0] 54 | 55 | # Plot the mesh 56 | if plot: 57 | fig, ax = plt.subplots() 58 | ax.triplot(points[:, 0], points[:, 1], cells, color="k", lw=0.1) 59 | 60 | first = True 61 | boundary_sections = [] 62 | start_node = None 63 | end_node = None 64 | for idx, (s1, s2) in enumerate(zip(stops[:-1], stops[1:])): 65 | if s2 - s1 < min_nodes_threshold: 66 | if first: 67 | start_node = s1 68 | first = False 69 | # We've reached the end of the list 70 | elif idx == len(stops) - 2: 71 | # Append the start and end nodes to the boundary sections list 72 | end_node = s2 73 | boundary_sections.append([start_node, end_node]) 74 | # Its not the end and we haven't found a section yet 75 | else: 76 | end_node = s2 77 | elif s2 - s1 >= min_nodes_threshold and not first: 78 | # Append the start and end nodes to the boundary sections list 79 | boundary_sections.append([start_node, end_node]) 80 | # Reset the start node, the last node didn't satisfy the threshold 81 | # and it appears we have a new section 82 | start_node = s1 83 | first = True 84 | # We've reached the end of the list 85 | elif idx == len(stops) - 2: 86 | # Save the end node 87 | end_node = s2 88 | # Append the start and end nodes to the boundary sections list and finish 89 | boundary_sections.append([start_node, end_node]) 90 | if plot: 91 | for s1, s2 in boundary_sections: 92 | ax.scatter( 93 | points[boundary_nodes_unmasked[s1:s2], 0], 94 | points[boundary_nodes_unmasked[s1:s2], 1], 95 | 5, 96 | c="r", 97 | ) 98 | ax.set_title("Identified ocean boundary sections") 99 | plt.show() 100 | 101 | # Map back to the original node indices associated with the points array 102 | boundary_sections = [ 103 | (boundary_nodes_unmasked[s1], boundary_nodes_unmasked[s2]) 104 | for s1, s2 in boundary_sections 105 | ] 106 | return boundary_sections 107 | -------------------------------------------------------------------------------- /oceanmesh/clean.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import logging 3 | 4 | import numpy as np 5 | import scipy.sparse as spsparse 6 | 7 | from . import edges 8 | from .fix_mesh import fix_mesh, simp_qual, simp_vol 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | __all__ = [ 13 | "make_mesh_boundaries_traversable", 14 | "delete_interior_faces", 15 | "delete_exterior_faces", 16 | "delete_faces_connected_to_one_face", 17 | "delete_boundary_faces", 18 | "laplacian2", 19 | "mesh_clean", 20 | ] 21 | 22 | 23 | def mesh_clean( 24 | points, 25 | cells, 26 | min_element_qual=0.01, 27 | min_percent_disconnected_area=0.05, 28 | max_iter=20, 29 | tol=0.01, 30 | pfix=None, 31 | ): 32 | """Clean a mesh by removing bad quality elements and boundary faces. 33 | 34 | Parameters 35 | ---------- 36 | points : array-like 37 | Mesh vertices. 38 | cells : array-like 39 | Mesh connectivity table. 40 | min_element_qual : float, optional 41 | Enforce Mmnimum quality of elements through deletion. The default is 0.01. 42 | min_percent_disconnected_area : float, optional 43 | Delete disconnected areas smaller than this threshold. The default is 0.05 x 44 | the total area of the mesh. 45 | max_iter : int, optional 46 | Maximum number of iterations for the Laplacian smoothing. The default is 20. 47 | tol : float, optional 48 | Tolerance for the Laplacian smoothing. The default is 0.01. Laplacian terminates 49 | after max_iter or when the maximum change in any vertex is less than tol. 50 | pfix : array-like, optional 51 | Indices of points to fix during the Laplacian smoothing. The default is None. 52 | 53 | Returns 54 | ------- 55 | points : array-like 56 | Mesh vertices cleaned. 57 | cells : array-like 58 | Mesh connectivity table cleaned. 59 | 60 | """ 61 | points, cells = make_mesh_boundaries_traversable( 62 | points, cells, min_disconnected_area=min_percent_disconnected_area 63 | ) 64 | points, cells = delete_boundary_faces(points, cells, min_qual=min_element_qual) 65 | points, cells = delete_faces_connected_to_one_face(points, cells) 66 | points, cells = laplacian2(points, cells, max_iter=max_iter, tol=tol, pfix=pfix) 67 | points, cells = make_mesh_boundaries_traversable( 68 | points, cells, min_disconnected_area=min_percent_disconnected_area 69 | ) 70 | points, cells, _ = fix_mesh(points, cells, delete_unused=True) 71 | return points, cells 72 | 73 | 74 | def _arg_sortrows(arr): 75 | """Before a multi column sort like MATLAB's sortrows""" 76 | i = arr[:, 1].argsort() # First sort doesn't need to be stable. 77 | j = arr[i, 0].argsort(kind="mergesort") 78 | return i[j] 79 | 80 | 81 | def _face_to_face(t): 82 | """Face to face connectivity table. 83 | Face `i` is connected to faces `ftof[ix[i]:ix[i+1], 1]` 84 | By connected, I mean shares a mutual edge. 85 | 86 | Parameters 87 | ---------- 88 | t: array-like 89 | Mesh connectivity table. 90 | 91 | Returns 92 | ------- 93 | ftof: array-like 94 | Face numbers connected to faces. 95 | ix: array-like 96 | indices into `ftof` 97 | 98 | """ 99 | nt = len(t) 100 | t = np.sort(t, axis=1) 101 | e = t[:, [[0, 1], [0, 2], [1, 2]]].reshape((nt * 3, 2)) 102 | trinum = np.repeat(np.arange(nt), 3) 103 | j = _arg_sortrows(e) 104 | e = e[j, :] 105 | trinum = trinum[j] 106 | k = np.argwhere(~np.diff(e, axis=0).any(axis=1)) 107 | ftof = np.concatenate((trinum[k], trinum[k + 1]), axis=1) 108 | dmy1 = ftof[:, 0].argsort() 109 | dmy2 = ftof[:, 1].argsort() 110 | tmp = np.vstack( 111 | ( 112 | ftof[dmy1, :], 113 | np.fliplr(ftof[dmy2]), 114 | np.column_stack((np.arange(nt), np.arange(nt))), 115 | ) 116 | ) 117 | j = _arg_sortrows(tmp) 118 | ftof = tmp[j, :] 119 | ix = np.argwhere(np.diff(ftof[:, 0])) + 1 120 | ix = np.insert(ix, 0, 0) 121 | ix = np.append(ix, len(ftof)) 122 | return ftof, ix 123 | 124 | 125 | def _vertex_to_face(vertices, faces): 126 | """Determine which faces are connected to which vertices. 127 | 128 | Parameters 129 | ---------- 130 | vertices: array-like 131 | Vertices of the mesh. 132 | faces: array-like 133 | Mesh connectivity table. 134 | 135 | Returns 136 | ------- 137 | vtoc: array-like 138 | face numbers connected to vertices. 139 | ix: array-like 140 | indices into `vtoc` 141 | 142 | """ 143 | num_faces = len(faces) 144 | 145 | ext = np.tile(np.arange(0, num_faces), (3, 1)).reshape(-1, order="F") 146 | ve = np.reshape(faces, (-1,)) 147 | ve = np.vstack((ve, ext)).T 148 | ve = ve[ve[:, 0].argsort(), :] 149 | 150 | idx = np.insert(np.diff(ve[:, 0]), 0, 0) 151 | vtoc_pointer = np.argwhere(idx) 152 | vtoc_pointer = np.insert(vtoc_pointer, 0, 0) 153 | vtoc_pointer = np.append(vtoc_pointer, num_faces * 3) 154 | 155 | vtoc = ve[:, 1] 156 | 157 | return vtoc, vtoc_pointer 158 | 159 | 160 | def make_mesh_boundaries_traversable(vertices, faces, min_disconnected_area=0.05): 161 | """ 162 | A mesh described by vertices and faces is "cleaned" and returned. 163 | Alternates between checking "interior" and "exterior" portions 164 | of the mesh until convergence is obtained. Convergence is defined as: 165 | having no vertices connected to more than two boundary edges. 166 | 167 | Parameters 168 | ---------- 169 | vertices: array-like 170 | The vertices of the "uncleaned" mesh. 171 | faces: array-like 172 | The "uncleaned" mesh connectivity. 173 | min_disconnected_area: float, optional 174 | A decimal percentage (max 1.0) used to decide whether to keep or remove 175 | disconnected portions of the meshing domain. 176 | 177 | 178 | Returns 179 | ------- 180 | vertices: array-like 181 | The vertices of the "cleaned" mesh. 182 | 183 | faces: array-like 184 | The "cleaned" mesh connectivity. 185 | 186 | Notes 187 | ----- 188 | 189 | Interior Check: Deletes faces that are within the interior of the 190 | mesh so that no vertices are connected to more than two boundary edges. For 191 | example, a barrier island could become very thin in a middle portion so that you 192 | have a vertex connected to two faces but four boundary edges, in a 193 | bow-tie type formation. 194 | 195 | This code will delete one of those connecting 196 | faces to ensure the spit is `clean` in the sense that two boundary edges 197 | are connected to that vertex. In the case of a choice between faces to 198 | delete, the one with the lowest quality is chosen. 199 | 200 | Exterior Check: Finds small disjoint portions of the mesh and removes 201 | them using a depth-first search. The individual disjoint portions are 202 | removed based on `min_disconnected_area` which is a decimal representing a fractional 203 | threshold component of the total mesh. 204 | 205 | """ 206 | 207 | boundary_edges, boundary_vertices = _external_topology(vertices, faces) 208 | 209 | logger.info("Performing mesh cleaning operations...") 210 | # NB: when this inequality is not met, the mesh boundary is not valid and non-manifold 211 | while len(boundary_edges) > len(boundary_vertices): 212 | faces = delete_exterior_faces(vertices, faces, min_disconnected_area) 213 | vertices, faces, _ = fix_mesh(vertices, faces, delete_unused=True) 214 | 215 | faces, _ = delete_interior_faces(vertices, faces) 216 | vertices, faces, _ = fix_mesh(vertices, faces, delete_unused=True) 217 | 218 | boundary_edges, boundary_vertices = _external_topology(vertices, faces) 219 | 220 | return vertices, faces 221 | 222 | 223 | def _external_topology(vertices, faces): 224 | """Get edges and vertices that make up the boundary of the mesh""" 225 | boundary_edges = edges.get_boundary_edges(faces) 226 | boundary_vertices = vertices[np.unique(boundary_edges.reshape(-1))] 227 | return boundary_edges, boundary_vertices 228 | 229 | 230 | def delete_exterior_faces(vertices, faces, min_disconnected_area): 231 | """Deletes portions of the mesh that are "outside" or not 232 | connected to the majority which represent a fractional 233 | area less than `min_disconnected_area`. 234 | """ 235 | t1 = copy.deepcopy(faces) 236 | t = np.array([]) 237 | # Calculate the total area of the patch 238 | A = np.sum(simp_vol(vertices, faces)) 239 | An = A 240 | # Based on area proportion 241 | while (An / A) > min_disconnected_area: 242 | # Perform the depth-First-Search to get `nflag` 243 | nflag = _depth_first_search(t1) 244 | 245 | # Get new triangulation and its area 246 | t2 = t1[nflag == 1, :] 247 | An = np.sum(simp_vol(vertices, t2)) 248 | 249 | # If large enough, retain this component 250 | if (An / A) > min_disconnected_area: 251 | if len(t) == 0: 252 | t = t2 253 | else: 254 | t = np.concatenate((t, t2)) 255 | 256 | # Delete where nflag == 1 from tmp t1 mesh 257 | t1 = np.delete(t1, nflag == 1, axis=0) 258 | logger.info( 259 | f"ACCEPTED: Deleting {int(np.sum(nflag == 0))} faces outside the main mesh" 260 | ) 261 | 262 | # Calculate the remaining area 263 | An = np.sum(simp_vol(vertices, t1)) 264 | 265 | return t 266 | 267 | 268 | def delete_interior_faces(vertices, faces): 269 | """Delete interior faces that have vertices with more than 270 | two vertices declared as boundary vertices 271 | """ 272 | # Get updated boundary topology 273 | boundary_edges, boundary_vertices = _external_topology(vertices, faces) 274 | etbv = boundary_edges.reshape(-1) 275 | # Count how many edges a vertex appears in. 276 | uebtv, count = np.unique(etbv, return_counts=True) 277 | # Get the faces connected to the vertices 278 | vtoc, nne = _vertex_to_face(vertices, faces) 279 | # Vertices which appear more than twice (implying they are shared by 280 | # more than two boundary edges) 281 | del_face_idx = [] 282 | for ix in uebtv[count > 2]: 283 | conn_faces = vtoc[nne[ix] : nne[ix + 1]] 284 | del_face = [] 285 | for conn_face in conn_faces: 286 | II = etbv == faces[conn_face, 0] 287 | JJ = etbv == faces[conn_face, 1] 288 | KK = etbv == faces[conn_face, 2] 289 | if np.any(II) and np.any(JJ) and np.any(KK): 290 | del_face.append(conn_face) 291 | 292 | if len(del_face) == 1: 293 | del_face_idx.append(del_face[0]) 294 | elif len(del_face) > 1: 295 | # Delete worst quality qualifying face. 296 | qual = simp_qual(vertices, faces[del_face]) 297 | idx = np.argmin(qual) 298 | del_face_idx.append(del_face[idx]) 299 | else: 300 | # No connected faces have all vertices on boundary edge so we 301 | # select the worst quality connecting face. 302 | qual = simp_qual(vertices, faces[conn_faces]) 303 | idx = np.argmin(qual) 304 | del_face_idx.append(conn_faces[idx]) 305 | 306 | logger.info(f"ACCEPTED: Deleting {len(del_face_idx)} faces inside the main mesh") 307 | faces = np.delete(faces, del_face_idx, 0) 308 | 309 | return faces, del_face_idx 310 | 311 | 312 | def _depth_first_search(faces): 313 | """Depth-First-Search (DFS) across the triangulation""" 314 | 315 | # Get graph connectivity. 316 | ftof, idx = _face_to_face(faces) 317 | 318 | nt = len(faces) 319 | 320 | # select a random face 321 | selected = np.random.randint(0, nt, 1) 322 | 323 | nflag = np.zeros(nt) 324 | 325 | searching = True 326 | 327 | visited = [] 328 | visited.append(*selected) 329 | 330 | # Traverse through connected mesh 331 | while searching: 332 | searching = False 333 | for c in visited: 334 | # Flag the current face as visited 335 | nflag[c] = 1 336 | # Search connected faces 337 | neis = [nei for nei in ftof[idx[c] : idx[c + 1], 1]] 338 | # Flag connected faces as visited 339 | for nei in neis: 340 | if nflag[nei] == 0: 341 | nflag[nei] = 1 342 | # Append visited cells to a list 343 | visited.append(nei) 344 | searching = True 345 | return nflag 346 | 347 | 348 | def delete_faces_connected_to_one_face(vertices, faces, max_iter=np.inf): 349 | """Iteratively deletes faces connected to one face. 350 | 351 | Parameters 352 | ---------- 353 | vertices: array-like 354 | The vertices of the "uncleaned" mesh. 355 | faces: array-like 356 | The "uncleaned" mesh connectivity. 357 | max_iter: float, optional 358 | The number of iterations to repeatedly delete faces connected to one face 359 | If the mesh is well-formed, this will converge in a finite 360 | number of iterations. Default is np.inf. 361 | 362 | Returns 363 | ------- 364 | vertices: array-like 365 | The vertices of the "cleaned" mesh. 366 | 367 | faces: array-like 368 | The "cleaned" mesh connectivity. 369 | 370 | """ 371 | assert max_iter > 0, "max_iter set too low" 372 | 373 | count = 0 374 | start_len = len(faces) 375 | while count < max_iter: 376 | _, idx = _face_to_face(faces) 377 | nn = np.diff(idx, 1) 378 | delete = np.argwhere(nn == 2) 379 | if len(delete) > 0: 380 | logger.info(f"ACCEPTED: Deleting {int(len(delete))} faces") 381 | faces = np.delete(faces, delete, axis=0) 382 | vertices, faces, _ = fix_mesh(vertices, faces, delete_unused=True) 383 | count += 1 384 | else: 385 | break 386 | logger.info( 387 | f"Deleted {int(start_len - len(faces))} faces after {int(count)} iterations" 388 | ) 389 | return vertices, faces 390 | 391 | 392 | def _sparse(Ix, J, S, shape=None, dtype=None): 393 | """ 394 | Similar to MATLAB's SPARSE(I, J, S, ...) 395 | """ 396 | 397 | # Advanced usage: allow J and S to be scalars. 398 | if np.isscalar(J): 399 | x = J 400 | J = np.empty(Ix.shape, dtype=int) 401 | J.fill(x) 402 | if np.isscalar(S): 403 | x = S 404 | S = np.empty(Ix.shape) 405 | S.fill(x) 406 | 407 | # Turn these into 1-d arrays for processing. 408 | S = S.flat 409 | II = Ix.flat 410 | J = J.flat 411 | return spsparse.coo_matrix((S, (II, J)), shape, dtype) 412 | 413 | 414 | def laplacian2(vertices, entities, max_iter=20, tol=0.01, pfix=None): 415 | """Move vertices to the average position of their connected neighbors 416 | with the goal to hopefully improve geometric entity quality. 417 | :param vertices: vertex coordinates of mesh 418 | :type vertices: numpy.ndarray[`float` x dim] 419 | :param entities: the mesh connectivity 420 | :type entities: numpy.ndarray[`int` x (dim+1)] 421 | :param max_iter: maximum number of iterations to perform 422 | :type max_iter: `int`, optional 423 | :param tol: iterations will cease when movement < tol 424 | :type tol: `float`, optional 425 | :param pfix: coordinates that you don't wish to move 426 | :type pfix: array-like 427 | :return vertices: updated vertices of mesh 428 | :rtype: numpy.ndarray[`float` x dim] 429 | :return: entities: updated mesh connectivity 430 | :rtype: numpy.ndarray[`int` x (dim+1)] 431 | """ 432 | if vertices.ndim != 2: 433 | raise NotImplementedError("Laplacian smoothing only works in 2D for now") 434 | 435 | def _closest_node(node, nodes): 436 | nodes = np.asarray(nodes) 437 | deltas = nodes - node 438 | dist_2 = np.einsum("ij,ij->i", deltas, deltas) 439 | return np.argmin(dist_2) 440 | 441 | eps = np.finfo(float).eps 442 | 443 | n = len(vertices) 444 | 445 | S = _sparse( 446 | entities[:, [0, 0, 1, 1, 2, 2]], 447 | entities[:, [1, 2, 0, 2, 0, 1]], 448 | 1, 449 | shape=(n, n), 450 | ) 451 | # bnd = get_boundary_vertices(entities) 452 | edge = edges.get_edges(entities) 453 | boundary_edges, _ = _external_topology(vertices, entities) 454 | bnd = np.unique(boundary_edges.reshape(-1)) 455 | if pfix is not None: 456 | ifix = [] 457 | for fix in pfix: 458 | ifix.append(_closest_node(fix, vertices)) 459 | ifix = np.asarray(ifix) 460 | bnd = np.concatenate((bnd, ifix)) 461 | 462 | W = np.sum(S, 1) 463 | if np.any(W == 0): 464 | print("Invalid mesh. Disjoint vertices found. Returning", flush=True) 465 | print(np.argwhere(W == 0), flush=True) 466 | return vertices, entities 467 | 468 | L = np.sqrt( 469 | np.sum(np.square(vertices[edge[:, 0], :] - vertices[edge[:, 1], :]), axis=1) 470 | ) 471 | L[L < eps] = eps 472 | L = L[:, None] 473 | for it in range(max_iter): 474 | pnew = np.divide(S @ np.array(vertices), np.hstack((W, W))) 475 | pnew[bnd, :] = vertices[bnd, :] 476 | vertices = pnew 477 | Lnew = np.sqrt( 478 | np.sum(np.square(vertices[edge[:, 0], :] - vertices[edge[:, 1], :]), axis=1) 479 | ) 480 | Lnew[Lnew < eps] = eps 481 | move = np.amax(np.divide((Lnew - L), Lnew)) 482 | if move < tol: 483 | logger.info(f"Movement tolerance reached after {it} iterations..exiting") 484 | break 485 | L = Lnew 486 | vertices = np.array(vertices) 487 | return vertices, entities 488 | 489 | 490 | def get_boundary_entities(vertices, entities, dim=2): 491 | """Determine the entities that lie on the boundary of the mesh. 492 | :param vertices: vertex coordinates of mesh 493 | :type vertices: numpy.ndarray[`float` x dim] 494 | :param entities: the mesh connectivity 495 | :type entities: numpy.ndarray[`int` x (dim+1)] 496 | :param dim: dimension of the mesh 497 | :type dim: `int`, optional 498 | :return: bele: indices of entities on the boundary of the mesh. 499 | :rtype: numpy.ndarray[`int` x 1] 500 | """ 501 | boundary_edges, _ = _external_topology(vertices, entities) 502 | boundary_vertices = np.unique(boundary_edges.reshape(-1)) 503 | vtoe, ptr = _vertex_to_face(vertices, entities) 504 | bele = np.array([], dtype=int) 505 | for vertex in boundary_vertices: 506 | for ele in zip(vtoe[ptr[vertex] : ptr[vertex + 1]]): 507 | bele = np.append(bele, ele) 508 | bele = np.unique(bele) 509 | return bele 510 | 511 | 512 | def delete_boundary_faces(vertices, entities, dim=2, min_qual=0.10, verbose=1): 513 | """Delete boundary faces with poor geometric quality (i.e., < min. quality) 514 | :param vertices: vertex coordinates of mesh 515 | :type vertices: numpy.ndarray[`float` x dim] 516 | :param entities: the mesh connectivity 517 | :type entities: numpy.ndarray[`int` x (dim+1)] 518 | :param dim: dimension of the mesh 519 | :type dim: `int`, optional 520 | :param min_qual: minimum geometric quality to consider "poor" quality 521 | :type min_qual: `float`, optional 522 | :return: vertices: updated vertex array of mesh 523 | :rtype: numpy.ndarray[`int` x dim] 524 | :return: entities: update mesh connectivity 525 | :rtype: numpy.ndarray[`int` x (dim+1)] 526 | """ 527 | qual = simp_qual(vertices, entities) 528 | bele, _ = _external_topology(vertices, entities) 529 | bele = get_boundary_entities(vertices, entities, dim=dim) 530 | qualBou = qual[bele] 531 | delete = qualBou < min_qual 532 | logger.info(f"Deleting {np.sum(delete)} poor quality boundary entities...") 533 | delete = np.argwhere(delete == 1) 534 | entities = np.delete(entities, bele[delete], axis=0) 535 | vertices, entities, _ = fix_mesh(vertices, entities, delete_unused=True, dim=dim) 536 | return vertices, entities 537 | -------------------------------------------------------------------------------- /oceanmesh/cpp/HamiltonJacobi.cpp: -------------------------------------------------------------------------------- 1 | /* solve the Hamilton-Jacobi equation to smooth a raster field 2 | Persson, PO. Engineering with Computers (2006) 22: 95. 3 | https://doi.org/10.1007/s00366-006-0014-1 kjr, usp, 2019 4 | */ 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | 20 | #include 21 | 22 | #define EPS 1e-9 23 | 24 | namespace py = pybind11; 25 | using py::ssize_t; 26 | 27 | // for column major order component indexes [zpos][col][row] returning linear index (all indexes 1-based) 28 | int sub2ind(const int row, const int col, const int zpos, const int nrows, const int ncols) { 29 | return 1 + (zpos - 1)*ncols*nrows + (col - 1)*nrows + (row - 1); 30 | } 31 | 32 | // for column major order linear index returning [k][j][i] component indexes (all indexes 1-based) 33 | void ind2sub(const int index, const int nrows, const int ncols, int *i, int *j, int *k) { 34 | int nij = nrows * ncols; 35 | int ij = (index - 1) % nij; 36 | *k = 1 + (index - 1) / nij; 37 | *j = 1 + ij / nrows; 38 | *i = 1 + ij % nrows; 39 | assert(*i > 0); 40 | assert(*j > 0); 41 | assert(*k > 0); 42 | } 43 | 44 | // 45 | bool IsNegative(int i) { return (i < 0); } 46 | 47 | // find indices in linear time where A==value 48 | std::vector findIndices(const std::vector &A, const int value) { 49 | std::vector B; 50 | for (std::size_t i = 0; i < A.size(); i++) { 51 | if (A[i] == value) { 52 | B.push_back((int)i); 53 | } 54 | } 55 | return B; 56 | } 57 | 58 | // solve the Hamilton-Jacobi equation 59 | std::vector c_gradient_limit(const std::vector &dims, 60 | const double &elen, 61 | const double &dfdx, const int &imax, 62 | const std::vector &ffun) { 63 | 64 | assert(dims[0] > 0 && dims[1] > 0 && dims[2] > 0); 65 | 66 | std::vector aset(dims[0] * dims[1] * dims[2], -1); 67 | 68 | double ftol = *(std::min_element(ffun.begin(), ffun.end())) * std::sqrt(EPS); 69 | 70 | std::array npos; 71 | npos.fill(0); 72 | 73 | double elend = elen * std::sqrt(2.0); 74 | 75 | // allocate output 76 | std::vector ffun_s; 77 | ffun_s.resize(ffun.size()); 78 | ffun_s = ffun; 79 | 80 | 81 | int maxSz = dims[0] * dims[1] * dims[2]; 82 | 83 | for (int iter = 0; iter < imax; iter++) { 84 | 85 | //------------------------- find "active" nodes this pass 86 | auto aidx = findIndices(aset, iter - 1); 87 | 88 | //------------------------- convergence 89 | if (aidx.empty()) { 90 | // std::cout << "INFO: Converged in " << iter << " iterations." << 91 | // std::endl; 92 | break; 93 | } 94 | 95 | for (std::size_t i = 0; i < aidx.size(); i++) { 96 | 97 | //----- map triply indexed to singly indexed 98 | int inod = aidx[i] + 1; // add one to match 1-based indexing 99 | 100 | // NOTE: 1-based indexing is used by ind2sub(), sub2ind(), and min/max clamping below. 101 | // could easily refactor to maintain 0-based indexing throughout. 102 | 103 | //----- calculate the i,j,k position 104 | int ipos, jpos, kpos; 105 | ind2sub(inod, dims[0], dims[1], &ipos, &jpos, &kpos); 106 | 107 | // ---- gather indices centered on inod 108 | npos[0] = inod; 109 | // right 110 | npos[1] = 111 | sub2ind(ipos, std::min(jpos + 1, dims[1]), kpos, dims[0], dims[1]); 112 | // left 113 | npos[2] = sub2ind(ipos, std::max(jpos - 1, 1), kpos, dims[0], dims[1]); 114 | // top 115 | npos[3] = sub2ind(std::min(ipos + 1, dims[0]), jpos, kpos, dims[0], dims[1]); 116 | // bottom 117 | npos[4] = sub2ind(std::max(ipos - 1, 1), jpos, kpos, dims[0], dims[1]); 118 | 119 | // top right diagonal 120 | npos[5] = sub2ind(std::min(ipos +1, dims[0]), std::min(jpos +1, dims[1]), kpos, dims[0], dims[1]); 121 | // top left diagonal 122 | npos[6] = sub2ind(std::max(ipos -1 , 1), std::min(jpos +1, dims[1]), kpos, dims[0], dims[1]); 123 | // bottom left diagonal 124 | npos[7] = sub2ind(std::max(ipos -1 , 1), std::max(jpos -1, 1), kpos, dims[0], dims[1]); 125 | // bottom right diagonal 126 | npos[8] = sub2ind(std::min(ipos +1 , dims[0]), std::min(jpos+1, dims[1]), kpos, dims[0], dims[1]); 127 | 128 | for (std::size_t u = 0; u < 9; u++) // subtract one to revert to 0-based indexing 129 | npos[u]--; 130 | 131 | int nod1 = npos[0]; 132 | assert(nod1 < ffun_s.size()); 133 | assert(nod1 > -1); 134 | 135 | for (std::size_t p = 1; p < 9; p++) { 136 | 137 | int nod2 = npos[p]; 138 | assert(nod2 < ffun_s.size()); 139 | assert(nod2 > -1); 140 | 141 | // use non-diagonal element length for p = [1..4] 142 | double elenp = elen; 143 | 144 | // use diagonal element length for p = [5..8] 145 | if (p > 4) { 146 | elenp = elend; 147 | } 148 | 149 | //----------------- calc. limits about min.-value 150 | if (ffun_s[nod1] > ffun_s[nod2]) { 151 | 152 | double fun1 = ffun_s[nod2] + elenp * dfdx; 153 | if (ffun_s[nod1] > fun1 + ftol) { 154 | ffun_s[nod1] = fun1; 155 | aset[nod1] = iter; 156 | } 157 | 158 | } else { 159 | 160 | double fun2 = ffun_s[nod1] + elenp * dfdx; 161 | if (ffun_s[nod2] > fun2 + ftol) { 162 | ffun_s[nod2] = fun2; 163 | aset[nod2] = iter; 164 | } 165 | } 166 | } 167 | } 168 | // std::cout << "ITER: " << iter << std::endl; 169 | } 170 | return ffun_s; 171 | } 172 | 173 | // Python wrapper 174 | py::array 175 | gradient_limit(py::array_t dims, 176 | const double elen, const double dfdx, const int imax, 177 | py::array_t ffun) { 178 | int num_points = (int)ffun.size(); 179 | 180 | std::vector cffun(num_points); 181 | std::vector cdims(3); 182 | 183 | std::memcpy(cffun.data(), ffun.data(), num_points * sizeof(double)); 184 | std::memcpy(cdims.data(), dims.data(), 3 * sizeof(int)); 185 | 186 | std::vector sffun = c_gradient_limit(cdims, elen, dfdx, imax, cffun); 187 | 188 | ssize_t sodble = (ssize_t)sizeof(double); 189 | std::vector shape = {num_points, 1}; 190 | std::vector strides = {sodble, sodble}; 191 | 192 | // return 2-D NumPy array 193 | return py::array( 194 | py::buffer_info(sffun.data(), /* data as contiguous array */ 195 | sizeof(double), /* size of one scalar */ 196 | py::format_descriptor::format(), /* data type */ 197 | 2, /* number of dimensions */ 198 | shape, /* shape of the matrix */ 199 | strides /* strides for each axis */ 200 | )); 201 | } 202 | 203 | PYBIND11_MODULE(_HamiltonJacobi, m) { 204 | 205 | m.doc() = "pybind11 module for gradient limiting a scalar field"; 206 | 207 | m.def("gradient_limit", &gradient_limit, 208 | "The function which gradient limits a scalar field reshaped to a " 209 | "vector."); 210 | } 211 | -------------------------------------------------------------------------------- /oceanmesh/cpp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/oceanmesh/cpp/__init__.py -------------------------------------------------------------------------------- /oceanmesh/cpp/delaunay_class.cpp: -------------------------------------------------------------------------------- 1 | #if _MSC_VER 2 | # pragma warning(disable : 4267) // size_t to int warning on line 91 3 | #endif 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include 18 | 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | namespace py = pybind11; 26 | using py::ssize_t; 27 | 28 | using K = CGAL::Exact_predicates_inexact_constructions_kernel; 29 | using Vb = CGAL::Triangulation_vertex_base_with_info_2; 30 | using Fb = CGAL::Triangulation_face_base_with_info_2; 31 | using Tds = CGAL::Triangulation_data_structure_2; 32 | using DT = CGAL::Delaunay_triangulation_2; 33 | 34 | using Point = K::Point_2; 35 | using Vertex_handle = DT::Vertex_handle; 36 | using Vi = DT::Finite_vertices_iterator; 37 | 38 | template class TypedInputIterator { 39 | public: 40 | using iterator_category = std::input_iterator_tag; 41 | using difference_type = std::ptrdiff_t; 42 | using value_type = T; 43 | using pointer = T *; 44 | using reference = T &; 45 | 46 | explicit TypedInputIterator(py::iterator &py_iter) : py_iter_(py_iter) {} 47 | 48 | explicit TypedInputIterator(py::iterator &&py_iter) : py_iter_(py_iter) {} 49 | 50 | value_type operator*() { return (*py_iter_).template cast(); } 51 | 52 | TypedInputIterator operator++(int) { 53 | auto copy = *this; 54 | ++py_iter_; 55 | return copy; 56 | } 57 | 58 | TypedInputIterator &operator++() { 59 | ++py_iter_; 60 | return *this; 61 | } 62 | 63 | bool operator!=(TypedInputIterator &rhs) { return py_iter_ != rhs.py_iter_; } 64 | 65 | bool operator==(TypedInputIterator &rhs) { return py_iter_ == rhs.py_iter_; } 66 | 67 | private: 68 | py::iterator py_iter_; 69 | }; 70 | 71 | PYBIND11_MODULE(_delaunay_class, m) { 72 | py::class_(m, "Point") 73 | .def(py::init(), py::arg("x"), py::arg("y")) 74 | .def(py::init(), py::arg("x"), py::arg("y")) 75 | .def_property_readonly("x", &Point::x) 76 | .def_property_readonly("y", &Point::y) 77 | .def("__repr__", [](const Point &p) { 78 | std::string r("Point("); 79 | r += boost::lexical_cast(p.x()); 80 | r += ", "; 81 | r += boost::lexical_cast(p.y()); 82 | r += ")"; 83 | return r; 84 | }); 85 | 86 | py::class_(m, "VertexHandle") 87 | .def_property_readonly("point", [](const Vertex_handle &vertex_handle) { 88 | return vertex_handle->point(); 89 | }); 90 | 91 | py::class_
(m, "DelaunayTriangulation") 92 | 93 | .def(py::init()) 94 | 95 | .def("insert", 96 | [](DT &dt, const std::vector &p) { 97 | std::vector> points; 98 | int num_points = p.size() / 2; 99 | // start adding at the end of the current table 100 | int start = dt.number_of_vertices(); 101 | for (std::size_t i = 0; i < num_points; ++i) { 102 | // add index information to form face table later 103 | points.push_back( 104 | std::make_pair(Point(p[i * 2 + 0], p[i * 2 + 1]), start)); 105 | start += 1; 106 | } 107 | return dt.insert(points.begin(), points.end()); 108 | }) 109 | 110 | .def("remove", 111 | [](DT &dt, const std::vector &to_remove) { 112 | int num_to_remove = to_remove.size(); 113 | std::vector handles; 114 | for (Vi vi = dt.finite_vertices_begin(); 115 | vi != dt.finite_vertices_end(); vi++) { 116 | handles.push_back(vi); 117 | } 118 | for (std::size_t i = 0; i < num_to_remove; ++i) { 119 | dt.remove(handles[to_remove[i]]); 120 | } 121 | return dt; 122 | }) 123 | 124 | .def("move", 125 | [](DT &dt, const std::vector &to_move, 126 | const std::vector &new_positions) { 127 | std::vector handles; 128 | std::vector new_pos; 129 | int num_to_move = to_move.size(); 130 | // store all vertex handles 131 | for (Vi vi = dt.finite_vertices_begin(); 132 | vi != dt.finite_vertices_end(); vi++) { 133 | handles.push_back(vi); 134 | } 135 | // store new positions as a vector of Point 136 | for (std::size_t i = 0; i < num_to_move; ++i) { 137 | new_pos.push_back( 138 | Point(new_positions[2 * i], new_positions[2 * i + 1])); 139 | } 140 | // 141 | for (std::size_t i = 0; i < num_to_move; ++i) { 142 | dt.move(handles[to_move[i]], new_pos[i]); 143 | } 144 | return dt; 145 | }) 146 | 147 | .def("number_of_vertices", &DT::number_of_vertices) 148 | 149 | .def("number_of_faces", 150 | [](DT &dt) { 151 | int count = 0; 152 | for (DT::Finite_faces_iterator fit = dt.finite_faces_begin(); 153 | fit != dt.finite_faces_end(); ++fit) { 154 | count += 1; 155 | } 156 | return count; 157 | }) 158 | 159 | .def("finite_vertices", 160 | [](DT &dt) -> py::iterator { 161 | return py::make_iterator(dt.finite_vertices_begin(), 162 | dt.finite_vertices_end()); 163 | }) 164 | 165 | .def("get_finite_cells", 166 | [](DT &dt) { 167 | // ouput the face table 168 | // YOU MUST CALL get_finite_vertices before if any incremental 169 | // operations were performed 170 | std::vector faces; 171 | faces.resize(dt.number_of_faces() * 3); 172 | 173 | int i = 0; 174 | for (DT::Finite_faces_iterator fit = dt.finite_faces_begin(); 175 | fit != dt.finite_faces_end(); ++fit) { 176 | 177 | DT::Face_handle face = fit; 178 | // critical! update the face index table so faces comes out correctly 179 | face->info() = i; 180 | faces[i * 3] = face->vertex(0)->info(); 181 | faces[i * 3 + 1] = face->vertex(1)->info(); 182 | faces[i * 3 + 2] = face->vertex(2)->info(); 183 | i += 1; 184 | } 185 | ssize_t soint = sizeof(int); 186 | ssize_t num_faces = faces.size() / 3; 187 | ssize_t ndim = 2; 188 | std::vector shape = {num_faces, 3}; 189 | std::vector strides = {soint * 3, soint}; 190 | 191 | // return 2-D NumPy array 192 | return py::array(py::buffer_info( 193 | faces.data(), /* data as contiguous array */ 194 | sizeof(int), /* size of one scalar */ 195 | py::format_descriptor::format(), /* data type */ 196 | 2, /* number of dimensions */ 197 | shape, /* shape of the matrix */ 198 | strides /* strides for each axis */ 199 | )); 200 | }) 201 | 202 | .def("get_finite_face_neighbors", 203 | [](DT &dt) { 204 | // ouput the neighbors of each face 205 | // infite neighbors are labeled as -1 206 | std::vector nei_faces; 207 | nei_faces.resize(dt.number_of_faces() * 3); 208 | 209 | int i = 0; 210 | for (DT::Finite_faces_iterator fit = dt.finite_faces_begin(); 211 | fit != dt.finite_faces_end(); ++fit) { 212 | // get the handle 213 | DT::Face_handle face = fit; 214 | // circulate around the neighbors 215 | for (size_t j=0; j < 3; ++j){ 216 | DT::Face_handle fh = face->neighbor(j); 217 | if(dt.is_infinite(fh)){ 218 | nei_faces[i * 3 + j] = -1; 219 | } 220 | else { 221 | nei_faces[i * 3 + j] = fh->info(); 222 | } 223 | } 224 | i += 1; 225 | } 226 | ssize_t soint = sizeof(int); 227 | ssize_t num_faces = nei_faces.size() / 3; 228 | ssize_t ndim = 2; 229 | std::vector shape = {num_faces, 3}; 230 | std::vector strides = {soint * 3, soint}; 231 | 232 | // return 2-D NumPy array 233 | return py::array(py::buffer_info( 234 | nei_faces.data(), /* data as contiguous array */ 235 | sizeof(int), /* size of one scalar */ 236 | py::format_descriptor::format(), /* data type */ 237 | 2, /* number of dimensions */ 238 | shape, /* shape of the matrix */ 239 | strides /* strides for each axis */ 240 | )); 241 | }) 242 | 243 | 244 | .def("get_finite_vertices", [](DT &dt) { 245 | // ouput the vertices 246 | std::vector vertices; 247 | vertices.resize(dt.number_of_vertices() * 2); 248 | 249 | int i = 0; 250 | for (DT::Finite_vertices_iterator fit = dt.finite_vertices_begin(); 251 | fit != dt.finite_vertices_end(); ++fit) { 252 | 253 | Vertex_handle vertex = fit; 254 | // critical! update the point index table so faces comes out correctly 255 | vertex->info() = i; 256 | vertices[i * 2] = vertex->point().x(); 257 | vertices[i * 2 + 1] = vertex->point().y(); 258 | i += 1; 259 | } 260 | ssize_t sdble = sizeof(double); 261 | ssize_t num_vertices = vertices.size() / 2; 262 | ssize_t ndim = 2; 263 | std::vector shape = {num_vertices, 2}; 264 | std::vector strides = {sdble * 2, sdble}; 265 | 266 | // return 2-D NumPy array 267 | return py::array(py::buffer_info( 268 | vertices.data(), /* data as contiguous array */ 269 | sizeof(double), /* size of one scalar */ 270 | py::format_descriptor::format(), /* data type */ 271 | 2, /* number of dimensions */ 272 | shape, /* shape of the matrix */ 273 | strides /* strides for each axis */ 274 | )); 275 | }); 276 | 277 | py::class_(m, "Vertex") 278 | .def_property_readonly( 279 | "point", [](DT::Finite_vertices_iterator::value_type &vertex) { 280 | return vertex.point(); 281 | }); 282 | 283 | py::class_(m, "Face").def( 284 | "vertex_handle", 285 | [](DT::Finite_faces_iterator::value_type &face, int index) { 286 | return face.vertex(index); 287 | }, 288 | py::arg("index")); 289 | } 290 | -------------------------------------------------------------------------------- /oceanmesh/cpp/fast_geometry.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include 10 | #include 11 | #include 12 | 13 | namespace py = pybind11; 14 | using py::ssize_t; 15 | 16 | #if 0 // DPZ non-portable 17 | class Timer { 18 | public: 19 | Timer() { clock_gettime(CLOCK_REALTIME, &beg_); } 20 | 21 | double elapsed() { 22 | clock_gettime(CLOCK_REALTIME, &end_); 23 | return end_.tv_sec - beg_.tv_sec + 24 | (end_.tv_nsec - beg_.tv_nsec) / 1000000000.; 25 | } 26 | 27 | void reset() { clock_gettime(CLOCK_REALTIME, &beg_); } 28 | 29 | private: 30 | timespec beg_, end_; 31 | }; 32 | #endif 33 | class Timer { 34 | public: 35 | Timer() { reset(); } 36 | 37 | double elapsed() { 38 | end_ = std::chrono::high_resolution_clock::now(); 39 | std::chrono::duration dur = end_ - beg_; 40 | return dur.count(); 41 | } 42 | 43 | void reset() { beg_ = std::chrono::high_resolution_clock::now(); } 44 | 45 | private: 46 | std::chrono::time_point beg_, end_; 47 | }; 48 | 49 | 50 | template 51 | std::vector vectorSortIntArr(std::vector> v) { 52 | std::sort(v.begin(), v.end()); 53 | // double t = tmr.elapsed(); 54 | // tmr.reset(); 55 | auto iter = std::unique(v.begin(), v.end()); 56 | // t = tmr.elapsed(); 57 | // std::cout << t << std::endl; 58 | 59 | size_t len = iter - v.begin(); 60 | std::vector outvec; 61 | outvec.reserve(len * 2); 62 | for (auto i = v.begin(); i != iter; ++i) { 63 | outvec.push_back(i->at(0)); 64 | outvec.push_back(i->at(1)); 65 | } 66 | return outvec; 67 | } 68 | 69 | py::array unique_edges( 70 | py::array_t edges) { 71 | 72 | std::vector cedges(edges.size()); 73 | std::memcpy(cedges.data(), edges.data(), edges.size() * sizeof(int)); 74 | 75 | std::vector> tl; 76 | 77 | tl.reserve(cedges.size()); 78 | for (size_t i = 0; i < cedges.size(); i += 2) { 79 | tl.push_back({std::min(cedges[i], cedges[i + 1]), 80 | std::max(cedges[i], cedges[i + 1])}); 81 | } 82 | 83 | auto u_edges = vectorSortIntArr(std::move(tl)); 84 | 85 | int num_edges = (int)u_edges.size(); 86 | ssize_t sint = sizeof(int); 87 | std::vector shape = {num_edges / 2, 2}; 88 | std::vector strides = {sint * 2, sint}; 89 | return py::array( 90 | py::buffer_info(u_edges.data(), /* data as contiguous array */ 91 | sizeof(int), /* size of one scalar */ 92 | py::format_descriptor::format(), /* data type */ 93 | 2, /* number of dimensions */ 94 | shape, /* shape of the matrix */ 95 | strides /* strides for each axis */ 96 | )); 97 | } 98 | 99 | PYBIND11_MODULE(_fast_geometry, m) { 100 | m.def("unique_edges", &unique_edges); 101 | } 102 | -------------------------------------------------------------------------------- /oceanmesh/edges.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | from matplotlib import collections as mc 4 | 5 | nan = np.nan 6 | 7 | 8 | def get_poly_edges(poly): 9 | """Given a winded polygon represented as a set of ascending line segments 10 | with separated features indicated by nans, this function calculates 11 | the edges of the polygon such that each edge indexes the start and end 12 | coordinates of each line segment of the polygon. 13 | 14 | Parameters 15 | ---------- 16 | poly: array-like, float 17 | A 2D array of point coordinates with features sepearated by NaNs 18 | 19 | Returns 20 | ------- 21 | edges: array-like, int 22 | A 2D array of integers containing indexes into the `poly` array. 23 | 24 | """ 25 | ix = np.argwhere(np.isnan(poly[:, 0])).ravel() 26 | ix = np.insert(ix, 0, -1) 27 | 28 | edges = [] 29 | for s in range(len(ix) - 1): 30 | ix_start = ix[s] + 1 31 | ix_end = ix[s + 1] - 1 32 | col1 = np.arange(ix_start, ix_end - 1) 33 | col2 = np.arange(ix_start + 1, ix_end) 34 | tmp = np.vstack((col1, col2)).T 35 | tmp = np.append(tmp, [[ix_end, ix_start]], axis=0) 36 | edges.append(tmp) 37 | return np.concatenate(edges, axis=0) 38 | 39 | 40 | def draw_edges(poly, edges): 41 | """Visualizes the polygon as a bunch of line segments 42 | 43 | Parameters 44 | ---------- 45 | poly: array-like, float 46 | A 2D array of point coordinates with features sepearated by NaNs. 47 | edges: array-like, int 48 | A 2D array of integers indexing into the `poly` array. 49 | 50 | Returns 51 | ------- 52 | None 53 | 54 | """ 55 | lines = [] 56 | for edge in edges: 57 | lines.append([poly[edge[0]], poly[edge[1]]]) 58 | lc = mc.LineCollection(lines, linewidths=2) 59 | fig, ax = plt.subplots() 60 | ax.add_collection(lc) 61 | ax.autoscale() 62 | plt.show() 63 | 64 | 65 | def unique_row_view(data): 66 | """https://github.com/numpy/numpy/issues/11136""" 67 | b = np.ascontiguousarray(data).view( 68 | np.dtype((np.void, data.dtype.itemsize * data.shape[1])) 69 | ) 70 | u, cnts = np.unique(b, return_counts=True) 71 | u = u.view(data.dtype).reshape(-1, data.shape[1]) 72 | return u, cnts 73 | 74 | 75 | def get_edges(entities, dim=2): 76 | """Get the undirected edges of mesh in no order (NB: are repeated) 77 | 78 | :param entities: the mesh connectivity 79 | :type entities: numpy.ndarray[`int` x (dim+1)] 80 | :param dim: dimension of the mesh 81 | :type dim: `int`, optional 82 | 83 | :return: edges: the edges that make up the mesh 84 | :rtype: numpy.ndarray[`int`x 2] 85 | """ 86 | 87 | num_entities = len(entities) 88 | entities = np.array(entities) 89 | if dim == 2: 90 | edges = entities[:, [[0, 1], [0, 2], [1, 2]]] 91 | edges = edges.reshape((num_entities * 3, 2)) 92 | elif dim == 3: 93 | edges = entities[:, [[0, 1], [1, 2], [2, 0], [0, 3], [1, 3], [2, 3]]] 94 | edges = edges.reshape((num_entities * 6, 2)) 95 | return edges 96 | 97 | 98 | def get_boundary_edges(entities, dim=2): 99 | """Get the boundary edges of the mesh. Boundary edges only appear (dim-1) times 100 | 101 | :param entities: the mesh connectivity 102 | :type entities: numpy.ndarray[`int` x (dim+1)] 103 | :param dim: dimension of the mesh 104 | :type dim: `int`, optional 105 | 106 | :return: boundary_edges: the edges that make up the boundary of the mesh 107 | :rtype: numpy.ndarray[`int` x 2] 108 | """ 109 | edges = get_edges(entities, dim=dim) 110 | edges = np.sort(edges, axis=1) 111 | unq, cnt = unique_row_view(edges) 112 | boundary_edges = np.array([e for e, c in zip(unq, cnt) if c == (dim - 1)]) 113 | return boundary_edges 114 | 115 | 116 | def get_winded_boundary_edges(entities, vFirst=None): 117 | """Order the boundary edges of the mesh in a winding fashion 118 | 119 | :param entities: the mesh connectivity 120 | :type entities: numpy.ndarray[`int` x (dim+1)] 121 | :param vFirst: vertex index of any edge element to trace boundary along 122 | :type vFirst: `int` 123 | 124 | :return: boundary_edges: the edges that make up the boundary of the mesh in a winding order 125 | :rtype: numpy.ndarray[`int` x 2] 126 | """ 127 | 128 | boundary_edges = get_boundary_edges(entities) 129 | _bedges = boundary_edges.copy() 130 | 131 | choice = 0 132 | if vFirst is not None: 133 | choice = next((i for i, j in enumerate(_bedges) if any(vFirst == j)), 0) 134 | 135 | isVisited = np.zeros((len(_bedges))) 136 | ordering = np.array([choice]) 137 | isVisited[choice] = 1 138 | 139 | vStart, vNext = _bedges[choice, :] 140 | while True: 141 | locs = np.column_stack(np.where(_bedges == vNext)) 142 | rows = locs[:, 0] 143 | choices = [row for row in rows if isVisited[row] == 0] 144 | if len(choices) == 0: 145 | break 146 | choice = choices[0] 147 | ordering = np.append(ordering, [choice]) 148 | isVisited[choice] = 1 149 | nextEdge = _bedges[choice, :] 150 | tmp = [v for v in nextEdge if v != vNext] 151 | vNext = tmp[0] 152 | boundary_edges = boundary_edges[ordering, :] 153 | return boundary_edges 154 | -------------------------------------------------------------------------------- /oceanmesh/filterfx.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Joseph Elmes: NERC-Funded PhD Researcher in Applied Mathematics 5 | University of Leeds : Leeds LS2 9JT : ml14je@leeds.ac.uk 6 | 7 | Python 3.8 8 | Created on Wed Sep 29 17:01:56 2021 9 | """ 10 | import numpy as np 11 | 12 | __all__ = ["filt2", "gaussfilter"] 13 | 14 | 15 | def filt2(Z, res, wl, filtertype, truncate=2.6): 16 | filtertype = filtertype.lower() 17 | if len(Z.shape) != 2: 18 | raise TypeError("Z should b a 2D array") 19 | 20 | if not np.isscalar(res): 21 | raise TypeError("res must be a scalar value.") 22 | 23 | if filtertype not in ["lowpass", "highpass", "bandpass", "bandstop"]: 24 | raise TypeError( 25 | "filtertype must be either lowpass, \ 26 | highpass, bandpass or bandstop" 27 | ) 28 | 29 | if hasattr(wl, "__len__") and ~isinstance(type(wl), np.ndarray): 30 | wl = np.array(wl) 31 | 32 | if np.any(wl <= 2 * res): 33 | print( 34 | "WARNING:: Nyquist says the wavelength should exceed two times \ 35 | the resolution of the dataset, which is an unmet condition based on these inputs" 36 | ) 37 | 38 | if filtertype in ["bandpass", "bandstop"]: 39 | if hasattr(wl, "__len__"): 40 | if len(wl) != 2 or isinstance(wl, str): 41 | raise TypeError( 42 | "Wavelength lambda must be a two-element array for a bandpass filter." 43 | ) 44 | 45 | if ~isinstance(wl, np.ndarray): 46 | wl = np.array(list(wl)) 47 | 48 | else: 49 | raise TypeError( 50 | "Wavelength lambda must be a two-element array for a bandpass filter." 51 | ) 52 | else: # so must be either hp or lp 53 | if hasattr(wl, "__len__"): 54 | raise TypeError( 55 | "Wavelength lambda must be a scalar for lowpass or highpass filters." 56 | ) 57 | 58 | sigma = (wl / res) / (2 * np.pi) 59 | 60 | if filtertype == "lowpass": 61 | return gaussfilter( 62 | Z, sigma, truncate 63 | ) # ndnanfilter is Carlos Adrian Vargas Aguilera's excellent function, which is included as a subfunction below. 64 | 65 | elif filtertype == "highpass": 66 | return Z - gaussfilter(Z, sigma, truncate) 67 | 68 | elif filtertype == "bandpass": 69 | return filt2(filt2(Z, res, np.max(wl), "highpass"), res, np.min(wl), "lowpass") 70 | 71 | else: # Leaves the case of 'bs' 72 | return filt2(Z, res, np.max(wl), "lowpass") - filt2( 73 | Z, res, np.min(wl), "highpass" 74 | ) 75 | 76 | 77 | def gaussfilter(Z, sigma, truncate): 78 | from scipy.ndimage import gaussian_filter 79 | 80 | return gaussian_filter(Z, sigma, truncate=truncate, mode="nearest") 81 | 82 | 83 | if __name__ == "__main__": 84 | res = 0.2 # 200 m resolution 85 | 86 | x = np.arange(0, 100 + res, res) # eastings from 0 to 100 km 87 | y = np.arange(0, 100 + res, res) # northings from 0 to 100 km 88 | X, Y = np.meshgrid(x, y) 89 | 90 | # Z contains 25 km features, ~5 km diagonal features, and noise: 91 | Z = ( 92 | np.cos(2 * np.pi * X / 25) 93 | + np.cos(2 * np.pi * (X + Y) / 7) 94 | + np.random.randn(X.shape[0], X.shape[1]) 95 | ) 96 | 97 | import matplotlib.pyplot as pt 98 | 99 | pt.matshow(Z, aspect="auto", extent=[0, 100, 0, 100]) 100 | pt.xlabel("eastings (km)") 101 | pt.ylabel("northings (km)") 102 | pt.title("Original with Noise") 103 | pt.show() 104 | 105 | Zlow = filt2(Z, res, 15, "lowpass") 106 | pt.matshow(Zlow, aspect="auto", extent=[0, 100, 0, 100]) 107 | pt.xlabel("eastings (km)") 108 | pt.ylabel("northings (km)") 109 | pt.title("15 km lowpass filtered data") 110 | pt.show() 111 | 112 | Zhi = filt2(Z, res, 15, "highpass") 113 | pt.matshow(Zhi, aspect="auto", extent=[0, 100, 0, 100]) 114 | pt.xlabel("eastings (km)") 115 | pt.ylabel("northings (km)") 116 | pt.title("15 km highpass filtered data") 117 | pt.show() 118 | 119 | Zbp = filt2(Z, res, [4, 7], "bandpass") 120 | pt.matshow(Zbp, aspect="auto", extent=[0, 100, 0, 100]) 121 | pt.xlabel("eastings (km)") 122 | pt.ylabel("northings (km)") 123 | pt.title("4 to 7 km bandpass filtered data") 124 | pt.show() 125 | 126 | Zbs = filt2(Z, res, [3, 12], "bandstop") 127 | pt.matshow(Zbs, aspect="auto", extent=[0, 100, 0, 100]) 128 | pt.xlabel("eastings (km)") 129 | pt.ylabel("northings (km)") 130 | pt.title("3 to 12 km bandstop filtered data") 131 | -------------------------------------------------------------------------------- /oceanmesh/fix_mesh.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | import numpy as np 4 | 5 | 6 | def simp_qual(p, t): 7 | """Simplex quality radius-to-edge ratio 8 | :param p: vertex coordinates of mesh 9 | :type p: numpy.ndarray[`float` x dim] 10 | :param t: mesh connectivity 11 | :type t: numpy.ndarray[`int` x (dim + 1)] 12 | :return: signed mesh quality: signed mesh quality (1.0 is perfect) 13 | :rtype: numpy.ndarray[`float` x 1] 14 | """ 15 | assert p.ndim == 2 and t.ndim == 2 and p.shape[1] + 1 == t.shape[1] 16 | 17 | def length(p1): 18 | return np.sqrt((p1**2).sum(1)) 19 | 20 | a = length(p[t[:, 1]] - p[t[:, 0]]) 21 | b = length(p[t[:, 2]] - p[t[:, 0]]) 22 | c = length(p[t[:, 2]] - p[t[:, 1]]) 23 | # Suppress Runtime warnings here because we know that mult1/denom1 can be negative 24 | # as the mesh is being cleaned 25 | with warnings.catch_warnings(): 26 | warnings.simplefilter("ignore") 27 | mult1 = (b + c - a) * (c + a - b) * (a + b - c) / (a + b + c) 28 | denom1 = np.sqrt((a + b + c) * (b + c - a) * (c + a - b) * (a + b - c)) 29 | r = 0.5 * mult1 30 | R = a * b * c / denom1 31 | return 2 * r / R 32 | 33 | 34 | def fix_mesh(p, t, ptol=2e-13, dim=2, delete_unused=False): 35 | """Remove duplicated/unused vertices and entities and 36 | ensure orientation of entities is CCW. 37 | :param p: point coordinates of mesh 38 | :type p: numpy.ndarray[`float` x dim] 39 | :param t: mesh connectivity 40 | :type t: numpy.ndarray[`int` x (dim + 1)] 41 | :param ptol: point tolerance to detect duplicates 42 | :type ptol: `float`, optional 43 | :param dim: dimension of mesh 44 | :type dim: `int`, optional 45 | :param delete_unused: flag to delete disjoint vertices. 46 | :type delete_unused: `boolean`, optional 47 | :return: p: updated point coordinates of mesh 48 | :rtype: numpy.ndarray[`float` x dim] 49 | :return: t: updated mesh connectivity 50 | :rtype: numpy.ndarray[`int` x (dim+1)] 51 | """ 52 | 53 | # duplicate vertices 54 | snap = (p.max(0) - p.min(0)).max() * ptol 55 | _, ix, jx = unique_rows(np.round(p / snap) * snap, True, True) 56 | 57 | p = p[ix] 58 | t = jx[t] 59 | 60 | # duplicate entities 61 | t = np.sort(t, axis=1) 62 | t = unique_rows(t) 63 | 64 | # delete disjoint vertices 65 | if delete_unused: 66 | pix, _, jx = np.unique(t, return_index=True, return_inverse=True) 67 | t = np.reshape(jx, (t.shape)) 68 | p = p[pix, :] 69 | 70 | # entity orientation is CCW 71 | flip = simp_vol(p, t) < 0 72 | t[flip, :2] = t[flip, 1::-1] 73 | 74 | return p, t, jx 75 | 76 | 77 | def unique_rows(A, return_index=False, return_inverse=False): 78 | """Similar to MATLAB's unique(A, 'rows'), this returns B, I, J 79 | where B is the unique rows of A and I and J satisfy 80 | A = B[J,:] and B = A[I,:] 81 | :param A: array of data 82 | :type A: numpy.ndarray[`int`/`float` x N] 83 | :param return_index: whether to return the indices of unique data 84 | :type return_index: `boolean`, optional 85 | :param return_inverse: whether to return the inverse mapping back to A from B. 86 | :type return_inverse: `boolean`, optional 87 | :return: B: array of data with duplicates removed 88 | :rtype: numpy.ndarray[`int`/`float` x N] 89 | :return: I: array of indices to unique data B. 90 | :rtype: numpy.ndarray[`int` x 1] 91 | :return: J: array of indices to A from B. 92 | :rtype: numpy.ndarray[`int` x 1] 93 | """ 94 | A = np.require(A, requirements="C") 95 | assert A.ndim == 2, "array must be 2-dim'l" 96 | 97 | orig_dtype = A.dtype 98 | ncolumns = A.shape[1] 99 | dtype = np.dtype((f"S{orig_dtype.itemsize * ncolumns}")) 100 | B, I, J = np.unique(A.view(dtype), return_index=True, return_inverse=True) 101 | # NUMPY 2 compatibility 102 | J = J.reshape(-1) 103 | 104 | B = B.view(orig_dtype).reshape((-1, ncolumns), order="C") 105 | 106 | # There must be a better way to do this: 107 | if return_index: 108 | if return_inverse: 109 | return B, I, J 110 | else: 111 | return B, I 112 | else: 113 | if return_inverse: 114 | return B, J 115 | else: 116 | return B 117 | 118 | 119 | def simp_vol(p, t): 120 | """Signed volumes of the simplex elements in the mesh. 121 | :param p: point coordinates of mesh 122 | :type p: numpy.ndarray[`float` x dim] 123 | :param t: mesh connectivity 124 | :type t: numpy.ndarray[`int` x (dim + 1)] 125 | :return: volume: signed volume of entity/simplex. 126 | :rtype: numpy.ndarray[`float` x 1] 127 | """ 128 | 129 | dim = p.shape[1] 130 | if dim == 1: 131 | d01 = p[t[:, 1]] - p[t[:, 0]] 132 | return d01 133 | elif dim == 2: 134 | d01 = p[t[:, 1]] - p[t[:, 0]] 135 | d02 = p[t[:, 2]] - p[t[:, 0]] 136 | return (d01[:, 0] * d02[:, 1] - d01[:, 1] * d02[:, 0]) / 2 137 | elif dim == 3: 138 | d01 = p[t[:, 1], :] - p[t[:, 0], :] 139 | d02 = p[t[:, 2], :] - p[t[:, 0], :] 140 | d03 = p[t[:, 3], :] - p[t[:, 0], :] 141 | return np.einsum("ij,ij->i", np.cross(d01, d02), d03) / 6 142 | else: 143 | raise NotImplementedError 144 | -------------------------------------------------------------------------------- /oceanmesh/grid.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | import scipy.spatial 6 | from scipy.interpolate import RegularGridInterpolator 7 | 8 | from .idw import Invdisttree 9 | from .region import Region, to_stereo 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | def compute_minimum(edge_lengths): 15 | """Determine the minimum of all edge lengths in the domain""" 16 | _crs = edge_lengths[0].crs 17 | msg = "All edgelengths must have the same CRS" 18 | for el in edge_lengths[1::]: 19 | assert _crs == el.crs, msg 20 | # project all edge_lengths onto the grid of the first one 21 | base_edge_length = edge_lengths[0] 22 | edge_lengths = [ 23 | edge_length.interpolate_to(base_edge_length) 24 | for edge_length in edge_lengths[1::] 25 | ] 26 | edge_lengths.insert(0, base_edge_length) 27 | 28 | minimum_values = np.minimum.reduce( 29 | [edge_length.values for edge_length in edge_lengths] 30 | ) 31 | min_edgelength = np.amin(minimum_values) 32 | # construct a new grid object with these values 33 | grid = Grid( 34 | bbox=base_edge_length.bbox, 35 | dx=base_edge_length.dx, 36 | dy=base_edge_length.dy, 37 | hmin=min_edgelength, 38 | values=minimum_values, 39 | extrapolate=True, 40 | crs=base_edge_length.crs, 41 | ) 42 | 43 | grid.build_interpolant() 44 | return grid 45 | 46 | 47 | class Grid(Region): 48 | """Abstracts a structured grid along with 49 | primitive operations (e.g., min, project, etc.) and 50 | stores data `values` defined at each grid point. 51 | Parameters 52 | ---------- 53 | bbox: tuple 54 | domain extent 55 | dx: float 56 | spacing between grid points along x-axis 57 | dy: float, optional 58 | spacing grid grid points along y-axis 59 | crs: pyproj.PROJ, optional 60 | Well-known text (WKT) 61 | hmin: float, optional 62 | minimum grid spacing in domain 63 | values: scalar or array-like 64 | values at grid points. If scalar, then an array of 65 | the value is created matching the extent. 66 | extrapolate: boolean, optional 67 | Whether the grid can extrapolate outside its bbox 68 | 69 | Attributes 70 | ---------- 71 | x0y0: tuple 72 | bottom left corner coordinate 73 | nx: int 74 | number of grid points in x-direction 75 | ny: int 76 | number of grid points in y-direction 77 | eval: func 78 | A function that takes a vector of points 79 | and returns a vector of values 80 | """ 81 | 82 | def __init__( 83 | self, 84 | bbox, 85 | dx, 86 | dy=None, 87 | crs="EPSG:4326", 88 | hmin=None, 89 | values=None, 90 | extrapolate=False, 91 | ): 92 | super().__init__(bbox, crs) 93 | if dy is None: 94 | dy = dx # equidistant grid in both x and y dirs if not passed 95 | self.bbox = bbox 96 | self.x0y0 = (bbox[0], bbox[2]) # bottom left corner coordinates (x,y) 97 | self.dx = dx 98 | self.dy = dy 99 | self.nx = None # int((self.bbox[1] - self.bbox[0]) // self.dx) + 1 100 | self.ny = None # int((self.bbox[3] - self.bbox[2]) // self.dy) + 1 101 | self.values = values 102 | self.eval = None 103 | self.extrapolate = extrapolate 104 | self.hmin = hmin 105 | 106 | @property 107 | def dx(self): 108 | return self.__dx 109 | 110 | @dx.setter 111 | def dx(self, value): 112 | if value <= 0: 113 | raise ValueError("Grid spacing (dx) must be >= 0.0") 114 | self.__dx = value 115 | 116 | @property 117 | def dy(self): 118 | return self.__dy 119 | 120 | @dy.setter 121 | def dy(self, value): 122 | if value <= 0: 123 | raise ValueError("Grid spacing (dy) must be >= 0.0") 124 | self.__dy = value 125 | 126 | @property 127 | def values(self): 128 | return self.__values 129 | 130 | @values.setter 131 | def values(self, data): 132 | if np.isscalar(data): 133 | self.nx = int((self.bbox[1] - self.bbox[0]) // self.dx) + 1 134 | self.ny = abs(int((self.bbox[3] - self.bbox[2]) // self.dy) + 1) 135 | data = np.tile(data, (self.nx, self.ny)) 136 | self.__values = data 137 | self.nx, self.ny = data.shape 138 | 139 | @staticmethod 140 | def get_border(self, arr): 141 | """Get the border values of a 2D array""" 142 | return np.concatenate( 143 | [arr[0, :-1], arr[:-1, -1], arr[-1, ::-1], arr[-2:0:-1, 0]], axis=0 144 | ) 145 | 146 | def create_vectors(self): 147 | """Build coordinate vectors 148 | 149 | Parameters 150 | ---------- 151 | None 152 | 153 | Returns 154 | ------- 155 | x: ndarray 156 | 1D array contain data with `float` type of x-coordinates. 157 | y: ndarray 158 | 1D array contain data with `float` type of y-coordinates. 159 | 160 | """ 161 | x = self.x0y0[0] + np.arange(0, self.nx) * self.dx # ascending monotonically 162 | y = self.x0y0[1] + np.arange(0, self.ny) * abs(self.dy) 163 | # y = y[::-1] # descending monotonically 164 | return x, y 165 | 166 | def create_grid(self): 167 | """Build a structured grid 168 | 169 | Parameters 170 | ---------- 171 | None 172 | 173 | Returns 174 | ------- 175 | xg: ndarray 176 | 2D array contain data with `float` type. 177 | yg: ndarray 178 | 2D array contain data with `float` type. 179 | 180 | """ 181 | x, y = self.create_vectors() 182 | xg, yg = np.meshgrid(x, y, indexing="ij") 183 | return xg, yg 184 | 185 | def find_indices(self, points, lon, lat, tree=None, k=1): 186 | """Find linear indices `indices` into a 2D array such that they 187 | return the closest k point(s) in the structured grid defined by `x` and `y` 188 | to `points`. 189 | 190 | Parameters 191 | ---------- 192 | points: ndarray 193 | Query points. 2D array with `float` type. 194 | lon: ndarray 195 | Grid points in x-dimension. 2D array with `float` type. 196 | lat: ndarray 197 | Grid points in y-dimension. 2D array with `float` type. 198 | tree: :obj:`scipy.spatial.ckdtree`, optional 199 | A KDtree with coordinates from :class:`Shoreline` 200 | k: int, optional 201 | Number of closest points to return 202 | 203 | Returns 204 | ------- 205 | indices: ndarray 206 | Indicies into an array. 1D array with `int` type. 207 | 208 | """ 209 | points = points[~np.isnan(points[:, 0]), :] 210 | if tree is None: 211 | lonlat = np.column_stack((lon.ravel(), lat.ravel())) 212 | tree = scipy.spatial.cKDTree(lonlat) 213 | try: 214 | dist, idx = tree.query(points, k=k, workers=-1) 215 | except (Exception,): 216 | dist, idx = tree.query(points, k=k, n_jobs=-1) 217 | return np.unravel_index(idx, lon.shape) 218 | 219 | def interpolate_to(self, grid2, method="nearest"): 220 | """Interpolates self.values onto :class`Grid` grid2 forming a new 221 | :class:`Grid` object grid3. 222 | Note 223 | ---- 224 | In other words, in areas of overlap, grid1 values 225 | take precedence elsewhere grid2 values are retained. Grid3 has 226 | dx & dy grid spacing following the resolution of grid2. 227 | Parameters 228 | ---------- 229 | grid2: :obj:`Grid` 230 | A :obj:`Grid` with `values`. 231 | method: str, optional 232 | Way to interpolate data between grids 233 | Returns 234 | ------- 235 | grid3: :obj:`Grid` 236 | A new `obj`:`Grid` with projected `values`. 237 | """ 238 | # is grid2 even a grid object? 239 | assert isinstance(grid2, Grid), "Object must be Grid." 240 | # check if they overlap 241 | x1min, x1max, y1min, y1max = self.bbox 242 | x2min, x2max, y2min, y2max = self.bbox 243 | overlap = x1min < x2max and x2min < x1max and y1min < y2max and y2min < y1max 244 | assert overlap, "Grid objects do not overlap." 245 | lon1, lat1 = self.create_vectors() 246 | lon2, lat2 = grid2.create_vectors() 247 | if self.extrapolate: 248 | _FILL = None 249 | else: 250 | _FILL = -999 251 | # take data from grid1 --> grid2 252 | fp = RegularGridInterpolator( 253 | (lon1, lat1), 254 | self.values, 255 | method=method, 256 | bounds_error=False, 257 | fill_value=_FILL, 258 | ) 259 | xg, yg = np.meshgrid(lon2, lat2, indexing="ij", sparse=True) 260 | new_values = fp((xg, yg)) 261 | # where fill replace with grid2 values 262 | new_values[new_values == _FILL] = grid2.values[new_values == _FILL] 263 | return Grid( 264 | bbox=grid2.bbox, 265 | dx=grid2.dx, 266 | dy=grid2.dy, 267 | hmin=grid2.hmin, 268 | values=new_values, 269 | crs=grid2.crs, 270 | ) 271 | 272 | def blend_into(self, coarse, blend_width=10, p=1, nnear=6, eps=0.0): 273 | """Blend self.values into the values of the coarse grid one so that the 274 | values transition smoothly. The kwargs control the blending procedure. 275 | Parameters 276 | ---------- 277 | coarse: :class:`Grid` 278 | blend_width: int, optional 279 | The width of the padding in number of grid points 280 | p: int, optional 281 | The polynomial order in the distance weighting scheme 282 | nnear: int, optional 283 | The number of nearest points to use to interpolate each point 284 | eps: float, optional 285 | Points less than `eps` are considered the same point 286 | Returns 287 | ------- 288 | _coarse_w_fine: :class:`Grid` 289 | The coarse grid with the finer grid interpolated and blended. 290 | """ 291 | _FILL = -99999 # uncommon value 292 | if not isinstance(coarse, Grid): 293 | raise ValueError("Object must be Grid.") 294 | # check if they overlap 295 | x1min, x1max, y1min, y1max = coarse.bbox 296 | x2min, x2max, y2min, y2max = self.bbox 297 | overlap = (x1min < x2min) & (x1max > x2max) & (y1min < y2min) & (y1max > y2max) 298 | if not overlap: 299 | logger.warning("Grid objects do not overlap.") 300 | return coarse 301 | _fine = self.values 302 | # 1. Pad the finer grid's values 303 | _fine_w_pad_values = np.pad( 304 | _fine, pad_width=blend_width, mode="constant", constant_values=_FILL 305 | ) 306 | # 2. Create a new Grid fine_w_pad 307 | _add_length = self.dx * blend_width 308 | _add_height = self.dy * blend_width 309 | _new_fine_bbox = ( 310 | self.bbox[0] - _add_length, 311 | self.bbox[1] + _add_length, 312 | self.bbox[2] - _add_height, 313 | self.bbox[3] + _add_height, 314 | ) 315 | _fine_w_pad = Grid( 316 | _new_fine_bbox, 317 | self.dx, 318 | dy=self.dy, 319 | values=_fine_w_pad_values, 320 | extrapolate=self.extrapolate, 321 | ) 322 | # 2. Interpolate _fine_w_pad onto coarse 323 | _coarse_w_fine = _fine_w_pad.interpolate_to(coarse, method="nearest") 324 | 325 | # 3. Perform inverse distance weighting on the points with 0 326 | _xg, _yg = _coarse_w_fine.create_grid() 327 | _pts = np.column_stack((_xg.flatten(), _yg.flatten())) 328 | _vals = _coarse_w_fine.values.flatten() 329 | 330 | # find buffer 331 | ask_index = _vals == _FILL 332 | known_index = _vals != _FILL 333 | 334 | _tree = Invdisttree(_pts[known_index], _vals[known_index]) 335 | _vals[ask_index] = _tree(_pts[ask_index], nnear=nnear, eps=eps, p=p) 336 | 337 | _hmin = np.amin(_vals[ask_index]) 338 | _coarse_w_fine.hmin = _hmin 339 | # put it back 340 | _coarse_w_fine.values = _vals.reshape(*_coarse_w_fine.values.shape) 341 | 342 | return _coarse_w_fine 343 | 344 | def plot( 345 | self, 346 | ax=None, 347 | xlabel=None, 348 | ylabel=None, 349 | title=None, 350 | holding=False, 351 | coarsen=1, 352 | plot_colorbar=False, 353 | cbarlabel=None, 354 | stereo=False, 355 | xlim=None, 356 | ylim=None, 357 | filename=None, 358 | **kwargs, 359 | ): 360 | """Visualize the values in :obj:`Grid` 361 | 362 | Parameters 363 | ---------- 364 | holding: boolean, optional 365 | Whether to create a new plot axis. 366 | 367 | Returns 368 | ------- 369 | fig: 370 | ax: handle to axis of plot 371 | handle to axis of plot. 372 | 373 | """ 374 | _xg, _yg = self.create_grid() 375 | if stereo: 376 | _xg, _yg = to_stereo(_xg, _yg) 377 | if ax is None: 378 | fig, ax = plt.subplots() 379 | ax.axis("equal") 380 | pc = ax.pcolor( 381 | _xg[::coarsen, ::coarsen], 382 | _yg[::coarsen, ::coarsen], 383 | self.values[::coarsen, ::coarsen], 384 | **kwargs, 385 | ) 386 | 387 | if xlabel is not None: 388 | ax.set_xlabel(xlabel) 389 | if ylabel is not None: 390 | ax.set_ylabel(ylabel) 391 | if title is not None: 392 | ax.set_title(title) 393 | if xlim is not None: 394 | ax.set_xlim(xlim) 395 | if ylim is not None: 396 | ax.set_ylim(ylim) 397 | 398 | if cbarlabel is not None: 399 | plot_colorbar = True 400 | if plot_colorbar or cbarlabel: 401 | cbar = fig.colorbar(pc) 402 | cbar.set_label(cbarlabel) 403 | 404 | if filename is not None: 405 | plt.savefig(filename) 406 | if holding is False: 407 | plt.show() 408 | return fig, ax, pc 409 | 410 | def build_interpolant(self): 411 | """Construct a RegularGriddedInterpolant sizing function stores it as 412 | the `eval` field. 413 | 414 | Parameters 415 | ---------- 416 | values: array-like 417 | An an array of values that form the gridded interpolant:w 418 | 419 | """ 420 | lon1, lat1 = self.create_vectors() 421 | if self.extrapolate: 422 | _FILL = None 423 | else: 424 | _FILL = 999999 425 | 426 | # for global mesh make it cyclical (from MatLab) 427 | if (abs(self.bbox[0]) == 180) & (abs(self.bbox[1]) == 180): 428 | self.values[[0, -1], :] = self.values[[-1, 0], :] 429 | 430 | fp = RegularGridInterpolator( 431 | (lon1, lat1), 432 | self.values, 433 | method="linear", 434 | bounds_error=False, 435 | fill_value=_FILL, 436 | ) 437 | 438 | def sizing_function(x): 439 | return fp(x) 440 | 441 | self.eval = sizing_function 442 | return self 443 | -------------------------------------------------------------------------------- /oceanmesh/idw.py: -------------------------------------------------------------------------------- 1 | """ invdisttree.py: inverse-distance-weighted interpolation using KDTree 2 | fast, solid, local 3 | """ 4 | 5 | from __future__ import division 6 | 7 | import numpy as np 8 | from scipy.spatial import cKDTree as KDTree 9 | 10 | # http://docs.scipy.org/doc/scipy/reference/spatial.html 11 | 12 | __date__ = "2010-11-09 Nov" # weights, doc 13 | 14 | # ............................................................................... 15 | 16 | 17 | class Invdisttree: 18 | """inverse-distance-weighted interpolation using KDTree: 19 | invdisttree = Invdisttree( X, z ) -- data points, values 20 | interpol = invdisttree( q, nnear=3, eps=0, p=1, weights=None, stat=0 ) 21 | interpolates z from the 3 points nearest each query point q; 22 | For example, interpol[ a query point q ] 23 | finds the 3 data points nearest q, at distances d1 d2 d3 24 | and returns the IDW average of the values z1 z2 z3 25 | (z1/d1 + z2/d2 + z3/d3) 26 | / (1/d1 + 1/d2 + 1/d3) 27 | = .55 z1 + .27 z2 + .18 z3 for distances 1 2 3 28 | 29 | q may be one point, or a batch of points. 30 | eps: approximate nearest, dist <= (1 + eps) * true nearest 31 | p: use 1 / distance**p 32 | weights: optional multipliers for 1 / distance**p, of the same shape as q 33 | stat: accumulate wsum, wn for average weights 34 | 35 | How many nearest neighbors should one take ? 36 | a) start with 8 11 14 .. 28 in 2d 3d 4d .. 10d; see Wendel's formula 37 | b) make 3 runs with nnear= e.g. 6 8 10, and look at the results -- 38 | |interpol 6 - interpol 8| etc., or |f - interpol*| if you have f(q). 39 | I find that runtimes don't increase much at all with nnear -- ymmv. 40 | 41 | p=1, p=2 ? 42 | p=2 weights nearer points more, farther points less. 43 | In 2d, the circles around query points have areas ~ distance**2, 44 | so p=2 is inverse-area weighting. For example, 45 | (z1/area1 + z2/area2 + z3/area3) 46 | / (1/area1 + 1/area2 + 1/area3) 47 | = .74 z1 + .18 z2 + .08 z3 for distances 1 2 3 48 | Similarly, in 3d, p=3 is inverse-volume weighting. 49 | 50 | Scaling: 51 | if different X coordinates measure different things, Euclidean distance 52 | can be way off. For example, if X0 is in the range 0 to 1 53 | but X1 0 to 1000, the X1 distances will swamp X0; 54 | rescale the data, i.e. make X0.std() ~= X1.std() . 55 | 56 | A nice property of IDW is that it's scale-free around query points: 57 | if I have values z1 z2 z3 from 3 points at distances d1 d2 d3, 58 | the IDW average 59 | (z1/d1 + z2/d2 + z3/d3) 60 | / (1/d1 + 1/d2 + 1/d3) 61 | is the same for distances 1 2 3, or 10 20 30 -- only the ratios matter. 62 | In contrast, the commonly-used Gaussian kernel exp( - (distance/h)**2 ) 63 | is exceedingly sensitive to distance and to h. 64 | 65 | """ 66 | 67 | # anykernel( dj / av dj ) is also scale-free 68 | # error analysis, |f(x) - idw(x)| ? todo: regular grid, nnear ndim+1, 2*ndim 69 | 70 | def __init__(self, X, z, leafsize=10, stat=0): 71 | assert len(X) == len(z), "len(X) %d != len(z) %d" % (len(X), len(z)) 72 | self.tree = KDTree(X, leafsize=leafsize) # build the tree 73 | self.z = z 74 | self.stat = stat 75 | self.wn = 0 76 | self.wsum = None 77 | 78 | def __call__(self, q, nnear=6, eps=0, p=1, weights=None): 79 | # nnear nearest neighbours of each query point -- 80 | q = np.asarray(q) 81 | qdim = q.ndim 82 | if qdim == 1: 83 | q = np.array([q]) 84 | if self.wsum is None: 85 | self.wsum = np.zeros(nnear) 86 | 87 | self.distances, self.ix = self.tree.query(q, k=nnear, eps=eps) 88 | interpol = np.zeros((len(self.distances),) + np.shape(self.z[0])) 89 | jinterpol = 0 90 | for dist, ix in zip(self.distances, self.ix): 91 | if nnear == 1: 92 | wz = self.z[ix] 93 | elif dist[0] < 1e-10: 94 | wz = self.z[ix[0]] 95 | else: # weight z s by 1/dist -- 96 | w = 1 / dist**p 97 | if weights is not None: 98 | w *= weights[ix] # >= 0 99 | w /= np.sum(w) 100 | wz = np.dot(w, self.z[ix]) 101 | if self.stat: 102 | self.wn += 1 103 | self.wsum += w 104 | interpol[jinterpol] = wz 105 | jinterpol += 1 106 | return interpol if qdim > 1 else interpol[0] 107 | 108 | 109 | # ............................................................................... 110 | if __name__ == "__main__": 111 | import sys 112 | 113 | N = 10000 114 | Ndim = 2 115 | Nask = N # N Nask 1e5: 24 sec 2d, 27 sec 3d on mac g4 ppc 116 | Nnear = 8 # 8 2d, 11 3d => 5 % chance one-sided -- Wendel, mathoverflow.com 117 | leafsize = 10 118 | eps = 0.1 # approximate nearest, dist <= (1 + eps) * true nearest 119 | p = 1 # weights ~ 1 / distance**p 120 | cycle = 0.25 121 | seed = 1 122 | 123 | exec("\n".join(sys.argv[1:])) # python this.py N= ... 124 | np.random.seed(seed) 125 | np.set_printoptions(3, threshold=100, suppress=True) # .3f 126 | 127 | print( 128 | "\nInvdisttree: N %d Ndim %d Nask %d Nnear %d leafsize %d eps %.2g p %.2g" 129 | % (N, Ndim, Nask, Nnear, leafsize, eps, p) 130 | ) 131 | 132 | def terrain(x): 133 | """~ rolling hills""" 134 | return np.sin((2 * np.pi / cycle) * np.mean(x, axis=-1)) 135 | 136 | known = np.random.uniform(size=(N, Ndim)) ** 0.5 # 1/(p+1): density x^p 137 | z = terrain(known) 138 | ask = np.random.uniform(size=(Nask, Ndim)) 139 | 140 | # ............................................................................... 141 | invdisttree = Invdisttree(known, z, leafsize=leafsize, stat=1) 142 | interpol = invdisttree(ask, nnear=Nnear, eps=eps, p=p) 143 | 144 | import matplotlib.pyplot as plt 145 | 146 | plt.scatter(ask[:, 0], ask[:, 1], c=interpol) 147 | plt.show() 148 | 149 | print( 150 | "average distances to nearest points: %s" 151 | % np.mean(invdisttree.distances, axis=0) 152 | ) 153 | print("average weights: %s" % (invdisttree.wsum / invdisttree.wn)) 154 | # see Wikipedia Zipf's law 155 | err = np.abs(terrain(ask) - interpol) 156 | print("average |terrain() - interpolated|: %.2g" % np.mean(err)) 157 | 158 | # print "interpolate a single point: %.2g" % \ 159 | # invdisttree( known[0], nnear=Nnear, eps=eps ) 160 | -------------------------------------------------------------------------------- /oceanmesh/mesh_generator.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import os 4 | import time 5 | 6 | import matplotlib.pyplot as plt 7 | import matplotlib.tri as tri 8 | import numpy as np 9 | import scipy.sparse as spsparse 10 | from _delaunay_class import DelaunayTriangulation as DT 11 | from _fast_geometry import unique_edges 12 | 13 | from .clean import _external_topology 14 | from .edgefx import multiscale_sizing_function 15 | from .fix_mesh import fix_mesh 16 | from .grid import Grid 17 | from .region import to_lat_lon, to_stereo 18 | from .signed_distance_function import Domain, multiscale_signed_distance_function 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | __all__ = [ 23 | "generate_mesh", 24 | "generate_multiscale_mesh", 25 | "plot_mesh_connectivity", 26 | "plot_mesh_bathy", 27 | "write_to_fort14", 28 | "write_to_t3s", 29 | ] 30 | 31 | 32 | def write_to_fort14( 33 | points, 34 | cells, 35 | filepath, 36 | topobathymetry=None, 37 | project_name="Created with oceanmesh", 38 | flip_bathymetry=False, 39 | ): 40 | """ 41 | Parameters 42 | ----------- 43 | points (numpy.ndarray): An array of shape (np, 2) containing the x, y coordinates of the mesh nodes. 44 | cells (numpy.ndarray): An array of shape (ne, 3) containing the indices of the nodes that form each mesh element. 45 | filepath (str): The file path to write the fort.14 file to. 46 | topobathymetry (numpy.ndarray): An array of shape (np, 1) containing the topobathymetry values at each node. 47 | project_name (str): The name of the project to be written to the fort.14 file. 48 | flip_bathymetry (bool): If True, the bathymetry values will be multiplied by -1. 49 | 50 | Returns: 51 | -------- 52 | points (numpy.ndarray): An array of shape (np, 2) containing the x, y coordinates of the mesh nodes. 53 | cells (numpy.ndarray): An array of shape (ne, 3) containing the indices of the nodes that form each mesh element. 54 | filepath (str): The file path to write the fort.14 file to. 55 | """ 56 | logger.info("Exporting mesh to fort.14 file...") 57 | 58 | # Calculate number of nodes and elements 59 | npoints = np.size(points, 0) 60 | nelements = np.size(cells, 0) 61 | 62 | if topobathymetry is not None: 63 | assert ( 64 | len(topobathymetry) == npoints 65 | ), "topobathymetry must be the same length as points" 66 | else: 67 | topobathymetry = np.zeros((npoints, 1)) 68 | 69 | if flip_bathymetry: 70 | topobathymetry *= -1 71 | 72 | # Shift cell indices by 1 (fort.14 uses 1-based indexing) 73 | cells += 1 74 | 75 | # Open file for writing 76 | with open(filepath, "w") as f_id: 77 | # Write mesh name 78 | if flip_bathymetry: 79 | f_id.write(f"{project_name} (bathymetry flipped) \n") 80 | else: 81 | f_id.write(f"{project_name} \n") 82 | 83 | # Write number of nodes and elements 84 | np.savetxt( 85 | f_id, 86 | np.column_stack((nelements, npoints)), 87 | delimiter=" ", 88 | fmt="%i", 89 | newline="\n", 90 | ) 91 | 92 | # Write node coordinates 93 | for k in range(npoints): 94 | np.savetxt( 95 | f_id, 96 | np.column_stack((k + 1, points[k][0], points[k][1], topobathymetry[k])), 97 | delimiter=" ", 98 | fmt="%i %f %f %f", 99 | newline="\n", 100 | ) 101 | 102 | # Write element connectivity 103 | for k in range(nelements): 104 | np.savetxt( 105 | f_id, 106 | np.column_stack((k + 1, 3, cells[k][0], cells[k][1], cells[k][2])), 107 | delimiter=" ", 108 | fmt="%i %i %i %i %i ", 109 | newline="\n", 110 | ) 111 | 112 | # Write zero for each boundary condition (4 total) 113 | for k in range(4): 114 | f_id.write("%d \n" % 0) 115 | 116 | return f"Wrote the mesh to {filepath}..." 117 | 118 | 119 | def write_to_t3s(points, cells, filepath): 120 | """ 121 | Write mesh data to a t3s file. 122 | 123 | Parameters: 124 | points (numpy.ndarray): An array of shape (np, 2) containing the x, y coordinates of the mesh nodes. 125 | cells (numpy.ndarray): An array of shape (ne, 3) containing the indices of the nodes that form each mesh element. 126 | filepath (str): The file path to write the t3s file to. 127 | 128 | Returns: 129 | None 130 | """ 131 | logger.info("Exporting mesh to t3s file...") 132 | 133 | # Calculate number of nodes and elements 134 | npoints = np.size(points, 0) 135 | nelements = np.size(cells, 0) 136 | 137 | # Open file for writing 138 | with open(filepath, "w") as f_id: 139 | # Write header 140 | today = datetime.datetime.now() 141 | date_time = today.strftime("%m/%d/%Y, %H:%M:%S") 142 | t3head = ( 143 | """#########################################################################\n 144 | :FileType t3s ASCII EnSim 1.0\n 145 | # Canadian Hydraulics Centre/National Research Council (c) 1998-2004\n 146 | # DataType 2D T3 Scalar Mesh\n 147 | # 148 | :Application BlueKenue\n 149 | :Version 3.0.44\n 150 | :WrittenBy pyoceanmesh\n 151 | :CreationDate """ 152 | + date_time 153 | + """\n 154 | # 155 | #------------------------------------------------------------------------\n 156 | # 157 | :Projection Cartesian\n 158 | :Ellipsoid Unknown\n 159 | # 160 | :NodeCount """ 161 | + str(npoints) 162 | + """\n 163 | :ElementCount """ 164 | + str(nelements) 165 | + """\n 166 | :ElementType T3\n 167 | # 168 | :EndHeader""" 169 | ) # END HEADER 170 | t3head = os.linesep.join([s for s in t3head.splitlines() if s]) 171 | f_id.write(t3head) 172 | f_id.write("\n") 173 | 174 | # Write node coordinates 175 | for k in range(npoints): 176 | np.savetxt( 177 | f_id, 178 | np.column_stack((points[k][0], points[k][1], 0.0)), 179 | delimiter=" ", 180 | fmt="%f %f %f", 181 | newline="\n", 182 | ) 183 | 184 | # Write element connectivity 185 | for k in range(nelements): 186 | np.savetxt( 187 | f_id, 188 | np.column_stack((cells[k][0], cells[k][1], cells[k][2])), 189 | delimiter=" ", 190 | fmt="%i %i %i ", 191 | newline="\n", 192 | ) 193 | 194 | return f"Wrote the mesh to {filepath}..." 195 | 196 | 197 | def plot_mesh_connectivity(points, cells, show_plot=True): 198 | """Plot the mesh connectivity using matplotlib's triplot function. 199 | Parameters 200 | ---------- 201 | 202 | points : numpy.ndarray 203 | A 2D array containing the x and y coordinates of the points. 204 | cells : numpy.ndarray 205 | A 2D array containing the connectivity information for the triangles. 206 | show_plot : bool, optional 207 | Whether to show the plot or not. The default is True. 208 | 209 | Returns 210 | ------- 211 | ax : matplotlib.axes.Axes 212 | The axes object containing the plot. 213 | """ 214 | triang = tri.Triangulation(points[:, 0], points[:, 1], cells) 215 | fig, ax = plt.subplots(figsize=(10, 10)) 216 | ax.triplot(triang) 217 | ax.set_aspect("equal", adjustable="box") 218 | ax.set_title("Mesh connectivity") 219 | if show_plot: 220 | plt.show(block=False) 221 | return ax 222 | 223 | 224 | def plot_mesh_bathy(points, bathymetry, connectivity, show_plot=True): 225 | """ 226 | Create a tricontourf plot of the bathymetry data associated with the points, 227 | using the triangle connectivity information to plot the contours. 228 | 229 | Parameters 230 | ---------- 231 | points : numpy.ndarray 232 | A 2D array containing the x and y coordinates of the points. 233 | bathymetry : numpy.ndarray 234 | A 1D array containing the bathymetry values associated with each point. 235 | connectivity : numpy.ndarray 236 | A 2D array containing the connectivity information for the triangles. 237 | show_plot : bool, optional 238 | Whether or not to display the plot. Default is True. 239 | 240 | Returns 241 | ------- 242 | matplotlib.axes._subplots.AxesSubplot 243 | The axis handle of the plot. 244 | 245 | """ 246 | # Create a Triangulation object using the points and connectivity table 247 | triangulation = tri.Triangulation(points[:, 0], points[:, 1], connectivity) 248 | 249 | # Create a figure and axis object 250 | fig, ax = plt.subplots(figsize=(10, 10)) 251 | 252 | # Plot the tricontourf 253 | tricontourf = ax.tricontourf(triangulation, bathymetry, cmap="jet") 254 | 255 | # Add colorbar 256 | plt.colorbar(tricontourf) 257 | 258 | # Set axis labels 259 | ax.set_xlabel("Longitude") 260 | ax.set_ylabel("Latitude") 261 | 262 | # Set title 263 | ax.set_title("Mesh Topobathymetry") 264 | 265 | # Show the plot if requested 266 | if show_plot: 267 | plt.show() 268 | 269 | return ax 270 | 271 | 272 | def _parse_kwargs(kwargs): 273 | for key in kwargs: 274 | if key in { 275 | "nscreen", 276 | "max_iter", 277 | "seed", 278 | "pfix", 279 | "points", 280 | "domain", 281 | "edge_length", 282 | "bbox", 283 | "min_edge_length", 284 | "plot", 285 | "blend_width", 286 | "blend_polynomial", 287 | "blend_max_iter", 288 | "blend_nnear", 289 | "lock_boundary", 290 | "pseudo_dt", 291 | "stereo", 292 | }: 293 | pass 294 | else: 295 | raise ValueError( 296 | "Option %s with parameter %s not recognized " % (key, kwargs[key]) 297 | ) 298 | 299 | 300 | def _check_bbox(bbox): 301 | assert isinstance(bbox, tuple), "`bbox` must be a tuple" 302 | assert int(len(bbox) / 2), "`dim` must be 2" 303 | 304 | 305 | def generate_multiscale_mesh(domains, edge_lengths, **kwargs): 306 | r"""Generate a 2D triangular mesh using callbacks to several 307 | sizing functions `edge_lengths` and several signed distance functions 308 | See the kwargs for `generate_mesh`. 309 | 310 | Parameters 311 | ---------- 312 | domains: A list of function objects. 313 | A list of functions that takes a point and returns the signed nearest distance to the domain boundary Ω. 314 | edge_lengths: A function object. 315 | A list of functions that can evalulate a point and return a mesh size. 316 | \**kwargs: 317 | See below for kwargs in addition to the ones available for `generate_mesh` 318 | 319 | :Keyword Arguments: 320 | * *blend_width* (``float``) -- 321 | The width of the element size transition region between nest and parent 322 | * *blend_polynomial* (``int``) -- 323 | The rate of transition scales with 1/dist^blend_polynomial 324 | * *blend_max_iter* (``int``) -- 325 | The number of mesh generation iterations to blend the nest and parent. 326 | * *blend_nnear* (``int``) -- 327 | The number of nearest neighbors in the IDW interpolation. 328 | 329 | """ 330 | assert ( 331 | len(domains) > 1 and len(edge_lengths) > 1 332 | ), "This function takes a list of domains and sizing functions" 333 | assert len(domains) == len( 334 | edge_lengths 335 | ), "The same number of domains must be passed as sizing functions" 336 | opts = { 337 | "max_iter": 100, 338 | "seed": 0, 339 | "pfix": None, 340 | "points": None, 341 | "min_edge_length": None, 342 | "plot": 999999, 343 | "blend_width": 2500, 344 | "blend_polynomial": 2, 345 | "blend_max_iter": 20, 346 | "blend_nnear": 256, 347 | "lock_boundary": False, 348 | } 349 | opts.update(kwargs) 350 | _parse_kwargs(kwargs) 351 | 352 | master_edge_length, edge_lengths_smoothed = multiscale_sizing_function( 353 | edge_lengths, 354 | blend_width=opts["blend_width"], 355 | nnear=opts["blend_nnear"], 356 | p=opts["blend_polynomial"], 357 | ) 358 | union, nests = multiscale_signed_distance_function(domains) 359 | _p = [] 360 | global_minimum = 9999 361 | for domain_number, (sdf, edge_length) in enumerate( 362 | zip(nests, edge_lengths_smoothed) 363 | ): 364 | logger.info(f"--> Building domain #{domain_number}") 365 | global_minimum = np.amin([global_minimum, edge_length.hmin]) 366 | _tmpp, _ = generate_mesh(sdf, edge_length, **kwargs) 367 | _p.append(_tmpp) 368 | 369 | _p = np.concatenate(_p, axis=0) 370 | 371 | # merge the two domains together 372 | logger.info("--> Blending the domains together...") 373 | _p, _t = generate_mesh( 374 | domain=union, 375 | edge_length=master_edge_length, 376 | min_edge_length=global_minimum, 377 | points=_p, 378 | max_iter=opts["blend_max_iter"], 379 | lock_boundary=True, 380 | **kwargs, 381 | ) 382 | 383 | return _p, _t 384 | 385 | 386 | def generate_mesh(domain, edge_length, **kwargs): 387 | r"""Generate a 2D triangular mesh using callbacks to a 388 | sizing function `edge_length` and signed distance function. 389 | 390 | Parameters 391 | ---------- 392 | domain: A function object. 393 | A function that takes a point and returns the signed nearest distance to the domain boundary Ω. 394 | edge_length: A function object. 395 | A function that can evalulate a point and return a mesh size. 396 | \**kwargs: 397 | See below 398 | 399 | :Keyword Arguments: 400 | * *bbox* (``tuple``) -- 401 | Bounding box containing domain extents. REQUIRED IF NOT USING :class:`edge_length` 402 | * *max_iter* (``float``) -- 403 | Maximum number of meshing iterations. (default==50) 404 | * *seed* (``float`` or ``int``) -- 405 | Pseudo-random seed to initialize meshing points. (default==0) 406 | * *pfix* (`array-like`) -- 407 | An array of points to constrain in the mesh. (default==None) 408 | * *min_edge_length* (``float``) -- 409 | The minimum element size in the domain. REQUIRED IF NOT USING :class:`edge_length` 410 | * *plot* (``int``) -- 411 | The mesh is visualized every `plot` meshing iterations. 412 | * *pseudo_dt* (``float``) -- 413 | The pseudo time step for the meshing algorithm. (default==0.2) 414 | * *stereo* (``bool``) -- 415 | To mesh the whole world (default==False) 416 | 417 | Returns 418 | ------- 419 | points: array-like 420 | vertex coordinates of mesh 421 | t: array-like 422 | mesh connectivity table. 423 | 424 | """ 425 | _DIM = 2 426 | opts = { 427 | "max_iter": 50, 428 | "seed": 0, 429 | "pfix": None, 430 | "points": None, 431 | "min_edge_length": None, 432 | "plot": 999999, 433 | "lock_boundary": False, 434 | "pseudo_dt": 0.2, 435 | "stereo": False, 436 | } 437 | opts.update(kwargs) 438 | _parse_kwargs(kwargs) 439 | 440 | fd, bbox = _unpack_domain(domain, opts) 441 | fh, min_edge_length = _unpack_sizing(edge_length, opts) 442 | 443 | _check_bbox(bbox) 444 | bbox = np.array(bbox).reshape(-1, 2) 445 | 446 | assert min_edge_length > 0, "`min_edge_length` must be > 0" 447 | 448 | assert opts["max_iter"] > 0, "`max_iter` must be > 0" 449 | max_iter = opts["max_iter"] 450 | 451 | np.random.seed(opts["seed"]) 452 | 453 | L0mult = 1 + 0.4 / 2 ** (_DIM - 1) 454 | delta_t = opts["pseudo_dt"] 455 | geps = 1e-3 * np.amin(min_edge_length) 456 | deps = np.sqrt(np.finfo(np.double).eps) # * np.amin(min_edge_length) 457 | 458 | pfix, nfix = _unpack_pfix(_DIM, opts) 459 | lock_boundary = opts["lock_boundary"] 460 | 461 | if opts["points"] is None: 462 | p = _generate_initial_points( 463 | min_edge_length, 464 | geps, 465 | bbox, 466 | fh, 467 | fd, 468 | pfix, 469 | opts["stereo"], 470 | ) 471 | else: 472 | p = opts["points"] 473 | 474 | N = p.shape[0] 475 | 476 | assert N > 0, "No vertices to mesh with!" 477 | 478 | logger.info( 479 | f"Commencing mesh generation with {N} vertices will perform {max_iter} iterations." 480 | ) 481 | for count in range(max_iter): 482 | start = time.time() 483 | 484 | # (Re)-triangulation by the Delaunay algorithm 485 | dt = DT() 486 | dt.insert(p.ravel().tolist()) 487 | 488 | # Get the current topology of the triangulation 489 | p, t = _get_topology(dt) 490 | 491 | ifix = [] 492 | if lock_boundary: 493 | _, bpts = _external_topology(p, t) 494 | for fix in bpts: 495 | ifix.append(_closest_node(fix, p)) 496 | nfix = len(ifix) 497 | 498 | # Find where pfix went 499 | if nfix > 0: 500 | for fix in pfix: 501 | ind = _closest_node(fix, p) 502 | ifix.append(ind) 503 | p[ind] = fix 504 | 505 | # Remove points outside the domain 506 | t = _remove_triangles_outside(p, t, fd, geps) 507 | 508 | # Number of iterations reached, stop. 509 | if count == (max_iter - 1): 510 | p, t, _ = fix_mesh(p, t, dim=_DIM, delete_unused=True) 511 | logger.info("Termination reached...maximum number of iterations.") 512 | return p, t 513 | 514 | # Compute the forces on the bars 515 | Ftot = _compute_forces(p, t, fh, min_edge_length, L0mult, opts) 516 | 517 | # Force = 0 at fixed points 518 | Ftot[:nfix] = 0 519 | 520 | # Update positions 521 | p += delta_t * Ftot 522 | 523 | # Bring outside points back to the boundary 524 | p = _project_points_back(p, fd, deps) 525 | 526 | # Show the user some progress so they know something is happening 527 | maxdp = delta_t * np.sqrt((Ftot**2).sum(1)).max() 528 | 529 | logger.info( 530 | f"Iteration #{count + 1}, max movement is {maxdp}, there are {len(p)} vertices and {len(t)}" 531 | ) 532 | 533 | end = time.time() 534 | logger.info(f"Elapsed wall-clock time {end - start} seconds") 535 | 536 | 537 | def _unpack_sizing(edge_length, opts): 538 | if isinstance(edge_length, Grid): 539 | fh = edge_length.eval 540 | min_edge_length = edge_length.hmin 541 | elif callable(edge_length): 542 | fh = edge_length 543 | min_edge_length = opts["min_edge_length"] 544 | else: 545 | raise ValueError( 546 | "`edge_length` must either be a function or a `edge_length` object" 547 | ) 548 | return fh, min_edge_length 549 | 550 | 551 | def _unpack_domain(domain, opts): 552 | if isinstance(domain, Domain): 553 | bbox = domain.bbox 554 | fd = domain.eval 555 | elif callable(domain): 556 | bbox = opts["bbox"] 557 | fd = domain 558 | else: 559 | raise ValueError( 560 | "`domain` must be a function or a :class:`signed_distance_function object" 561 | ) 562 | return fd, bbox 563 | 564 | 565 | def _get_bars(t): 566 | """Describe each bar by a unique pair of nodes""" 567 | bars = np.concatenate([t[:, [0, 1]], t[:, [1, 2]], t[:, [2, 0]]]) 568 | return unique_edges(bars) 569 | 570 | 571 | # Persson-Strang 572 | def _compute_forces(p, t, fh, min_edge_length, L0mult, opts): 573 | """Compute the forces on each edge based on the sizing function""" 574 | N = p.shape[0] 575 | bars = _get_bars(t) 576 | barvec = p[bars[:, 0]] - p[bars[:, 1]] # List of bar vectors 577 | L = np.sqrt((barvec**2).sum(1)) # L = Bar lengths 578 | L[L == 0] = np.finfo(float).eps 579 | if opts["stereo"]: 580 | p1 = p[bars].sum(1) / 2 581 | x, y = to_lat_lon(p1[:, 0], p1[:, 1]) 582 | p2 = np.asarray([x, y]).T 583 | hbars = fh(p2) * _stereo_distortion_dist(y) 584 | else: 585 | hbars = fh(p[bars].sum(1) / 2) 586 | L0 = hbars * L0mult * (np.nanmedian(L) / np.nanmedian(hbars)) 587 | F = L0 - L 588 | F[F < 0] = 0 # Bar forces (scalars) 589 | Fvec = ( 590 | F[:, None] / L[:, None].dot(np.ones((1, 2))) * barvec 591 | ) # Bar forces (x,y components) 592 | Ftot = _dense( 593 | bars[:, [0] * 2 + [1] * 2], 594 | np.repeat([list(range(2)) * 2], len(F), axis=0), 595 | np.hstack((Fvec, -Fvec)), 596 | shape=(N, 2), 597 | ) 598 | return Ftot 599 | 600 | 601 | # Bossen-Heckbert 602 | # def _compute_forces(p, t, fh, min_edge_length, L0mult): 603 | # """Compute the forces on each edge based on the sizing function""" 604 | # N = p.shape[0] 605 | # bars = _get_bars(t) 606 | # barvec = p[bars[:, 0]] - p[bars[:, 1]] # List of bar vectors 607 | # L = np.sqrt((barvec ** 2).sum(1)) # L = Bar lengths 608 | # L[L == 0] = np.finfo(float).eps 609 | # hbars = fh(p[bars].sum(1) / 2) 610 | # L0 = hbars * L0mult * (np.nanmedian(L) / np.nanmedian(hbars)) 611 | # LN = L / L0 612 | # F = (1 - LN ** 4) * np.exp(-(LN ** 4)) / LN 613 | # Fvec = ( 614 | # F[:, None] / LN[:, None].dot(np.ones((1, 2))) * barvec 615 | # ) # Bar forces (x,y components) 616 | # Ftot = _dense( 617 | # bars[:, [0] * 2 + [1] * 2], 618 | # np.repeat([list(range(2)) * 2], len(F), axis=0), 619 | # np.hstack((Fvec, -Fvec)), 620 | # shape=(N, 2), 621 | # ) 622 | # return Ftot 623 | 624 | 625 | def _dense(Ix, J, S, shape=None, dtype=None): 626 | """ 627 | Similar to MATLAB's SPARSE(I, J, S, ...), but instead returning a 628 | dense array. 629 | """ 630 | 631 | # Advanced usage: allow J and S to be scalars. 632 | if np.isscalar(J): 633 | x = J 634 | J = np.empty(Ix.shape, dtype=int) 635 | J.fill(x) 636 | if np.isscalar(S): 637 | x = S 638 | S = np.empty(Ix.shape) 639 | S.fill(x) 640 | 641 | # Turn these into 1-d arrays for processing. 642 | S = S.flat 643 | II = Ix.flat 644 | J = J.flat 645 | return spsparse.coo_matrix((S, (II, J)), shape, dtype).toarray() 646 | 647 | 648 | def _remove_triangles_outside(p, t, fd, geps): 649 | """Remove vertices outside the domain""" 650 | pmid = p[t].sum(1) / 3 # Compute centroids 651 | return t[fd(pmid) < -geps] # Keep interior triangles 652 | 653 | 654 | def _project_points_back(p, fd, deps): 655 | """Project points outsidt the domain back within""" 656 | d = fd(p) 657 | ix = d > 0 # Find points outside (d>0) 658 | if ix.any(): 659 | 660 | def _deps_vec(i): 661 | a = [0] * 2 662 | a[i] = deps 663 | return a 664 | 665 | try: 666 | dgrads = [ 667 | (fd(p[ix] + _deps_vec(i)) - d[ix]) / deps for i in range(2) 668 | ] # old method 669 | except ValueError: # an error is thrown if all points in fd are outside 670 | # bbox domain, so instead calulate all fd and then 671 | # take the solely ones outside domain 672 | dgrads = [(fd(p + _deps_vec(i)) - d) / deps for i in range(2)] 673 | dgrads = list(np.array(dgrads)[:, ix]) 674 | dgrad2 = sum(dgrad**2 for dgrad in dgrads) 675 | dgrad2 = np.where(dgrad2 < deps, deps, dgrad2) 676 | p[ix] -= (d[ix] * np.vstack(dgrads) / dgrad2).T # Project 677 | return p 678 | 679 | 680 | def _stereo_distortion(lat): 681 | # we use here Stereographic projection of the sphere 682 | # from the north pole onto the plane 683 | # https://en.wikipedia.org/wiki/Stereographic_projection 684 | lat0 = 90 685 | ll = lat + lat0 686 | lrad = ll / 180 * np.pi 687 | res = 2 / (1 + np.sin(lrad)) 688 | return res 689 | 690 | 691 | def _stereo_distortion_dist(lat): 692 | lrad = np.radians(lat) 693 | # Calculate the scale factor for the stereographic projection 694 | res = 2 / (1 + np.sin(lrad)) / 180 * np.pi 695 | return res 696 | 697 | 698 | def _generate_initial_points(min_edge_length, geps, bbox, fh, fd, pfix, stereo=False): 699 | """Create initial distribution in bounding box (equilateral triangles)""" 700 | if stereo: 701 | bbox = np.array([[-180, 180], [-89, 89]]) 702 | p = np.mgrid[ 703 | tuple(slice(min, max + min_edge_length, min_edge_length) for min, max in bbox) 704 | ].astype(float) 705 | if stereo: 706 | # for global meshes in stereographic projections, 707 | # we need to reproject the points from lon/lat to stereo projection 708 | # then, we need to rectify their coordinates to lat/lon for the sizing function 709 | p0 = p.reshape(2, -1).T 710 | x, y = to_stereo(p0[:, 0], p0[:, 1]) 711 | p = np.asarray([x, y]).T 712 | r0 = fh(to_lat_lon(p[:, 0], p[:, 1])) * _stereo_distortion(p0[:, 1]) 713 | else: 714 | p = p.reshape(2, -1).T 715 | r0 = fh(p) 716 | r0m = np.min(r0[r0 >= min_edge_length]) 717 | p = p[np.random.rand(p.shape[0]) < r0m**2 / r0**2] 718 | p = p[fd(p) < geps] # Keep only d<0 points 719 | return np.vstack( 720 | ( 721 | pfix, 722 | p, 723 | ) 724 | ) 725 | 726 | 727 | def _dist(p1, p2): 728 | """Euclidean distance between two sets of points""" 729 | return np.sqrt(((p1 - p2) ** 2).sum(1)) 730 | 731 | 732 | def _unpack_pfix(dim, opts): 733 | """Unpack fixed points""" 734 | pfix = np.empty((0, dim)) 735 | nfix = 0 736 | if opts["pfix"] is not None: 737 | pfix = np.array(opts["pfix"], dtype="d") 738 | nfix = len(pfix) 739 | logger.info(f"Constraining {nfix} fixed points..") 740 | return pfix, nfix 741 | 742 | 743 | def _get_topology(dt): 744 | """Get points and entities from :clas:`CGAL:DelaunayTriangulation2/3` object""" 745 | return dt.get_finite_vertices(), dt.get_finite_cells() 746 | 747 | 748 | def _closest_node(node, nodes): 749 | nodes = np.asarray(nodes) 750 | deltas = nodes - node 751 | dist_2 = np.einsum("ij,ij->i", deltas, deltas) 752 | return np.argmin(dist_2) 753 | -------------------------------------------------------------------------------- /oceanmesh/region.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from pyproj import CRS, Transformer 3 | 4 | __all__ = ["Region", "warp_coordinates"] 5 | 6 | 7 | def warp_coordinates(points, src_crs, dst_crs): 8 | src_crs = CRS.from_epsg(src_crs) 9 | dst_crs = CRS.from_epsg(dst_crs) 10 | transformer = Transformer.from_crs(src_crs, dst_crs, always_xy=True) 11 | points = transformer.transform(points[:, 0], points[:, 1]) 12 | return np.asarray(points).T 13 | 14 | 15 | def stereo_to_3d(u, v, R=1): 16 | # to 3D 17 | # c=4*R**2/(u**2+v**2+4*R**2) 18 | # x=c*u 19 | # y=c*v 20 | # z=2*c*R-R 21 | 22 | rp2 = u**2 + v**2 23 | x = -2 * R * u / (1 + rp2) 24 | y = -2 * R * v / (1 + rp2) 25 | z = R * (1 - rp2) / (1 + rp2) 26 | 27 | return x, y, z 28 | 29 | 30 | def to_lat_lon(x, y, z=None, R=1): 31 | if z is None: 32 | x, y, z = stereo_to_3d(x, y, R=R) 33 | 34 | # to lat/lon 35 | rad = x**2 + y**2 + z**2 36 | rad = np.sqrt(rad) 37 | 38 | rad[rad == 0] = rad.max() 39 | 40 | rlat = np.arcsin(z / rad) 41 | rlon = np.arctan2(y, x) 42 | 43 | rlat = rlat * 180 / np.pi 44 | rlon = rlon * 180 / np.pi 45 | 46 | return rlon, rlat 47 | 48 | 49 | def to_3d(x, y, R=1): 50 | lon = np.array(x) 51 | lat = np.array(y) 52 | # to 3D 53 | kx = np.cos(lat / 180 * np.pi) * np.cos(lon / 180 * np.pi) * R 54 | ky = np.cos(lat / 180 * np.pi) * np.sin(lon / 180 * np.pi) * R 55 | kz = np.sin(lat / 180 * np.pi) * R 56 | 57 | return kx, ky, kz 58 | 59 | 60 | def to_stereo(x, y, R=1): 61 | kx, ky, kz = to_3d(x, y, R) 62 | 63 | # to 2D in stereo 64 | # u = 2*R*kx/(R+kz) 65 | # v = 2*R*ky/(R+kz) 66 | u = -kx / (R + kz) 67 | v = -ky / (R + kz) 68 | 69 | return u, v 70 | 71 | 72 | class Region: 73 | def __init__(self, extent, crs): 74 | self.bbox = extent 75 | self._crs = CRS.from_user_input(crs) 76 | 77 | @property 78 | def crs(self): 79 | return self._crs 80 | 81 | @property 82 | def bbox(self): 83 | return self.__bbox 84 | 85 | @property 86 | def total_bounds(self): 87 | if isinstance(self.bbox, tuple): 88 | return self.bbox 89 | else: 90 | return ( 91 | self.bbox[:, 0].min(), 92 | self.bbox[:, 0].max(), 93 | self.bbox[:, 1].min(), 94 | self.bbox[:, 1].max(), 95 | ) 96 | 97 | @bbox.setter 98 | def bbox(self, value): 99 | if isinstance(value, tuple): 100 | if len(value) < 4: 101 | raise ValueError("bbox has wrong number of values.") 102 | if value[1] < value[0]: 103 | raise ValueError("bbox has wrong values.") 104 | if value[3] < value[2]: 105 | raise ValueError("bbox has wrong values.") 106 | # otherwise polygon 107 | self.__bbox = value 108 | 109 | def transform_to(self, dst_crs): 110 | """Transform extents ``bbox`` to dst_crs""" 111 | dst_crs = CRS.from_user_input(dst_crs) 112 | if not self._crs.equals(dst_crs): 113 | transformer = Transformer.from_crs(self.crs, dst_crs, always_xy=True) 114 | if isinstance(self.bbox, tuple): 115 | xmin, xmax, ymin, ymax = self.bbox 116 | (xmin, xmax), (ymin, ymax) = transformer.transform( 117 | (xmin, xmax), (ymin, ymax) 118 | ) 119 | self.bbox = (xmin, xmax, ymin, ymax) 120 | else: 121 | # for polygon case 122 | self.bbox = np.asarray( 123 | transformer.transform(self.bbox[:, 0], self.bbox[:, 1]) 124 | ).T 125 | 126 | self._crs = dst_crs 127 | return self 128 | -------------------------------------------------------------------------------- /oceanmesh/signed_distance_function.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import math 3 | import random 4 | 5 | import matplotlib.pyplot as plt 6 | import numpy as np 7 | import scipy.spatial 8 | from inpoly import inpoly2 9 | 10 | from . import Shoreline, edges 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | __all__ = [ 15 | "multiscale_signed_distance_function", 16 | "signed_distance_function", 17 | "Domain", 18 | "Union", 19 | "Difference", 20 | "Intersection", 21 | "create_circle", 22 | "create_bbox", 23 | ] 24 | 25 | nan = np.nan 26 | 27 | 28 | def create_circle(center, radius): 29 | """Create a circle centered on `center` and with 30 | radius `radius` in WGS84 degrees""" 31 | stepSize = 0.1 32 | positions = [] 33 | t = 0 34 | while t < 2 * math.pi: 35 | positions.append( 36 | (radius * math.cos(t) + center[0], radius * math.sin(t) + center[1]) 37 | ) 38 | t += stepSize 39 | 40 | return np.array(positions) 41 | 42 | 43 | def create_bbox(bbox): 44 | """ 45 | Returns a domain class object which gives a bbox and signed-distance function 46 | for a domain defined by bbox. 47 | """ 48 | import numpy as np 49 | 50 | x0, xN, y0, yN = bbox 51 | 52 | def func(p): 53 | min = np.minimum 54 | """Signed distance function for rectangle with corners (x1,y1), (x2,y1), 55 | (x1,y2), (x2,y2). 56 | This has an incorrect distance to the four corners but that isn't a big deal 57 | """ 58 | return -min(min(min(-y0 + p[:, 1], yN - p[:, 1]), -x0 + p[:, 0]), xN - p[:, 0]) 59 | 60 | domain = Domain(bbox, func) 61 | 62 | return domain 63 | 64 | 65 | def _generate_samples(bbox, dim, N): 66 | N = int(N) 67 | points = [] 68 | _xrange = (bbox[0] - 0.01, bbox[1] + 0.01) 69 | _yrange = (bbox[2] - 0.01, bbox[3] + 0.01) 70 | if dim == 2: 71 | points.append( 72 | [ 73 | ( 74 | random.uniform(*_xrange), 75 | random.uniform(*_yrange), 76 | ) 77 | for i in range(N) 78 | ] 79 | ) 80 | elif dim == 3: 81 | _zrange = (bbox[4] - 0.01, bbox[5] + 0.01) 82 | points.append( 83 | [ 84 | ( 85 | random.uniform(*_xrange), 86 | random.uniform(*_yrange), 87 | random.uniform(*_zrange), 88 | ) 89 | for i in range(N) 90 | ] 91 | ) 92 | points = np.asarray(points) 93 | points = points.reshape(-1, dim) 94 | return points 95 | 96 | 97 | def _plot(geo, filename=None, samples=100000): 98 | p = _generate_samples(geo.bbox, 2, N=samples) 99 | d = geo.eval(p) 100 | ix = np.logical_and(d > -0.0001, d < 0.0001) 101 | 102 | fig = plt.figure() 103 | ax = fig.add_subplot(111) # , projection="3d") 104 | im = ax.scatter(p[ix, 0], p[ix, 1], c=d[ix], marker=".", s=5.0) 105 | ax.set_xlabel("X-axis") 106 | ax.set_ylabel("Y-axis") 107 | plt.title("Approximate 0-level set") 108 | fig.colorbar(im, ax=ax) 109 | im.set_clim(-0.001, 0.001) 110 | ax.set_aspect("auto") 111 | 112 | if filename is None: 113 | plt.show() 114 | else: 115 | plt.savefig(filename) 116 | 117 | 118 | class Domain: 119 | def __init__(self, bbox, func, covering=None): 120 | self.bbox = bbox 121 | self.domain = func 122 | self.covering = covering 123 | 124 | def eval(self, x): 125 | return self.domain(x) 126 | 127 | def plot(self, filename=None, samples=10000): 128 | _plot(self, filename=None, samples=samples) 129 | 130 | 131 | def _compute_bbox(domains): 132 | bbox = ( 133 | min(d.bbox[0] for d in domains), 134 | max(d.bbox[1] for d in domains), 135 | min(d.bbox[2] for d in domains), 136 | max(d.bbox[3] for d in domains), 137 | ) 138 | return bbox 139 | 140 | 141 | # Note to self: these primitive operations are inexact. 142 | # Recall: https://www.iquilezles.org/www/articles/distfunctions2d/distfunctions2d.htm 143 | class Union(Domain): 144 | def __init__(self, domains): 145 | bbox = _compute_bbox(domains) 146 | super().__init__(bbox, domains) 147 | 148 | def eval(self, x): 149 | d = [d.eval(x) for d in self.domain] 150 | return np.minimum.reduce(d) 151 | 152 | 153 | class Intersection(Domain): 154 | def __init__(self, domains): 155 | bbox = _compute_bbox(domains) 156 | super().__init__(bbox, domains) 157 | 158 | def eval(self, x): 159 | d = [d.eval(x) for d in self.domain] 160 | return np.maximum.reduce(d) 161 | 162 | 163 | class Difference(Domain): 164 | def __init__(self, domains): 165 | bbox = _compute_bbox(domains) 166 | super().__init__(bbox, domains) 167 | 168 | def eval(self, x): 169 | return np.maximum.reduce( 170 | [-d.eval(x) if n > 0 else d.eval(x) for n, d in enumerate(self.domain)] 171 | ) 172 | 173 | 174 | def signed_distance_function(shoreline, invert=False): 175 | """Takes a :class:`Shoreline` object containing linear segments representing meshing boundaries 176 | and calculates a signed distance function with it under the assumption that all polygons are closed. 177 | The returned function `func` becomes a bound method of the :class:`Domain` and is queried during 178 | mesh generation several times per iteration. 179 | 180 | Parameters 181 | ---------- 182 | shoreline: a :class:`Shoreline` object 183 | The processed shapefile data from :class:`Geodata` 184 | invert: boolean, optional 185 | Invert the definition of the domain. 186 | 187 | Returns 188 | ------- 189 | domain: a :class:`Domain` object 190 | Contains a signed distance function along with an extent `bbox` 191 | 192 | """ 193 | logger.info("Building a signed distance function...") 194 | 195 | assert isinstance(shoreline, Shoreline), "shoreline is not a Shoreline object" 196 | poly = np.vstack((shoreline.inner, shoreline.boubox)) 197 | tree = scipy.spatial.cKDTree( 198 | poly[~np.isnan(poly[:, 0]), :], balanced_tree=False, leafsize=50 199 | ) 200 | e = edges.get_poly_edges(poly) 201 | 202 | boubox = np.nan_to_num(shoreline.boubox) 203 | e_box = edges.get_poly_edges(shoreline.boubox) 204 | 205 | def func(x): 206 | # Initialize d with some positive number larger than geps 207 | dist = np.zeros(len(x)) + 1.0 208 | # are points inside the boubox? 209 | in_boubox, _ = inpoly2(x, boubox, e_box) 210 | # are points inside the shoreline? 211 | in_shoreline, _ = inpoly2(x, np.nan_to_num(poly), e) 212 | # compute dist to shoreline 213 | try: 214 | d, _ = tree.query(x, k=1, workers=-1) 215 | except (Exception,): 216 | d, _ = tree.query(x, k=1, n_jobs=-1) 217 | # d is signed negative if inside the 218 | # intersection of two areas and vice versa. 219 | cond = np.logical_and(in_shoreline, in_boubox) 220 | dist = (-1) ** (cond) * d 221 | if invert: 222 | dist *= -1 223 | return dist 224 | 225 | poly2 = shoreline.boubox 226 | tree2 = scipy.spatial.cKDTree( 227 | poly2[~np.isnan(poly2[:, 0]), :], balanced_tree=False, leafsize=50 228 | ) 229 | 230 | def func_covering(x): 231 | # Initialize d with some positive number larger than geps 232 | dist = np.zeros(len(x)) + 1.0 233 | # are points inside the boubox? 234 | in_boubox, _ = inpoly2(x, boubox, e_box) 235 | # compute dist to shoreline 236 | try: 237 | d, _ = tree2.query(x, k=1, workers=-1) 238 | except (Exception,): 239 | d, _ = tree2.query(x, k=1, n_jobs=-1) 240 | # d is signed negative if inside the 241 | # intersection of two areas and vice versa. 242 | dist = (-1) ** (in_boubox) * d 243 | return dist 244 | 245 | return Domain(shoreline.bbox, func, covering=func_covering) 246 | 247 | 248 | def _create_boubox(bbox): 249 | """Create a bounding box from domain extents `bbox`. Path orientation will be CCW.""" 250 | xmin, xmax, ymin, ymax = bbox 251 | return np.array( 252 | [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax], [xmin, ymin]], 253 | dtype=float, 254 | ) 255 | 256 | 257 | def multiscale_signed_distance_function(signed_distance_functions): 258 | """Takes a list of :class:`Domain` objects and calculates a signed distance 259 | function from each one that represents a multiscale meshing domain. 260 | 261 | Parameters 262 | ---------- 263 | signed_distance_functions: a list of `signed_distance_function` objects 264 | 265 | Returns 266 | ------- 267 | union: a :class:`Union` object 268 | The union of the `signed_distance_functions` 269 | nests: a list of :class:`Difference` containing objects 270 | Nested domains are set differenced from their parent domains. 271 | """ 272 | logger.info("Building a multiscale signed distance function...") 273 | 274 | msg = "`signed_distance_functions` is not a list" 275 | assert isinstance(signed_distance_functions, list), msg 276 | assert len(signed_distance_functions) > 1, "Use `signed_distance_function` instead" 277 | msg = "list does not contain all `signed_distance_function`" 278 | for sdf in signed_distance_functions: 279 | assert isinstance(sdf, Domain), msg 280 | 281 | # calculate the boolean/set difference from the base sdf and subsequent nests 282 | nests = [] 283 | for i, sdf in enumerate(signed_distance_functions): 284 | # set eval method to covering 285 | tmp = [Domain(s.bbox, s.covering) for s in signed_distance_functions[i + 1 :]] 286 | nests.append(Difference([sdf, *tmp])) 287 | 288 | union = Union(nests) 289 | 290 | return union, nests 291 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42", "wheel", "pybind11>=2.6.0", "versioneer-518"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.black] 6 | target-version = ['py39'] 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = oceanmesh 3 | url = https://github.com/krober10nd/oceanmesh 4 | author = Keith Roberts 5 | email = keithrbt0@gmail.com 6 | description = Automatic coastal ocean mesh generation. 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | license = GPL-3.0-or-later 10 | license_files = LICENSE.txt 11 | classifiers = 12 | Development Status :: 4 - Beta 13 | License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+) 14 | Operating System :: OS Independent 15 | Programming Language :: Python :: 3.7 16 | Programming Language :: Python :: 3.8 17 | Programming Language :: Python :: 3.9 18 | Topic :: Scientific/Engineering 19 | Topic :: Scientific/Engineering :: Mathematics 20 | Topic :: Scientific/Engineering :: Physics 21 | Topic :: Scientific/Engineering :: Visualization 22 | [options] 23 | packages = find: 24 | install_requires = 25 | numpy 26 | inpoly 27 | meshio 28 | pyproj 29 | scipy 30 | geopandas 31 | fiona 32 | shapely 33 | matplotlib 34 | rasterio>=1.3a1 35 | rioxarray 36 | scikit-fmm 37 | scikit-image 38 | python_requires = >=3.0 39 | 40 | [versioneer] 41 | VCS = git 42 | style = pep440 43 | versionfile_source = oceanmesh/_version.py 44 | versionfile_build = oceanmesh/_version.py 45 | tag_prefix = v 46 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import configparser 4 | 5 | from pybind11.setup_helpers import Pybind11Extension, build_ext 6 | from setuptools import setup # , find_packages 7 | 8 | import versioneer 9 | 10 | sys.path.append(os.path.dirname(__file__)) 11 | 12 | # https://github.com/pybind/python_example/ 13 | is_called = [ 14 | "_HamiltonJacobi", 15 | "_delaunay_class", 16 | "_fast_geometry", 17 | ] 18 | 19 | files = [ 20 | "oceanmesh/cpp/HamiltonJacobi.cpp", 21 | "oceanmesh/cpp/delaunay_class.cpp", 22 | "oceanmesh/cpp/fast_geometry.cpp", 23 | ] 24 | 25 | if os.name == "nt": 26 | home = os.environ["USERPROFILE"].replace("\\", "/") 27 | vcpkg = f"{home}/OceanMesh/vcpkg/installed/x64-windows" 28 | ext_modules = [ 29 | Pybind11Extension( 30 | loc, 31 | [fi], 32 | include_dirs=[f"{vcpkg}/include"], 33 | extra_link_args=[f"/LIBPATH:{vcpkg}/lib"], 34 | libraries=["gmp", "mpfr"], 35 | ) 36 | for fi, loc in zip(files, is_called) 37 | ] 38 | else: 39 | # no CGAL libraries necessary from CGAL 5.0 onwards 40 | ext_modules = [ 41 | Pybind11Extension(loc, [fi], libraries=["gmp", "mpfr"]) 42 | for fi, loc in zip(files, is_called) 43 | ] 44 | 45 | cmdclass = versioneer.get_cmdclass() 46 | cmdclass.update({"build_ext": build_ext}) 47 | 48 | 49 | def get_requirements(): 50 | """ 51 | Fix 52 | """ 53 | 54 | config = configparser.ConfigParser() 55 | config.read("setup.cfg") 56 | requirements = config["options"]["install_requires"].split() 57 | 58 | if sys.version_info < (3, 9): 59 | requirements.remove("fiona") 60 | requirements.append("fiona<1.10") 61 | 62 | return requirements 63 | 64 | 65 | if __name__ == "__main__": 66 | setup( 67 | install_requires=get_requirements(), 68 | cmdclass=cmdclass, 69 | version=versioneer.get_version(), 70 | ext_modules=ext_modules, 71 | zip_safe=False, 72 | ) 73 | -------------------------------------------------------------------------------- /tests/GSHHS_i_L1.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/GSHHS_i_L1.dbf -------------------------------------------------------------------------------- /tests/GSHHS_i_L1.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /tests/GSHHS_i_L1.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/GSHHS_i_L1.shp -------------------------------------------------------------------------------- /tests/GSHHS_i_L1.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/GSHHS_i_L1.shx -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/__init__.py -------------------------------------------------------------------------------- /tests/galv_sub.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/galv_sub.nc -------------------------------------------------------------------------------- /tests/galv_sub.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/galv_sub.tif -------------------------------------------------------------------------------- /tests/global/global_latlon.cpg: -------------------------------------------------------------------------------- 1 | ISO-8859-1 -------------------------------------------------------------------------------- /tests/global/global_latlon.dbf: -------------------------------------------------------------------------------- 1 | { =AFIDN 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 -------------------------------------------------------------------------------- /tests/global/global_latlon.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /tests/global/global_latlon.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/global/global_latlon.shp -------------------------------------------------------------------------------- /tests/global/global_latlon.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/global/global_latlon.shx -------------------------------------------------------------------------------- /tests/global/global_stereo.cpg: -------------------------------------------------------------------------------- 1 | ISO-8859-1 -------------------------------------------------------------------------------- /tests/global/global_stereo.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/global/global_stereo.dbf -------------------------------------------------------------------------------- /tests/global/global_stereo.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /tests/global/global_stereo.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/global/global_stereo.shp -------------------------------------------------------------------------------- /tests/global/global_stereo.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/global/global_stereo.shx -------------------------------------------------------------------------------- /tests/islands.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 -------------------------------------------------------------------------------- /tests/islands.dbf: -------------------------------------------------------------------------------- 1 | yA idN 2 | 1 2 2 3 -------------------------------------------------------------------------------- /tests/islands.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /tests/islands.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/islands.shp -------------------------------------------------------------------------------- /tests/islands.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/islands.shx -------------------------------------------------------------------------------- /tests/ocean.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 -------------------------------------------------------------------------------- /tests/ocean.dbf: -------------------------------------------------------------------------------- 1 | yA idN 2 | 1 -------------------------------------------------------------------------------- /tests/ocean.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /tests/ocean.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/ocean.shp -------------------------------------------------------------------------------- /tests/ocean.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CHLNDDEV/oceanmesh/478fe78128e8d33a6c0404a276fb6dbad72d0779/tests/ocean.shx -------------------------------------------------------------------------------- /tests/test_bathymetric_gradient_function.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | import oceanmesh as om 6 | 7 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 8 | fdem = os.path.join(os.path.dirname(__file__), "../datasets/EastCoast.nc") 9 | 10 | 11 | def generate_mesh(name, signed_distance, edge_length, max_iterations=50): 12 | points, cells = om.generate_mesh( 13 | signed_distance, edge_length, max_iter=max_iterations 14 | ) 15 | # Makes sure the vertices of each triangle are arranged in an anti-clockwise manner 16 | points, cells, jx = om.fix_mesh(points, cells) 17 | # mesh_plot(points, cells, "1 Original Mesh") 18 | 19 | # remove degenerate mesh faces and other common problems in the mesh 20 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 21 | # mesh_plot(points, cells, "2 Degenerate Elements Removed") 22 | 23 | # Removes faces connected by a single face 24 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 25 | # mesh_plot(points, cells, "3 Deleted Faces Connected to One Face") 26 | 27 | # remove low quality boundary elements less than 15% 28 | points, cells = om.delete_boundary_faces(points, cells, min_qual=0.15) 29 | # mesh_plot(points, cells, "4 Deleted Boundary Faces") 30 | 31 | # apply a Laplacian smoother 32 | P, T = om.laplacian2(points, cells) # Final poost-processed mesh 33 | mesh_plot(P, T, f"Post-processed mesh with {name} edge function") 34 | 35 | return P, T 36 | 37 | 38 | def mesh_plot(points, cells, plot_title=""): 39 | import matplotlib.pyplot as pt 40 | 41 | fig, ax = fig, ax = pt.subplots() 42 | ax.set_xlabel("longitude (EPSG:4326 degrees)") 43 | ax.set_ylabel("latitude (EPSG:4326 degrees)") 44 | ax.set_title(plot_title) 45 | 46 | X, Y = points.T 47 | ax.plot(X, Y, "bx", markersize=1) 48 | ax.triplot(X, Y, cells, linewidth=0.2, color="red") 49 | ax.set_aspect("equal") 50 | pt.show() 51 | 52 | 53 | @pytest.mark.skip(reason="not implemented yet") 54 | def test_bathymetric_gradient_function(): 55 | EPSG = 4326 # EPSG:4326 or WGS84 56 | bbox = (-74.4, -73.4, 40.2, 41.2) 57 | extent = om.Region(extent=bbox, crs=EPSG) 58 | dem = om.DEM(fdem, crs=4326) 59 | 60 | min_edge_length = 0.0025 # minimum mesh size in domain in projection 61 | max_edge_length = 0.10 # maximum mesh size in domain in projection 62 | shoreline = om.Shoreline(fname, extent.bbox, min_edge_length) 63 | sdf = om.signed_distance_function(shoreline) 64 | 65 | edge_length1 = om.feature_sizing_function( 66 | shoreline, 67 | sdf, 68 | max_edge_length=max_edge_length, 69 | crs=EPSG, 70 | ) 71 | edge_length2 = om.bathymetric_gradient_sizing_function( 72 | dem, 73 | slope_parameter=5.0, 74 | filter_quotient=50, 75 | min_edge_length=min_edge_length, 76 | max_edge_length=max_edge_length, 77 | crs=EPSG, 78 | ) 79 | edge_length3 = om.compute_minimum([edge_length1, edge_length2]) 80 | edge_length3 = om.enforce_mesh_gradation(edge_length3, gradation=0.15) 81 | 82 | for name_, edge_length in zip( 83 | [ 84 | "Feature Sizing", 85 | "Bathymetric Gradient", 86 | "Feature Sizing & Bathymetric Gradient", 87 | ], 88 | [edge_length1, edge_length3], 89 | ): 90 | print(f"Generating mesh associated with {name_}") 91 | edge_length_ = om.enforce_mesh_gradation(edge_length, gradation=0.15) 92 | generate_mesh(name_, sdf, edge_length_) 93 | -------------------------------------------------------------------------------- /tests/test_circ_rect.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import fiona 5 | import geopandas as gpd 6 | import matplotlib.pyplot as pyplot 7 | import numpy 8 | 9 | import oceanmesh 10 | 11 | shp0 = os.path.join(os.path.dirname(__file__), "ocean.shp") 12 | shp = os.path.join(os.path.dirname(__file__), "islands.shp") 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | def test_circ(): 18 | logger.info(f"--> Fiona version: {fiona.__version__}") 19 | logger.info(f"--> Geopandas version: {gpd.__version__}") 20 | file = gpd.read_file(shp0) 21 | for g in file.geometry: 22 | bbox = numpy.asarray(g.exterior.coords.xy).T 23 | bbox = numpy.append(bbox, [[numpy.nan, numpy.nan]], axis=0) 24 | 25 | min_edge_length = 0.02 # units: EPSG:4326 26 | max_edge_length = 0.1 27 | 28 | region = oceanmesh.Region(bbox, 4326) 29 | shore = oceanmesh.Shoreline(shp, region.bbox, min_edge_length) 30 | edge_length = oceanmesh.distance_sizing_function( 31 | shore, rate=0.10, max_edge_length=max_edge_length 32 | ) 33 | domain = oceanmesh.signed_distance_function(shore) 34 | 35 | points, cells = oceanmesh.generate_mesh(domain, edge_length) 36 | 37 | pyplot.figure(1) 38 | pyplot.clf() 39 | pyplot.triplot(points[:, 0], points[:, 1], cells, "-", lw=0.5, color="0.5") 40 | pyplot.plot(shore.boubox[:, 0], shore.boubox[:, 1], "-", color="r", markersize=0) 41 | pyplot.plot(shore.inner[:, 0], shore.inner[:, 1], ".", color="gray", markersize=2) 42 | pyplot.plot( 43 | shore.mainland[:, 0], shore.mainland[:, 1], "-", color="green", linewidth=0.5 44 | ) 45 | pyplot.gca().axis("equal") 46 | pyplot.show() 47 | 48 | 49 | def test_rect(): 50 | bbox = (0.4, 1.6, -0.6, 0.6) 51 | 52 | min_edge_length = 0.02 # units: EPSG:4326 53 | max_edge_length = 0.1 54 | 55 | region = oceanmesh.Region(bbox, 4326) 56 | shore = oceanmesh.Shoreline(shp, region.bbox, min_edge_length) 57 | edge_length = oceanmesh.distance_sizing_function( 58 | shore, rate=0.10, max_edge_length=max_edge_length 59 | ) 60 | domain = oceanmesh.signed_distance_function(shore) 61 | 62 | points, cells = oceanmesh.generate_mesh(domain, edge_length) 63 | 64 | pyplot.figure(1) 65 | pyplot.clf() 66 | pyplot.triplot(points[:, 0], points[:, 1], cells, "-", lw=0.5, color="0.5") 67 | pyplot.plot(shore.boubox[:, 0], shore.boubox[:, 1], "-", color="r", markersize=0) 68 | pyplot.plot(shore.inner[:, 0], shore.inner[:, 1], ".", color="gray", markersize=2) 69 | pyplot.plot( 70 | shore.mainland[:, 0], shore.mainland[:, 1], "-", color="green", linewidth=0.5 71 | ) 72 | pyplot.gca().axis("equal") 73 | pyplot.show() 74 | -------------------------------------------------------------------------------- /tests/test_delete_exterior.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import oceanmesh 4 | 5 | 6 | def test_del_exterior(): 7 | # a mesh that has 1/5 of the total area disjoint. 8 | p = np.array( 9 | [ 10 | [0, 0], 11 | [0, 0.33333], 12 | [0, 0.66667], 13 | [0, 1], 14 | [0.33333, 0], 15 | [0.33333, 0.33333], 16 | [0.33333, 0.66667], 17 | [0.33333, 1], 18 | [0.66667, 0], 19 | [0.66667, 0.33333], 20 | [0.66667, 0.66667], 21 | [0.66667, 1], 22 | [1, 0], 23 | [1, 0.33333], 24 | [1, 0.66667], 25 | [1, 1], 26 | [1.5, 1.5], 27 | [1.5, 1.625], 28 | [1.5, 1.75], 29 | [1.5, 1.875], 30 | [1.5, 2], 31 | [1.625, 1.5], 32 | [1.625, 1.625], 33 | [1.625, 1.75], 34 | [1.625, 1.875], 35 | [1.625, 2], 36 | [1.75, 1.5], 37 | [1.75, 1.625], 38 | [1.75, 1.75], 39 | [1.75, 1.875], 40 | [1.75, 2], 41 | [1.875, 1.5], 42 | [1.875, 1.625], 43 | [1.875, 1.75], 44 | [1.875, 1.875], 45 | [1.875, 2], 46 | [2, 1.5], 47 | [2, 1.625], 48 | [2, 1.75], 49 | [2, 1.875], 50 | [2, 2], 51 | ] 52 | ) 53 | t = np.array( 54 | [ 55 | [14, 10, 13], 56 | [5, 1, 4], 57 | [9, 5, 8], 58 | [0, 4, 1], 59 | [2, 1, 5], 60 | [6, 2, 5], 61 | [5, 4, 8], 62 | [7, 3, 6], 63 | [9, 6, 5], 64 | [3, 2, 6], 65 | [11, 7, 10], 66 | [11, 10, 14], 67 | [7, 6, 10], 68 | [15, 11, 14], 69 | [9, 8, 12], 70 | [10, 9, 13], 71 | [10, 6, 9], 72 | [9, 12, 13], 73 | [24, 19, 23], 74 | [18, 17, 22], 75 | [33, 32, 37], 76 | [34, 33, 38], 77 | [28, 23, 27], 78 | [21, 17, 16], 79 | [18, 23, 19], 80 | [21, 22, 17], 81 | [31, 36, 32], 82 | [26, 27, 22], 83 | [22, 21, 26], 84 | [23, 18, 22], 85 | [33, 34, 29], 86 | [19, 24, 20], 87 | [27, 23, 22], 88 | [29, 25, 24], 89 | [28, 24, 23], 90 | [25, 20, 24], 91 | [29, 30, 25], 92 | [29, 34, 30], 93 | [39, 40, 35], 94 | [39, 34, 38], 95 | [28, 27, 32], 96 | [35, 34, 39], 97 | [35, 30, 34], 98 | [37, 32, 36], 99 | [33, 28, 32], 100 | [29, 28, 33], 101 | [29, 24, 28], 102 | [27, 26, 31], 103 | [27, 31, 32], 104 | [38, 33, 37], 105 | ], 106 | dtype=int, 107 | ) 108 | A1 = np.sum(oceanmesh.simp_vol(p, t)) 109 | t2 = oceanmesh.delete_exterior_faces(p, t, 0.20) 110 | A2 = np.sum(oceanmesh.simp_vol(p, t2)) 111 | assert (A1 - A2) == 0.25 112 | -------------------------------------------------------------------------------- /tests/test_delete_interior.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import oceanmesh 4 | 5 | 6 | def test_del_interior(): 7 | # a mesh that has two elements that delete interior should remove 8 | p = np.array( 9 | [ 10 | [0, 0], 11 | [0.5000, 0.5000], 12 | [1.0000, 0], 13 | [-0.5000, 0.5000], 14 | [1.5000, 0.5000], 15 | [0, 0.7500], 16 | [0.5000, 1.2500], 17 | [1.0000, 0.7500], 18 | [-0.5000, 1.2500], 19 | [1.5000, 1.2500], 20 | [0, 0.7500], 21 | [0.5000, 0.5000], 22 | [-0.5000, 0.5000], 23 | ] 24 | ) 25 | 26 | t = np.array( 27 | [ 28 | [1, 2, 3], 29 | [4, 1, 2], 30 | [5, 2, 3], 31 | [6, 7, 8], 32 | [9, 6, 7], 33 | [10, 7, 8], 34 | [4, 6, 2], 35 | [2, 8, 5], 36 | ], 37 | dtype=int, 38 | ) 39 | 40 | t -= 1 41 | 42 | cells, deleted = oceanmesh.delete_interior_faces(p, t) 43 | assert np.allclose(deleted, [6, 7]) 44 | -------------------------------------------------------------------------------- /tests/test_edgefx.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import oceanmesh as om 4 | from oceanmesh import Shoreline, distance_sizing_function 5 | 6 | dfname = os.path.join(os.path.dirname(__file__), "galv_sub.nc") 7 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 8 | 9 | 10 | def test_edgefx(): 11 | region1 = om.Region(extent=(-75.0, -70.0, 38.0, 42.0), crs=4326) 12 | shore1 = Shoreline(fname, region1.bbox, 0.01) 13 | shore1.plot() 14 | 15 | dis1 = distance_sizing_function(shore1) 16 | 17 | region2 = om.Region(extent=(-74.0, -73.0, 40.0, 41.0), crs=4326) 18 | shore2 = Shoreline(fname, region2.bbox, 0.001) 19 | 20 | dis2 = distance_sizing_function(shore2) 21 | dis2.extrapolate = False 22 | 23 | dis3 = dis2.interpolate_to(dis1) 24 | 25 | fig, ax, pc = dis3.plot(holding=True) 26 | shore1.plot(ax) 27 | 28 | dis4 = dis1.interpolate_to(dis2) 29 | _, ax, _ = dis4.plot(holding=False) 30 | 31 | 32 | def test_edgefx_elevation_bounds(): 33 | region = om.Region(extent=(-95.24, -95.21, 28.95, 29.00), crs=4326) 34 | 35 | dem = om.DEM(dfname, bbox=region, crs=4326) 36 | 37 | sho = om.Shoreline(fname, region.bbox, 0.005) 38 | sho.plot() 39 | 40 | edge_length = om.distance_sizing_function(sho) 41 | 42 | bounds = [[0.02, 0.03, -10, -5], [0.01, 0.015, -5, -1]] 43 | edge_length = om.enforce_mesh_size_bounds_elevation(edge_length, dem, bounds) 44 | edge_length.plot() 45 | 46 | 47 | def test_edgefx_medial_axis(): 48 | region, min_edge_length = ( 49 | om.Region(extent=(-75.000, -70.001, 40.0001, 41.9000), crs=4326), 50 | 0.01, 51 | ) 52 | 53 | shoreline = om.Shoreline(fname, region.bbox, min_edge_length) 54 | sdf = om.signed_distance_function(shoreline) 55 | 56 | # Visualize the medial points 57 | edge_length = om.feature_sizing_function( 58 | shoreline, sdf, max_edge_length=5e3, plot=True 59 | ) 60 | fig, ax, pc = edge_length.plot( 61 | xlabel="longitude (WGS84 degrees)", 62 | ylabel="latitude (WGS84 degrees)", 63 | title="Feature sizing function", 64 | cbarlabel="mesh size (degrees)", 65 | holding=True, 66 | xlim=[-74.3, -73.8], 67 | ylim=[40.3, 40.8], 68 | ) 69 | shoreline.plot(ax=ax) 70 | -------------------------------------------------------------------------------- /tests/test_edges.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from oceanmesh import edges 4 | 5 | nan = np.nan 6 | 7 | 8 | def test_edges(): 9 | poly = np.array( 10 | [ 11 | [0.0, 0.0], 12 | [1.0, 0.0], 13 | [1.0, 1.0], 14 | [0.0, 1.0], 15 | [0.0, 0.0], 16 | [nan, nan], 17 | [0.2, 0.2], 18 | [0.4, 0.2], 19 | [0.3, 0.3], 20 | [0.2, 0.2], 21 | [nan, nan], 22 | ] 23 | ) 24 | 25 | e = edges.get_poly_edges(poly) 26 | edges.draw_edges(poly, e) 27 | -------------------------------------------------------------------------------- /tests/test_geodata.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from oceanmesh import DEM, Region, Shoreline, edges 6 | 7 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 8 | dfname = os.path.join(os.path.dirname(__file__), "galv_sub.nc") 9 | tfname = os.path.join(os.path.dirname(__file__), "galv_sub.tif") 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "boxes_h0", 14 | [((166.0, 176.0, -48.0, -40.0), 0.01), ((-74.0, -70.0, 35.0, 42.0), 0.005)], 15 | ) 16 | def test_shoreline(boxes_h0): 17 | """Read in a shapefile at different scales h0 18 | shoreline and test you get the write output""" 19 | bbox, h0 = boxes_h0 20 | region = Region(bbox, 4326) 21 | shp = Shoreline(fname, region.bbox, h0, crs=region.crs) 22 | assert len(shp.inner) > 0 23 | assert len(shp.mainland) > 0 24 | e = edges.get_poly_edges(shp.inner) 25 | edges.draw_edges(shp.inner, e) 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "files_bboxes", 30 | [ 31 | ( 32 | dfname, 33 | (-95.24, -95.21, 28.95, 29.00), 34 | ), 35 | ( 36 | tfname, 37 | (-95.24, -95.21, 28.95, 29.00), 38 | ), 39 | ], 40 | ) 41 | def test_geodata(files_bboxes): 42 | """Read in a subset of a DEM from netcdf/tif""" 43 | 44 | f, bbox = files_bboxes 45 | region = Region(bbox, 4326) 46 | dem = DEM(f, bbox=region, crs=region.crs) 47 | assert isinstance(dem, DEM), "DEM class did not form" 48 | -------------------------------------------------------------------------------- /tests/test_global_stereo.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import oceanmesh as om 4 | 5 | # Note: global_stereo.shp has been generated using global_tag in pyposeidon 6 | # https://github.com/ec-jrc/pyPoseidon/blob/9cfd3bbf5598c810004def83b1f43dc5149addd0/pyposeidon/boundary.py#L452 7 | fname = os.path.join(os.path.dirname(__file__), "global", "global_latlon.shp") 8 | fname2 = os.path.join(os.path.dirname(__file__), "global", "global_stereo.shp") 9 | 10 | 11 | def test_global_stereo(): 12 | # it is necessary to define all the coastlines at once: 13 | # the Shoreline class will the detect the biggest coastline (antartica and define it 14 | # as the outside boundary) 15 | 16 | EPSG = 4326 # EPSG:4326 or WGS84 17 | bbox = (-180.00, 180.00, -89.00, 90.00) 18 | extent = om.Region(extent=bbox, crs=4326) 19 | 20 | min_edge_length = 0.5 # minimum mesh size in domain in meters 21 | max_edge_length = 2 # maximum mesh size in domain in meters 22 | shoreline = om.Shoreline(fname, extent.bbox, min_edge_length) 23 | sdf = om.signed_distance_function(shoreline) 24 | edge_length0 = om.distance_sizing_function(shoreline, rate=0.11) 25 | edge_length1 = om.feature_sizing_function( 26 | shoreline, 27 | sdf, 28 | min_edge_length=min_edge_length, 29 | max_edge_length=max_edge_length, 30 | crs=EPSG, 31 | ) 32 | 33 | edge_length = om.compute_minimum([edge_length0, edge_length1]) 34 | edge_length = om.enforce_mesh_gradation(edge_length, gradation=0.09, stereo=True) 35 | 36 | # once the size functions have been defined, wed need to mesh inside domain in 37 | # stereographic projections. This is way we use another coastline which is 38 | # already translated in a sterographic projection 39 | shoreline_stereo = om.Shoreline(fname2, extent.bbox, min_edge_length, stereo=True) 40 | domain = om.signed_distance_function(shoreline_stereo) 41 | 42 | points, cells = om.generate_mesh(domain, edge_length, stereo=True, max_iter=100) 43 | 44 | # remove degenerate mesh faces and other common problems in the mesh 45 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 46 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 47 | 48 | # apply a Laplacian smoother 49 | points, cells = om.laplacian2(points, cells, max_iter=100) 50 | 51 | # plot 52 | fig, ax, _ = edge_length.plot( 53 | holding=True, 54 | plot_colorbar=True, 55 | stereo=True, 56 | vmax=max_edge_length, 57 | ) 58 | 59 | ax.triplot(points[:, 0], points[:, 1], cells, color="gray", linewidth=0.5) 60 | shoreline_stereo.plot(ax=ax) 61 | -------------------------------------------------------------------------------- /tests/test_grade.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib.pyplot as plt 4 | import matplotlib.tri as tri 5 | 6 | import oceanmesh as om 7 | 8 | 9 | def test_grade(): 10 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 11 | 12 | region = om.Region((-75.000, -70.001, 40.0001, 41.9000), 4326) 13 | min_edge_length = 0.01 14 | 15 | shore = om.Shoreline(fname, region.bbox, min_edge_length) 16 | 17 | edge_length = om.distance_sizing_function(shore, rate=0.35) 18 | 19 | test_edge_length = om.enforce_mesh_gradation(edge_length, gradation=0.20) 20 | test_edge_length.plot(filename="test_grade_edge_length.png") 21 | 22 | domain = om.signed_distance_function(shore) 23 | 24 | points, cells = om.generate_mesh(domain, test_edge_length, max_iter=100) 25 | 26 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 27 | 28 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 29 | 30 | # plot 31 | fig, ax = plt.subplots() 32 | ax.set_aspect("equal") 33 | triang = tri.Triangulation(points[:, 0], points[:, 1], cells) 34 | ax.triplot(triang, "-", lw=1) 35 | plt.show() 36 | -------------------------------------------------------------------------------- /tests/test_irregular_domain.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib.pyplot as plt 4 | import matplotlib.tri as tri 5 | import numpy as np 6 | 7 | import oceanmesh as om 8 | 9 | 10 | def test_irregular_domain(): 11 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 12 | 13 | # New York Lower Bay and Jamaica Bay 14 | bbox = np.array( 15 | [ 16 | [-74.1588, 40.5431], 17 | [-74.1215, 40.4847], 18 | [-74.0261, 40.4660], 19 | [-73.9369, 40.5034], 20 | [-73.8166, 40.5104], 21 | [-73.7524, 40.5711], 22 | [-73.7627, 40.6669], 23 | [-73.8436, 40.6809], 24 | [-73.9473, 40.6552], 25 | [-74.0883, 40.6155], 26 | [-74.1588, 40.5431], 27 | ] 28 | ) 29 | 30 | min_edge_length = 0.001 31 | 32 | region = om.Region(bbox, 4326) 33 | shore = om.Shoreline(fname, region.bbox, min_edge_length) 34 | shore.plot(file_name="test_irregular_domain.png", show=False) 35 | 36 | edge_length = om.distance_sizing_function(shore, max_edge_length=0.01) 37 | 38 | domain = om.signed_distance_function(shore) 39 | 40 | points, cells = om.generate_mesh(domain, edge_length, max_iter=50) 41 | 42 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 43 | 44 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 45 | 46 | # plot 47 | fig, ax = plt.subplots() 48 | ax.set_aspect("equal") 49 | triang = tri.Triangulation(points[:, 0], points[:, 1], cells) 50 | ax.triplot(triang, "-", lw=1) 51 | plt.savefig("test_irregular_domain_mesh.png") 52 | plt.show() 53 | -------------------------------------------------------------------------------- /tests/test_mesh_generator_simple.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from oceanmesh import generate_mesh, simp_vol 4 | 5 | 6 | def test_mesh_generator_rectangle(): 7 | min_edge_length = 0.1 8 | bbox = (0.0, 1.0, 0.0, 1.0) 9 | 10 | def drectangle(p, x1, x2, y1, y2): 11 | min = np.minimum 12 | return -min(min(min(-y1 + p[:, 1], y2 - p[:, 1]), -x1 + p[:, 0]), x2 - p[:, 0]) 13 | 14 | def domain(x): 15 | return drectangle(x, *bbox) 16 | 17 | def edge_length(p): 18 | return np.array([0.1] * len(p)) 19 | 20 | points, cells = generate_mesh( 21 | domain=domain, 22 | edge_length=edge_length, 23 | min_edge_length=min_edge_length, 24 | bbox=bbox, 25 | ) 26 | 27 | assert np.isclose(np.sum(simp_vol(points, cells)), 1.0, 0.01) 28 | -------------------------------------------------------------------------------- /tests/test_multiscale.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import matplotlib.gridspec as gridspec 4 | import matplotlib.pyplot as plt 5 | import matplotlib.tri as tri 6 | import numpy as np 7 | 8 | import oceanmesh as om 9 | 10 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 11 | EPSG = 4326 # EPSG:4326 or WGS84 12 | 13 | 14 | def test_multiscale_overlap(): 15 | extent1 = om.Region(extent=(-75.00, -70.001, 40.0001, 41.9000), crs=EPSG) 16 | min_edge_length1 = 1000.0e-5 # ~1.0km 17 | max_edge_length1 = 12500.0e-5 # ~12.5km 18 | 19 | bbox2 = np.array( 20 | [ 21 | [-73.9481, 40.6028], 22 | [-74.0186, 40.5688], 23 | [-73.9366, 40.5362], 24 | [-73.7269, 40.5626], 25 | [-73.7231, 40.6459], 26 | [-73.8242, 40.6758], 27 | [-73.9481, 40.6028], 28 | ], 29 | dtype=float, 30 | ) 31 | extent2 = om.Region(extent=bbox2, crs=EPSG) 32 | min_edge_length2 = 600.0e-5 # ~500m 33 | 34 | bbox3 = np.array( 35 | [ 36 | [-73.8262, 40.6500], 37 | [-73.8230, 40.6000], 38 | [-73.7500, 40.6030], 39 | [-73.7450, 40.6430], 40 | [-73.8262, 40.6500], 41 | ], 42 | dtype=float, 43 | ) 44 | extent3 = om.Region(extent=bbox3, crs=EPSG) 45 | min_edge_length3 = 300.0e-5 # ~300m 46 | 47 | s1 = om.Shoreline(fname, extent1.bbox, min_edge_length1) 48 | sdf1 = om.signed_distance_function(s1) 49 | el1 = om.distance_sizing_function(s1, max_edge_length=max_edge_length1) 50 | 51 | s2 = om.Shoreline(fname, extent2.bbox, min_edge_length2) 52 | sdf2 = om.signed_distance_function(s2) 53 | el2 = om.distance_sizing_function(s2) 54 | 55 | s3 = om.Shoreline(fname, extent3.bbox, min_edge_length3) 56 | sdf3 = om.signed_distance_function(s3) 57 | el3 = om.distance_sizing_function(s3) 58 | 59 | # Control the element size transition 60 | # from coarse to fine with the kwargs prefixed with `blend` 61 | points, cells = om.generate_multiscale_mesh( 62 | [sdf1, sdf2, sdf3], [el1, el2, el3], blend_width=1000, blend_max_iter=100 63 | ) 64 | # remove degenerate mesh faces and other common problems in the mesh 65 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 66 | # remove singly connected elements (elements 67 | # connected to only one other element) 68 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 69 | # remove poor boundary elements with quality < 15% 70 | points, cells = om.delete_boundary_faces(points, cells, min_qual=0.15) 71 | # apply a Laplacian smoother that preservers the mesh size distribution 72 | points, cells = om.laplacian2(points, cells) 73 | 74 | triang = tri.Triangulation(points[:, 0], points[:, 1], cells) 75 | gs = gridspec.GridSpec(3, 1) 76 | gs.update(wspace=0.1) 77 | plt.figure(figsize=[4.8, 6.4]) 78 | 79 | ax = plt.subplot(gs[0, 0]) 80 | ax.set_aspect("equal") 81 | ax.triplot(triang, "-", lw=0.5) 82 | ax.plot(bbox2[:, 0], bbox2[:, 1], "r--") 83 | ax.plot(bbox3[:, 0], bbox3[:, 1], "r--") 84 | 85 | ax = plt.subplot(gs[1, 0]) 86 | buf = 0.07 87 | ax.set_xlim([min(bbox2[:, 0]) - buf, max(bbox2[:, 0]) + buf]) 88 | ax.set_ylim([min(bbox2[:, 1]) - buf, max(bbox2[:, 1]) + buf]) 89 | ax.set_aspect("equal") 90 | ax.triplot(triang, "-", lw=0.5) 91 | ax.plot(bbox2[:, 0], bbox2[:, 1], "r--") 92 | 93 | ax = plt.subplot(gs[2, 0]) 94 | buf = 0.07 95 | ax.set_xlim([min(bbox3[:, 0]) - buf, max(bbox3[:, 0]) + buf]) 96 | ax.set_ylim([min(bbox3[:, 1]) - buf, max(bbox3[:, 1]) + buf]) 97 | ax.set_aspect("equal") 98 | ax.triplot(triang, "-", lw=0.5) 99 | ax.plot(bbox3[:, 0], bbox3[:, 1], "r--") 100 | 101 | plt.show() 102 | 103 | 104 | def test_multiscale_non_overlap(): 105 | extent1 = om.Region(extent=(-75.00, -70.001, 40.0001, 41.9000), crs=EPSG) 106 | min_edge_length1 = 1000.0e-5 # ~1.0km 107 | max_edge_length1 = 12500.0e-5 # ~12.5km 108 | 109 | bbox2 = np.array( 110 | [ 111 | [-73.9481, 40.6028], 112 | [-74.0186, 40.5688], 113 | [-73.9366, 40.5362], 114 | [-73.7269, 40.5626], 115 | [-73.7231, 40.6459], 116 | [-73.8242, 40.6758], 117 | [-73.9481, 40.6028], 118 | ], 119 | dtype=float, 120 | ) 121 | extent2 = om.Region(extent=bbox2, crs=EPSG) 122 | min_edge_length2 = 500.0e-5 # ~500m 123 | 124 | bbox3 = np.array( 125 | [ 126 | [-71.4700, 41.8500], 127 | [-71.4700, 41.4000], 128 | [-71.1500, 41.4000], 129 | [-71.1500, 41.8000], 130 | [-71.4700, 41.8500], 131 | ], 132 | dtype=float, 133 | ) 134 | extent3 = om.Region(extent=bbox3, crs=EPSG) 135 | min_edge_length3 = 500.0e-5 # ~500m 136 | 137 | s1 = om.Shoreline(fname, extent1.bbox, min_edge_length1) 138 | sdf1 = om.signed_distance_function(s1) 139 | el1 = om.distance_sizing_function(s1, max_edge_length=max_edge_length1) 140 | 141 | s2 = om.Shoreline(fname, extent2.bbox, min_edge_length2) 142 | sdf2 = om.signed_distance_function(s2) 143 | el2 = om.distance_sizing_function(s2) 144 | 145 | s3 = om.Shoreline(fname, extent3.bbox, min_edge_length3) 146 | sdf3 = om.signed_distance_function(s3) 147 | el3 = om.distance_sizing_function(s3) 148 | 149 | # Control the element size transition from 150 | # coarse to fine with the kwargs prefixed with `blend`. 151 | # Function objects must appear in order of descending `min_edge_length`. 152 | points, cells = om.generate_multiscale_mesh( 153 | [sdf1, sdf2, sdf3], [el1, el2, el3], blend_width=1000, blend_max_iter=100 154 | ) 155 | # remove degenerate mesh faces and other common problems in the mesh 156 | points, cells = om.make_mesh_boundaries_traversable(points, cells) 157 | # remove singly connected elements (elements 158 | # connected to only one other element) 159 | points, cells = om.delete_faces_connected_to_one_face(points, cells) 160 | # remove poor boundary elements with quality < 15% 161 | points, cells = om.delete_boundary_faces(points, cells, min_qual=0.15) 162 | # apply a Laplacian smoother that preservers the mesh size distribution 163 | points, cells = om.laplacian2(points, cells) 164 | 165 | triang = tri.Triangulation(points[:, 0], points[:, 1], cells) 166 | gs = gridspec.GridSpec(3, 1) 167 | gs.update(wspace=0.1) 168 | plt.figure(figsize=[4.8, 6.4]) 169 | 170 | ax = plt.subplot(gs[0, 0]) 171 | ax.set_aspect("equal") 172 | ax.triplot(triang, "-", lw=0.5) 173 | ax.plot(bbox2[:, 0], bbox2[:, 1], "r--") 174 | ax.plot(bbox3[:, 0], bbox3[:, 1], "r--") 175 | 176 | ax = plt.subplot(gs[1, 0]) 177 | buf = 0.07 178 | ax.set_xlim([min(bbox2[:, 0]) - buf, max(bbox2[:, 0]) + buf]) 179 | ax.set_ylim([min(bbox2[:, 1]) - buf, max(bbox2[:, 1]) + buf]) 180 | ax.set_aspect("equal") 181 | ax.triplot(triang, "-", lw=0.5) 182 | ax.plot(bbox2[:, 0], bbox2[:, 1], "r--") 183 | 184 | ax = plt.subplot(gs[2, 0]) 185 | buf = 0.07 186 | ax.set_xlim([min(bbox3[:, 0]) - buf, max(bbox3[:, 0]) + buf]) 187 | ax.set_ylim([min(bbox3[:, 1]) - buf, max(bbox3[:, 1]) + buf]) 188 | ax.set_aspect("equal") 189 | ax.triplot(triang, "-", lw=0.5) 190 | ax.plot(bbox3[:, 0], bbox3[:, 1], "r--") 191 | 192 | plt.show() 193 | -------------------------------------------------------------------------------- /tests/test_signed_distance_function.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from oceanmesh import Domain, Region, Shoreline, signed_distance_function 4 | 5 | 6 | def test_signed_distance_function(): 7 | fname = os.path.join(os.path.dirname(__file__), "GSHHS_i_L1.shp") 8 | 9 | region = Region((-74.0, -70.0, 40.0, 42.0), 4326) 10 | h0 = 0.005 11 | 12 | shp = Shoreline(fname, region.bbox, h0, crs=region.crs) 13 | 14 | domain = signed_distance_function(shp) 15 | 16 | assert isinstance(domain, Domain) 17 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # usage: 2 | # tox --> default, runs pytest 3 | 4 | [tox] 5 | envlist = py3 6 | isolated_build = True 7 | 8 | [testenv] 9 | deps = 10 | pytest 11 | pytest-codeblocks 12 | requests 13 | git+https://github.com/dengwirda/inpoly-python.git@bootstrap_numpy 14 | extras = all 15 | setenv = 16 | MPLBACKEND = agg 17 | commands = 18 | pytest {posargs} -v --codeblocks 19 | --------------------------------------------------------------------------------