├── .codecov.yml ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ └── pixi_auto_update.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.rst ├── data ├── README.rst ├── provinces-nl.geojson └── south-america.geojson ├── docs ├── Makefile ├── _static │ ├── deltares-blue.svg │ ├── deltares-white.svg │ ├── enabling-delta-life.svg │ ├── pandamesh-demo.png │ ├── pandamesh-logo.svg │ └── theme-deltares.css ├── _templates │ └── enums.rst ├── api │ ├── changelog.rst │ └── index.rst ├── conf.py ├── index.rst └── make.bat ├── examples ├── 01_triangle-basic.py ├── 02_gmsh-basic.py ├── 03_gmsh-fields.py ├── 04_triangle-geospatial.py ├── 05_preprocessing.py └── README.rst ├── pandamesh ├── __init__.py ├── common.py ├── data │ ├── __init__.py │ └── sample_data.py ├── enum_base.py ├── gmsh_enums.py ├── gmsh_fields.py ├── gmsh_geometry.py ├── gmsh_mesher.py ├── plot.py ├── preprocessor.py ├── snapping.py ├── triangle_enums.py ├── triangle_geometry.py └── triangle_mesher.py ├── pixi.lock ├── pyproject.toml └── tests ├── __init__.py ├── test_common.py ├── test_data.py ├── test_enum.py ├── test_fields.py ├── test_gmsh_geometry.py ├── test_meshers.py ├── test_plot.py ├── test_preprocessor.py ├── test_snapping.py └── test_triangle_geometry.py /.codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | github_checks: 3 | annotations: false -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # GitHub syntax highlighting 2 | pixi.lock linguist-language=YAML 3 | 4 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" # Location of package manifests 6 | schedule: 7 | interval: "weekly" 8 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | lint: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Check out repo 20 | uses: actions/checkout@v4 21 | - name: Set up Python 22 | uses: actions/setup-python@v5 23 | - name: Run pre-commit 24 | uses: pre-commit/action@v3.0.1 25 | test: 26 | name: ${{ matrix.pixi-environment }} - ${{ matrix.os }} 27 | runs-on: ${{ matrix.os }} 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | os: 32 | - ubuntu-latest 33 | - macOS-latest 34 | - windows-latest 35 | pixi-environment: 36 | - py313 37 | - py312 38 | - py311 39 | - py310 40 | - py309 41 | steps: 42 | - name: Check out repo 43 | uses: actions/checkout@v4 44 | - name: Setup Pixi 45 | uses: prefix-dev/setup-pixi@v0.8.8 46 | with: 47 | manifest-path: pyproject.toml 48 | - name: Run Tests 49 | run: pixi run --environment ${{ matrix.pixi-environment }} test 50 | 51 | build: 52 | runs-on: ubuntu-latest 53 | steps: 54 | - name: Check out repo 55 | uses: actions/checkout@v4 56 | - name: Setup Pixi 57 | uses: prefix-dev/setup-pixi@v0.8.8 58 | with: 59 | manifest-path: pyproject.toml 60 | - name: Run Tests 61 | run: pixi run test 62 | - name: Publish Code Coverage 63 | uses: codecov/codecov-action@v5 64 | with: 65 | token: ${{ secrets.CODECOV_TOKEN }} 66 | fail_ci_if_error: false 67 | - name: Build Docs 68 | run: pixi run docs 69 | - name: Deploy to Github Pages 70 | if: github.ref == 'refs/heads/main' 71 | uses: peaceiris/actions-gh-pages@v4 72 | with: 73 | github_token: ${{ secrets.GITHUB_TOKEN }} 74 | publish_dir: ./docs/_build 75 | -------------------------------------------------------------------------------- /.github/workflows/pixi_auto_update.yml: -------------------------------------------------------------------------------- 1 | name: Pixi auto update 2 | 3 | on: 4 | schedule: 5 | # At 03:00 on day 3 of the month 6 | - cron: "0 3 3 * *" 7 | # on demand 8 | workflow_dispatch: 9 | 10 | jobs: 11 | auto-update: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} 17 | - uses: prefix-dev/setup-pixi@v0.8.8 18 | with: 19 | pixi-version: "latest" 20 | cache: false 21 | - name: Update pixi lock file 22 | run: pixi update 23 | - uses: peter-evans/create-pull-request@v7 24 | with: 25 | token: ${{ secrets.GITHUB_TOKEN }} 26 | branch: update/pixi-lock 27 | title: Update pixi lock file 28 | commit-message: "Update `pixi.lock`" 29 | body: Update pixi dependencies to the latest version. 30 | author: "GitHub " -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info 3 | 4 | # Sphinx documentation 5 | docs/_build/ 6 | docs/examples/ 7 | examples/* 8 | !examples/*.py 9 | !examples/*.rst 10 | 11 | # IDE 12 | .vscode 13 | 14 | # tox 15 | .tox/* 16 | 17 | # Distribution 18 | build/* 19 | dist/* 20 | 21 | # pixi environments 22 | .pixi 23 | 24 | # coverage 25 | .coverage 26 | coverage.xml 27 | 28 | sg_execution_times.rst 29 | docs/api/api/* 30 | 31 | __pycache__ -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | # Ruff version. 4 | rev: v0.1.5 5 | hooks: 6 | # Run the linter. 7 | - id: ruff 8 | args: [--fix, --exit-non-zero-on-fix] 9 | # Run the formatter. 10 | - id: ruff-format -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Deltares 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | pandamesh 2 | ========= 3 | 4 | .. image:: https://img.shields.io/github/actions/workflow/status/deltares/pandamesh/ci.yml?style=flat-square 5 | :target: https://github.com/deltares/pandamesh/actions?query=workflows%3Aci 6 | .. image:: https://img.shields.io/codecov/c/github/deltares/pandamesh.svg?style=flat-square 7 | :target: https://app.codecov.io/gh/deltares/pandamesh 8 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square 9 | :target: https://github.com/psf/black 10 | 11 | This package translates geospatial vector data (points, lines, or polygons) to 12 | unstructured meshes. 13 | 14 | .. code:: python 15 | 16 | import pandamesh as pm 17 | 18 | # Get some sample data in geopandas form. 19 | south_america = pm.data.south_america() 20 | 21 | # Explode any multi-polygon, and project it to UTM20. 22 | south_america = south_america.explode(index_parts=True).reset_index().to_crs(epsg=32620) 23 | 24 | # Set a maximum cell size of 500 km and generate a mesh. 25 | south_america["cellsize"] = 500_000.0 26 | mesher = pm.TriangleMesher(south_america) 27 | vertices, faces = mesher.generate() 28 | 29 | .. image:: https://raw.githubusercontent.com/Deltares/pandamesh/main/docs/_static/pandamesh-demo.png 30 | :target: https://github.com/deltares/pandamesh 31 | 32 | The package converts geospatial data, presented as 33 | `geopandas`_ `GeoDataFrames`_, to unstructured meshes using the open source 34 | high quality mesh generators: 35 | 36 | * Christophe Geuzaine and Jean-François Remacle's `Gmsh`_ 37 | * Jonathan Shewchuk's `Triangle`_ 38 | 39 | utilizing the respective Python API's, available at: 40 | 41 | * https://pypi.org/project/gmsh/ 42 | * https://pypi.org/project/triangle/ 43 | 44 | For completeness, the source code of both projects can be found at: 45 | 46 | * https://gitlab.onelab.info/gmsh/gmsh, under ``api/gmsh.py`` 47 | * https://github.com/drufat/triangle 48 | 49 | These APIs are wrapped in two lightweight classes: ``pandamesh.TriangleMesher`` 50 | and ``pandamesh.GmshMesher``. Both are initialized with a GeoDataFrame defining 51 | the geometry features of the mesh. During initialization, geometries are 52 | checked for overlaps and intersections, as the mesh generators cannot deal with 53 | these. Generated meshes are returned as two numpy arrays: the coordinates of 54 | the vertices, and the connectivity of the mesh faces to these vertices (as is 55 | `usual`_ for many unstructured grid representations). 56 | 57 | GeoPandas is not suited for geometries that "wrap around" the world. 58 | Consequently, this package cannot generate meshes for e.g. a sphere. 59 | 60 | Installation 61 | ------------ 62 | 63 | .. code:: console 64 | 65 | pip install pandamesh 66 | 67 | Documentation 68 | ------------- 69 | 70 | .. image:: https://img.shields.io/github/actions/workflow/status/deltares/pandamesh/ci.yml?style=flat-square 71 | :target: https://deltares.github.io/pandamesh/ 72 | 73 | The documentation can be found `here`_. 74 | 75 | Other projects 76 | -------------- 77 | 78 | Pandamesh has been developed because none of the existing packages provide a 79 | straightforward scripting based approach to converting 2D vector geometries to 80 | 2D unstructured grids. 81 | 82 | Examples of other packages which work with unstructured meshes are listed below. 83 | 84 | See also `this list`_ for many other mesh generation tools. 85 | 86 | pygmsh 87 | ****** 88 | 89 | The `pygmsh Python package`_ provides useful abstractions from Gmsh's own 90 | Python interface so you can create complex geometries more easily. It also 91 | provides tools for 3D operations (e.g. extrusions). 92 | 93 | qgis-gsmh 94 | ********* 95 | 96 | qgis-gmsh generates geometry input files for the GMSH mesh generator and 97 | converts the Gmsh mesh files to shapefiles that can be imported into QGIS. 98 | 99 | * Lambrechts, J., Comblen, R., Legat, V., Geuzaine, C., & Remacle, J. F. (2008). 100 | Multiscale mesh generation on the sphere. Ocean Dynamics, 58(5-6), 461-473. 101 | * Remacle, J. F., & Lambrechts, J. (2018). Fast and robust mesh generation on 102 | the sphere—Application to coastal domains. Computer-Aided Design, 103, 14-23. 103 | https://doi.org/10.1016/j.cad.2018.03.002 104 | 105 | Source: https://github.com/ccorail/qgis-gmsh 106 | 107 | Shingle 108 | ******* 109 | 110 | Shingle provides generalised self-consistent and automated domain 111 | discretisation for multi-scale geophysical models. 112 | 113 | * Candy, A. S., & Pietrzak, J. D. (2018). Shingle 2.0: generalising 114 | self-consistent and automated domain discretisation for multi-scale 115 | geophysical models. Geoscientific Model Development, 11(1), 213-234. 116 | https://doi.org/10.5194/gmd-11-213-2018 117 | 118 | Source: https://github.com/shingleproject/Shingle 119 | 120 | Website: http://shingleproject.org/index_shingle1.0.html 121 | 122 | .. _here: https://deltares.github.io/pandamesh/ 123 | .. _geopandas: https://geopandas.org/ 124 | .. _GeoDataFrames: https://geopandas.org/en/stable/docs/reference/api/geopandas.GeoDataFrame.html 125 | .. _Gmsh: https://gmsh.info/ 126 | .. _Triangle: https://www.cs.cmu.edu/~quake/triangle.html 127 | .. _usual: https://ugrid-conventions.github.io/ugrid-conventions/ 128 | .. _pygmsh Python package: https://github.com/nschloe/pygmsh 129 | .. _this list: https://github.com/nschloe/awesome-scientific-computing#meshing 130 | -------------------------------------------------------------------------------- /data/README.rst: -------------------------------------------------------------------------------- 1 | Sample data sets 2 | ================ 3 | 4 | These files are used as sample data in Xugrid and are downloaded by 5 | ``pandamesh.data`` functions. 6 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/_static/deltares-blue.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /docs/_static/deltares-white.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /docs/_static/enabling-delta-life.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /docs/_static/pandamesh-demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/pandamesh/e3b7f461a95020a2c4c4d0dbc760f6b28cbb57de/docs/_static/pandamesh-demo.png -------------------------------------------------------------------------------- /docs/_static/pandamesh-logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 17 | 41 | 43 | 44 | 45 | 47 | 2021-11-22T22:01:51.591547 48 | image/svg+xml 49 | 50 | 51 | Matplotlib v3.4.3, https://matplotlib.org/ 52 | 53 | 54 | 55 | 56 | 57 | 58 | 61 | 64 | 65 | 69 | 75 | 78 | 83 | 84 | 87 | 90 | 96 | 97 | 100 | 103 | 106 | 109 | 115 | 121 | 127 | 133 | 139 | 145 | 146 | 147 | 149 | 152 | 159 | 160 | 161 | 162 | -------------------------------------------------------------------------------- /docs/_static/theme-deltares.css: -------------------------------------------------------------------------------- 1 | /* enlarge deltares & github icon size; only works with local/url svg files; not with fa icons */ 2 | img.icon-link-image { 3 | height: 2.5em !important; 4 | } 5 | 6 | [data-theme="dark"] img.icon-link-image[src*="deltares-blue.svg"] { 7 | filter: brightness(0) saturate(100%) invert(100%); 8 | } 9 | [data-theme="dark"] img.icon-link-image[src*="Octicons-mark-github.svg"] { 10 | filter: brightness(0) saturate(100%) invert(100%); 11 | } -------------------------------------------------------------------------------- /docs/_templates/enums.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. autoclass:: {{ objname }} 6 | :members: 7 | :member-order: bysource 8 | -------------------------------------------------------------------------------- /docs/api/changelog.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | All notable changes to this project will be documented in this file. 5 | 6 | The format is based on `Keep a Changelog`_, and this project adheres to 7 | `Semantic Versioning`_. 8 | 9 | Unreleased 10 | ---------- 11 | 12 | [0.2.3] 2025-01-06 13 | ------------------ 14 | 15 | Added 16 | ~~~~~ 17 | - Support for Python 3.13 18 | 19 | [0.2.2] 2024-09-26 20 | ------------------ 21 | 22 | Fixed 23 | ~~~~~ 24 | 25 | - Version 0.2.1 introduced new logic relying on geopandas 1.0+ shapely-wrapped 26 | functions, breaking compatibility with earlier geopandas versions. These method 27 | calls have been replaced with direct shapely functions, ensuring compatibility 28 | with older versions such as geopandas 0.14. 29 | 30 | [0.2.1] 2024-09-04 31 | ------------------ 32 | 33 | Added 34 | ~~~~~ 35 | 36 | - Added :func:`find_proximate_perimeter_points` to identify (near) dangling 37 | edges and extremely short edges in polygon perimeters. 38 | 39 | Changed 40 | ~~~~~~~ 41 | 42 | - Added a ``minimum_perimeter_spacing`` keyword to 43 | :class:`pandamesh.TriangleMesher` and :class:`pandamesh.GmshMesher` to define 44 | a tolerance for (near) dangling edges and very short edges in polygon 45 | perimeters. This tolerance is set to 1.0e-3 by default; it means that the 46 | meshers will error during initialization if slivers of 0.001 or thinner are 47 | found, or if consecutive vertices are within 0.001 of each other. 48 | 49 | [0.2.0] 2024-09-03 50 | ------------------ 51 | 52 | Fixed 53 | ~~~~~ 54 | 55 | - Previously, :class:`pandamesh.TriangleMesher` would not respect specified 56 | cell sizes in areas that are fully bounded by linestrings (rather than 57 | polygons), e.g. three separate lines forming a triangular zone. The reason is 58 | that Triangle identifies such a zone as a separate region, and the point 59 | specifying the maximum area is isolated. This has been fixed by checking 60 | whether linestrings form any coincendental polygons, and including these 61 | polygons are separate zones. 62 | 63 | Added 64 | ~~~~~ 65 | 66 | - :meth:`pandamesh.TriangleMesher.generate_geodataframe()` and - 67 | :meth:`pandamesh.GmshMesher.generate_geodataframe()` have been added to 68 | return generated meshes as geodataframes. 69 | - Added :attr:`pandamesh.MeshAlgorithm.QUASI_STRUCTURED_QUAD` as an option. 70 | - Added :class:`pandamesh.Preprocessor` to assist in preparing and cleaning 71 | geospatial data prior to meshing. 72 | - Added :meth:`pandamesh.GmshMesher.add_threshold_distance_field`, 73 | :meth:`pandamesh.GmshMesher.add_matheval_distance_field`, 74 | :meth:`pandamesh.GmshMesher.add_structured_field`, and 75 | :meth:`pandamesh.GmshMesher.add_structured_field_from_dataarray` to enable 76 | Gmsh fields from geometry or from raster data. 77 | - Added ``finalize`` keyword to :meth:`pandamesh.GmshMesher.generate` to 78 | automatically finalize after mesh generation. 79 | - Added :func:`pandamesh.find_edge_intersections` to locate unresolved 80 | intersection between polygon boundary, linestring, and linearring edges. 81 | 82 | Changed 83 | ~~~~~~~ 84 | 85 | - :class:`pandamesh.TriangleMesher` does a cell size to area conversion. This 86 | previously assumed right-angled triangles. This has been changed to assume 87 | equilateral triangles instead. This may result in slightly smaller triangles. 88 | - Mesher properties set with :class:`pandamesh.DelaunayAlgorithm`, 89 | :class:`pandamesh.FieldCombination`, :class:`pandamesh.GeneralVerbosity`, 90 | :class:`pandamesh.GmshMesher`, :class:`pandamesh.MeshAlgorithm`, or 91 | :class:`pandamesh.SubdivisionAlgorithm` will now accept one of these enums, 92 | or the enum member name as a string. 93 | - :class:`pandamesh.TriangleMesher` and :class:`pandamesh.GmshMesher` now take 94 | a ``shift_origin`` argument to temporarily shift the coordinate system to the 95 | centroid of the geometries' bounding box to mitigate floating point precision 96 | problems. This is enabled by default. 97 | - :func:`pandamesh.gmsh_env` now finalizes an existing Gmsh instance prior to 98 | initializing Gmsh anew. 99 | - :class:`pandamesh.TriangleMesher` and :class:`pandamesh.GmshMesher` will now 100 | also accept LinearRing geometries (previously only Polygons, LineStrings, and 101 | Points). 102 | - Added an ``edge_intersection`` keyword to :class:`pandamesh.TriangleMesher` 103 | and :class:`pandamesh.GmshMesher` to control whether to error, warn, or 104 | ignore unresolved edge intersections of polygon boundaries, linestrings, and 105 | linearrings. By default, both meshers will now error if unresolved 106 | intersections are encountered. 107 | 108 | [0.1.6] 2024-07-17 109 | ------------------ 110 | 111 | Added 112 | ~~~~~ 113 | 114 | - :class:`pandamesh.GmshMesher` now takes ``read_config_files`` and ``interruptible`` 115 | as initialization arguments for ``gmsh.``. 116 | 117 | Fixed 118 | ~~~~~ 119 | 120 | - Compatibility changes for Numpy 2.0. 121 | 122 | 123 | [0.1.5] 2024-02-06 124 | ------------------ 125 | 126 | Fixed 127 | ~~~~~ 128 | 129 | - Inside of :class:`pandamesh.GmshMesher` a check now occurs before finalization. 130 | This keeps ``gmsh`` from printing (harmless) errors to the console, which 131 | previously commonly happened at initialization. 132 | - ``pandamesh`` can now be imported in a sub-thread. ``gmsh`` will not run 133 | outside of the main interpreter thread, but it previously also prevented 134 | the entire import of ``pandamesh``. Attempting to use the 135 | :class:`pandamesh.GmshMesher` outside of the main thread will result in a 136 | ``RuntimeError``. 137 | 138 | Added 139 | ~~~~~ 140 | 141 | - :class:`pandamesh.GeneralVerbosity` has been added to control the verbosity 142 | of Gmsh. It can be set via the :attr:`GmshMesher.general_verbosity` 143 | property. Its default value is ``SILENT``. 144 | 145 | Changed 146 | ~~~~~~~ 147 | 148 | - A number of deprecations have been fixed. Most notable is the deprecation 149 | of ``geopandas.datasets``. The South America geodataframe can now be 150 | fetched via :func:`pandamesh.data.south_america()`. 151 | - Checking of intersections of linestrings has currently been disabled: 152 | the current implementation is too strict and resulted in too many false 153 | positives. 154 | 155 | .. _Keep a Changelog: https://keepachangelog.com/en/1.0.0/ 156 | .. _Semantic Versioning: https://semver.org/spec/v2.0.0.html 157 | -------------------------------------------------------------------------------- /docs/api/index.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: pandamesh 2 | 3 | .. _api: 4 | 5 | API Reference 6 | ============= 7 | 8 | This page provides an auto-generated summary of pandamesh's API. 9 | 10 | .. toctree:: 11 | :maxdepth: 1 12 | 13 | changelog 14 | 15 | Preprocessing 16 | ------------- 17 | 18 | .. autosummary:: 19 | :toctree: api/ 20 | 21 | Preprocessor 22 | Preprocessor.unify_polygons 23 | Preprocessor.merge_polygons 24 | Preprocessor.clip_lines 25 | Preprocessor.unify_lines 26 | Preprocessor.interpolate_lines_to_points 27 | Preprocessor.snap_points 28 | Preprocessor.clip_points 29 | Preprocessor.to_geodataframe 30 | find_edge_intersections 31 | find_proximate_perimeter_points 32 | 33 | Triangle 34 | -------- 35 | 36 | .. autosummary:: 37 | :toctree: api/ 38 | 39 | TriangleMesher 40 | TriangleMesher.generate 41 | TriangleMesher.generate_geodataframe 42 | TriangleMesher.generate_ugrid 43 | TriangleMesher.minimum_angle 44 | TriangleMesher.conforming_delaunay 45 | TriangleMesher.suppress_exact_arithmetic 46 | TriangleMesher.maximum_steiner_points 47 | TriangleMesher.delaunay_algorithm 48 | TriangleMesher.consistency_check 49 | 50 | Triangle Enumerators 51 | -------------------- 52 | 53 | .. autosummary:: 54 | :toctree: api/ 55 | :template: enums.rst 56 | 57 | DelaunayAlgorithm 58 | 59 | Gmsh 60 | ---- 61 | 62 | .. autosummary:: 63 | :toctree: api/ 64 | 65 | GmshMesher 66 | GmshMesher.generate 67 | GmshMesher.generate_geodataframe 68 | GmshMesher.generate_ugrid 69 | GmshMesher.mesh_algorithm 70 | GmshMesher.recombine_all 71 | GmshMesher.mesh_size_extend_from_boundary 72 | GmshMesher.mesh_size_from_points 73 | GmshMesher.mesh_size_from_curvature 74 | GmshMesher.field_combination 75 | GmshMesher.subdivision_algorithm 76 | GmshMesher.general_verbosity 77 | GmshMesher.add_matheval_distance_field 78 | GmshMesher.add_threshold_distance_field 79 | GmshMesher.add_structured_field 80 | GmshMesher.add_structured_field_from_dataarray 81 | GmshMesher.fields 82 | GmshMesher.clear_fields 83 | GmshMesher.write 84 | GmshMesher.finalize 85 | GmshMesher.finalize_gmsh 86 | gmsh_env 87 | 88 | Gmsh Enumerators 89 | ---------------- 90 | 91 | .. autosummary:: 92 | :toctree: api/ 93 | :template: enums.rst 94 | 95 | FieldCombination 96 | GeneralVerbosity 97 | GmshMesher 98 | MeshAlgorithm 99 | SubdivisionAlgorithm 100 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -- Path setup -------------------------------------------------------------- 2 | 3 | # If extensions (or modules to document with autodoc) are in another directory, 4 | # add these directories to sys.path here. If the directory is relative to the 5 | # documentation root, use os.path.abspath to make it absolute, like shown here. 6 | # 7 | # import os 8 | # import sys 9 | # sys.path.insert(0, os.path.abspath('.')) 10 | 11 | # import pandamesh 12 | 13 | # -- Project information ----------------------------------------------------- 14 | 15 | project = "pandamesh" 16 | copyright = "Deltares" 17 | author = "Deltares" 18 | 19 | # The full version, including alpha/beta/rc tags 20 | import pandamesh 21 | 22 | version = pandamesh.__version__ 23 | 24 | 25 | # -- General configuration --------------------------------------------------- 26 | 27 | # Add any Sphinx extension module names here, as strings. They can be 28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 29 | # ones. 30 | extensions = [ 31 | "sphinx.ext.autodoc", 32 | "sphinx.ext.autosummary", 33 | "sphinx.ext.viewcode", 34 | "sphinx.ext.todo", 35 | "sphinx.ext.napoleon", 36 | "sphinx_gallery.gen_gallery", 37 | ] 38 | 39 | sphinx_gallery_conf = { 40 | "examples_dirs": [ 41 | "../examples", 42 | ], # path to your example scripts 43 | "gallery_dirs": [ 44 | "examples", 45 | ], # path to where to save gallery generated output 46 | "filename_pattern": ".py", 47 | "abort_on_example_error": True, 48 | "download_all_examples": False, 49 | "within_subsection_order": "sphinx_gallery.sorting.FileNameSortKey", 50 | } 51 | 52 | # The suffix(es) of source filenames. 53 | # You can specify multiple suffix as a list of string: 54 | # 55 | # source_suffix = ['.rst', '.md'] 56 | source_suffix = ".rst" 57 | 58 | # The master toctree document. 59 | master_doc = "index" 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | # 64 | # This is also used if you do content translation via gettext catalogs. 65 | # Usually you set "language" from the command line for these cases. 66 | language = "en" 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | # This pattern also affects html_static_path and html_extra_path . 71 | templates_path = ["_templates"] 72 | exclude_patterns = ["_build", "_templates", "Thumbs.db", ".DS_Store"] 73 | 74 | # The name of the Pygments (syntax highlighting) style to use. 75 | pygments_style = "sphinx" 76 | 77 | # -- Options for HTML output ------------------------------------------------- 78 | 79 | # The theme to use for HTML and HTML Help pages. See the documentation for 80 | # a list of builtin themes. 81 | # 82 | html_theme = "pydata_sphinx_theme" 83 | 84 | # Add any paths that contain custom static files (such as style sheets) here, 85 | # relative to this directory. They are copied after the builtin static files, 86 | # so a file named "default.css" will overwrite the builtin "default.css". 87 | html_static_path = ["_static"] 88 | html_css_files = ["theme-deltares.css"] 89 | html_theme_options = { 90 | "show_nav_level": 2, 91 | "navbar_align": "content", 92 | "use_edit_page_button": False, 93 | "icon_links": [ 94 | { 95 | "name": "GitHub", 96 | "url": "https://github.com/Deltares/pandamesh", # required 97 | "icon": "https://upload.wikimedia.org/wikipedia/commons/9/91/Octicons-mark-github.svg", 98 | "type": "url", 99 | }, 100 | { 101 | "name": "Deltares", 102 | "url": "https://www.deltares.nl/en/", 103 | "icon": "_static/deltares-blue.svg", 104 | "type": "local", 105 | }, 106 | ], 107 | "logo": { 108 | "text": "pandamesh", 109 | "image_light": "pandamesh-logo.svg", 110 | "image_dark": "pandamesh-logo.svg", 111 | }, 112 | } 113 | 114 | # -- Extension configuration ------------------------------------------------- 115 | 116 | # extension sphinx.ext.todo 117 | # If true, `todo` and `todoList` produce output, else they produce nothing. 118 | todo_include_todos = True 119 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Pandamesh 2 | ========= 3 | 4 | This package translates geospatial vector data (points, lines, or polygons) to 5 | unstructured meshes. 6 | 7 | .. code:: python 8 | 9 | import pandamesh as pm 10 | 11 | # Get some sample data in geopandas form. 12 | south_america = pm.data.south_america() 13 | 14 | # Explode any multi-polygon, and project it to UTM20. 15 | south_america = south_america.explode().reset_index().to_crs(epsg=32620) 16 | 17 | # Set a maximum cell size of 500 km and generate a mesh. 18 | south_america["cellsize"] = 500_000.0 19 | mesher = pm.TriangleMesher(south_america) 20 | vertices, faces = mesher.generate() 21 | 22 | .. image:: https://raw.githubusercontent.com/Deltares/pandamesh/main/docs/_static/pandamesh-demo.png 23 | :target: https://github.com/deltares/pandamesh 24 | 25 | The package converts geospatial data, presented as 26 | `geopandas`_ `GeoDataFrames`_, to unstructured meshes using the open source 27 | high quality mesh generators: 28 | 29 | * Christophe Geuzaine and Jean-François Remacle's `Gmsh`_ 30 | * Jonathan Shewchuk's `Triangle`_ 31 | 32 | utilizing the respective Python API's, available at: 33 | 34 | * https://pypi.org/project/gmsh/ 35 | * https://pypi.org/project/triangle/ 36 | 37 | For completeness, the source code of both projects can be found at: 38 | 39 | * https://gitlab.onelab.info/gmsh/gmsh, under ``api/gmsh.py`` 40 | * https://github.com/drufat/triangle 41 | 42 | These APIs are wrapped in two lightweight classes: ``pandamesh.TriangleMesher`` 43 | and ``pandamesh.GmshMesher``. Both are initialized with a GeoDataFrame defining 44 | the geometry features of the mesh. During initialization, geometries are 45 | checked for overlaps and intersections, as the mesh generators cannot deal with 46 | these. Generated meshes are returned as two numpy arrays: the coordinates of 47 | the vertices, and the connectivity of the mesh faces to these vertices (as is 48 | `usual`_ for many unstructured grid representations). 49 | 50 | GeoPandas is not suited for geometries that "wrap around" the world. 51 | Consequently, this package cannot generate meshes for e.g. a sphere. 52 | 53 | 54 | Installation 55 | ------------ 56 | 57 | .. code:: console 58 | 59 | pip install pandamesh 60 | 61 | Other projects 62 | -------------- 63 | 64 | Pandamesh has been developed because none of the existing packages provide a 65 | straightforward scripting based approach to converting 2D vector geometries to 66 | 2D unstructured grids. 67 | 68 | Examples of other packages which work with unstructured meshes are listed below. 69 | 70 | See also `this list`_ for many other mesh generation tools. 71 | 72 | pygmsh 73 | ****** 74 | 75 | The `pygmsh Python package`_ provides useful abstractions from Gmsh's own 76 | Python interface so you can create complex geometries more easily. It also 77 | provides tools for 3D operations (e.g. extrusions). 78 | 79 | qgis-gsmh 80 | ********* 81 | 82 | qgis-gmsh generates geometry input files for the GMSH mesh generator and 83 | converts the Gmsh mesh files to shapefiles that can be imported into QGIS. 84 | 85 | * Lambrechts, J., Comblen, R., Legat, V., Geuzaine, C., & Remacle, J. F. (2008). 86 | Multiscale mesh generation on the sphere. Ocean Dynamics, 58(5-6), 461-473. 87 | * Remacle, J. F., & Lambrechts, J. (2018). Fast and robust mesh generation on 88 | the sphere—Application to coastal domains. Computer-Aided Design, 103, 14-23. 89 | https://doi.org/10.1016/j.cad.2018.03.002 90 | 91 | Source: https://github.com/ccorail/qgis-gmsh 92 | 93 | Shingle 94 | ******* 95 | 96 | Shingle provides generalised self-consistent and automated domain 97 | discretisation for multi-scale geophysical models. 98 | 99 | * Candy, A. S., & Pietrzak, J. D. (2018). Shingle 2.0: generalising 100 | self-consistent and automated domain discretisation for multi-scale 101 | geophysical models. Geoscientific Model Development, 11(1), 213-234. 102 | https://doi.org/10.5194/gmd-11-213-2018 103 | 104 | Source: https://github.com/shingleproject/Shingle 105 | 106 | Website: http://shingleproject.org/index_shingle1.0.html 107 | 108 | .. _geopandas: https://geopandas.org/ 109 | .. _GeoDataFrames: https://geopandas.org/en/stable/docs/reference/api/geopandas.GeoDataFrame.html 110 | .. _Gmsh: https://gmsh.info/ 111 | .. _Triangle: https://www.cs.cmu.edu/~quake/triangle.html 112 | .. _usual: https://ugrid-conventions.github.io/ugrid-conventions/ 113 | .. _pygmsh Python package: https://github.com/nschloe/pygmsh 114 | .. _this list: https://github.com/nschloe/awesome-scientific-computing#meshing 115 | 116 | 117 | .. toctree:: 118 | :titlesonly: 119 | :hidden: 120 | 121 | examples/index 122 | api/index 123 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /examples/01_triangle-basic.py: -------------------------------------------------------------------------------- 1 | """ 2 | Basic Triangle Example 3 | ====================== 4 | 5 | In this example we'll create some basic geometries and turn them into meshes. 6 | to illustrate some of the mesh generation features that Triangle provides in 7 | combination with polygon, point, and linestring geometries represented by 8 | geopandas. 9 | """ 10 | # %% 11 | import geopandas as gpd 12 | import matplotlib.pyplot as plt 13 | import numpy as np 14 | import shapely.geometry as sg 15 | 16 | import pandamesh as pm 17 | 18 | # %% 19 | # A simple rectangular mesh 20 | # ------------------------- 21 | # 22 | # The most simple example is perhaps a rectangle. We'll create a vector 23 | # geometry, store this in a geodataframe, and associate a cell size. 24 | 25 | polygon = sg.Polygon( 26 | [ 27 | [0.0, 0.0], 28 | [10.0, 0.0], 29 | [10.0, 10.0], 30 | [0.0, 10.0], 31 | ] 32 | ) 33 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 34 | gdf["cellsize"] = 2.0 35 | 36 | # %% 37 | # We'll use this polygon to generate a mesh. We start by initializing a 38 | # TriangleMesher, which is a simple wrapper around the Python bindings to the 39 | # Triangle C-library. This wrapper extracts the coordinates and presents them 40 | # in the appropriate manner for triangle. 41 | 42 | mesher = pm.TriangleMesher(gdf) 43 | vertices, triangles = mesher.generate() 44 | pm.plot(vertices, triangles) 45 | 46 | # %% 47 | # Defaults 48 | # -------- 49 | # 50 | # The TriangleMesher class is initialized with a number of default parameters: 51 | 52 | print(mesher) 53 | 54 | # %% 55 | # We can change a parameter, and see what effects this has on the mesh: 56 | 57 | mesher.conforming_delaunay = False 58 | vertices, triangles = mesher.generate() 59 | pm.plot(vertices, triangles) 60 | 61 | # %% 62 | # To generate a mesh with smaller cell sizes, we adjust the geodataframe, and 63 | # recreate the mesher. 64 | 65 | gdf["cellsize"] = 1.0 66 | mesher = pm.TriangleMesher(gdf) 67 | vertices, triangles = mesher.generate() 68 | pm.plot(vertices, triangles) 69 | # %% 70 | # Multiple cell size zones 71 | # ------------------------ 72 | # 73 | # Multiple zones of cell sizes are supported, as every polygon can be associated 74 | # with a cell size in the geodataframe. 75 | 76 | polygon2 = sg.Polygon( 77 | [ 78 | [10.0, 0.0], 79 | [20.0, 0.0], 80 | [20.0, 10.0], 81 | [10.0, 10.0], 82 | ] 83 | ) 84 | gdf = gpd.GeoDataFrame(geometry=[polygon, polygon2]) 85 | gdf["cellsize"] = [2.0, 1.0] 86 | 87 | mesher = pm.TriangleMesher(gdf) 88 | vertices, triangles = mesher.generate() 89 | pm.plot(vertices, triangles) 90 | # %% 91 | # Polygons with holes ("donut" geometries) 92 | # ---------------------------------------- 93 | # 94 | # Holes in polygons work as expected: 95 | 96 | outer = [(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0)] 97 | inner = [(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0)] 98 | 99 | donut = sg.Polygon(shell=outer, holes=[inner]) 100 | gdf = gpd.GeoDataFrame(geometry=[donut]) 101 | gdf["cellsize"] = [2.0] 102 | 103 | mesher = pm.TriangleMesher(gdf) 104 | vertices, triangles = mesher.generate() 105 | pm.plot(vertices, triangles) 106 | 107 | # %% 108 | # Local refinement 109 | # ---------------- 110 | # 111 | # To do local refinement, we need to ensure there is no overlap between the 112 | # polygons. The coordinates of the hole of the outer polygon should match 113 | # exactly with the coordinates of the exterior boundary of the inner polygon. 114 | 115 | refined = sg.Polygon(inner) 116 | 117 | gdf = gpd.GeoDataFrame(geometry=[donut, refined]) 118 | gdf["cellsize"] = [2.0, 0.5] 119 | 120 | mesher = pm.TriangleMesher(gdf) 121 | vertices, triangles = mesher.generate() 122 | pm.plot(vertices, triangles) 123 | 124 | # %% 125 | # Force points into the triangulation 126 | # ----------------------------------- 127 | # 128 | # We may also force points into the triangulation, by adding points to the 129 | # geodataframe. Let's assume we'd like to a series of points at x=1.0, at a 130 | # distance of 0.5. 131 | 132 | y = np.arange(0.5, 10.0, 0.5) 133 | x = np.full(y.size, 1.0) 134 | points = gpd.points_from_xy(x, y) 135 | 136 | gdf = gpd.GeoDataFrame(geometry=[donut, refined, *points]) 137 | gdf["cellsize"] = [2.0, 0.5] + (len(points) * [np.nan]) 138 | gdf.plot(facecolor="none") 139 | 140 | # %% 141 | # We can now see the points forced in the triangulation, by plotting the 142 | # contents of the geodataframe on top of the generated mesh: 143 | 144 | mesher = pm.TriangleMesher(gdf) 145 | vertices, triangles = mesher.generate() 146 | 147 | fig, ax = plt.subplots() 148 | pm.plot(vertices, triangles, ax=ax) 149 | gdf.plot(facecolor="none", edgecolor="red", ax=ax) 150 | # %% 151 | # Force linestrings into the triangulation 152 | # ---------------------------------------- 153 | # 154 | # We may do the same with linestrings. Here, we will add a vertical line at 155 | # x = 9.0. 156 | 157 | line = sg.LineString( 158 | [ 159 | [9.0, 2.0], 160 | [9.0, 8.0], 161 | ] 162 | ) 163 | gdf = gpd.GeoDataFrame(geometry=[donut, refined, line, *points]) 164 | gdf["cellsize"] = [2.0, 0.5, np.nan] + (len(points) * [np.nan]) 165 | 166 | mesher = pm.TriangleMesher(gdf) 167 | vertices, triangles = mesher.generate() 168 | 169 | fig, ax = plt.subplots() 170 | pm.plot(vertices, triangles, ax=ax) 171 | gdf.plot(facecolor="none", edgecolor="red", ax=ax) 172 | 173 | # %% 174 | # Specify cell size along line string 175 | # ----------------------------------- 176 | # 177 | # Finally, we may also specify the cell size along the line. 178 | 179 | line = sg.LineString([(2.0, 8.0), (8.0, 2.0)]) 180 | gdf = gpd.GeoDataFrame(geometry=[polygon, line]) 181 | gdf["cellsize"] = [2.0, 0.5] 182 | 183 | fig, ax = plt.subplots() 184 | 185 | mesher = pm.TriangleMesher(gdf) 186 | vertices, triangles = mesher.generate() 187 | pm.plot(vertices, triangles, ax=ax) 188 | gdf.plot(facecolor="none", edgecolor="red", ax=ax) 189 | 190 | # %% 191 | # Conclusion 192 | # ---------- 193 | # 194 | # In real use, the vector geometries will be more complex, and not based on 195 | # just a few coordinate pairs. Such cases are presented in the other examples, 196 | # but the same principles apply: we may use polygons with associated cell 197 | # sizes, and linestrings and points to steer the triangulation. 198 | -------------------------------------------------------------------------------- /examples/02_gmsh-basic.py: -------------------------------------------------------------------------------- 1 | """ 2 | Basic Gmsh Example 3 | ================== 4 | 5 | In this example we'll create some basic geometries and turn them into meshes. 6 | to illustrate some of the mesh generation features that Gmsh provides in 7 | combination with polygon, point, and linestring geometries represented by 8 | geopandas. 9 | 10 | The :py:class:`GmshMesher` supports the geometry show in the basic Triangle 11 | example and has a number of additional features. 12 | """ 13 | # %% 14 | import geopandas as gpd 15 | import matplotlib.pyplot as plt 16 | import numpy as np 17 | import shapely.geometry as sg 18 | 19 | import pandamesh as pm 20 | 21 | # sphinx_gallery_start_ignore 22 | pm.GmshMesher.finalize() 23 | # sphinx_gallery_end_ignore 24 | 25 | # %% 26 | # A simple rectangular mesh 27 | # ------------------------- 28 | # 29 | # The most simple example is perhaps a rectangle. We'll create a vector 30 | # geometry, store this in a geodataframe, and associate a cell size. 31 | 32 | polygon = sg.Polygon( 33 | [ 34 | [0.0, 0.0], 35 | [10.0, 0.0], 36 | [10.0, 10.0], 37 | [0.0, 10.0], 38 | ] 39 | ) 40 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 41 | gdf["cellsize"] = 2.0 42 | 43 | # %% 44 | # We'll use this polygon to generate a mesh. We start by initializing a 45 | # TriangleMesher, which is a simple wrapper around the Python bindings to the 46 | # Gmsh C++-library. This wrapper extracts the coordinates and presents them 47 | # in the appropriate manner for Gmsh. 48 | 49 | mesher = pm.GmshMesher(gdf) 50 | vertices, triangles = mesher.generate() 51 | pm.plot(vertices, triangles) 52 | 53 | # %% 54 | # Before we can instantiate another GmshMesher, we need to ``finalize`` the old 55 | # one. 56 | 57 | mesher.finalize() 58 | 59 | # %% 60 | # As the name suggests, Triangle only generates triangular meshes. Gmsh is 61 | # capable of generating quadrilateral-dominant meshes, and has a lot more bells 62 | # and whistles for defining cellsizes. 63 | 64 | line = sg.LineString([(2.0, 8.0), (8.0, 2.0)]) 65 | gdf = gpd.GeoDataFrame(geometry=[polygon, line]) 66 | gdf["cellsize"] = [2.0, 0.5] 67 | 68 | fig, (ax0, ax1) = plt.subplots(ncols=2) 69 | 70 | mesher = pm.TriangleMesher(gdf) 71 | vertices, triangles = mesher.generate() 72 | pm.plot(vertices, triangles, ax=ax0) 73 | 74 | mesher = pm.GmshMesher(gdf) 75 | vertices, triangles = mesher.generate() 76 | pm.plot(vertices, triangles, ax=ax1) 77 | 78 | # %% 79 | # Gmsh allows for specifying cell sizes in a more flexible way. Triangle (left) 80 | # only supports polygons (regions) with fixed cell sizes and explicitly placed 81 | # vertices. Gmsh is capable of forcing refinement in a larger zone around 82 | # features as is visible around the diagonal (right). 83 | # 84 | # Defaults 85 | # -------- 86 | # 87 | # The GmshMesher class is initialized with a number of default parameters: 88 | 89 | print(mesher) 90 | 91 | mesher.finalize() 92 | 93 | # %% 94 | # The parameters of Gmsh differ from Triangle, but they work the same: they can 95 | # be altered after initialization to control the triangulation. 96 | # 97 | # Forcing points, lines, local refinement 98 | # --------------------------------------- 99 | # 100 | # We can force points and lines into the triangulation: 101 | 102 | outer = [(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0)] 103 | inner = [(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0)] 104 | donut = sg.Polygon(shell=outer, holes=[inner]) 105 | refined = sg.Polygon(inner) 106 | 107 | y = np.arange(0.5, 10.0, 0.5) 108 | x = np.full(y.size, 1.0) 109 | points = gpd.points_from_xy(x, y) 110 | 111 | line = sg.LineString( 112 | [ 113 | [9.0, 2.0], 114 | [9.0, 8.0], 115 | ] 116 | ) 117 | 118 | gdf = gpd.GeoDataFrame(geometry=[donut, refined, line, *points]) 119 | gdf["cellsize"] = [2.0, 0.5, 2.0] + (len(points) * [2.0]) 120 | 121 | mesher = pm.GmshMesher(gdf) 122 | vertices, triangles = mesher.generate() 123 | mesher.finalize() 124 | 125 | fig, ax = plt.subplots() 126 | pm.plot(vertices, triangles, ax=ax) 127 | gdf.plot(facecolor="none", edgecolor="red", ax=ax) 128 | 129 | 130 | # Quadrilateral meshes 131 | # -------------------- 132 | # 133 | # One of the features of Gmsh is that it is also capable of generating 134 | # quadrilateral (dominant) meshes, by recombining triangles. We can achieve 135 | # this by changing a parameter on the mesher: 136 | 137 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 138 | gdf["cellsize"] = 2.0 139 | mesher = pm.GmshMesher(gdf) 140 | mesher.recombine_all = True 141 | vertices, faces = mesher.generate() 142 | 143 | pm.plot(vertices, faces) 144 | 145 | # %% 146 | # Writing to file 147 | # --------------- 148 | # It's also possible to use the Python bindings to write a Gmsh ``.msh`` file. 149 | # This file can be opened using the Gmsh GUI to e.g. inspect the generated 150 | # mesh. 151 | 152 | mesher.write("my-mesh.msh") 153 | 154 | # %% 155 | # Conclusion 156 | # ---------- 157 | # 158 | # In real use, the vector geometries will be more complex, and not based on 159 | # just a few coordinate pairs. Such cases are presented in the other examples, 160 | # but the same principles apply: we may use polygons, linestrings and points 161 | # with associated cell sizes to steer the triangulation; unlike Triangle, 162 | # for Gmsh cell sizes can associated to linestrings and points, not just 163 | # polygons. 164 | 165 | # %% 166 | -------------------------------------------------------------------------------- /examples/03_gmsh-fields.py: -------------------------------------------------------------------------------- 1 | """ 2 | Gmsh Fields Example 3 | =================== 4 | 5 | Gmsh supports so called "fields" to guide the cell sizes of the generated 6 | meshes. These fields are separate from the geometrical constraints: for 7 | example, a field point does not end up in the generated mesh, but influences 8 | the cell size in its surrounding. 9 | 10 | These field geometries can be added via: 11 | 12 | * :meth:`pandamesh.GmshMesher.add_threshold_distance_field()` 13 | * :meth:`pandamesh.GmshMesher.add_matheval_distance_field()` 14 | * :meth:`pandamesh.GmshMesher.add_structured_field()` 15 | * :meth:`pandamesh.GmshMesher.add_structured_field_from_dataarray()`, 16 | 17 | The examples below demonstrate how to set up these distance fields for meshing. 18 | """ 19 | # %% 20 | import geopandas as gpd 21 | import matplotlib.pyplot as plt 22 | import numpy as np 23 | import shapely.geometry as sg 24 | 25 | import pandamesh as pm 26 | 27 | # sphinx_gallery_start_ignore 28 | pm.GmshMesher.finalize() 29 | # sphinx_gallery_end_ignore 30 | 31 | # %% 32 | # Point fields 33 | # ------------ 34 | # 35 | # We'll start again with simple rectangular example. 36 | 37 | polygon = sg.Polygon( 38 | [ 39 | [0.0, 0.0], 40 | [10.0, 0.0], 41 | [10.0, 10.0], 42 | [0.0, 10.0], 43 | ] 44 | ) 45 | point = sg.Point([4.0, 4.0]) 46 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 47 | gdf["cellsize"] = 5.0 48 | 49 | mesher = pm.GmshMesher(gdf, shift_origin=False) 50 | mesher.mesh_size_extend_from_boundary = False 51 | mesher.mesh_size_from_curvature = False 52 | mesher.mesh_size_from_points = False 53 | 54 | pm.plot(*mesher.generate()) 55 | 56 | # %% 57 | # Threshold distance fields 58 | # ------------------------- 59 | # 60 | # Gmsh supports changing cell sizes gradually, for example as a function of 61 | # distance to a feature. We can add a point, and connect a distance threshold 62 | # field to it: 63 | 64 | point = sg.Point([4.0, 4.0]) 65 | field = gpd.GeoDataFrame(geometry=[point]) 66 | field["dist_min"] = 2.0 67 | field["dist_max"] = 4.0 68 | field["size_min"] = 0.5 69 | field["size_max"] = 2.5 70 | field["spacing"] = np.nan 71 | mesher.add_threshold_distance_field(field) 72 | 73 | vertices, faces = mesher.generate() 74 | pm.plot(vertices, faces) 75 | 76 | # %% 77 | # Within the ``dist_min`` of the point, all cell sizes have size of at most 78 | # ``size_min``. This changes linearly until ``dist_max`` is reached, at which point 79 | # the cell sizes become ``size_max``. 80 | # 81 | # Fields can be removed via ``.clear_fields()``: 82 | 83 | mesher.clear_fields() 84 | vertices, faces = mesher.generate() 85 | pm.plot(vertices, faces) 86 | 87 | # %% 88 | # Gmsh only measures distances to point. The ``spacing`` is used to interpolate 89 | # points along lines: 90 | 91 | mesher.clear_fields() 92 | 93 | line = sg.LineString( 94 | [ 95 | [3.0, -3.0], 96 | [3.0, 13.0], 97 | ] 98 | ) 99 | field = gpd.GeoDataFrame(geometry=[line]) 100 | field["dist_min"] = 2.0 101 | field["dist_max"] = 4.0 102 | field["size_min"] = 0.5 103 | field["size_max"] = 2.5 104 | field["spacing"] = 2.0 105 | mesher.add_threshold_distance_field(field) 106 | 107 | vertices, faces = mesher.generate() 108 | pm.plot(vertices, faces) 109 | 110 | # %% 111 | # Note that unlike the mesher input geometries, these geometries may fall 112 | # outside the meshing domain: they only "radiate" a cell size. 113 | # 114 | # Polygons can also be used as field geometries. Distances are measured from 115 | # internal and external boundaries: 116 | 117 | mesher.clear_fields() 118 | 119 | square = sg.Polygon( 120 | [ 121 | [3.0, 3.0], 122 | [7.0, 3.0], 123 | [7.0, 7.0], 124 | [3.0, 7.0], 125 | ] 126 | ) 127 | field = gpd.GeoDataFrame(geometry=[square]) 128 | field["dist_min"] = 0.5 129 | field["dist_max"] = 1.5 130 | field["size_min"] = 0.3 131 | field["size_max"] = 2.5 132 | field["spacing"] = 1.0 133 | mesher.add_threshold_distance_field(field) 134 | 135 | vertices, faces = mesher.generate() 136 | pm.plot(vertices, faces) 137 | 138 | # %% 139 | # MathEval distance fields 140 | # ------------------------ 141 | # 142 | # Gmsh also supports arbitrary mathematical functions. With Pandamesh, these 143 | # can be easily combined to specify cell size a function to some boundary. For 144 | # example, we can specify cell size as quadratically growing with the distance 145 | # from the left boundary: 146 | 147 | mesher.clear_fields() 148 | 149 | line = sg.LineString( 150 | [ 151 | [0.0, 0.0], 152 | [0.0, 10.0], 153 | ] 154 | ) 155 | field = gpd.GeoDataFrame(geometry=[line]) 156 | field["function"] = "distance^2 + 0.3" 157 | field["spacing"] = 1.0 158 | mesher.add_matheval_distance_field(field) 159 | 160 | vertices, faces = mesher.generate() 161 | pm.plot(vertices, faces) 162 | 163 | # %% 164 | # Note that we should take care to specify a function which is always larger 165 | # than zero in the meshing domain. 166 | # 167 | # Unlike input geometries, fields can be added in a piece by piece manner. The 168 | # distance is always relative to the feature of the geometry in the 169 | # GeoDataFrame row. 170 | 171 | second_field = gpd.GeoDataFrame(geometry=[sg.Point([5.0, 5.0])]) 172 | second_field["function"] = "max(1/(distance^2), 2.0)" 173 | second_field["spacing"] = np.nan 174 | mesher.add_matheval_distance_field(second_field) 175 | 176 | vertices, faces = mesher.generate() 177 | pm.plot(vertices, faces) 178 | 179 | # %% 180 | # Structured fields 181 | # ----------------- 182 | # 183 | # In some cases, the generated cell size should depend on some physical 184 | # properties of the domain. In geospatial applications, such properties are 185 | # often represented as raster data. These data can be used to guide mesh 186 | # generation as a structured grid. The cell size is prescribed at the grid 187 | # points, and interpolated between. 188 | # 189 | # In the example below, we generate 3 by 3 grid of cell sizes, with small cell 190 | # sizes in the lower left corner, and large cell sizes in the upper right: 191 | 192 | mesher.clear_fields() 193 | 194 | y, x = np.meshgrid([1.0, 5.0, 9.0], [1.0, 5.0, 9.0], indexing="ij") 195 | distance_from_origin = np.sqrt((x * x + y * y)) 196 | cellsize = np.log(distance_from_origin / distance_from_origin.min()) + 0.5 197 | mesher.add_structured_field( 198 | cellsize=cellsize, 199 | xmin=x.min(), 200 | ymin=y.min(), 201 | dx=1.0, 202 | dy=1.0, 203 | ) 204 | vertices, faces = mesher.generate() 205 | 206 | fig, ax = plt.subplots() 207 | pm.plot(vertices, faces, ax=ax) 208 | ax.scatter(x, y) 209 | 210 | # %% 211 | # DataArray structured fields 212 | # --------------------------- 213 | # 214 | # These structured fields can also be provided as xarray DataArrays: 215 | 216 | mesher.clear_fields() 217 | 218 | import xarray as xr 219 | 220 | x = np.arange(1.0, 10.0) 221 | y = np.arange(1.0, 10.0) 222 | da = xr.DataArray(np.ones((y.size, x.size)), coords={"y": y, "x": x}, dims=("y", "x")) 223 | 224 | mesher.add_structured_field_from_dataarray(da) 225 | vertices, faces = mesher.generate() 226 | pm.plot(vertices, faces) 227 | 228 | # %% 229 | # This is arguably the most flexible way of configuring cell sizes, since we 230 | # can easily modify the DataArray values. Note that like the MathEval 231 | # specification, we need to take care to ensure values remain > 0. 232 | 233 | mesher.clear_fields() 234 | 235 | cos_da = da * np.cos(da["x"]) + 1.1 236 | mesher.add_structured_field_from_dataarray(cos_da) 237 | vertices, faces = mesher.generate() 238 | pm.plot(vertices, faces) 239 | # %% 240 | -------------------------------------------------------------------------------- /examples/04_triangle-geospatial.py: -------------------------------------------------------------------------------- 1 | """ 2 | Geospatial Triangle Example 3 | =========================== 4 | 5 | In this example we'll illustrate how to generate a mesh from a "real-world" 6 | geospatial vector dataset. 7 | """ 8 | # %% 9 | import geopandas as gpd 10 | import matplotlib.pyplot as plt 11 | import pandas as pd 12 | import shapely.geometry as sg 13 | 14 | import pandamesh as pm 15 | 16 | # %% 17 | # Overlap 18 | # ------- 19 | # 20 | # We will get the data of a GeoJSON file describing the provinces of the 21 | # Netherlands, and select only the name and geometry columns. We'll set the 22 | # coordinate reference system to the Dutch national standard (EPSG:28992). 23 | # Finally we set the name column to be used as index, so we can select 24 | # provinces on name. 25 | 26 | provinces = pm.data.provinces_nl().loc[:, ["name", "geometry"]] 27 | provinces = provinces.to_crs("epsg:28992") 28 | provinces.index = provinces["name"] 29 | gdf = provinces.copy() 30 | 31 | # %% 32 | # The mesh generation software cannot deal with overlap of polygons. To get rid 33 | # of overlap, we can use the spatial functionality that geopandas provides. 34 | # Let's check the polygons for overlap first. 35 | 36 | overlap = gdf.overlay(gdf, how="intersection", keep_geom_type=True) 37 | overlap = overlap.loc[overlap["name_1"] != overlap["name_2"]] 38 | 39 | fig, ax = plt.subplots() 40 | gdf.plot(ax=ax) 41 | overlap.plot(edgecolor="red", ax=ax) 42 | 43 | # %% 44 | # Clean-up 45 | # -------- 46 | # 47 | # There are many small overlaps visible at the province borders. 48 | # 49 | # We can generate a consistent polygon using a unary union. 50 | 51 | union = sg.Polygon(gdf.unary_union) 52 | union_gdf = gpd.GeoDataFrame(geometry=[union]) 53 | union_gdf["cellsize"] = 10_000.0 54 | 55 | # %% 56 | # Unfortunately, the province boundaries of this dataset no do align neatly and 57 | # there are a number of small holes present. Some of these holes are not formed 58 | # by inconsistencies, but by a small number of Belgian exclaves, 59 | # `Baarle-Hertog`_. 60 | # 61 | # Simplify 62 | # -------- 63 | # 64 | # We'll ignore the subtleties of international law for now and use geopandas to 65 | # remove all blemishes by: 66 | # 67 | # * squeezing out the holes with ``.buffer`` 68 | # * dissolving the buffered polygons into a single polygon with ``.dissolve`` 69 | # * simplifying the dissolved polygon to avoid over-refinement with ``.simplify`` 70 | # 71 | # This creates a clean, and simpler, geometry. 72 | 73 | simplified = gdf.copy() 74 | simplified.geometry = simplified.geometry.buffer(500.0) 75 | simplified["dissolve_column"] = 0 76 | simplified = simplified.dissolve(by="dissolve_column") 77 | simplified.geometry = simplified.geometry.simplify(5_000.0) 78 | simplified["cellsize"] = 10_000.0 79 | 80 | simplified.plot() 81 | 82 | # %% 83 | # Using this clean geometry, we can generate an unstructured grid with a fairly 84 | # constant cell size. 85 | 86 | mesher = pm.TriangleMesher(simplified) 87 | vertices, triangles = mesher.generate() 88 | pm.plot(vertices, triangles) 89 | 90 | # %% 91 | # For real work, buffering and simplifying will likely not suffice. 92 | # 93 | # See the preprocessing example to for an overview of common issues and how to 94 | # apply pandamesh's Preprocessor class to resolve them. 95 | # 96 | # Local refinement 97 | # ---------------- 98 | # 99 | # To set a zone of refinement, we can define an additional polygon. We need to 100 | # ensure that no overlap occurs in the follwing steps: 101 | # 102 | # * select the geometry of a single province; 103 | # * simplify its geometry to an appropriate level of detail; 104 | # * specify a smaller cell size; 105 | # * remove this province from the enveloping polygon; 106 | # * collect the two polygons in a single geodataframe. 107 | 108 | utrecht = gdf.loc[["Utrecht"]] 109 | utrecht.geometry = utrecht.geometry.simplify(2_500.0) 110 | utrecht["cellsize"] = 5000.0 111 | 112 | envelope = simplified.overlay(utrecht, how="difference") 113 | refined = pd.concat([envelope, utrecht]) 114 | refined.index = [0, 1] 115 | refined.plot(column="name") 116 | 117 | # %% 118 | # This results in a mesh with a smaller cell size in the province of Utrecht. 119 | 120 | mesher = pm.TriangleMesher(refined) 121 | vertices, triangles = mesher.generate() 122 | pm.plot(vertices, triangles) 123 | 124 | # %% 125 | # Conclusion 126 | # ---------- 127 | # 128 | # This example provides a taste of how to convert a geospatial vector dataset 129 | # into an unstructured grid with a locally refined part. Real-world data 130 | # generally come with their own idiosyncrasies and inconsistencies. Depending 131 | # on the nature of the necessary fixes, they can be solved with geopandas 132 | # functionality, but sometimes manual editing is required. Fortunately, 133 | # geopandas provides easy input and output for many file formats, which can be 134 | # opened by e.g. QGIS. 135 | # 136 | # .. _Baarle-Hertog: https://en.wikipedia.org/wiki/Baarle-Hertog 137 | -------------------------------------------------------------------------------- /examples/05_preprocessing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Preprocessing 3 | ============= 4 | 5 | Raw geospatial vector data is often not ready to use directly in mesh 6 | generation: 7 | 8 | * Polygon data often do not form a valid planar partition: polygons are 9 | overlapping, or neighboring polygons have small gaps between them. 10 | * Polygon boundaries or linestring segments intersect each other. 11 | * Points may be located on polygon boundaries or lines. Since floating point 12 | numbers are not exact, points seemingly located on a line are computationally 13 | just left or just right of the line and form an extremely thin triangle. 14 | * Points may be located extremely close together, thereby generating tiny 15 | triangles. 16 | 17 | Such problems either lead to a generated mesh with extremely small elements, or 18 | worse, they lead to a crash of the meshing program. Pandamesh provides a 19 | ``Preprocessor`` class to assist with cleaning up some common faults. 20 | 21 | This example will illustrate some common problems and how to resolve them. 22 | """ 23 | # %% 24 | import geopandas as gpd 25 | import matplotlib.pyplot as plt 26 | import numpy as np 27 | import shapely 28 | import shapely.geometry as sg 29 | 30 | import pandamesh as pm 31 | 32 | # sphinx_gallery_start_ignore 33 | pm.GmshMesher.finalize() 34 | # sphinx_gallery_end_ignore 35 | 36 | # %% 37 | # Polygons 38 | # -------- 39 | # 40 | # When generating a mesh, we often have a general area which may be meshed 41 | # coarsely and an area of interest, which should be meshed more finely. 42 | # Generally, the fine inner zone is located within the coarse outer zone, but 43 | # this requires a hole in the outer zone that exactly matches up with the 44 | # exterior of the inner zone. 45 | 46 | outer = sg.Polygon( 47 | [ 48 | [0.0, 0.0], 49 | [10.0, 0.0], 50 | [10.0, 10.0], 51 | [0.0, 10.0], 52 | ] 53 | ) 54 | inner = sg.Polygon( 55 | [ 56 | [5.0, 2.0], 57 | [8.0, 5.0], 58 | [5.0, 8.0], 59 | [2.0, 5.0], 60 | ] 61 | ) 62 | 63 | gdf = gpd.GeoDataFrame(geometry=[outer, inner]) 64 | gdf["cellsize"] = [2.0, 1.0] 65 | 66 | fig, (ax0, ax1) = plt.subplots(ncols=2, sharex=True, sharey=True) 67 | gdf.iloc[[0]].plot(ax=ax0) 68 | gdf.iloc[[1]].plot(ax=ax1) 69 | 70 | # %% 71 | # In this case, we have two conflicting specified cell sizes in the inner 72 | # square. We can resolve this as follows: 73 | 74 | resolved = ( 75 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 76 | .unify_polygons() 77 | .to_geodataframe() 78 | ).rename(columns={"values": "cellsize"}) 79 | 80 | # %% 81 | # Note that the Preprocessor supports method chaining, allowing you to flexibly 82 | # execute a set of operations. 83 | # 84 | # The resulting geodataframe's geometries are valid planar partition: 85 | 86 | fig, (ax0, ax1) = plt.subplots(ncols=2, sharex=True, sharey=True) 87 | resolved.iloc[[0]].plot(ax=ax0) 88 | resolved.iloc[[1]].plot(ax=ax1) 89 | 90 | # %% 91 | # And we can use it directly to generate a mesh: 92 | 93 | vertices, faces = pm.TriangleMesher(resolved).generate() 94 | pm.plot(vertices, faces) 95 | 96 | # %% 97 | # Alternatively, multiple polygons with the same cell size specification might 98 | # be overlapping 99 | 100 | inner0 = shapely.affinity.translate(inner, xoff=-1.0) 101 | inner1 = shapely.affinity.translate(inner, xoff=1.0) 102 | gdf = gpd.GeoDataFrame(geometry=[outer, inner0, inner1]) 103 | gdf["cellsize"] = [2.0, 1.0, 1.0] 104 | 105 | fig, ax = plt.subplots() 106 | gdf.plot(ax=ax, facecolor="none") 107 | # %% 108 | # These will also be resolved by ``.unify_polygons``. 109 | 110 | resolved = ( 111 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 112 | .unify_polygons() 113 | .to_geodataframe() 114 | ).rename(columns={"values": "cellsize"}) 115 | 116 | vertices, faces = pm.TriangleMesher(resolved).generate() 117 | 118 | fig, ax = plt.subplots() 119 | pm.plot(vertices, faces, ax=ax) 120 | resolved.plot(ax=ax, facecolor="none", edgecolor="red") 121 | 122 | # %% 123 | # Note, however, that the internal boundaries of the inner polygons are forced 124 | # into the triangulation. We can rid of these by calling ``.merge_polygons``: 125 | 126 | resolved = ( 127 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 128 | .unify_polygons() 129 | .merge_polygons() 130 | .to_geodataframe() 131 | ).rename(columns={"values": "cellsize"}) 132 | 133 | vertices, faces = pm.TriangleMesher(resolved).generate() 134 | 135 | fig, ax = plt.subplots() 136 | pm.plot(vertices, faces, ax=ax) 137 | resolved.plot(ax=ax, facecolor="none", edgecolor="red") 138 | 139 | # %% 140 | # An alternative problem is when polygons are touching, but do not actually 141 | # share vertices along the boundary. 142 | 143 | first = sg.Polygon( 144 | [ 145 | [0.0, 0.0], 146 | [10.0, 0.0], 147 | [10.0, 10.0], 148 | [0.0, 10.0], 149 | ] 150 | ) 151 | second = sg.Polygon( 152 | [ 153 | [10.0, 2.0], 154 | [18.0, 2.0], 155 | [18.0, 8.0], 156 | [10.0, 8.0], 157 | ] 158 | ) 159 | 160 | gdf = gpd.GeoDataFrame(geometry=[first, second]) 161 | gdf["cellsize"] = [4.0, 2.0] 162 | 163 | vertices, faces = pm.GmshMesher(gdf, intersecting_edges="warn").generate(finalize=True) 164 | pm.plot(vertices, faces) 165 | 166 | # %% 167 | # At x=10.0, the generated triangles are disconnected. 168 | # 169 | # This is caused by the the fact that the polygons do not share an edge: 170 | # 171 | # * The polygon on the left has an edge from (10.0, 0.0) to (10.0, 10.0) 172 | # * The polygon on the right has an edge from (10.0, 2.0) to (10.0, 8.0) 173 | # 174 | # In fact, the vertices of the right polygon are intersecting the (edge) of the 175 | # left polygon. We can identify these intersections with 176 | # :func:`pandamesh.find_edge_intersections`: 177 | 178 | intersections = pm.find_edge_intersections(gdf.geometry) 179 | 180 | fig, ax = plt.subplots() 181 | pm.plot(vertices, faces, ax=ax) 182 | intersections.plot(ax=ax) 183 | 184 | # %% 185 | # Calling ``.unify_polygons()`` ensures that the vertices of touching polygons 186 | # are inserted, such that the polygons share an edge. 187 | 188 | resolved = ( 189 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 190 | .unify_polygons() 191 | .to_geodataframe() 192 | ).rename(columns={"values": "cellsize"}) 193 | 194 | vertices, faces = pm.TriangleMesher(resolved).generate() 195 | polygon0_coords = shapely.get_coordinates(resolved.geometry[0]) 196 | 197 | fig, ax = plt.subplots() 198 | pm.plot(vertices, faces, ax=ax) 199 | ax.scatter(*polygon0_coords.T) 200 | 201 | # %% 202 | # Lines 203 | # ----- 204 | # 205 | # Lines may only be only partially present, or present in holes: 206 | 207 | donut = sg.Polygon( 208 | [ 209 | [0.0, 0.0], 210 | [10.0, 0.0], 211 | [10.0, 10.0], 212 | [0.0, 10.0], 213 | ], 214 | holes=[ 215 | [ 216 | [2.0, 5.0], 217 | [5.0, 8.0], 218 | [8.0, 5.0], 219 | [5.0, 2.0], 220 | ] 221 | ], 222 | ) 223 | line0 = shapely.LineString( 224 | [ 225 | [-2.0, 0.0], 226 | [12.0, 10.0], 227 | ] 228 | ) 229 | line1 = shapely.LineString( 230 | [ 231 | [5.5, 9.0], 232 | [9.0, 5.5], 233 | ] 234 | ) 235 | 236 | gdf = gpd.GeoDataFrame(geometry=[donut, line0, line1]) 237 | gdf["cellsize"] = [2.0, 1.0, 1.0] 238 | gdf.plot(edgecolor="k") 239 | 240 | # %% 241 | # We can identify these problematic intersections again using 242 | # :func:`pandamesh.find_edge_intersections`: 243 | 244 | intersections = pm.find_edge_intersections(gdf.geometry) 245 | fig, ax = plt.subplots() 246 | gdf.plot(ax=ax, facecolor="none") 247 | intersections.plot(ax=ax) 248 | 249 | # %% 250 | # A first step is to remove line segments that do not fall in any polygon: 251 | 252 | resolved = ( 253 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 254 | .clip_lines() 255 | .to_geodataframe() 256 | ).rename(columns={"values": "cellsize"}) 257 | resolved.plot(edgecolor="k") 258 | 259 | # %% 260 | # However, this doesn't create suitable input for meshing. The ``GmshMesher`` 261 | # appears to hang on this input, and Triangle generates a grid with very small 262 | # triangles. Pandamesh errors on these intersections by default, but way may 263 | # proceed: 264 | 265 | vertices, faces = pm.TriangleMesher(resolved, intersecting_edges="warn").generate() 266 | pm.plot(vertices, faces) 267 | 268 | # %% 269 | # A better approach here is to ensure all intersections are present in all 270 | # linework: 271 | # 272 | # * First we clip. 273 | # * Then we call ``unify_lines`` to ensure that the intersection of line0 and 274 | # line1 at (7.625 6.875) is represented. 275 | # * Next we call ``unify_polygons``. This ensures the intersections of the lines 276 | # with the poygon exterior is represented as well. 277 | # * The result of ``unify_polygons`` is that the line splits the polygon in two 278 | # parts. These are merged back together with ``merge_polygons``. 279 | # 280 | # If we plot the vertices of the resolved polygon, we see that the intersection 281 | # vertices have been inserted into the polygon boundaries, and that the tiny 282 | # triangles around the line intersection have disappeared: 283 | 284 | resolved = ( 285 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 286 | .clip_lines() 287 | .unify_lines() 288 | .unify_polygons() 289 | .merge_polygons() 290 | .to_geodataframe() 291 | ).rename(columns={"values": "cellsize"}) 292 | 293 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 294 | polygon0_coords = shapely.get_coordinates(resolved.geometry[0]) 295 | 296 | fig, ax = plt.subplots() 297 | pm.plot(vertices, faces, ax=ax) 298 | ax.scatter(*polygon0_coords.T) 299 | 300 | # %% 301 | # In some cases, having line segments terminate exactly on polygon boundaries 302 | # still causes trouble. We may also ensure that lines are some distance removed 303 | # from any polygon boundary by providing a distance to ``clip_lines``: 304 | 305 | resolved = ( 306 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 307 | .unify_lines() 308 | .clip_lines(distance=0.5) 309 | .to_geodataframe() 310 | ).rename(columns={"values": "cellsize"}) 311 | 312 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 313 | polygon0_coords = shapely.get_coordinates(resolved.geometry[0]) 314 | 315 | fig, ax = plt.subplots() 316 | pm.plot(vertices, faces, ax=ax) 317 | resolved.plot(facecolor="none", edgecolor="red", ax=ax) 318 | ax.scatter(*polygon0_coords.T) 319 | 320 | # %% 321 | # Another pragmatic approach is to convert any line into interpolated points. 322 | # Points cannot intersect each other, which sidesteps a large number of problems. 323 | 324 | resolved = ( 325 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 326 | .interpolate_lines_to_points(distance=0.25) 327 | .clip_points() 328 | .to_geodataframe() 329 | ).rename(columns={"values": "cellsize"}) 330 | 331 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 332 | 333 | fig, ax = plt.subplots() 334 | pm.plot(vertices, faces, ax=ax) 335 | resolved.plot(facecolor="none", edgecolor="red", ax=ax) 336 | 337 | # %% 338 | # Points 339 | # ------ 340 | # 341 | # Note that the start and end points of the lines are still on, or very near 342 | # the polygon edges. 343 | # 344 | # We can remove those points by providing a distance to ``clip_points``. 345 | 346 | resolved = ( 347 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 348 | .interpolate_lines_to_points(distance=0.25) 349 | .clip_points(distance=0.5) 350 | .to_geodataframe() 351 | ).rename(columns={"values": "cellsize"}) 352 | 353 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 354 | 355 | fig, ax = plt.subplots() 356 | pm.plot(vertices, faces, ax=ax) 357 | resolved.plot(facecolor="none", edgecolor="red", ax=ax) 358 | 359 | # %% 360 | # A problem with points is that they may be very close together, thereby 361 | # generating very small triangles. Let's generate 200 random points to illustrate: 362 | 363 | rng = np.random.default_rng() 364 | points = gpd.points_from_xy(*rng.random((2, 200)) * 10.0) 365 | gdf = gpd.GeoDataFrame(geometry=np.concatenate([[donut], points])) 366 | gdf["cellsize"] = 2.0 367 | 368 | resolved = ( 369 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 370 | .clip_points(distance=0.5) 371 | .to_geodataframe() 372 | ).rename(columns={"values": "cellsize"}) 373 | 374 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 375 | pm.plot(vertices, faces) 376 | 377 | # %% 378 | # We can solve this by snapping points together that are located some distance 379 | # from each other: 380 | 381 | resolved = ( 382 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 383 | .clip_points(distance=0.5) 384 | .snap_points(distance=0.5) 385 | .to_geodataframe() 386 | ).rename(columns={"values": "cellsize"}) 387 | 388 | vertices, faces = pm.GmshMesher(resolved).generate(finalize=True) 389 | pm.plot(vertices, faces) 390 | 391 | # %% 392 | # Flexibility and composability 393 | # ----------------------------- 394 | # 395 | # The Preprocessor class in Pandamesh is designed with flexibility and 396 | # composability in mind through method chaining. By combining various 397 | # preprocessing steps in any order, you can address a wide range of geometric 398 | # issues. For instance, you might start by unifying polygons, then clip lines, 399 | # interpolate them to points, and finally snap those points together. 400 | # 401 | # The steps required depend on the nature of geometrical input, and may require 402 | # experimenting with various methods. The intermediate output can be checked 403 | # and visualized at any moments, by calling ``to_geodataframe``. For example, 404 | # to check the intermediate result after clipping but prior to snapping: 405 | 406 | check = ( 407 | pm.Preprocessor(geometry=gdf.geometry, values=gdf.cellsize) 408 | .clip_points(distance=0.5) 409 | .to_geodataframe() 410 | ) 411 | 412 | check.plot(facecolor="none") 413 | 414 | # %% 415 | # This also makes it easy to apply the preprocessor in steps. Some steps may be 416 | # relatively costly, such as unifying a large number of detailed polygons. The 417 | # intermediate result can be stored as e.g. a GeoPackage. Then, in a separate 418 | # processing step, the intermediate result can be read again, and other 419 | # processing steps (such as filtering points) can be applied. 420 | -------------------------------------------------------------------------------- /examples/README.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ======== 3 | 4 | These examples illustrate the basic steps to create meshes with Triangle and 5 | Gmsh. 6 | 7 | Every example can be downloaded as a Python script or a Jupyter Notebook. 8 | -------------------------------------------------------------------------------- /pandamesh/__init__.py: -------------------------------------------------------------------------------- 1 | from pandamesh import data 2 | from pandamesh.common import find_edge_intersections, find_proximate_perimeter_points 3 | from pandamesh.gmsh_enums import ( 4 | FieldCombination, 5 | GeneralVerbosity, 6 | MeshAlgorithm, 7 | SubdivisionAlgorithm, 8 | ) 9 | from pandamesh.gmsh_mesher import GmshMesher, gmsh_env 10 | from pandamesh.plot import plot 11 | from pandamesh.preprocessor import Preprocessor 12 | from pandamesh.triangle_enums import DelaunayAlgorithm 13 | from pandamesh.triangle_mesher import TriangleMesher 14 | 15 | __version__ = "0.2.3" 16 | 17 | 18 | __all__ = ( 19 | "data", 20 | "FieldCombination", 21 | "GeneralVerbosity", 22 | "GmshMesher", 23 | "MeshAlgorithm", 24 | "SubdivisionAlgorithm", 25 | "gmsh_env", 26 | "plot", 27 | "DelaunayAlgorithm", 28 | "TriangleMesher", 29 | "Preprocessor", 30 | "find_edge_intersections", 31 | "find_proximate_perimeter_points", 32 | ) 33 | -------------------------------------------------------------------------------- /pandamesh/data/__init__.py: -------------------------------------------------------------------------------- 1 | from pandamesh.data.sample_data import provinces_nl, south_america 2 | 3 | __all__ = ("provinces_nl", "south_america") 4 | -------------------------------------------------------------------------------- /pandamesh/data/sample_data.py: -------------------------------------------------------------------------------- 1 | """Functions to load sample data.""" 2 | import geopandas as gpd 3 | import pooch 4 | 5 | REGISTRY = pooch.create( 6 | path=pooch.os_cache("pandamesh"), 7 | base_url="https://raw.githubusercontent.com/Deltares/pandamesh/main/data/", 8 | version=None, 9 | version_dev="main", 10 | env="PANDAMESH_DATA_DIR", 11 | registry={ 12 | "provinces-nl.geojson": "7539318974d1d78f35e4c2987287aa81f5ff505f444a2e0f340d804f57c0f8e3", 13 | "south-america.geojson": "337746351d15a83d5d41f1cecd30aa40b1698eb7587a4e412c511af89f82e49c", 14 | }, 15 | ) 16 | 17 | 18 | def provinces_nl(): 19 | """Return the provinces (including water bodies) of the Netherlands as a GeoDataframe.""" 20 | fname = REGISTRY.fetch("provinces-nl.geojson") 21 | return gpd.read_file(fname) 22 | 23 | 24 | def south_america(): 25 | fname = REGISTRY.fetch("south-america.geojson") 26 | return gpd.read_file(fname) 27 | -------------------------------------------------------------------------------- /pandamesh/enum_base.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, EnumMeta 2 | from typing import Any, Type, TypeVar, Union, cast 3 | 4 | E = TypeVar("E", bound="FlexibleEnum") 5 | 6 | 7 | def _show_options(options: Type[E]) -> str: 8 | return "\n * ".join(map(str, options.__members__)) 9 | 10 | 11 | class AttributeErrorMeta(EnumMeta): 12 | def __getattr__(cls, name: str) -> Any: 13 | try: 14 | return cls.__members__[name] 15 | except KeyError: 16 | raise AttributeError( 17 | f"{name} is not a valid {cls.__name__}. " 18 | f"Valid options are:\n * {_show_options(cls)}" 19 | ) 20 | 21 | 22 | class FlexibleEnum(Enum, metaclass=AttributeErrorMeta): 23 | @classmethod 24 | def parse(cls: Type[E], value: str) -> E: 25 | try: 26 | return cls.__members__[value] 27 | except KeyError: 28 | raise ValueError( 29 | # Use __repr__() so strings are shown with quotes. 30 | f"{value.__repr__()} is not a valid {cls.__name__}. " 31 | f"Valid options are:\n * {_show_options(cls)}" 32 | ) 33 | 34 | @classmethod 35 | def from_value(cls: Type[E], value: Union[E, str]) -> E: 36 | if isinstance(value, cls): 37 | return value 38 | else: 39 | value = cast(str, value) 40 | return cls.parse(value) 41 | -------------------------------------------------------------------------------- /pandamesh/gmsh_enums.py: -------------------------------------------------------------------------------- 1 | from pandamesh.enum_base import FlexibleEnum 2 | 3 | 4 | class MeshAlgorithm(FlexibleEnum): 5 | """ 6 | Gmsh meshing algorithm. Each algorithm has its own advantages and 7 | disadvantages. 8 | 9 | For all 2D unstructured algorithms a Delaunay mesh that contains all 10 | the points of the 1D mesh is initially constructed using a 11 | divide-and-conquer algorithm. Missing edges are recovered using edge 12 | swaps. After this initial step several algorithms can be applied to 13 | generate the final mesh: 14 | 15 | * The MeshAdapt algorithm is based on local mesh modifications. This 16 | technique makes use of edge swaps, splits, and collapses: long edges 17 | are split, short edges are collapsed, and edges are swapped if a 18 | better geometrical configuration is obtained. 19 | * The Delaunay algorithm is inspired by the work of the GAMMA team at 20 | INRIA. New points are inserted sequentially at the circumcenter of 21 | the element that has the largest adimensional circumradius. The mesh 22 | is then reconnected using an anisotropic Delaunay criterion. 23 | * The Frontal-Delaunay algorithm is inspired by the work of S. Rebay. 24 | * Other experimental algorithms with specific features are also 25 | available. In particular, Frontal-Delaunay for Quads is a variant of 26 | the Frontal-Delaunay algorithm aiming at generating right-angle 27 | triangles suitable for recombination; and BAMG allows to generate 28 | anisotropic triangulations. 29 | 30 | For very complex curved surfaces the MeshAdapt algorithm is the most robust. 31 | When high element quality is important, the Frontal-Delaunay algorithm should 32 | be tried. For very large meshes of plane surfaces the Delaunay algorithm is 33 | the fastest; it usually also handles complex mesh size fields better than the 34 | Frontal-Delaunay. When the Delaunay or Frontal-Delaunay algorithms fail, 35 | MeshAdapt is automatically triggered. The Automatic algorithm uses 36 | Delaunay for plane surfaces and MeshAdapt for all other surfaces. 37 | """ 38 | 39 | MESH_ADAPT = 1 40 | """ 41 | Local mesh modifications using edge swaps, splits, and collapses. Robust 42 | for complex curved surfaces. 43 | """ 44 | 45 | AUTOMATIC = 2 46 | """ 47 | Uses Delaunay for plane surfaces and MeshAdapt for all other surfaces. 48 | """ 49 | 50 | INITIAL_MESH_ONLY = 3 51 | """Generates only the initial Delaunay triangulation.""" 52 | 53 | FRONTAL_DELAUNAY = 5 54 | """Good for high element quality.""" 55 | 56 | BAMG = 7 57 | """Experimental algorithm for generating anisotropic triangulations.""" 58 | 59 | FRONTAL_DELAUNAY_FOR_QUADS = 8 60 | """ 61 | Variant of Frontal-Delaunay aiming to generate right-angle triangles 62 | suitable for recombination. 63 | """ 64 | 65 | PACKING_OF_PARALLELLOGRAMS = 9 66 | """Experimental algorithm for parallelogram-based mesh generation.""" 67 | 68 | QUASI_STRUCTURED_QUAD = 11 69 | """ 70 | Combines an initial unstructured quad mesh with topological improvements 71 | guided by cross fields to produce quasi-structured meshes with few 72 | irregular vertices. 73 | """ 74 | 75 | 76 | class SubdivisionAlgorithm(FlexibleEnum): 77 | """ 78 | Controls how Gmsh recombines triangles to form quads. 79 | 80 | The default recombination algorithm might leave some triangles in the mesh, 81 | if recombining all the triangles leads to badly shaped quads. In such 82 | cases, to generate full-quad meshes, you can either subdivide the resulting 83 | hybrid mesh (ALL_QUADRANGLES), or use the full-quad recombination 84 | algorithm, which will automatically perform a coarser mesh followed by 85 | recombination, smoothing and subdivision. 86 | """ 87 | 88 | NONE = 0 89 | """ 90 | No subdivision is applied. The mesh remains as is after the initial 91 | recombination process, potentially leaving some triangles in the mesh. 92 | """ 93 | ALL_QUADRANGLES = 1 94 | """ 95 | Subdivides the mesh to convert all elements into quadrangles. This method 96 | ensures a full-quad mesh by subdividing any remaining triangles after the 97 | initial recombination process. 98 | """ 99 | BARYCENTRIC = 3 100 | """ 101 | Applies barycentric subdivision to the mesh. This method subdivides each 102 | element by connecting its barycenter to the midpoints of its edges, 103 | resulting in a refined mesh with increased element count. 104 | """ 105 | 106 | 107 | class FieldCombination(FlexibleEnum): 108 | """ 109 | Controls how cell size fields are combined in Gmsh when they are found at 110 | the same location. 111 | """ 112 | 113 | MIN = "Min" 114 | """Use the minimum size.""" 115 | MAX = "Max" 116 | """Use the maximum size.""" 117 | 118 | 119 | class GeneralVerbosity(FlexibleEnum): 120 | """Gmsh level of information printed.""" 121 | 122 | SILENT = 0 123 | """No output is printed. All messages are suppressed.""" 124 | 125 | ERRORS = 1 126 | """ 127 | Only error messages are printed. This level is useful when you want to be 128 | alerted only to critical issues that prevent correct execution. 129 | """ 130 | 131 | WARNINGS = 2 132 | """ 133 | Error and warning messages are printed. This level adds important 134 | cautionary information that doesn't necessarily prevent execution but might 135 | affect results. 136 | """ 137 | 138 | DIRECT = 3 139 | """ 140 | Errors, warnings, and direct output from Gmsh commands are printed. This 141 | level is useful for seeing immediate results of operations without too much 142 | extra information. 143 | """ 144 | 145 | INFORMATION = 4 146 | """ 147 | Errors, warnings, direct output, and additional informational messages are 148 | printed. This level provides more detailed feedback about the progress and 149 | state of operations. 150 | """ 151 | 152 | STATUS = 5 153 | """ 154 | Errors, warnings, direct output, information, and status updates are 155 | printed. This level offers comprehensive feedback, including progress 156 | indicators for longer operations. 157 | """ 158 | 159 | DEBUG = 99 160 | """ 161 | All possible output is printed, including detailed debugging information. 162 | This level is extremely verbose and is typically used for troubleshooting 163 | or development purposes. 164 | """ 165 | -------------------------------------------------------------------------------- /pandamesh/gmsh_fields.py: -------------------------------------------------------------------------------- 1 | import struct 2 | import tempfile 3 | from dataclasses import dataclass, field 4 | from pathlib import Path 5 | from typing import List, Optional, Union 6 | 7 | import numpy as np 8 | 9 | from pandamesh.common import FloatArray, IntArray, gmsh 10 | from pandamesh.gmsh_enums import FieldCombination 11 | 12 | 13 | def write_structured_field_file( 14 | path: Union[Path, str], 15 | cellsize: FloatArray, 16 | xmin: float, 17 | ymin: float, 18 | dx: float, 19 | dy: float, 20 | ) -> None: 21 | """ 22 | Write a binary structured 2D gmsh field file. 23 | 24 | Note: make sure the signs of ``dx`` and ``dy`` match the orientation of the 25 | data in ``cellsize``. Geospatial rasters typically have a positive value for 26 | dx and negative for dy (x coordinate is ascending; y coordinate is 27 | descending). Data will be flipped around the respective axis for a negative 28 | dx or dy. 29 | 30 | Parameters 31 | ---------- 32 | path: str or pathlib.Path 33 | cellsize: 2D np.ndarray of floats 34 | Dimension order is (y, x), i.e. y differs along the rows and x differs along 35 | the columns. 36 | xmin: float 37 | ymin: float 38 | dx: float 39 | dy: float 40 | 41 | Returns 42 | ------- 43 | None 44 | Writes a structured gmsh field file. 45 | """ 46 | shape = cellsize.shape 47 | if cellsize.ndim != 2: 48 | raise ValueError(f"`cellsize` must be 2D. Received an array of shape: {shape}") 49 | nrow, ncol = shape 50 | # Flip values around if dx or dy is negative. 51 | if dy < 0.0: 52 | cellsize = np.flipud(cellsize) 53 | dy = abs(dy) 54 | if dx < 0.0: 55 | cellsize = np.fliplr(cellsize) 56 | dx = abs(dx) 57 | 58 | with open(path, "wb") as f: 59 | f.write(struct.pack("3d", xmin, ymin, 0.0)) 60 | f.write(struct.pack("3d", dx, dy, 1.0)) 61 | f.write(struct.pack("3i", nrow, ncol, 1)) 62 | cellsize.T.tofile(f) 63 | return 64 | 65 | 66 | class GmshField: 67 | def remove_from_gmsh(self): 68 | gmsh.model.mesh.field.remove(self.id) 69 | 70 | 71 | class DistanceFunctionField(GmshField): 72 | def remove_from_gmsh(self): 73 | self.distance_field.remove_from_gmsh() 74 | super().remove_from_gmsh() 75 | 76 | 77 | @dataclass 78 | class DistanceField(GmshField): 79 | point_list: IntArray 80 | id: int = field(init=False) 81 | 82 | def __post_init__(self): 83 | self.id = gmsh.model.mesh.field.add("Distance") 84 | gmsh.model.mesh.field.setNumbers(self.id, "PointsList", self.point_list) 85 | 86 | 87 | @dataclass 88 | class MathEvalField(DistanceFunctionField): 89 | distance_field: DistanceField 90 | function: str 91 | id: int = field(init=False) 92 | 93 | def __post_init__(self): 94 | if "distance" not in self.function: 95 | raise ValueError( 96 | f"distance not in MathEval field function: {self.function}" 97 | ) 98 | self.id = gmsh.model.mesh.field.add("MathEval") 99 | distance_function = self.function.replace( 100 | "distance", f"F{self.distance_field.id}" 101 | ) 102 | gmsh.model.mesh.field.setString(self.id, "F", distance_function) 103 | 104 | 105 | @dataclass 106 | class ThresholdField(DistanceFunctionField): 107 | distance_field: DistanceField 108 | size_min: float 109 | size_max: float 110 | dist_min: float 111 | dist_max: float 112 | sigmoid: bool = False 113 | stop_at_dist_max: bool = False 114 | id: int = field(init=False) 115 | 116 | def __post_init__(self): 117 | self.id = gmsh.model.mesh.field.add("Threshold") 118 | gmsh.model.mesh.field.setNumber(self.id, "InField", self.distance_field.id) 119 | gmsh.model.mesh.field.setNumber(self.id, "SizeMin", self.size_min) 120 | gmsh.model.mesh.field.setNumber(self.id, "SizeMax", self.size_max) 121 | gmsh.model.mesh.field.setNumber(self.id, "DistMin", self.dist_min) 122 | gmsh.model.mesh.field.setNumber(self.id, "DistMax", self.dist_max) 123 | gmsh.model.mesh.field.setNumber(self.id, "Sigmoid", self.sigmoid) 124 | gmsh.model.mesh.field.setNumber(self.id, "StopAtDistMax", self.stop_at_dist_max) 125 | 126 | 127 | @dataclass 128 | class StructuredField(GmshField): 129 | tmpdir: tempfile.TemporaryDirectory 130 | cellsize: FloatArray 131 | xmin: float 132 | ymin: float 133 | dx: float 134 | dy: float 135 | outside_value: Optional[float] = None 136 | id: int = field(init=False) 137 | path: str = field(init=False) 138 | set_outside_value: bool = field(init=False) 139 | 140 | def __post_init__(self): 141 | min_value = self.cellsize.min() 142 | if not (min_value > 0): # will also catch NaN 143 | raise ValueError(f"Minimum cellsize must be > 0, received: {min_value}") 144 | 145 | if self.outside_value is not None: 146 | self.set_outside_value = True 147 | self.outside_value = self.outside_value 148 | else: 149 | self.set_outside_value = False 150 | self.outside_value = -1.0 151 | 152 | self.id = gmsh.model.mesh.field.add("Structured") 153 | self.path = Path(self.tmpdir.name) / f"structured_field_{self.id}.dat" 154 | write_structured_field_file( 155 | self.path, self.cellsize, self.xmin, self.ymin, self.dx, self.dy 156 | ) 157 | gmsh.model.mesh.field.setNumber(self.id, "TextFormat", 0) # binary 158 | gmsh.model.mesh.field.setString(self.id, "FileName", str(self.path)) 159 | gmsh.model.mesh.field.setNumber( 160 | self.id, "SetOutsideValue", self.set_outside_value 161 | ) 162 | gmsh.model.mesh.field.setNumber(self.id, "OutsideValue", self.outside_value) 163 | 164 | 165 | @dataclass 166 | class CombinationField(GmshField): 167 | fields: List[GmshField] 168 | combination: FieldCombination 169 | id: int = field(init=False) 170 | fields_list: List[int] = field(init=False) 171 | 172 | def __post_init__(self): 173 | self.id = gmsh.model.mesh.field.add(self.combination.value) 174 | self.fields_list = [field.id for field in self.fields] 175 | gmsh.model.mesh.field.setNumbers(self.id, "FieldsList", self.fields_list) 176 | gmsh.model.mesh.field.setAsBackgroundMesh(self.id) 177 | -------------------------------------------------------------------------------- /pandamesh/gmsh_geometry.py: -------------------------------------------------------------------------------- 1 | from typing import List, NamedTuple, Tuple, Union 2 | 3 | import geopandas as gpd 4 | import numpy as np 5 | import pandas as pd 6 | import shapely 7 | import shapely.geometry as sg 8 | 9 | from pandamesh.common import FloatArray, IntArray, coord_dtype, gmsh, separate_geometry 10 | 11 | Z_DEFAULT = 0.0 12 | POINT_DIM = 0 13 | LINE_DIM = 1 14 | PLANE_DIM = 2 15 | 16 | 17 | class PolygonInfo(NamedTuple): 18 | index: int 19 | size: int 20 | interior_indices: List[int] 21 | interior_sizes: List[int] 22 | polygon_id: int 23 | 24 | 25 | class LineStringInfo(NamedTuple): 26 | index: int 27 | size: int 28 | embedded_in: Union[int, None] 29 | 30 | 31 | def polygon_info( 32 | polygon: sg.Polygon, cellsize: float, index: int, polygon_id: int 33 | ) -> Tuple[PolygonInfo, FloatArray, FloatArray, int]: 34 | exterior_coords = np.array(polygon.exterior.coords)[:-1] 35 | size = len(exterior_coords) 36 | vertices = [exterior_coords] 37 | cellsizes = [np.full(size, cellsize)] 38 | info = PolygonInfo(index, size, [], [], polygon_id) 39 | index += size 40 | for interior in polygon.interiors: 41 | interior_coords = np.array(interior.coords)[:-1] 42 | vertices.append(interior_coords) 43 | size = len(interior_coords) 44 | cellsizes.append(np.full(size, cellsize)) 45 | info.interior_indices.append(index) 46 | info.interior_sizes.append(size) 47 | index += size 48 | return info, vertices, cellsizes, index 49 | 50 | 51 | def linestring_info( 52 | linestring: sg.LineString, cellsize: float, index: int, inside: Union[int, None] 53 | ) -> Tuple[LineStringInfo, FloatArray, FloatArray, int]: 54 | vertices = np.array(linestring.coords) 55 | size = len(vertices) 56 | cellsizes = np.full(size, cellsize) 57 | info = LineStringInfo(index, size, inside) 58 | index += size 59 | return info, vertices, cellsizes, index 60 | 61 | 62 | def add_vertices(vertices, cellsizes, tags) -> None: 63 | for (x, y), cellsize, tag in zip(vertices, cellsizes, tags): 64 | gmsh.model.geo.addPoint(x, y, Z_DEFAULT, cellsize, tag) 65 | 66 | 67 | def add_linestrings( 68 | features: List[LineStringInfo], tags: IntArray 69 | ) -> Tuple[IntArray, IntArray]: 70 | n_lines = sum(info.size - 1 for info in features) 71 | line_indices = np.empty(n_lines, dtype=np.int64) 72 | embedded_in = np.empty(n_lines, dtype=np.int64) 73 | i = 0 74 | for info in features: 75 | point_tags = tags[info.index : info.index + info.size] 76 | first = point_tags[0] 77 | for second in point_tags[1:]: 78 | line_index = gmsh.model.geo.addLine(first, second) 79 | line_indices[i] = line_index 80 | embedded_in[i] = info.embedded_in 81 | first = second 82 | i += 1 83 | return line_indices, embedded_in 84 | 85 | 86 | def add_curve_loop(point_tags: FloatArray) -> int: 87 | tags = [] 88 | first = point_tags[-1] 89 | for second in point_tags: 90 | line_tag = gmsh.model.geo.addLine(first, second) 91 | tags.append(line_tag) 92 | first = second 93 | curve_loop_tag = gmsh.model.geo.addCurveLoop(tags) 94 | return curve_loop_tag 95 | 96 | 97 | def add_polygons( 98 | features: List[PolygonInfo], tags: IntArray 99 | ) -> Tuple[List[int], List[int]]: 100 | plane_tags = [] 101 | for info in features: 102 | # Add the exterior loop first 103 | curve_loop_tags = [add_curve_loop(tags[info.index : info.index + info.size])] 104 | # Now add holes 105 | for start, size in zip(info.interior_indices, info.interior_sizes): 106 | loop_tag = add_curve_loop(tags[start : start + size]) 107 | curve_loop_tags.append(loop_tag) 108 | plane_tag = gmsh.model.geo.addPlaneSurface(curve_loop_tags, tag=info.polygon_id) 109 | plane_tags.append(plane_tag) 110 | return curve_loop_tags, plane_tags 111 | 112 | 113 | def add_points(points: gpd.GeoDataFrame) -> Tuple[IntArray, IntArray]: 114 | n_points = len(points) 115 | indices = np.empty(n_points, dtype=np.int64) 116 | embedded_in = points["__polygon_id"].to_numpy() 117 | # We have to add points one by one due to the Gmsh addPoint API 118 | for i, row in enumerate(points.to_dict("records")): 119 | point = row["geometry"] 120 | # Rely on the automatic number of gmsh now to generate the indices 121 | point_index = gmsh.model.geo.addPoint( 122 | point.x, point.y, Z_DEFAULT, row["cellsize"] 123 | ) 124 | indices[i] = point_index 125 | return indices, embedded_in 126 | 127 | 128 | def collect_polygons( 129 | polygons: gpd.GeoDataFrame, index: int 130 | ) -> Tuple[int, FloatArray, IntArray, List[PolygonInfo]]: 131 | vertices = [] 132 | cellsizes = [] 133 | features = [] 134 | for row in polygons.to_dict("records"): 135 | info, coords, cells, index = polygon_info( 136 | row["geometry"], row["cellsize"], index, row["__polygon_id"] 137 | ) 138 | vertices.extend(coords) 139 | cellsizes.extend(cells) 140 | features.append(info) 141 | return index, vertices, cellsizes, features 142 | 143 | 144 | def collect_linestrings( 145 | linestrings: gpd.GeoDataFrame, index: int 146 | ) -> Tuple[int, FloatArray, IntArray, List[LineStringInfo]]: 147 | vertices = [] 148 | cellsizes = [] 149 | features = [] 150 | for row in linestrings.to_dict("records"): 151 | info, coords, cells, index = linestring_info( 152 | row["geometry"], row["cellsize"], index, row["__polygon_id"] 153 | ) 154 | vertices.append(coords) 155 | cellsizes.append(cells) 156 | features.append(info) 157 | return index, vertices, cellsizes, features 158 | 159 | 160 | def collect_points(points: gpd.GeoDataFrame) -> FloatArray: 161 | return np.stack((points["geometry"].x, points["geometry"].y), axis=1) 162 | 163 | 164 | def embed_where(gdf: gpd.GeoDataFrame, polygons: gpd.GeoDataFrame) -> gpd.GeoDataFrame: 165 | tmp = gpd.sjoin(gdf, polygons, predicate="within", how="inner") 166 | tmp["cellsize"] = tmp[["cellsize_left", "cellsize_right"]].min(axis=1) 167 | return tmp[["cellsize", "__polygon_id", "geometry"]] 168 | 169 | 170 | def add_geometry( 171 | polygons: gpd.GeoDataFrame, linestrings: gpd.GeoDataFrame, points: gpd.GeoDataFrame 172 | ) -> None: 173 | # Assign unique ids 174 | polygons["__polygon_id"] = np.arange(1, len(polygons) + 1) 175 | 176 | # Figure out in which polygon the points and linestrings will be embedded. 177 | linestrings = embed_where(linestrings, polygons) 178 | embedded_points = embed_where(points, polygons) 179 | 180 | # Collect all coordinates, and store the length and type of every element 181 | index, poly_vertices, poly_cellsizes, polygon_features = collect_polygons( 182 | polygons, index=0 183 | ) 184 | index, line_vertices, line_cellsizes, linestring_features = collect_linestrings( 185 | linestrings, index 186 | ) 187 | vertices = np.concatenate(poly_vertices + line_vertices) 188 | cellsizes = np.concatenate(poly_cellsizes + line_cellsizes) 189 | 190 | # Get the unique vertices, and generate the array of indices pointing to 191 | # them for every feature 192 | vertices, indices = np.unique( 193 | vertices.reshape(-1).view(coord_dtype), return_inverse=True 194 | ) 195 | vertex_tags = np.arange(1, len(vertices) + 1) 196 | tags = vertex_tags[indices] 197 | # Get the smallest cellsize per vertex 198 | cellsizes = pd.Series(cellsizes).groupby(tags).min().to_numpy() 199 | 200 | # Add all unique vertices. This includes vertices for linestrings and polygons. 201 | add_vertices(vertices, cellsizes, vertex_tags) 202 | # Add all geometries to gmsh 203 | add_polygons(polygon_features, tags) 204 | linestring_indices, linestring_embedded = add_linestrings(linestring_features, tags) 205 | gmsh.model.geo.synchronize() 206 | 207 | # Now embed the points and linestrings in the polygons 208 | for polygon_id, embed_indices in pd.Series(linestring_indices).groupby( 209 | linestring_embedded 210 | ): 211 | gmsh.model.mesh.embed(LINE_DIM, embed_indices, PLANE_DIM, polygon_id) 212 | 213 | if len(embedded_points) > 0: 214 | point_indices, point_embedded = add_points(embedded_points) 215 | gmsh.model.geo.synchronize() 216 | for polygon_id, embed_indices in pd.Series(point_indices).groupby( 217 | point_embedded 218 | ): 219 | gmsh.model.mesh.embed(POINT_DIM, embed_indices, PLANE_DIM, polygon_id) 220 | 221 | gmsh.model.geo.synchronize() 222 | return 223 | 224 | 225 | def add_distance_points(points: gpd.GeoSeries) -> IntArray: 226 | indices = np.empty(len(points), dtype=np.int64) 227 | for i, (x, y) in enumerate(shapely.get_coordinates(points)): 228 | indices[i] = gmsh.model.geo.addPoint(x, y, Z_DEFAULT) 229 | return indices 230 | 231 | 232 | def add_distance_linestring( 233 | linestring: sg.LineString, 234 | distance: float, 235 | ) -> IntArray: 236 | # We could add the line as a Gmsh curve, but Gmsh samples along the line 237 | # anyway, and the number of points is configured through a discrete 238 | # sampling value, rather than some spacing distance. So we might as well 239 | # sample ourselves. 240 | length = linestring.length 241 | n = int(np.ceil(length / distance)) 242 | interpolated = shapely.get_coordinates( 243 | shapely.line_interpolate_point(linestring, distance=np.linspace(0.0, length, n)) 244 | ) 245 | indices = np.empty(len(interpolated), dtype=np.int64) 246 | for i, (x, y) in enumerate(interpolated): 247 | indices[i] = gmsh.model.geo.addPoint(x, y, Z_DEFAULT) 248 | return indices 249 | 250 | 251 | def add_distance_linestrings( 252 | linestrings: gpd.GeoSeries, 253 | spacing: FloatArray, 254 | ) -> IntArray: 255 | indices = [ 256 | add_distance_linestring(linestring, distance) 257 | for linestring, distance in zip(linestrings, spacing) 258 | ] 259 | return np.concatenate(indices) 260 | 261 | 262 | def add_distance_polygons(polygons: gpd.GeoSeries, spacing: FloatArray) -> IntArray: 263 | indices = [] 264 | for polygon, distance in zip(polygons, spacing): 265 | indices.append(add_distance_linestring(polygon.exterior, distance)) 266 | for interior in polygon.interiors: 267 | indices.append(add_distance_linestring(interior, distance)) 268 | return np.concatenate(indices) 269 | 270 | 271 | def add_distance_geometry(geometry: gpd.GeoSeries, spacing: FloatArray) -> IntArray: 272 | polygons, lines, points = separate_geometry(geometry) 273 | indices = [] 274 | if len(points) > 0: 275 | indices.append(add_distance_points(points)) 276 | if len(lines) > 0: 277 | indices.append(add_distance_linestrings(lines, spacing)) 278 | if len(polygons) > 0: 279 | indices.append(add_distance_polygons(polygons, spacing)) 280 | gmsh.model.geo.synchronize() 281 | return np.concatenate(indices) 282 | -------------------------------------------------------------------------------- /pandamesh/plot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from pandamesh.common import FloatArray, IntArray 4 | 5 | 6 | def plot( 7 | vertices: FloatArray, 8 | faces: IntArray, 9 | fill_value: int = -1, 10 | ax=None, 11 | facecolors: str = "lightgray", 12 | edgecolors: str = "black", 13 | **kwargs, 14 | ) -> None: 15 | """Plot an unstructured mesh""" 16 | import matplotlib.pyplot as plt 17 | from matplotlib.collections import PolyCollection 18 | 19 | if ax is None: 20 | _, ax = plt.subplots() 21 | 22 | nodes = vertices[faces] 23 | nodes[faces == fill_value] = np.nan 24 | collection = PolyCollection( 25 | nodes, facecolors=facecolors, edgecolors=edgecolors, **kwargs 26 | ) 27 | ax.add_collection(collection) 28 | ax.set_aspect(1.0) 29 | ax.autoscale() 30 | -------------------------------------------------------------------------------- /pandamesh/snapping.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple 2 | 3 | import numpy as np 4 | import shapely 5 | 6 | from pandamesh.common import FloatArray, GeometryArray, IntArray 7 | 8 | try: 9 | from numba import njit # pragma: no cover 10 | except ImportError: 11 | 12 | def njit(*args, **kwargs): # pragma: no cover 13 | # Dummy decorator when numba is not available. 14 | def decorator(func): 15 | return func 16 | 17 | return decorator 18 | 19 | 20 | class MatrixCSR(NamedTuple): 21 | """ 22 | Compressed Sparse Row matrix. The row indices are compressed; all values 23 | must therefore be sorted by row number. More or less matches the 24 | scipy.sparse.csr_matrix. 25 | 26 | NamedTuple for easy ingestion by numba. 27 | 28 | Parameters 29 | ---------- 30 | data: np.ndarray of floats 31 | The values of the matrix. 32 | indices: np.ndarray of integers 33 | The column numbers of the CSR format. 34 | indptr: inp.ndarray of integers 35 | The row index CSR pointer array. 36 | Values for row i (target index i) are stored in: 37 | indices[indptr[i]: indptr[i + 1]] 38 | n: int 39 | The number of rows. 40 | m: int 41 | The number of columns. 42 | nnz: int 43 | The number of non-zero values. 44 | """ 45 | 46 | data: FloatArray 47 | indices: IntArray 48 | indptr: IntArray 49 | n: int 50 | m: int 51 | nnz: int 52 | 53 | @staticmethod 54 | def from_triplet( 55 | row: IntArray, col: IntArray, data: np.ndarray, n: int, m: int 56 | ) -> "MatrixCSR": 57 | i = np.cumsum(np.bincount(row, minlength=n)) 58 | indptr = np.empty(i.size + 1, dtype=int) 59 | indptr[0] = 0 60 | indptr[1:] = i 61 | return MatrixCSR( 62 | data=data, 63 | indices=col, 64 | indptr=indptr, 65 | n=n, 66 | m=m, 67 | nnz=data.size, 68 | ) 69 | 70 | 71 | @njit(inline="always") 72 | def row_slice(A, row: int) -> slice: 73 | """Return the indices or data slice of a single row.""" 74 | start = A.indptr[row] 75 | end = A.indptr[row + 1] 76 | return slice(start, end) 77 | 78 | 79 | @njit(inline="always") 80 | def columns_and_values(A, slice): 81 | return zip(A.indices[slice], A.data[slice]) 82 | 83 | 84 | @njit(cache=True) 85 | def _snap_to_nearest(A: MatrixCSR, snap_candidates: IntArray, max_distance) -> IntArray: 86 | """ 87 | Find a closest target for each node. 88 | 89 | The kD tree distance matrix will have stored for each node the other nodes 90 | that are within snapping distance. These are the rows in the sparse matrix 91 | that have more than one entry: the snap_candidates. 92 | 93 | The first condition for a point to become a TARGET is if it hasn't been 94 | connected to another point yet, i.e. it is UNVISITED. Once a point becomes 95 | an TARGET, it looks for nearby points within the max_distance. These nearby 96 | points are connected if: they are UNVISITED (i.e. they don't have a target 97 | yet), or the current target is closer than the previous. 98 | """ 99 | UNVISITED = -1 100 | TARGET = -2 101 | nearest = np.full(A.n, max_distance + 1.0) 102 | visited = np.full(A.n, UNVISITED) 103 | 104 | for i in snap_candidates: 105 | if visited[i] != UNVISITED: 106 | continue 107 | visited[i] = TARGET 108 | 109 | # Now iterate through every node j that is within max_distance of node i. 110 | for j, dist in columns_and_values(A, row_slice(A, i)): 111 | if i == j or visited[j] == TARGET: 112 | # Continue if we're looking at the distance to ourselves 113 | # (i==j), or other node is a target. 114 | continue 115 | if visited[j] == UNVISITED or dist < nearest[j]: 116 | # If unvisited node, or already visited but we're closer, set 117 | # to i. 118 | visited[j] = i 119 | nearest[j] = dist 120 | 121 | return visited 122 | 123 | 124 | def distance_matrix(geometry: GeometryArray, max_distance: float) -> MatrixCSR: 125 | n = len(geometry) 126 | tree = shapely.STRtree(geometry) 127 | i, j = tree.query(geometry, distance=max_distance, predicate="dwithin") 128 | coords = shapely.get_coordinates(geometry) 129 | return MatrixCSR.from_triplet( 130 | row=i, 131 | col=j, 132 | data=np.linalg.norm(coords[i] - coords[j], axis=1), 133 | n=n, 134 | m=n, 135 | ) 136 | 137 | 138 | def snap_nodes(points: GeometryArray, max_distance: float) -> IntArray: 139 | """ 140 | Snap neigbhoring vertices together that are located within a maximum 141 | snapping distance from each other. 142 | 143 | If vertices are located within a maximum distance, some of them are snapped 144 | to their neighbors ("targets"), thereby guaranteeing a minimum distance 145 | between nodes in the result. The determination of whether a point becomes a 146 | target itself or gets snapped to another point is primarily based on the 147 | order in which points are processed and their spatial relationships. 148 | 149 | This function also return an inverse index array. In case of a connectivity 150 | array, ``inverse`` can be used to index into, yielding the updated 151 | numbers. E.g.: 152 | 153 | ``updated_face_nodes = inverse[face_nodes]`` 154 | 155 | Parameters 156 | ---------- 157 | xy: nd array of floats of size (N, 2) 158 | max_distance: float 159 | 160 | Returns 161 | ------- 162 | index: 1D array of ints of size M 163 | Which nodes to preserve. 164 | """ 165 | # First, find all the points that lie within max_distance of each other 166 | A = distance_matrix(points, max_distance) 167 | should_snap = np.diff(A.indptr) > 1 # equal to: .getnnz(axis=1) > 1 168 | if should_snap.any(): 169 | index = np.arange(A.n) 170 | visited = _snap_to_nearest( 171 | A=A, 172 | snap_candidates=index[should_snap], 173 | max_distance=max_distance, 174 | ) 175 | targets = visited < 0 # i.e. still UNVISITED or TARGET valued. 176 | visited[targets] = index[targets] 177 | deduplicated = np.unique(visited) 178 | return deduplicated 179 | else: 180 | return np.arange(A.n) 181 | -------------------------------------------------------------------------------- /pandamesh/triangle_enums.py: -------------------------------------------------------------------------------- 1 | from pandamesh.enum_base import FlexibleEnum 2 | 3 | 4 | class DelaunayAlgorithm(FlexibleEnum): 5 | """The type of Delaunay algorithm for Triangle.""" 6 | 7 | DIVIDE_AND_CONQUER = "" 8 | """Default algorithm.""" 9 | INCREMENTAL = "i" 10 | """ 11 | Uses the incremental algorithm for Delaunay triangulation, rather than 12 | the divide-and-conquer algorithm. 13 | """ 14 | SWEEPLINE = "F" 15 | """ 16 | Uses Steven Fortune's sweepline algorithm for Delaunay triangulation, 17 | rather than the divide-and-conquer algorithm. 18 | """ 19 | -------------------------------------------------------------------------------- /pandamesh/triangle_geometry.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import geopandas as gpd 4 | import numpy as np 5 | import pandas as pd 6 | import shapely 7 | 8 | from pandamesh.common import FloatArray, Grouper, IntArray, flatten, flatten_geometry 9 | 10 | 11 | def segmentize_linestrings(linestrings: gpd.GeoDataFrame) -> gpd.GeoDataFrame: 12 | if len(linestrings) == 0: 13 | return linestrings 14 | condition = linestrings["cellsize"].notnull() 15 | segmentized = linestrings.loc[condition].copy() 16 | segmentized["geometry"] = shapely.segmentize( 17 | segmentized["geometry"], max_segment_length=segmentized["cellsize"] 18 | ) 19 | return pd.concat([linestrings.loc[~condition], segmentized]) 20 | 21 | 22 | def add_linestrings(linestrings: gpd.GeoSeries) -> Tuple[FloatArray, IntArray]: 23 | if len(linestrings) == 0: 24 | return np.empty((0, 2), dtype=np.float64), np.empty((0, 2), dtype=np.int32) 25 | 26 | geometry = linestrings.geometry.to_numpy() 27 | n_vertex = shapely.get_num_coordinates(geometry) 28 | vertices, index = shapely.get_coordinates(geometry, return_index=True) 29 | 30 | vertex_numbers = np.arange(n_vertex.sum()) 31 | segments = np.empty((n_vertex.sum() - 1, 2), dtype=np.int32) 32 | segments[:, 0] = vertex_numbers[:-1] 33 | segments[:, 1] = vertex_numbers[1:] 34 | keep = np.diff(index) == 0 35 | segments = segments[keep] 36 | 37 | return vertices, segments 38 | 39 | 40 | def add_polygons( 41 | polygons: gpd.GeoDataFrame, 42 | ) -> Tuple[FloatArray, IntArray, FloatArray]: 43 | is_region = polygons["cellsize"].notnull() 44 | n_region = is_region.sum() 45 | regions = np.empty((n_region, 4), dtype=np.float64) 46 | region_points = polygons[is_region].representative_point() 47 | regions[:, 0] = region_points.x 48 | regions[:, 1] = region_points.y 49 | regions[:, 2] = np.arange(n_region) 50 | cellsize = polygons[is_region]["cellsize"].to_numpy() 51 | # Assume equilateral triangles for cell size to area conversion. 52 | regions[:, 3] = 0.25 * np.sqrt(3) * cellsize * cellsize 53 | 54 | boundary = polygons.boundary.explode(index_parts=True).geometry 55 | vertices, segments = add_linestrings(boundary) 56 | return vertices, segments, regions 57 | 58 | 59 | def add_points(points: gpd.GeoDataFrame) -> FloatArray: 60 | vertices = np.empty((len(points), 2), dtype=np.float64) 61 | if len(points) > 0: 62 | vertices[:, 0] = points.geometry.x 63 | vertices[:, 1] = points.geometry.y 64 | return vertices 65 | 66 | 67 | def polygon_holes( 68 | polygons: gpd.GeoDataFrame, 69 | ): 70 | """ 71 | Return a point for every hole in every polygon. 72 | 73 | Triangle recognizes holes as a point contained by segments. 74 | """ 75 | # An interior may be a true hole, or it could be (partially!) filled with 76 | # another polygon. Find out if this is the case: get the interiors, and 77 | # diff them with any polygon inside. 78 | inner_rings = flatten(polygons.interiors) 79 | interiors = np.asarray(flatten_geometry(shapely.polygonize(inner_rings))) 80 | tree = shapely.STRtree(interiors) 81 | index_inside, index_interior = tree.query( 82 | polygons.representative_point(), predicate="within" 83 | ) 84 | nothing_inside = np.full(len(interiors), True) 85 | nothing_inside[index_interior] = False 86 | 87 | points = [gpd.GeoSeries(interiors[nothing_inside]).representative_point()] 88 | if len(index_inside) > 0: 89 | for interior, polygons_inside in Grouper( 90 | a=interiors, 91 | a_index=index_interior, 92 | b=polygons.geometry, 93 | b_index=index_inside, 94 | ): 95 | all_polygons = shapely.unary_union(polygons_inside) 96 | true_holes = shapely.difference(interior, all_polygons) 97 | if shapely.is_empty(true_holes).all(): 98 | continue 99 | hole_points = ( 100 | gpd.GeoSeries(true_holes) 101 | .explode(ignore_index=True) 102 | .representative_point() 103 | ) 104 | points.append(hole_points) 105 | 106 | points = gpd.GeoSeries(np.concatenate(points)) 107 | if len(points) > 0: 108 | return add_points(points) 109 | else: 110 | return None 111 | 112 | 113 | def _polygon_polygon_difference( 114 | a: gpd.GeoDataFrame, b: gpd.GeoDataFrame 115 | ) -> gpd.GeoDataFrame: 116 | out = a.copy() 117 | out["geometry"] = shapely.difference( 118 | out["geometry"], 119 | shapely.union_all(b["geometry"]), 120 | ) 121 | return out.loc[out.area > 0].copy() 122 | 123 | 124 | def convert_linestring_rings(polygons: gpd.GeoDataFrame, linestrings: gpd.GeoDataFrame): 125 | # Check if linestrings contain any rings. Triangle will treat such a ring 126 | # as a region on its own. 127 | linestring_polygons = gpd.GeoSeries( 128 | shapely.polygonize(linestrings["geometry"].to_numpy()), 129 | crs=linestrings.crs, 130 | name="polygons", 131 | ).explode(ignore_index=True) 132 | if linestring_polygons.empty: 133 | # No rings found 134 | return polygons 135 | 136 | # Assign the cell size to the created polygons Do a cheap check: see 137 | # whether a vertex falls within the polygons. 138 | polygons_inside = polygons.sjoin( 139 | gpd.GeoDataFrame( 140 | geometry=shapely.get_point(linestring_polygons.exterior, index=0) 141 | ), 142 | predicate="contains", 143 | how="right", 144 | ) 145 | # Set the original geometry back. 146 | polygons_inside["geometry"] = linestring_polygons 147 | # Any linestring polygon outside will have a cellsize of NaN; remove those 148 | # entries. 149 | polygons_inside = polygons_inside.loc[polygons_inside["cellsize"].notnull()] 150 | # Ensure we burn the polygon holes into the newly created linestrings 151 | # polygons. 152 | new_polygons = polygons_inside.loc[:, ["cellsize", "geometry"]].copy() 153 | new_polygons["geometry"] = shapely.intersection( 154 | new_polygons["geometry"], 155 | shapely.union_all(polygons["geometry"]), 156 | ) 157 | # Make room for the new polygons 158 | diffed_polygons = _polygon_polygon_difference( 159 | a=polygons, 160 | b=polygons_inside, 161 | ) 162 | return pd.concat((diffed_polygons, new_polygons)) 163 | 164 | 165 | def unique_vertices_and_segments(vertices, segments): 166 | # If the geometry is closed (LinearRings), the final vertex of every 167 | # feature is discarded, since the repeats will segfault Triangle. 168 | vertices, inverse = np.unique(vertices, return_inverse=True, axis=0) 169 | inverse = inverse.ravel() 170 | segments = inverse[segments] 171 | # Now remove duplicated segments. 172 | segments = np.unique(segments, axis=0) 173 | return vertices, segments 174 | 175 | 176 | def collect_geometry( 177 | polygons: gpd.GeoDataFrame, linestrings: gpd.GeoDataFrame, points: gpd.GeoDataFrame 178 | ) -> Tuple[FloatArray, IntArray, FloatArray]: 179 | linestrings = segmentize_linestrings(linestrings) 180 | polygons = convert_linestring_rings(polygons, linestrings) 181 | polygon_vertices, polygon_segments, regions = add_polygons(polygons) 182 | linestring_vertices, linestring_segments = add_linestrings(linestrings.geometry) 183 | point_vertices = add_points(points) 184 | linestring_segments += polygon_vertices.shape[0] 185 | vertices = np.concatenate([polygon_vertices, linestring_vertices, point_vertices]) 186 | segments = np.concatenate([polygon_segments, linestring_segments]) 187 | vertices, segments = unique_vertices_and_segments(vertices, segments) 188 | return vertices, segments, regions 189 | -------------------------------------------------------------------------------- /pandamesh/triangle_mesher.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Union 2 | 3 | import geopandas as gpd 4 | import triangle 5 | 6 | from pandamesh.common import ( 7 | FloatArray, 8 | IntArray, 9 | central_origin, 10 | check_geodataframe, 11 | repr, 12 | separate_geodataframe, 13 | to_geodataframe, 14 | to_ugrid, 15 | ) 16 | from pandamesh.triangle_enums import DelaunayAlgorithm 17 | from pandamesh.triangle_geometry import collect_geometry, polygon_holes 18 | 19 | 20 | class TriangleMesher: 21 | """ 22 | Wrapper for the python bindings to Triangle. This class must be initialized 23 | with a geopandas GeoDataFrame containing at least one polygon, and a column 24 | named ``"cellsize"``. 25 | 26 | Optionally, multiple polygons with different cell sizes can be included in 27 | the geodataframe. These can be used to achieve local mesh refinement. 28 | 29 | Linestrings and points may also be included. The segments of linestrings 30 | will be directly forced into the triangulation. Points can also be forced 31 | into the triangulation. The cell size values associated with these 32 | geometries willl not be used. 33 | 34 | Triangle cannot automatically resolve overlapping polygons, or points 35 | located exactly on segments. During initialization, the geometries of 36 | the geodataframe are checked: 37 | 38 | * Polygons should not have any overlap with each other. 39 | * Linestrings should not intersect each other, unless the intersection 40 | vertex is present in both. 41 | * Every linestring should be fully contained by a single polygon; 42 | a linestring may not intersect two or more polygons. 43 | * Linestrings and points should not "touch" / be located on 44 | polygon borders. 45 | * Holes in polygons are fully supported, but they must not contain 46 | any linestrings or points. 47 | 48 | If such cases are detected, the initialization will error: use the 49 | :class:`pandamesh.Preprocessor` to clean up geometries beforehand. 50 | 51 | For more details on Triangle, see: 52 | https://www.cs.cmu.edu/~quake/triangle.defs.html 53 | 54 | Parameters 55 | ---------- 56 | gdf: gpd.GeoDataFrame 57 | GeoDataFrame containing the vector geometry. Must contain a "cellsize" 58 | column. 59 | shift_origin: bool, optional, default is True. 60 | If True, temporarily shifts the coordinate system origin to the centroid 61 | of the geometry's bounding box during mesh generation. This helps mitigate 62 | floating-point precision issues. The resulting mesh vertices are 63 | automatically translated back to the original coordinate system. 64 | intersecting_edges: str, optional, default is "error" 65 | String indicating how to report unresolved line segment intersections: 66 | 67 | * "ignore": skip check. 68 | * "warning": emit a warning. 69 | * "error": raise a ValueError. 70 | 71 | minimum_perimeter_spacing: float, default is 1.0e-3. 72 | Errors if spacing of vertices on polygon perimeters is less or equal to 73 | minimum spacing. A distance of 0.0 indicates a dangling edge or a 74 | repeated vertex. Such features may cause a crash during mesh 75 | generation. 76 | """ 77 | 78 | def __init__( 79 | self, 80 | gdf: gpd.GeoDataFrame, 81 | shift_origin: bool = True, 82 | intersecting_edges="error", 83 | minimum_perimeter_spacing=1.0e-3, 84 | ) -> None: 85 | check_geodataframe(gdf, {"geometry", "cellsize"}, check_index=True) 86 | gdf, self._xoff, self._yoff = central_origin(gdf, shift_origin) 87 | polygons, linestrings, points = separate_geodataframe( 88 | gdf, intersecting_edges, minimum_perimeter_spacing 89 | ) 90 | self.vertices, self.segments, self.regions = collect_geometry( 91 | polygons, linestrings, points 92 | ) 93 | self.holes = polygon_holes(polygons) 94 | 95 | # Set default values for meshing parameters 96 | self.minimum_angle = 20.0 97 | self.conforming_delaunay = True 98 | self.suppress_exact_arithmetic = False 99 | self.maximum_steiner_points = None 100 | self.delaunay_algorithm = DelaunayAlgorithm.DIVIDE_AND_CONQUER 101 | self.consistency_check = False 102 | 103 | def __repr__(self): 104 | return repr(self) 105 | 106 | @property 107 | def minimum_angle(self) -> float: 108 | """ 109 | Minimum allowed angle for any triangle in the mesh. 110 | 111 | See: 112 | https://www.cs.cmu.edu/~quake/triangle.q.html 113 | """ 114 | return self._minimum_angle 115 | 116 | @minimum_angle.setter 117 | def minimum_angle(self, value: float): 118 | if not isinstance(value, float): 119 | raise TypeError("minimum angle must be a float") 120 | if value >= 34.0 or value <= 0.0: 121 | raise ValueError("minimum_angle should fall in the interval: (0.0, 34.0)") 122 | self._minimum_angle = value 123 | 124 | @property 125 | def conforming_delaunay(self) -> bool: 126 | """ 127 | Conforming Delaunay: use this switch if you want all triangles in the 128 | mesh to be Delaunay, and not just constrained Delaunay; or if you want 129 | to ensure that all Voronoi vertices lie within the triangulation. 130 | """ 131 | return self._conforming_delaunay 132 | 133 | @conforming_delaunay.setter 134 | def conforming_delaunay(self, value: bool): 135 | if not isinstance(value, bool): 136 | raise TypeError("conforming_delaunay must be a bool") 137 | self._conforming_delaunay = value 138 | 139 | @property 140 | def suppress_exact_arithmetic(self) -> bool: 141 | """ 142 | Suppresses exact arithmetic. 143 | 144 | See: 145 | https://www.cs.cmu.edu/~quake/triangle.exact.html 146 | """ 147 | return self._suppress_exact_arithmetic 148 | 149 | @suppress_exact_arithmetic.setter 150 | def suppress_exact_arithmetic(self, value: bool): 151 | if not isinstance(value, bool): 152 | raise TypeError("suppress_exact_arithmetic must be a bool") 153 | self._suppress_exact_arithmetic = value 154 | 155 | @property 156 | def maximum_steiner_points(self) -> int: 157 | """ 158 | Specifies the maximum number of added Steiner points 159 | 160 | See: 161 | https://www.cs.cmu.edu/~quake/triangle.S.html 162 | """ 163 | return self._maximum_steiner_points 164 | 165 | @maximum_steiner_points.setter 166 | def maximum_steiner_points(self, value: Union[int, None]): 167 | if not isinstance(value, (int, type(None))): 168 | raise TypeError("maximum_steiner_points must be an int or None") 169 | self._maximum_steiner_points = value 170 | 171 | @property 172 | def delaunay_algorithm(self) -> DelaunayAlgorithm: 173 | """ 174 | Sets the Delaunay algorithm. Can be set to one of: 175 | :py:class:`pandamesh.DelaunayAlgorithm`: 176 | 177 | .. code:: 178 | 179 | DIVIDE_AND_CONQUER = "" 180 | INCREMENTAL = "i" 181 | SWEEPLINE = "F" 182 | 183 | """ 184 | return self._delaunay_algorithm 185 | 186 | @delaunay_algorithm.setter 187 | def delaunay_algorithm(self, value: Union[DelaunayAlgorithm, str]): 188 | value = DelaunayAlgorithm.from_value(value) 189 | self._delaunay_algorithm = value 190 | 191 | @property 192 | def consistency_check(self) -> bool: 193 | """ 194 | Check the consistency of the final mesh. Uses exact arithmetic for 195 | checking, even if ``suppress_exact_arithmetic`` is set to ``False``. 196 | Useful if you suspect Triangle is buggy. 197 | """ 198 | return self._consistency_check 199 | 200 | @consistency_check.setter 201 | def consistency_check(self, value: bool): 202 | if not isinstance(value, int): 203 | raise TypeError("consistency_check must be a bool") 204 | self._consistency_check = value 205 | 206 | def generate(self) -> Tuple[FloatArray, IntArray]: 207 | """ 208 | Generate a mesh of triangles. 209 | 210 | Returns 211 | ------- 212 | vertices: np.ndarray of floats with shape ``(n_vertex, 2)`` 213 | triangles: np.ndarray of integers with shape ``(n_triangle, 3)`` 214 | """ 215 | options = ( 216 | "p" 217 | f"q{self._minimum_angle}" 218 | "a" 219 | f"{'D' if self._conforming_delaunay else ''}" 220 | f"{'X' if self._suppress_exact_arithmetic else ''}" 221 | f"{'S' + str(self._maximum_steiner_points) if self._maximum_steiner_points is not None else ''}" 222 | f"{self._delaunay_algorithm.value}" 223 | f"{'C' if self.consistency_check else ''}" 224 | ) 225 | 226 | tri = {"vertices": self.vertices, "segments": self.segments} 227 | if self.holes is not None: 228 | tri["holes"] = self.holes 229 | if len(self.regions) > 0: 230 | tri["regions"] = self.regions 231 | 232 | result = triangle.triangulate(tri=tri, opts=options) 233 | vertices = result["vertices"] 234 | vertices[:, 0] += self._xoff 235 | vertices[:, 1] += self._yoff 236 | return vertices, result["triangles"] 237 | 238 | def generate_geodataframe(self) -> gpd.GeoDataFrame: 239 | """ 240 | Generate a mesh and return it as a geopandas GeoDataFrame. 241 | 242 | Returns 243 | ------- 244 | mesh: geopandas.GeoDataFrame 245 | """ 246 | return to_geodataframe(*self.generate()) 247 | 248 | def generate_ugrid(self) -> "xugrid.Ugrid2d": # type: ignore # noqa pragma: no cover 249 | """ 250 | Generate a mesh and return it as an xugrid Ugrid2d. 251 | 252 | Returns 253 | ------- 254 | mesh: xugrid.Ugrid2d 255 | """ 256 | return to_ugrid(*self.generate()) 257 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "pandamesh" 7 | description = "From geodataframe to mesh" 8 | license = { text = "MIT" } 9 | readme = { file = "README.rst", content-type = "text/x-rst" } 10 | dynamic = ["version"] 11 | maintainers = [{ name = "Huite Bootsma", email = "huite.bootsma@deltares.nl" }] 12 | requires-python = ">=3.9" 13 | dependencies = [ 14 | 'geopandas', 15 | 'pooch', 16 | 'triangle', 17 | 'shapely >= 2.0', 18 | ] 19 | classifiers = [ 20 | 'Development Status :: 4 - Beta', 21 | 'Intended Audience :: Science/Research', 22 | 'License :: OSI Approved :: MIT License', 23 | 'Operating System :: OS Independent', 24 | 'Programming Language :: Python', 25 | 'Programming Language :: Python :: 3', 26 | 'Programming Language :: Python :: 3.9', 27 | 'Programming Language :: Python :: 3.10', 28 | 'Programming Language :: Python :: 3.11', 29 | 'Programming Language :: Python :: 3.12', 30 | 'Programming Language :: Python :: 3.13', 31 | 'Programming Language :: Python :: Implementation :: CPython', 32 | 'Topic :: Scientific/Engineering', 33 | ] 34 | keywords = ['mesh', 'geopandas', 'unstructured grid'] 35 | 36 | [project.urls] 37 | Home = "https://github.com/deltares/pandamesh" 38 | Code = "https://github.com/deltares/pandamesh" 39 | Issues = "https://github.com/deltares/pandamesh/issues" 40 | 41 | [project.optional-dependencies] 42 | all = [ 43 | 'geopandas', 44 | 'gmsh', 45 | 'pooch', 46 | 'triangle', 47 | 'shapely >= 2.0', 48 | 'matplotlib', 49 | 'xarray', 50 | ] 51 | 52 | [tool.hatch.version] 53 | path = "pandamesh/__init__.py" 54 | 55 | [tool.hatch.build.targets.sdist] 56 | only-include = ["pandamesh", "tests"] 57 | 58 | [tool.isort] 59 | profile = "black" 60 | 61 | [tool.coverage.report] 62 | exclude_lines = [ 63 | "pragma: no cover", 64 | "@abc.abstractmethod", 65 | ] 66 | 67 | [tool.pixi.project] 68 | channels = ["conda-forge"] 69 | platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] 70 | 71 | [tool.pixi.pypi-dependencies] 72 | pandamesh = { path = ".", editable = true } 73 | 74 | [tool.pixi.dependencies] 75 | python-build = "*" 76 | geopandas = "*" 77 | python-gmsh = "*" 78 | matplotlib = "*" 79 | numpy = "*" 80 | pooch = "*" 81 | pip = "*" 82 | pre-commit = "*" 83 | pydata-sphinx-theme = "*" 84 | py-triangle = "*" 85 | pytest = "*" 86 | pytest-cov = "*" 87 | shapely = ">=2.0" 88 | sphinx = "*" 89 | sphinx-gallery = "*" 90 | hatchling = "*" 91 | jupyter = "*" 92 | twine = "*" 93 | xarray = "*" 94 | 95 | [tool.pixi.tasks] 96 | pre-commit = "pre-commit run --all-files" 97 | test = "pytest --cov=pandamesh --cov-report xml --cov-report term" 98 | docs = "sphinx-build docs docs/_build" 99 | all = { depends-on = ["pre-commit", "test", "docs"]} 100 | pypi-publish = { cmd = "rm --recursive --force dist && python -m build && twine check dist/* && twine upload dist/*" } 101 | 102 | [tool.pixi.feature.py312.dependencies] 103 | python = "3.12.*" 104 | 105 | [tool.pixi.feature.py311.dependencies] 106 | python = "3.11.*" 107 | 108 | [tool.pixi.feature.py310.dependencies] 109 | python = "3.10.*" 110 | 111 | [tool.pixi.feature.py309.dependencies] 112 | python = "3.09.*" 113 | 114 | [tool.pixi.feature.py313.dependencies] 115 | python = "3.13.*" 116 | 117 | [tool.pixi.environments] 118 | default = { features = ["py312"], solve-group = "py312" } 119 | py312 = { features = ["py312"], solve-group = "py312" } 120 | py311 = ["py311"] 121 | py310 = ["py310"] 122 | py309 = ["py309"] 123 | py313 = ["py313"] 124 | 125 | 126 | [tool.ruff.lint] 127 | # See https://docs.astral.sh/ruff/rules/ 128 | select = ["C4", "D2", "D3", "D4", "E", "F", "I", "NPY", "PD"] 129 | ignore = [ 130 | "D202", 131 | "D205", 132 | "D206", 133 | "D400", 134 | "D404", 135 | "E402", 136 | "E501", 137 | "E703", 138 | "PD002", 139 | "PD901", 140 | "PD003", 141 | "PD004", 142 | "PD011", 143 | ] 144 | ignore-init-module-imports = true 145 | 146 | [tool.ruff.lint.pydocstyle] 147 | convention = "numpy" 148 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # import for pytest-cov 2 | -------------------------------------------------------------------------------- /tests/test_common.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | import geopandas as gpd 4 | import numpy as np 5 | import pytest 6 | import shapely 7 | import shapely.geometry as sg 8 | 9 | from pandamesh import common 10 | 11 | a = sg.Polygon( 12 | [ 13 | (0.0, 0.0), 14 | (1.0, 0.0), 15 | (1.0, 1.0), 16 | (0.0, 1.0), 17 | ] 18 | ) 19 | b = sg.Polygon( 20 | [ 21 | (0.5, 0.0), 22 | (1.5, 0.0), 23 | (1.5, 1.0), 24 | (0.5, 1.0), 25 | ] 26 | ) 27 | c = sg.Polygon( 28 | [ 29 | (1.0, 0.0), 30 | (2.0, 0.0), 31 | (2.0, 1.0), 32 | (1.0, 1.0), 33 | ] 34 | ) 35 | d = sg.Polygon( 36 | [ 37 | (2.0, 0.0), 38 | (3.0, 0.0), 39 | (3.0, 1.0), 40 | (2.0, 1.0), 41 | ] 42 | ) 43 | # Bowtie: 44 | e = sg.Polygon( 45 | [ 46 | (3.0, 0.0), 47 | (4.0, 1.0), 48 | (3.0, 1.0), 49 | (4.0, 0.0), 50 | ] 51 | ) 52 | # Almost dangling edge 53 | f = sg.Polygon( 54 | [ 55 | (0.0, 0.0), 56 | (1.0, 0.0), 57 | (2.0, 0.0), 58 | (1.0, 0.0005), 59 | (1.0, 1.0), 60 | (0.0, 1.0), 61 | ] 62 | ) 63 | 64 | La = sg.LineString( 65 | [ 66 | (0.25, 0.25), 67 | (0.75, 0.75), 68 | ] 69 | ) 70 | Lb = sg.LineString( 71 | [ 72 | (0.25, 0.75), 73 | (0.75, 0.25), 74 | ] 75 | ) 76 | Lc = sg.LineString( 77 | [ 78 | (2.25, 0.25), 79 | (2.75, 0.75), 80 | ] 81 | ) 82 | Ld = sg.LineString( 83 | [ 84 | (0.6, 0.5), 85 | (1.5, 0.5), 86 | ] 87 | ) 88 | # Bowtie 89 | Le = sg.LineString( 90 | [ 91 | (3.0, 0.0), 92 | (4.0, 1.0), 93 | (3.0, 1.0), 94 | (4.0, 0.0), 95 | ] 96 | ) 97 | # Outside 98 | Lf = sg.LineString( 99 | [ 100 | (1.0, 2.0), 101 | (2.0, 2.0), 102 | ] 103 | ) 104 | 105 | Ra = sg.LinearRing( 106 | [ 107 | (0.0, 0.0), 108 | (1.0, 0.0), 109 | (1.0, 1.0), 110 | (0.0, 1.0), 111 | ] 112 | ) 113 | 114 | 115 | pa = sg.Point(0.5, 0.5) 116 | pb = sg.Point(0.5, 1.5) 117 | 118 | 119 | def test_flatten(): 120 | assert common.flatten([[]]) == [] 121 | assert common.flatten([[1]]) == [1] 122 | assert common.flatten([[1], [2, 3]]) == [1, 2, 3] 123 | 124 | 125 | def test_flatten_geometry(): 126 | point = sg.Point(0, 0) 127 | assert common.flatten_geometry(point) == [point] 128 | 129 | multi_point = sg.MultiPoint([sg.Point(0, 0), sg.Point(1, 1)]) 130 | assert common.flatten_geometry(multi_point) == list(multi_point.geoms) 131 | 132 | point = sg.Point(0, 0) 133 | line = sg.LineString([(1, 1), (2, 2)]) 134 | polygon = sg.Polygon([(3, 3), (4, 4), (4, 3)]) 135 | multi_point = sg.MultiPoint([sg.Point(5, 5), sg.Point(6, 6)]) 136 | 137 | collection = sg.GeometryCollection([point, line, polygon, multi_point]) 138 | expected = [point, line, polygon, sg.Point(5, 5), sg.Point(6, 6)] 139 | assert common.flatten_geometry(collection) == expected 140 | 141 | inner_collection = sg.GeometryCollection([point, multi_point]) 142 | nested_collection = sg.GeometryCollection([inner_collection, line, polygon]) 143 | expected = [point, sg.Point(5, 5), sg.Point(6, 6), line, polygon] 144 | assert common.flatten_geometry(nested_collection) == expected 145 | 146 | # Also test flatten_geometries 147 | geometries = [ 148 | point, 149 | sg.GeometryCollection([multi_point]), 150 | sg.GeometryCollection([line, polygon]), 151 | ] 152 | actual = common.flatten_geometries(geometries) 153 | assert isinstance(actual, np.ndarray) 154 | assert all(actual == expected) 155 | 156 | 157 | def test_check_geodataframe(): 158 | with pytest.raises(TypeError, match="Expected GeoDataFrame"): 159 | common.check_geodataframe([1, 2, 3], {}) 160 | 161 | gdf = gpd.GeoDataFrame(geometry=[pa, pb]) 162 | with pytest.raises( 163 | ValueError, 164 | match=re.escape("These column(s) are required but are missing: cellsize"), 165 | ): 166 | common.check_geodataframe(gdf, {"cellsize"}) 167 | 168 | gdf["cellsize"] = 1.0 169 | gdf.index = [0, "1"] 170 | with pytest.raises(ValueError, match="geodataframe index is not integer typed"): 171 | common.check_geodataframe(gdf, {"cellsize"}, check_index=True) 172 | 173 | empty = gdf.loc[[]] 174 | with pytest.raises(ValueError, match="Dataframe is empty"): 175 | common.check_geodataframe(empty, {"cellsize"}) 176 | 177 | gdf.index = [0, 0] 178 | with pytest.raises(ValueError, match="geodataframe index contains duplicates"): 179 | common.check_geodataframe(gdf, {"cellsize"}, check_index=True) 180 | 181 | gdf.index = [0, 1] 182 | common.check_geodataframe(gdf, {"cellsize"}, check_index=True) 183 | 184 | 185 | def test_intersecting_features(): 186 | polygons = gpd.GeoSeries(data=[a, b, c, d], index=[0, 1, 2, 3]) 187 | ia, ib = common.intersecting_features(polygons, "polygon") 188 | assert np.array_equal(ia, [0, 1]) 189 | assert np.array_equal(ib, [1, 2]) 190 | 191 | linestrings = gpd.GeoSeries(data=[La, Lb, Lc], index=[0, 1, 2]) 192 | ia, ib = common.intersecting_features(linestrings, "linestring") 193 | assert np.array_equal(ia, [0]) 194 | assert np.array_equal(ib, [1]) 195 | 196 | 197 | def test_check_lines(): 198 | # Complex (self-intersecting) linestring 199 | linestrings = gpd.GeoSeries(data=[La, Lb, Lc, Le], index=[0, 1, 2, 3]) 200 | with pytest.raises(ValueError, match="1 cases of complex lines detected"): 201 | common.check_lines(linestrings) 202 | 203 | # Valid input 204 | linestrings = gpd.GeoSeries(data=[La, Lc], index=[0, 1]) 205 | common.check_lines(linestrings) 206 | 207 | 208 | def test_compute_intersections(): 209 | segments = shapely.linestrings( 210 | [ 211 | [ 212 | [0.0, 5.0], 213 | [10.0, 5.0], 214 | ], 215 | [ 216 | [5.0, 0.0], 217 | [5.0, 10.0], 218 | ], 219 | [ 220 | [0.0, -5.0], 221 | [10.0, -5.0], 222 | ], 223 | ] 224 | ) 225 | i = np.array([0, 0, 1, 1, 2]) 226 | j = np.array([0, 1, 1, 2, 2]) 227 | 228 | _i, _j, actual = common.compute_intersections(segments, i, j) 229 | assert np.array_equal(_i, [0]) 230 | assert np.array_equal(_j, [1]) 231 | assert np.array_equal(actual, [[5.0, 5.0]]) 232 | 233 | # No intersections 234 | _i, _j, actual = common.compute_intersections(segments, i, i) 235 | assert np.array_equal(_i, []) 236 | assert np.array_equal(_j, []) 237 | assert np.array_equal(actual, np.array([]).reshape((-1, 2))) 238 | 239 | i = np.array([0]) 240 | j = np.array([2]) 241 | _i, _j, actual = common.compute_intersections(segments, i, j) 242 | assert np.array_equal(_i, []) 243 | assert np.array_equal(_j, []) 244 | assert np.array_equal(actual, np.array([]).reshape((-1, 2))) 245 | 246 | # Parallel 247 | segments = shapely.linestrings( 248 | [ 249 | [ 250 | [0.0, 5.0], 251 | [10.0, 5.0], 252 | ], 253 | [ 254 | [3.0, 5.0], 255 | [13.0, 5.0], 256 | ], 257 | ] 258 | ) 259 | i = np.array([0]) 260 | j = np.array([1]) 261 | _i, _j, actual = common.compute_intersections(segments, i, j) 262 | assert np.array_equal(_i, [0, 0]) 263 | assert np.array_equal(_j, [1, 1]) 264 | expected = np.array( 265 | [ 266 | [3.0, 5.0], 267 | [10.0, 5.0], 268 | ] 269 | ) 270 | assert np.array_equal(actual, expected) 271 | 272 | 273 | def test_check_polygons(): 274 | polygons = gpd.GeoSeries(data=[a, b, c, d, e], index=[0, 1, 2, 3, 4]) 275 | with pytest.raises(ValueError, match="1 cases of complex polygon detected"): 276 | common.check_polygons(polygons, 1.0e-3) 277 | 278 | polygons = gpd.GeoSeries(data=[a, b, c, d], index=[0, 1, 2, 3]) 279 | with pytest.raises(ValueError, match="2 cases of intersecting polygon detected"): 280 | common.check_polygons(polygons, 1.0e-3) 281 | 282 | polygons = gpd.GeoSeries(data=[f], index=[0]) 283 | with pytest.raises( 284 | ValueError, match="1 proximate points found on polygon perimeters" 285 | ): 286 | common.check_polygons(polygons, 1.0e-3) 287 | 288 | 289 | def test_check_points(): 290 | points = gpd.GeoSeries(data=[pa, pb], index=[0, 1]) 291 | polygons = gpd.GeoSeries(data=[a, b, c, d, e], index=[0, 1, 2, 3, 4]) 292 | with pytest.raises(ValueError, match="1 points detected outside"): 293 | common.check_points(points, polygons) 294 | 295 | 296 | def test_separate_geometry(): 297 | bad = np.array([sg.MultiPolygon([a, d])]) 298 | with pytest.raises( 299 | TypeError, match="GeoDataFrame contains unsupported geometry types" 300 | ): 301 | common.separate_geometry(bad) 302 | 303 | geometry = np.array([a, c, La, Lc, pa, Ra, d]) 304 | polygons, lines, points = common.separate_geometry(geometry) 305 | assert all(polygons == [a, c, d]) 306 | assert all(lines == [La, Lc, Ra]) 307 | assert all(points == [pa]) 308 | 309 | 310 | def test_separate_geodataframe(): 311 | gdf = gpd.GeoDataFrame(geometry=[a, c, d, La, Lc, pa]) 312 | gdf["cellsize"] = 1.0 313 | 314 | with pytest.raises(ValueError, match="intersecting_edges should be one of"): 315 | common.separate_geodataframe( 316 | gdf, intersecting_edges="abc", minimum_spacing=1.0e-3 317 | ) 318 | 319 | polygons, linestrings, points = common.separate_geodataframe( 320 | gdf, intersecting_edges="error", minimum_spacing=1.0e-3 321 | ) 322 | assert isinstance(polygons.geometry.iloc[0], sg.Polygon) 323 | assert isinstance(linestrings.geometry.iloc[0], sg.LineString) 324 | assert isinstance(points.geometry.iloc[0], sg.Point) 325 | 326 | # Make sure it works for single elements 327 | gdf = gpd.GeoDataFrame(geometry=[a]) 328 | gdf["cellsize"] = 1.0 329 | common.separate_geodataframe( 330 | gdf, intersecting_edges="error", minimum_spacing=1.0e-3 331 | ) 332 | 333 | gdf = gpd.GeoDataFrame(geometry=[a, La]) 334 | gdf["cellsize"] = 1.0 335 | polygons, linestrings, points = common.separate_geodataframe( 336 | gdf, intersecting_edges="error", minimum_spacing=1.0e-3 337 | ) 338 | 339 | # Make sure cellsize is cast to float 340 | gdf = gpd.GeoDataFrame(geometry=[a, La]) 341 | gdf["cellsize"] = "1" 342 | dfs = common.separate_geodataframe( 343 | gdf, intersecting_edges="error", minimum_spacing=1.0e-3 344 | ) 345 | for df in dfs: 346 | assert np.issubdtype(df["cellsize"].dtype, np.floating) 347 | 348 | with pytest.raises( 349 | TypeError, match="GeoDataFrame contains unsupported geometry types" 350 | ): 351 | gdf = gpd.GeoDataFrame(geometry=[sg.MultiPolygon([a, b])]) 352 | common.separate_geodataframe( 353 | gdf, intersecting_edges="error", minimum_spacing=1.0e-3 354 | ) 355 | 356 | 357 | def test_central_origin(): 358 | gdf = gpd.GeoDataFrame(geometry=[d]) 359 | back, x, y = common.central_origin(gdf, False) 360 | assert gdf is not back 361 | assert x == 0 362 | assert y == 0 363 | 364 | back, x, y = common.central_origin(gdf, True) 365 | assert np.allclose(x, 2.5) 366 | assert np.allclose(y, 0.5) 367 | assert np.array_equal(back.total_bounds, [-0.5, -0.5, 0.5, 0.5]) 368 | 369 | 370 | def test_grouper(): 371 | a = np.array([1, 2, 3, 4]) 372 | a_index = np.array([0, 0, 1, 1]) 373 | b = np.array([10, 20, 30, 40, 50]) 374 | b_index = np.array([0, 1, 2, 3]) 375 | with pytest.raises(ValueError, match="All arrays must be of the same length"): 376 | grouper = common.Grouper(a, [0, 0, 1], b, b_index) 377 | 378 | grouper = common.Grouper(a, a_index, b, b_index) 379 | 380 | # First group 381 | a_val, b_vals = next(grouper) 382 | assert a_val == 1 383 | assert np.array_equal(b_vals, np.array([10, 20])) 384 | 385 | # Second group 386 | a_val, b_vals = next(grouper) 387 | assert a_val == 2 388 | assert np.array_equal(b_vals, np.array([30, 40])) 389 | 390 | # As iterator 391 | grouper = common.Grouper(a, a_index, b, b_index) 392 | results = list(grouper) 393 | assert results[0][0] == 1 394 | assert np.array_equal(results[0][1], np.array([10, 20])) 395 | assert results[1][0] == 2 396 | assert np.array_equal(results[1][1], np.array([30, 40])) 397 | 398 | 399 | def test_to_geodataframe(): 400 | # Two triangles 401 | vertices = np.array([[0, 0], [1, 0], [0, 1], [1, 1]]) 402 | faces = np.array([[0, 1, 2], [1, 3, 2]]) 403 | 404 | gdf = common.to_geodataframe(vertices, faces) 405 | assert isinstance(gdf, gpd.GeoDataFrame) 406 | assert len(gdf == 2) 407 | 408 | # Check the coordinates of the first triangle 409 | assert np.allclose( 410 | np.array(gdf.geometry[0].exterior.coords), 411 | np.array([(0, 0), (1, 0), (0, 1), (0, 0)]), 412 | ) 413 | 414 | # Check the coordinates of the second triangle 415 | assert np.allclose( 416 | np.array(gdf.geometry[1].exterior.coords), 417 | np.array([(1, 0), (1, 1), (0, 1), (1, 0)]), 418 | ) 419 | 420 | # Define vertices 421 | vertices = np.array([[0, 0], [1, 0], [0, 1], [1, 1], [2, 0.5]], dtype=float) 422 | 423 | # Define faces (triangle: 0,1,2 and quadrangle: 1,3,4,2) 424 | faces = np.array([[0, 1, 2, -1], [1, 3, 4, 2]], dtype=int) 425 | 426 | # Call the function 427 | gdf = common.to_geodataframe(vertices, faces) 428 | 429 | # Assertions 430 | assert isinstance(gdf, gpd.GeoDataFrame) 431 | assert len(gdf) == 2 # Two polygons (one triangle, one quadrangle) 432 | 433 | 434 | class TestLineworkIntersection: 435 | @pytest.fixture(autouse=True) 436 | def setup(self): 437 | self.donut = sg.Polygon( 438 | [ 439 | [0.0, 0.0], 440 | [10.0, 0.0], 441 | [10.0, 10.0], 442 | [0.0, 10.0], 443 | ], 444 | holes=[ 445 | [ 446 | [2.0, 5.0], 447 | [5.0, 8.0], 448 | [8.0, 5.0], 449 | [5.0, 2.0], 450 | ] 451 | ], 452 | ) 453 | self.line0 = shapely.LineString( 454 | [ 455 | [-2.0, 0.0], 456 | [12.0, 10.0], 457 | ] 458 | ) 459 | self.line1 = shapely.LineString( 460 | [ 461 | [5.5, 9.0], 462 | [9.0, 5.5], 463 | ] 464 | ) 465 | self.gdf = gpd.GeoDataFrame(geometry=[self.donut, self.line0, self.line1]) 466 | 467 | def test_find_edge_intersections(self): 468 | gdf = gpd.GeoDataFrame(geometry=[self.donut]) 469 | with pytest.raises(TypeError, match="Expected geopandas.GeoSeries"): 470 | common.find_edge_intersections(gdf) 471 | 472 | actual = common.find_edge_intersections(gdf.geometry) 473 | assert len(actual) == 0 474 | 475 | gdf = gpd.GeoDataFrame(geometry=[self.donut, self.line0]) 476 | actual = common.find_edge_intersections(gdf.geometry) 477 | assert len(actual) == 4 478 | 479 | gdf = gpd.GeoDataFrame(geometry=[self.donut, self.line1]) 480 | actual = common.find_edge_intersections(gdf.geometry) 481 | assert len(actual) == 0 482 | 483 | def test_check_linework(self): 484 | gdf = gpd.GeoDataFrame(geometry=[self.donut, self.line1]) 485 | polygons, lines, _ = common.separate_geometry(gdf.geometry) 486 | common.check_linework(polygons, lines, "error") 487 | 488 | gdf = gpd.GeoDataFrame(geometry=[self.donut, self.line0]) 489 | polygons, lines, _ = common.separate_geometry(gdf.geometry) 490 | with pytest.raises(ValueError): 491 | common.check_linework(polygons, lines, "error") 492 | 493 | with pytest.warns(UserWarning): 494 | common.check_linework(polygons, lines, "warn") 495 | 496 | 497 | class TestProximatePoints: 498 | @pytest.fixture(autouse=True) 499 | def setup(self): 500 | self.shell0 = [ 501 | [0.0, 0.0], 502 | [5.0, 0.0], 503 | [15.0, 0.0], 504 | [10.0, 0.1], 505 | [10.0, 9.9], 506 | [13.0, 10.0], 507 | [10.0, 10.0], 508 | [5.0, 10.0], 509 | [0.0, 10.0], 510 | ] 511 | self.shell1 = [ 512 | [20.0, 0.0], 513 | [30.0, 0.0], 514 | [30.0, 5.0], 515 | [25.0, 5.0], 516 | [30.0, 5.1], 517 | [30.0, 10.0], 518 | [20.0, 10.0], 519 | ] 520 | self.shell2 = [ 521 | [40.0, 0.0], 522 | [50.0, 0.0], 523 | [50.0, 10.0], 524 | [40.0, 10.0], 525 | ] 526 | self.hole2 = [ 527 | [42.0, 2.0], 528 | [42.0, 8.0], 529 | [45.0, 8.0], 530 | [45.0, 7.0], 531 | [45.1, 8.0], 532 | [48.0, 8.0], 533 | [48.0, 2.0], 534 | ] 535 | self.shell3 = [ 536 | [60.0, 0.0], 537 | [70.0, 0.0], 538 | [73.0, 0.0], 539 | [73.0, 0.01], 540 | [70.0, 0.01], 541 | [70.0, 10.0], 542 | [60.0, 10.0], 543 | ] 544 | 545 | # Order of vertices shouldn't matter, since we're calling shapely.normalize 546 | @pytest.mark.parametrize("flip", [True, False]) 547 | def test_find_proximate_points(self, flip: bool): 548 | def construct_polygon(shell, hole=None): 549 | if flip: 550 | shell = reversed(shell) 551 | if hole is not None: 552 | hole = reversed(hole) 553 | return sg.Polygon(shell=shell, holes=[hole]) 554 | 555 | poly0 = construct_polygon(self.shell0) 556 | poly1 = construct_polygon(self.shell1) 557 | poly2 = construct_polygon(self.shell2, self.hole2) 558 | poly3 = construct_polygon(self.shell3) 559 | geometry = gpd.GeoSeries([poly0, poly1, poly2, poly3]) 560 | 561 | # Default tolerance of 0.001, no problems 562 | faulty = common.find_proximate_perimeter_points(geometry) 563 | assert isinstance(faulty, gpd.GeoSeries) 564 | assert len(faulty) == 0 565 | 566 | faulty = common.find_proximate_perimeter_points(geometry, 0.5) 567 | expected = np.array( 568 | [ 569 | [13.0, 10.0], 570 | [15.0, 0.0], 571 | [73.0, 0.01], 572 | [73.0, 0.0], 573 | [45.0, 7.0], 574 | ] 575 | ) 576 | assert isinstance(faulty, gpd.GeoSeries) 577 | assert len(faulty) == 5 578 | actual = shapely.get_coordinates(faulty) 579 | assert np.array_equal(actual, expected) 580 | 581 | def test_find_proximate_points_hourglass(self): 582 | # The gap here is only 1e-3 wide, but because there are several 583 | # vertices between, it is not detected. Ideally we would find these 584 | # cases as well. 585 | hourglass = sg.Polygon( 586 | shell=[ 587 | [0.0, 0.0], 588 | [10.0, 0.0], 589 | [10.0, 10.0], 590 | [1e-3, 10.0], 591 | [1e-3, 10.01], 592 | [-0.01, 10.01], 593 | [0.0, 10.0], 594 | ] 595 | ) 596 | geometry = gpd.GeoSeries([hourglass]) 597 | faulty = common.find_proximate_perimeter_points(geometry, 2e-3) 598 | assert len(faulty) == 0 599 | -------------------------------------------------------------------------------- /tests/test_data.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | 3 | import pandamesh as pm 4 | 5 | 6 | def test_provinces_nl(): 7 | gdf = pm.data.provinces_nl() 8 | assert isinstance(gdf, gpd.GeoDataFrame) 9 | 10 | 11 | def test_south_america(): 12 | gdf = pm.data.south_america() 13 | assert isinstance(gdf, gpd.GeoDataFrame) 14 | -------------------------------------------------------------------------------- /tests/test_enum.py: -------------------------------------------------------------------------------- 1 | import re 2 | import textwrap 3 | 4 | import pytest 5 | 6 | from pandamesh.enum_base import FlexibleEnum, _show_options 7 | 8 | 9 | class Color(FlexibleEnum): 10 | RED = 1 11 | GREEN = 2 12 | BLUE = 3 13 | 14 | 15 | def test_show_options(): 16 | _show_options(Color) == textwrap.dedent( 17 | """ 18 | * RED 19 | * GREEN 20 | * BLUE 21 | """ 22 | ) 23 | 24 | 25 | def test_check_options(): 26 | assert Color(1) == Color.RED 27 | assert Color(Color.RED) == Color.RED 28 | assert Color.from_value(Color.RED) == Color.RED 29 | assert Color.from_value("RED") == Color.RED 30 | expected = ( 31 | "'YELLOW' is not a valid Color. Valid options are:\n * RED\n * GREEN\n * BLUE" 32 | ) 33 | with pytest.raises(ValueError, match=re.escape(expected)): 34 | Color.from_value("YELLOW") 35 | 36 | expected = expected.replace("'YELLOW'", "YELLOW") 37 | with pytest.raises(AttributeError, match=re.escape(expected)): 38 | Color.YELLOW 39 | 40 | expected = expected.replace("YELLOW", "0") 41 | with pytest.raises(ValueError, match=re.escape(expected)): 42 | Color.from_value(0) 43 | -------------------------------------------------------------------------------- /tests/test_fields.py: -------------------------------------------------------------------------------- 1 | import re 2 | import struct 3 | from pathlib import Path 4 | from typing import NamedTuple 5 | 6 | import geopandas as gpd 7 | import numpy as np 8 | import pytest 9 | import shapely.geometry as sg 10 | import xarray as xr 11 | 12 | from pandamesh import gmsh_fields as gf 13 | from pandamesh.gmsh_mesher import GmshMesher 14 | 15 | 16 | @pytest.fixture(scope="function") 17 | def gdf(): 18 | polygon = sg.Polygon( 19 | [ 20 | [0.0, 0.0], 21 | [10.0, 0.0], 22 | [10.0, 10.0], 23 | [0.0, 10.0], 24 | ] 25 | ) 26 | return gpd.GeoDataFrame(geometry=[polygon], data={"cellsize": [10.0]}) 27 | 28 | 29 | def assert_refinement(mesh: gpd.GeoDataFrame): 30 | assert (mesh.area.max() / mesh.area.min()) > 5 31 | 32 | 33 | class StructuredFieldContent(NamedTuple): 34 | cellsize: np.ndarray 35 | xmin: float 36 | ymin: float 37 | dx: float 38 | dy: float 39 | 40 | 41 | def read_structured_field_file( 42 | path: Path 43 | ) -> tuple[np.ndarray, float, float, float, float]: 44 | with path.open("rb") as f: 45 | xmin, ymin, _ = struct.unpack("3d", f.read(24)) # 3 doubles, 8 bytes each 46 | dx, dy, _ = struct.unpack("3d", f.read(24)) # 3 doubles, 8 bytes each 47 | nrow, ncol, _ = struct.unpack("3i", f.read(12)) # 3 integers, 4 bytes each 48 | cellsize_data = f.read() # Read the rest of the file 49 | 50 | cellsize = np.frombuffer(cellsize_data, dtype=np.float64).reshape((ncol, nrow)) 51 | return StructuredFieldContent(cellsize, xmin, ymin, dx, dy) 52 | 53 | 54 | def test_write_structured_field_file(tmp_path): 55 | cellsize = np.arange(6.0).reshape(2, 3) 56 | xmin = 5.0 57 | ymin = 15.0 58 | path = tmp_path / "a.dat" 59 | with pytest.raises(ValueError, match=re.escape("`cellsize` must be 2D")): 60 | gf.write_structured_field_file(path, cellsize[0], xmin, ymin, 1.0, 1.0) 61 | 62 | gf.write_structured_field_file(path, cellsize, xmin, ymin, 1.0, 1.0) 63 | assert path.exists() 64 | back = read_structured_field_file(path) 65 | assert back.dx == 1.0 66 | assert back.dy == 1.0 67 | assert back.xmin == 5.0 68 | assert back.ymin == 15.0 69 | # Gmsh expects it in column major order 70 | assert np.array_equal(back.cellsize, cellsize.T) 71 | 72 | path = tmp_path / "b.dat" 73 | gf.write_structured_field_file(path, cellsize, xmin, ymin, 1.0, -1.0) 74 | back = read_structured_field_file(path) 75 | assert back.dy == 1.0 76 | assert np.array_equal(back.cellsize[0, :], [3, 0]) 77 | assert np.array_equal(back.cellsize[1, :], [4, 1]) 78 | assert np.array_equal(back.cellsize[2, :], [5, 2]) 79 | 80 | path = tmp_path / "c.dat" 81 | gf.write_structured_field_file(path, cellsize, xmin, ymin, -1.0, 1.0) 82 | back = read_structured_field_file(path) 83 | assert back.dx == 1.0 84 | assert np.array_equal(back.cellsize[0, :], [2, 5]) 85 | assert np.array_equal(back.cellsize[1, :], [1, 4]) 86 | assert np.array_equal(back.cellsize[2, :], [0, 3]) 87 | 88 | 89 | def test_math_eval_field(gdf): 90 | mesher = GmshMesher._force_init(gdf) 91 | 92 | with pytest.raises(TypeError): 93 | mesher.add_matheval_distance_field(1) 94 | 95 | field = gpd.GeoDataFrame( 96 | geometry=[sg.Point([5.0, 5.0])], 97 | ) 98 | # missing columns 99 | with pytest.raises(ValueError): 100 | mesher.add_matheval_distance_field(field) 101 | 102 | field["spacing"] = np.nan 103 | field["function"] = "dist + 0.1" 104 | 105 | with pytest.raises(ValueError, match="distance not in MathEval field function"): 106 | mesher.add_matheval_distance_field(field) 107 | 108 | field["function"] = "max(distance, 0.5)" 109 | mesher.add_matheval_distance_field(field) 110 | mesh = mesher.generate_geodataframe() 111 | 112 | field = mesher.fields[0] 113 | assert isinstance(field, gf.MathEvalField) 114 | 115 | # Test whether anything has been refined. 116 | assert_refinement(mesh) 117 | 118 | # Assert combination field has been generated 119 | assert mesher._combination_field is not None 120 | assert isinstance(mesher._combination_field, gf.CombinationField) 121 | 122 | mesher.clear_fields() 123 | assert len(mesher.fields) == 0 124 | assert mesher._combination_field is None 125 | 126 | 127 | def test_threshold_field(gdf): 128 | mesher = GmshMesher._force_init(gdf) 129 | 130 | with pytest.raises(TypeError): 131 | mesher.add_threshold_distance_field(1) 132 | 133 | field = gpd.GeoDataFrame( 134 | geometry=[sg.Point([5.0, 5.0])], 135 | ) 136 | # missing columns 137 | with pytest.raises(ValueError): 138 | mesher.add_threshold_distance_field(field) 139 | 140 | field["dist_min"] = 1.0 141 | field["dist_max"] = 2.5 142 | field["size_min"] = 0.2 143 | field["size_max"] = 2.5 144 | field["spacing"] = np.nan 145 | 146 | mesher.add_threshold_distance_field(field) 147 | mesh = mesher.generate_geodataframe() 148 | 149 | # Test whether anything has been refined. 150 | assert_refinement(mesh) 151 | 152 | # Should work WITH optional columns as well 153 | mesher.clear_fields() 154 | field["sigmoid"] = True 155 | field["stop_at_dist_max"] = True 156 | 157 | mesher.add_threshold_distance_field(field) 158 | 159 | field = mesher.fields[0] 160 | assert isinstance(field, gf.ThresholdField) 161 | 162 | mesh = mesher.generate_geodataframe() 163 | assert isinstance(mesh, gpd.GeoDataFrame) 164 | 165 | 166 | def test_line_field(gdf): 167 | line = sg.LineString( 168 | [ 169 | [3.0, -3.0], 170 | [3.0, 13.0], 171 | ] 172 | ) 173 | field = gpd.GeoDataFrame(geometry=[line]) 174 | field["dist_min"] = 2.0 175 | field["dist_max"] = 4.0 176 | field["size_min"] = 0.5 177 | field["size_max"] = 2.5 178 | field["spacing"] = 2.0 179 | 180 | mesher = GmshMesher._force_init(gdf) 181 | mesher.add_threshold_distance_field(field) 182 | 183 | field = mesher.fields[0] 184 | assert isinstance(field, gf.ThresholdField) 185 | 186 | mesh = mesher.generate_geodataframe() 187 | assert_refinement(mesh) 188 | 189 | 190 | def test_polygon_field(gdf): 191 | square = sg.Polygon( 192 | [ 193 | [3.0, 3.0], 194 | [7.0, 3.0], 195 | [7.0, 7.0], 196 | [3.0, 7.0], 197 | ] 198 | ) 199 | field = gpd.GeoDataFrame(geometry=[square]) 200 | field["dist_min"] = 0.5 201 | field["dist_max"] = 1.5 202 | field["size_min"] = 0.3 203 | field["size_max"] = 2.5 204 | field["spacing"] = 1.0 205 | 206 | mesher = GmshMesher._force_init(gdf) 207 | mesher.add_threshold_distance_field(field) 208 | 209 | field = mesher.fields[0] 210 | assert isinstance(field, gf.ThresholdField) 211 | 212 | mesh = mesher.generate_geodataframe() 213 | assert_refinement(mesh) 214 | 215 | 216 | def test_add_structured_field(gdf): 217 | mesher = GmshMesher._force_init(gdf) 218 | 219 | y, x = np.meshgrid([1.0, 5.0, 9.0], [1.0, 5.0, 9.0], indexing="ij") 220 | distance_from_origin = np.sqrt((x * x + y * y)) 221 | cellsize = np.log(distance_from_origin / distance_from_origin.min()) + 0.5 222 | 223 | with pytest.raises(ValueError, match=r"Minimum cellsize must be > 0, received:"): 224 | mesher.add_structured_field( 225 | cellsize=cellsize * -1, 226 | xmin=x.min(), 227 | ymin=y.min(), 228 | dx=1.0, 229 | dy=1.0, 230 | ) 231 | 232 | mesher.add_structured_field( 233 | cellsize=cellsize, 234 | xmin=x.min(), 235 | ymin=y.min(), 236 | dx=1.0, 237 | dy=1.0, 238 | ) 239 | 240 | field = mesher.fields[0] 241 | assert isinstance(field, gf.StructuredField) 242 | assert not field.set_outside_value 243 | assert field.outside_value == -1.0 244 | assert isinstance(field.path, Path) 245 | assert field.path.exists() 246 | 247 | mesh = mesher.generate_geodataframe() 248 | assert_refinement(mesh) 249 | 250 | mesher.clear_fields() 251 | mesher.add_structured_field( 252 | cellsize=cellsize, 253 | xmin=x.min(), 254 | ymin=y.min(), 255 | dx=1.0, 256 | dy=1.0, 257 | outside_value=100.0, 258 | ) 259 | field = mesher.fields[0] 260 | assert isinstance(field, gf.StructuredField) 261 | assert field.set_outside_value 262 | assert field.outside_value == 100.0 263 | assert isinstance(field.path, Path) 264 | assert field.path.exists() 265 | 266 | 267 | def test_add_structured_field_from_dataarray(gdf): 268 | mesher = GmshMesher._force_init(gdf) 269 | 270 | with pytest.raises( 271 | TypeError, match="da must be xr.DataArray, received instead: int" 272 | ): 273 | mesher.add_structured_field_from_dataarray(1) 274 | 275 | x = np.arange(1.0, 10.0) 276 | y = np.arange(1.0, 10.0) 277 | da = xr.DataArray( 278 | np.ones((y.size, x.size)), coords={"y": y, "x": x}, dims=("y", "x") 279 | ) 280 | 281 | with pytest.raises(ValueError, match=re.escape('Dimensions must be ("y", "x")')): 282 | mesher.add_structured_field_from_dataarray(da.transpose()) 283 | 284 | with pytest.raises(ValueError, match=r"Minimum cellsize must be > 0, received:"): 285 | mesher.add_structured_field_from_dataarray(da * -1) 286 | 287 | da_x = da.copy() 288 | da_x["x"] = da_x["x"] ** 2 289 | 290 | with pytest.raises(ValueError, match="da is not equidistant along x"): 291 | mesher.add_structured_field_from_dataarray(da_x) 292 | 293 | da_y = da.copy() 294 | da_y["y"] = da_y["y"] ** 2 295 | 296 | with pytest.raises(ValueError, match="da is not equidistant along y"): 297 | mesher.add_structured_field_from_dataarray(da_y) 298 | -------------------------------------------------------------------------------- /tests/test_gmsh_geometry.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pytest 4 | import shapely.geometry as sg 5 | 6 | from pandamesh import gmsh_geometry as gg 7 | from pandamesh.gmsh_mesher import gmsh_env 8 | 9 | 10 | class TestGmshGeometry: 11 | @pytest.fixture(autouse=True) 12 | def setup(self): 13 | self.outer_coords = np.array( 14 | [(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0)] 15 | ) 16 | self.inner_coords = np.array([(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0)]) 17 | self.line_coords = np.array([(2.0, 8.0), (8.0, 2.0)]) 18 | self.line = sg.LineString(self.line_coords) 19 | self.polygon = sg.Polygon(self.outer_coords) 20 | self.donut = sg.Polygon(self.outer_coords, holes=[self.inner_coords]) 21 | self.refined = sg.Polygon(self.inner_coords) 22 | y = np.arange(0.5, 10.0, 0.5) 23 | x = np.full(y.size, 1.0) 24 | self.points_embed = gpd.points_from_xy(x, y) 25 | self.line_embed = sg.LineString( 26 | [ 27 | [9.0, 2.0], 28 | [9.0, 8.0], 29 | ] 30 | ) 31 | self.polygons = gpd.GeoDataFrame( 32 | {"cellsize": [1.0], "__polygon_id": [1]}, geometry=[self.donut] 33 | ) 34 | 35 | def test_polygon_info(self): 36 | info, vertices, cellsizes, index = gg.polygon_info(self.polygon, 1.0, 0, 0) 37 | expected_info = gg.PolygonInfo(0, 4, [], [], 0) 38 | assert np.allclose(vertices, self.outer_coords) 39 | assert info == expected_info 40 | assert np.allclose(cellsizes, 1.0) 41 | assert index == 4 42 | 43 | info, vertices, cellsizes, index = gg.polygon_info(self.donut, 1.0, 0, 0) 44 | expected_info = gg.PolygonInfo(0, 4, [4], [4], 0) 45 | assert np.allclose(vertices, [self.outer_coords, self.inner_coords]) 46 | assert info == expected_info 47 | assert np.allclose(cellsizes, 1.0) 48 | assert index == 8 49 | 50 | def test_linestring_info(self): 51 | info, vertices, cellsizes, index = gg.linestring_info(self.line, 1.0, 0, 0) 52 | expected_info = gg.LineStringInfo(0, 2, 0) 53 | assert np.allclose(vertices, self.line_coords) 54 | assert info == expected_info 55 | assert np.allclose(cellsizes, 1.0) 56 | assert index == 2 57 | 58 | def test_add_vertices(self): 59 | with gmsh_env(): 60 | gg.add_vertices( 61 | [(0.0, 0.0), (1.0, 1.0)], 62 | [1.0, 1.0], 63 | [1, 2], 64 | ) 65 | 66 | def test_add_linestrings(self): 67 | info = gg.LineStringInfo(0, 2, 0) 68 | with gmsh_env(): 69 | gg.add_vertices( 70 | [(0.0, 0.0), (1.0, 1.0)], 71 | [1.0, 1.0], 72 | [0, 1], 73 | ) 74 | gg.add_linestrings([info], [0, 1]) 75 | 76 | def test_add_curve_loop(self): 77 | with gmsh_env(): 78 | gg.add_vertices( 79 | [(0.0, 0.0), (1.0, 0.0), (1.0, 1.0), (0.0, 1.0)], 80 | [1.0, 1.0, 1.0, 1.0], 81 | [0, 1, 2, 3], 82 | ) 83 | curve_loop_tag = gg.add_curve_loop([0, 1, 2, 3]) 84 | assert curve_loop_tag == 1 85 | 86 | def test_add_polygons(self): 87 | info = gg.PolygonInfo(0, 4, [4], [4], 0) 88 | vertices = np.vstack([self.inner_coords, self.outer_coords]) 89 | cellsizes = np.full(vertices.size, 1.0) 90 | tags = np.arange(vertices.size) 91 | with gmsh_env(): 92 | gg.add_vertices(vertices, cellsizes, tags) 93 | loop_tags, plane_tags = gg.add_polygons([info], tags) 94 | assert loop_tags == [1, 2] 95 | assert plane_tags == [0] 96 | 97 | def test_add_points(self): 98 | x = np.arange(0.5, 10.0, 0.5) 99 | y = np.full(x.size, 1.0) 100 | ids = np.arange(x.size) + 1 101 | gdf = gpd.GeoDataFrame(geometry=gpd.points_from_xy(x, y)) 102 | gdf["__polygon_id"] = ids 103 | gdf["cellsize"] = 1.0 104 | with gmsh_env(): 105 | indices, embedded_in = gg.add_points(gdf) 106 | assert np.array_equal(indices, ids) 107 | assert np.array_equal(embedded_in, ids) 108 | 109 | def test_collect_polygons(self): 110 | gdf = gpd.GeoDataFrame(geometry=[self.polygon]) 111 | gdf["cellsize"] = 1.0 112 | gdf["__polygon_id"] = 1 113 | index, vertices, cellsizes, features = gg.collect_polygons(gdf, 0) 114 | assert index == 4 115 | assert np.allclose(vertices, self.outer_coords) 116 | assert np.allclose(cellsizes, 1.0) 117 | assert features == [gg.PolygonInfo(0, 4, [], [], 1)] 118 | 119 | def test_collect_linestrings(self): 120 | gdf = gpd.GeoDataFrame(geometry=[self.line]) 121 | gdf["cellsize"] = 1.0 122 | gdf["__polygon_id"] = 1 123 | index, vertices, cellsizes, features = gg.collect_linestrings(gdf, 0) 124 | assert index == 2 125 | assert np.allclose(vertices, self.line_coords) 126 | assert np.allclose(cellsizes, 1.0) 127 | assert features == [gg.LineStringInfo(0, 2, 1)] 128 | 129 | def test_collect_points(self): 130 | x = np.arange(0.5, 10.0, 0.5) 131 | y = np.full(x.size, 1.0) 132 | gdf = gpd.GeoDataFrame(geometry=gpd.points_from_xy(x, y)) 133 | xy = gg.collect_points(gdf) 134 | assert np.allclose(xy, np.column_stack([x, y])) 135 | 136 | def test_embed_where_linestring(self): 137 | line_gdf = gpd.GeoDataFrame({"cellsize": [0.5]}, geometry=[self.line_embed]) 138 | actual = gg.embed_where(line_gdf, self.polygons) 139 | assert np.allclose(actual["cellsize"], 0.5) 140 | assert np.allclose(actual["__polygon_id"], 1) 141 | assert actual.geometry.iloc[0] == self.line_embed 142 | 143 | def test_embed_where_points(self): 144 | points_gdf = gpd.GeoDataFrame(geometry=self.points_embed) 145 | points_gdf["cellsize"] = 0.25 146 | actual = gg.embed_where(points_gdf, self.polygons) 147 | assert np.allclose(actual["cellsize"], 0.25) 148 | assert np.allclose(actual["__polygon_id"], 1) 149 | assert (actual.geometry.to_numpy() == self.points_embed).all() 150 | 151 | def test_add_geometry(self): 152 | line_gdf = gpd.GeoDataFrame({"cellsize": [0.5]}, geometry=[self.line_embed]) 153 | points_gdf = gpd.GeoDataFrame(geometry=self.points_embed) 154 | points_gdf["cellsize"] = 0.25 155 | with gmsh_env(): 156 | gg.add_geometry(self.polygons, line_gdf, points_gdf) 157 | 158 | def test_add_distance_points(self): 159 | with gmsh_env(): 160 | indices = gg.add_distance_points(self.points_embed) 161 | assert np.array_equal(indices, np.arange(1, 20)) 162 | 163 | def test_add_distance_linestring(self): 164 | with gmsh_env(): 165 | indices = gg.add_distance_linestring(self.line, distance=20) 166 | assert np.array_equal(indices, [1]) 167 | 168 | with gmsh_env(): 169 | indices = gg.add_distance_linestring(self.line, distance=5) 170 | assert np.array_equal(indices, [1, 2]) 171 | 172 | with gmsh_env(): 173 | indices = gg.add_distance_linestring(self.line, distance=3) 174 | assert np.array_equal(indices, [1, 2, 3]) 175 | 176 | def test_add_distance_linestrings(self): 177 | lines = gpd.GeoSeries([self.line, self.line_embed]) 178 | with gmsh_env(): 179 | indices = gg.add_distance_linestrings(lines, spacing=np.array([20, 20])) 180 | assert np.array_equal(indices, [1, 2]) 181 | 182 | with gmsh_env(): 183 | indices = gg.add_distance_linestrings(lines, spacing=np.array([3, 3])) 184 | assert np.array_equal(indices, [1, 2, 3, 4, 5]) 185 | 186 | with gmsh_env(): 187 | indices = gg.add_distance_linestrings(lines, spacing=np.array([20, 3])) 188 | assert np.array_equal(indices, [1, 2, 3]) 189 | 190 | def test_add_distance_polygons(self): 191 | polygons = gpd.GeoSeries([self.donut, self.refined]) 192 | with gmsh_env(): 193 | indices = gg.add_distance_polygons(polygons, spacing=np.array([100, 100])) 194 | # Exterior, interior, exterior 195 | assert np.array_equal(indices, [1, 2, 3]) 196 | 197 | with gmsh_env(): 198 | indices = gg.add_distance_polygons(polygons, spacing=np.array([3, 3])) 199 | # Exterior, interior, exterior 200 | assert np.array_equal(indices, np.arange(1, 27)) 201 | 202 | def test_add_distance_geometry(self): 203 | geometry = gpd.GeoSeries([sg.MultiPolygon([self.donut, self.refined])]) 204 | with pytest.raises(TypeError, match="Geometry should be one of"): 205 | gg.add_distance_geometry(geometry, np.array([1.0])) 206 | 207 | # One point on each edge 208 | geometry = gpd.GeoSeries([self.polygon]) 209 | with gmsh_env(): 210 | indices = gg.add_distance_geometry(geometry, np.array([10.0])) 211 | assert np.array_equal(indices, [1, 2, 3, 4]) 212 | 213 | # Three points 214 | geometry = gpd.GeoSeries([self.line]) 215 | with gmsh_env(): 216 | indices = gg.add_distance_geometry(geometry, np.array([3.0])) 217 | assert np.array_equal(indices, [1, 2, 3]) 218 | 219 | # Twenty points, one to one 220 | geometry = gpd.GeoSeries(self.points_embed) 221 | with gmsh_env(): 222 | indices = gg.add_distance_geometry(geometry, np.array([np.nan])) 223 | assert np.array_equal(indices, np.arange(1, 20)) 224 | 225 | # All together 226 | geometry = gpd.GeoSeries( 227 | np.concatenate([[self.polygon], [self.line], self.points_embed]) 228 | ) 229 | with gmsh_env(): 230 | indices = gg.add_distance_geometry(geometry, np.array([10.0, 3.0, np.nan])) 231 | assert np.array_equal(indices, np.arange(1, 25)) 232 | -------------------------------------------------------------------------------- /tests/test_meshers.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pytest 4 | import shapely.geometry as sg 5 | 6 | import pandamesh as pm 7 | 8 | outer_coords = np.array([(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0)]) 9 | inner_coords = np.array([(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0)]) 10 | line_coords = np.array([(2.0, 8.0), (8.0, 2.0)]) 11 | inner = sg.LinearRing(inner_coords) 12 | outer = sg.LinearRing(outer_coords) 13 | line = sg.LineString(line_coords) 14 | polygon = sg.Polygon(outer) 15 | donut = sg.Polygon(outer, holes=[inner]) 16 | 17 | other_inner_coords = np.array([(3.0, 4.0), (7.0, 4.0), (7.0, 6.0), (3.0, 6.0)]) 18 | other_inner = sg.Polygon(other_inner_coords) 19 | other_hole_coords = np.array( 20 | [ 21 | (7.0, 3.0), 22 | (7.0, 4.0), 23 | (7.0, 6.0), 24 | (7.0, 7.0), 25 | (3.0, 7.0), 26 | (3.0, 6.0), 27 | (3.0, 4.0), 28 | (3.0, 3.0), 29 | ] 30 | ) 31 | other_hole = sg.LinearRing(other_hole_coords) 32 | other_donut = sg.Polygon(outer, holes=[other_hole]) 33 | 34 | 35 | def test_singleton_gmsh_mesher(): 36 | pm.GmshMesher.finalize() 37 | 38 | gdf = gpd.GeoDataFrame(geometry=[donut]) 39 | gdf["cellsize"] = 1.0 40 | mesher = pm.GmshMesher(gdf) 41 | 42 | with pytest.raises( 43 | RuntimeError, match="Multiple GmshMesher instances are not allowed" 44 | ): 45 | pm.GmshMesher(gdf) 46 | 47 | mesher.finalize() 48 | new_mesher = pm.GmshMesher(gdf) 49 | assert isinstance(new_mesher, pm.GmshMesher) 50 | 51 | with pytest.raises(RuntimeError, match="This GmshMesher has been finalized"): 52 | mesher.generate() 53 | 54 | new_mesher.generate(finalize=True) 55 | assert not new_mesher._initialized 56 | 57 | 58 | def bounds(vertices): 59 | x, y = vertices.T 60 | return x.min(), y.min(), x.max(), y.max() 61 | 62 | 63 | def area(vertices, triangles): 64 | """ 65 | Compute the area of every triangle in the mesh. 66 | (Helper for these tests.) 67 | """ 68 | coords = vertices[triangles] 69 | u = coords[:, 1] - coords[:, 0] 70 | v = coords[:, 2] - coords[:, 0] 71 | return 0.5 * np.abs(u[:, 0] * v[:, 1] - u[:, 1] * v[:, 0]) 72 | 73 | 74 | def triangle_generate(gdf: gpd.GeoDataFrame, shift: bool): 75 | mesher = pm.TriangleMesher(gdf, shift_origin=shift) 76 | return mesher.generate() 77 | 78 | 79 | def gmsh_generate(gdf: gpd.GeoDataFrame, shift: bool): 80 | mesher = pm.GmshMesher._force_init(gdf, shift_origin=shift) 81 | vertices, faces = mesher.generate() 82 | return vertices, faces 83 | 84 | 85 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 86 | @pytest.mark.parametrize("shift", [False, True]) 87 | def test_empty(generate, shift): 88 | gdf = gpd.GeoDataFrame(geometry=[line], data={"cellsize": [1.0]}) 89 | with pytest.raises(ValueError, match="No polygons provided"): 90 | generate(gdf, shift) 91 | 92 | 93 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 94 | @pytest.mark.parametrize("shift", [False, True]) 95 | def test_basic(generate, shift): 96 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 97 | gdf["cellsize"] = 1.0 98 | vertices, triangles = generate(gdf, shift) 99 | mesh_area = area(vertices, triangles).sum() 100 | assert np.allclose(mesh_area, polygon.area) 101 | assert np.allclose(bounds(vertices), gdf.total_bounds) 102 | 103 | 104 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 105 | @pytest.mark.parametrize("shift", [False, True]) 106 | def test_hole(generate, shift): 107 | gdf = gpd.GeoDataFrame(geometry=[donut]) 108 | gdf["cellsize"] = 1.0 109 | vertices, triangles = generate(gdf, shift) 110 | mesh_area = area(vertices, triangles).sum() 111 | assert np.allclose(mesh_area, donut.area) 112 | assert np.allclose(bounds(vertices), gdf.total_bounds) 113 | 114 | 115 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 116 | @pytest.mark.parametrize("shift", [False, True]) 117 | def test_ring(generate, shift): 118 | gdf = gpd.GeoDataFrame(geometry=[polygon, inner]) 119 | gdf["cellsize"] = 1.0 120 | vertices, triangles = generate(gdf, shift) 121 | mesh_area = area(vertices, triangles).sum() 122 | assert np.allclose(mesh_area, polygon.area) 123 | assert np.allclose(bounds(vertices), gdf.total_bounds) 124 | 125 | 126 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 127 | @pytest.mark.parametrize("shift", [False, True]) 128 | def test_partial_hole(generate, shift): 129 | gdf = gpd.GeoDataFrame(geometry=[other_donut, other_inner]) 130 | gdf["cellsize"] = 1.0 131 | vertices, triangles = generate(gdf, shift) 132 | mesh_area = area(vertices, triangles).sum() 133 | assert np.allclose(mesh_area, other_donut.area + other_inner.area) 134 | assert np.allclose(bounds(vertices), gdf.total_bounds) 135 | 136 | 137 | @pytest.mark.parametrize("generate", [triangle_generate, gmsh_generate]) 138 | @pytest.mark.parametrize("shift", [False, True]) 139 | def test_adjacent_donut(generate, shift): 140 | inner_coords2 = inner_coords.copy() 141 | outer_coords2 = outer_coords.copy() 142 | inner_coords2[:, 0] += 10.0 143 | outer_coords2[:, 0] += 10.0 144 | inner2 = sg.LinearRing(inner_coords2) 145 | outer2 = sg.LinearRing(outer_coords2) 146 | donut2 = sg.Polygon(outer2, holes=[inner2]) 147 | 148 | gdf = gpd.GeoDataFrame(geometry=[donut, donut2]) 149 | gdf["cellsize"] = [1.0, 0.5] 150 | vertices, triangles = generate(gdf, shift) 151 | mesh_area = area(vertices, triangles).sum() 152 | assert np.allclose(mesh_area, 2 * donut.area) 153 | assert np.allclose(bounds(vertices), gdf.total_bounds) 154 | 155 | # With a line at y=8.0 and points in the left polygon, at y=2.0 156 | line1 = sg.LineString([(0.25, 8.0), (9.75, 8.0)]) 157 | line2 = sg.LineString([(10.25, 8.0), (19.75, 8.0)]) 158 | x = np.arange(0.25, 10.0, 0.25) 159 | y = np.full(x.size, 2.0) 160 | points = gpd.points_from_xy(x=x, y=y) 161 | gdf = gpd.GeoDataFrame(geometry=[donut, donut2, line1, line2, *points]) 162 | gdf["cellsize"] = 1.0 163 | 164 | vertices, triangles = generate(gdf, shift) 165 | mesh_area = area(vertices, triangles).sum() 166 | assert np.allclose(mesh_area, 2 * donut.area) 167 | assert np.allclose(bounds(vertices), gdf.total_bounds) 168 | 169 | 170 | def test_triangle_properties(): 171 | gdf = gpd.GeoDataFrame(geometry=[donut]) 172 | gdf["cellsize"] = 1.0 173 | mesher = pm.TriangleMesher(gdf) 174 | 175 | # Should be a float >=0, < 34.0 176 | with pytest.raises(TypeError): 177 | mesher.minimum_angle = 10 178 | with pytest.raises(ValueError): 179 | mesher.minimum_angle = 35.0 180 | 181 | # Set properties 182 | mesher.minimum_angle = 10.0 183 | mesher.conforming_delaunay = False 184 | mesher.suppress_exact_arithmetic = True 185 | mesher.maximum_steiner_points = 10 186 | mesher.delaunay_algorithm = pm.DelaunayAlgorithm.SWEEPLINE 187 | mesher.consistency_check = True 188 | 189 | # Check whether properties have been set properly 190 | assert mesher.minimum_angle == 10.0 191 | assert mesher.conforming_delaunay is False 192 | assert mesher.suppress_exact_arithmetic is True 193 | assert mesher.maximum_steiner_points == 10 194 | assert mesher.delaunay_algorithm == pm.DelaunayAlgorithm.SWEEPLINE 195 | assert mesher.consistency_check is True 196 | 197 | # Check whether the repr method works 198 | assert isinstance(mesher.__repr__(), str) 199 | 200 | with pytest.raises(TypeError): 201 | mesher.conforming_delaunay = "a" 202 | 203 | with pytest.raises(TypeError): 204 | mesher.suppress_exact_arithmetic = "a" 205 | 206 | with pytest.raises(TypeError): 207 | mesher.maximum_steiner_points = "a" 208 | 209 | with pytest.raises(ValueError): 210 | mesher.delaunay_algorithm = "a" 211 | 212 | with pytest.raises(TypeError): 213 | mesher.consistency_check = "a" 214 | 215 | 216 | def test_gmsh_properties(): 217 | gdf = gpd.GeoDataFrame(geometry=[donut]) 218 | gdf["cellsize"] = 1.0 219 | mesher = pm.GmshMesher._force_init(gdf) 220 | 221 | # Set default values for meshing parameters 222 | mesher.mesh_algorithm = pm.MeshAlgorithm.FRONTAL_DELAUNAY 223 | mesher.recombine_all = False 224 | mesher.mesh_size_extend_from_boundary = False 225 | mesher.mesh_size_from_points = False 226 | mesher.mesh_size_from_curvature = True 227 | mesher.field_combination = pm.FieldCombination.MAX 228 | mesher.subdivision_algorithm = pm.SubdivisionAlgorithm.BARYCENTRIC 229 | mesher.general_verbosity = pm.GeneralVerbosity.ERRORS 230 | 231 | assert mesher.mesh_algorithm == pm.MeshAlgorithm.FRONTAL_DELAUNAY 232 | assert mesher.recombine_all is False 233 | assert mesher.mesh_size_extend_from_boundary is False 234 | assert mesher.mesh_size_from_points is False 235 | assert mesher.mesh_size_from_curvature is True 236 | assert mesher.field_combination == pm.FieldCombination.MAX 237 | assert mesher.subdivision_algorithm == pm.SubdivisionAlgorithm.BARYCENTRIC 238 | assert mesher.general_verbosity == pm.GeneralVerbosity.ERRORS 239 | 240 | # Check whether the repr method works 241 | assert isinstance(mesher.__repr__(), str) 242 | 243 | with pytest.raises(ValueError): 244 | mesher.mesh_algorithm = "a" 245 | 246 | with pytest.raises(TypeError): 247 | mesher.recombine_all = "a" 248 | 249 | with pytest.raises(TypeError): 250 | mesher.mesh_size_extend_from_boundary = "a" 251 | 252 | with pytest.raises(TypeError): 253 | mesher.mesh_size_from_points = "a" 254 | 255 | with pytest.raises(TypeError): 256 | mesher.mesh_size_from_curvature = "a" 257 | 258 | with pytest.raises(ValueError): 259 | mesher.field_combination = "a" 260 | 261 | with pytest.raises(ValueError): 262 | mesher.subdivision_algorithm = "a" 263 | 264 | with pytest.raises(ValueError): 265 | mesher.general_verbosity = "a" 266 | 267 | 268 | def test_gmsh_write(tmp_path): 269 | gdf = gpd.GeoDataFrame(geometry=[donut]) 270 | gdf["cellsize"] = 1.0 271 | mesher = pm.GmshMesher._force_init(gdf) 272 | path = tmp_path / "a.msh" 273 | mesher.write(path) 274 | assert path.exists() 275 | 276 | 277 | @pytest.mark.parametrize("read_config_files", [True, False]) 278 | @pytest.mark.parametrize("interruptible", [True, False]) 279 | def test_gmsh_initialization_kwargs(read_config_files, interruptible): 280 | gdf = gpd.GeoDataFrame(geometry=[donut]) 281 | gdf["cellsize"] = 1.0 282 | mesher = pm.GmshMesher._force_init( 283 | gdf, read_config_files=read_config_files, interruptible=interruptible 284 | ) 285 | vertices, triangles = mesher.generate() 286 | mesh_area = area(vertices, triangles).sum() 287 | assert np.allclose(mesh_area, donut.area) 288 | 289 | 290 | def test_generate_geodataframe(): 291 | gdf = gpd.GeoDataFrame(geometry=[donut]) 292 | gdf["cellsize"] = 1.0 293 | 294 | result = pm.TriangleMesher(gdf).generate_geodataframe() 295 | assert isinstance(result, gpd.GeoDataFrame) 296 | assert np.allclose(result.area.sum(), donut.area) 297 | 298 | mesher = pm.GmshMesher._force_init(gdf) 299 | result = mesher.generate_geodataframe() 300 | assert isinstance(result, gpd.GeoDataFrame) 301 | assert np.allclose(result.area.sum(), donut.area) 302 | 303 | # Make sure the geodataframe logic can deal with quads as well. 304 | mesher.recombine_all = True 305 | result = mesher.generate_geodataframe() 306 | assert isinstance(result, gpd.GeoDataFrame) 307 | assert np.allclose(result.area.sum(), donut.area) 308 | -------------------------------------------------------------------------------- /tests/test_plot.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import pytest 4 | 5 | import pandamesh as pm 6 | 7 | 8 | @pytest.fixture(scope="function") 9 | def mesh(): 10 | vertices = np.array( 11 | [ 12 | [0.0, 0.0], 13 | [1.0, 0.0], 14 | [1.0, 1.0], 15 | [0.0, 1.0], 16 | [2.0, 0.0], 17 | ] 18 | ) 19 | faces = np.array( 20 | [ 21 | [0, 1, 2, 3], 22 | [1, 4, 2, -1], 23 | ] 24 | ) 25 | return vertices, faces 26 | 27 | 28 | def test_plot(mesh): 29 | vertices, faces = mesh 30 | pm.plot(vertices, faces) 31 | 32 | 33 | def test_plot_optional_args(mesh): 34 | vertices, faces = mesh 35 | 36 | _, ax = plt.subplots() 37 | pm.plot( 38 | vertices, 39 | faces, 40 | fill_value=-1, 41 | ax=ax, 42 | facecolors="none", 43 | edgecolors="blue", 44 | linestyles="dashed", 45 | ) 46 | -------------------------------------------------------------------------------- /tests/test_preprocessor.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pytest 4 | import shapely 5 | import shapely.geometry as sg 6 | 7 | from pandamesh import preprocessor as pr 8 | 9 | outer = sg.Polygon( 10 | [ 11 | [0.0, 0.0], 12 | [10.0, 0.0], 13 | [10.0, 10.0], 14 | [0.0, 10.0], 15 | ] 16 | ) 17 | inner = sg.Polygon( 18 | [ 19 | [5.0, 2.0], 20 | [8.0, 5.0], 21 | [5.0, 8.0], 22 | [2.0, 5.0], 23 | ] 24 | ) 25 | 26 | first = sg.Polygon( 27 | [ 28 | [0.0, 0.0], 29 | [10.0, 0.0], 30 | [10.0, 10.0], 31 | [0.0, 10.0], 32 | ] 33 | ) 34 | second = sg.Polygon( 35 | [ 36 | [10.0, 2.0], 37 | [18.0, 2.0], 38 | [18.0, 8.0], 39 | [10.0, 8.0], 40 | ] 41 | ) 42 | third = sg.Polygon( 43 | [ 44 | [18.0, 2.0], 45 | [22.0, 2.0], 46 | [22.0, 8.0], 47 | [18.0, 8.0], 48 | ] 49 | ) 50 | 51 | donut = sg.Polygon( 52 | [ 53 | [0.0, 0.0], 54 | [10.0, 0.0], 55 | [10.0, 10.0], 56 | [0.0, 10.0], 57 | ], 58 | holes=[ 59 | [ 60 | [2.0, 5.0], 61 | [5.0, 8.0], 62 | [8.0, 5.0], 63 | [5.0, 2.0], 64 | ] 65 | ], 66 | ) 67 | 68 | first_line = shapely.LineString( 69 | [ 70 | [-5.0, 5.0], 71 | [6.0, 5.0], 72 | ] 73 | ) 74 | second_line = shapely.LineString( 75 | [ 76 | [5.0, -5.0], 77 | [5.0, 6.0], 78 | ] 79 | ) 80 | 81 | 82 | def test_collect_exteriors(): 83 | polygons = np.array([outer, donut]) 84 | exteriors = pr.collect_exteriors(polygons) 85 | assert len(exteriors) == 2 86 | assert (shapely.get_type_id(exteriors) == 2).all() 87 | 88 | 89 | def test_collect_interiors(): 90 | polygons = np.array([donut, outer]) 91 | interiors = pr.collect_interiors(polygons) 92 | assert len(interiors == 1) 93 | assert (shapely.get_type_id(interiors) == 2).all() 94 | 95 | 96 | def test_filter_holes_and_assign_values(): 97 | # Assume: two polygons. Union, then polygonized to create a partitions. Any 98 | # polygon found in a hole should disappear. 99 | polygons = np.array([donut, inner, second, third]) 100 | # Overlap between the original outer and inner polygon. 101 | index_original = np.array([0, 1, 0, 1]) 102 | index_union = np.array([0, 0, 1, 1]) 103 | indexer = np.array([4, 3, 2, 1]) 104 | 105 | filtered_polygons, filtered_indexer = pr.filter_holes_and_assign_values( 106 | polygons, index_original, index_union, indexer, ascending=True 107 | ) 108 | assert all(filtered_polygons == [donut, inner]) 109 | assert np.array_equal(filtered_indexer, [3, 3]) 110 | 111 | filtered_polygons, filtered_indexer = pr.filter_holes_and_assign_values( 112 | polygons, index_original, index_union, indexer, ascending=False 113 | ) 114 | assert all(filtered_polygons == [donut, inner]) 115 | assert np.array_equal(filtered_indexer, [4, 4]) 116 | 117 | 118 | def test_locate_polygons(): 119 | new = np.array([donut, inner]) 120 | old = np.array([outer, inner]) 121 | indexer = np.array([1, 0]) 122 | polygons, indexer = pr.locate_polygons(new, old, indexer, True) 123 | assert all(polygons == [donut, inner]) 124 | assert np.array_equal(indexer, [1, 0]) 125 | 126 | polygons, indexer = pr.locate_polygons(new, old, indexer, False) 127 | assert all(polygons == [donut, inner]) 128 | assert np.array_equal(indexer, [1, 1]) 129 | 130 | 131 | def test_locate_lines(): 132 | old = np.array([first_line, second_line]) 133 | new = np.array( 134 | [ 135 | shapely.LineString([[-5.0, 5.0], [0.0, 5.0]]), 136 | shapely.LineString([[5.0, -5.0], [5.0, 0.0]]), 137 | shapely.LineString([[5.0, 0.0], [5.0, 5.0]]), 138 | shapely.LineString([[0.0, 5.0], [5.0, 5.0]]), 139 | ] 140 | ) 141 | indexer = np.array([0, 1]) 142 | located = pr.locate_lines(new, old, indexer) 143 | assert np.array_equal(located, [0, 1, 1, 0]) 144 | 145 | 146 | def test_merge_polyons(): 147 | polygons = np.array([first, second]) 148 | merged = pr.merge_polygons(polygons, None) 149 | assert len(merged == 1) 150 | assert (shapely.get_type_id(merged) == 3).all() 151 | 152 | polygons = np.array([inner, second]) 153 | merged = pr.merge_polygons(polygons, None) 154 | assert len(merged == 2) 155 | assert (shapely.get_type_id(merged) == 3).all() 156 | 157 | 158 | def test_preprocessor_init(): 159 | geometry = [outer] 160 | values = [1.0, 2.0] 161 | 162 | with pytest.raises(ValueError, match="geometry and values shape mismatch"): 163 | pr.Preprocessor(geometry, values) 164 | 165 | point = sg.Point([1.0, 1.0]) 166 | geometry = [outer, inner, second, first_line, point] 167 | p = pr.Preprocessor(geometry) 168 | assert p.values is None 169 | assert isinstance(p.polygons, np.ndarray) 170 | assert isinstance(p.lines, np.ndarray) 171 | assert isinstance(p.points, np.ndarray) 172 | assert np.array_equal(p.polygon_indexer, [0, 1, 2]) 173 | assert np.array_equal(p.line_indexer, [3]) 174 | assert np.array_equal(p.point_indexer, [4]) 175 | 176 | values = [3.0, 2.0, 1.0, 0.5, 0.5] 177 | p = pr.Preprocessor(geometry, values) 178 | assert np.array_equal(p.values, [0.5, 1.0, 2.0, 3.0]) 179 | assert np.array_equal(p.polygon_indexer, [3, 2, 1]) 180 | assert np.array_equal(p.line_indexer, [0]) 181 | assert np.array_equal(p.point_indexer, [0]) 182 | 183 | 184 | class TestPreprocessor: 185 | @pytest.fixture(autouse=True) 186 | def setup(self): 187 | points = [sg.Point([0.1, 0.1]), sg.Point([1.0, 1.0]), sg.Point([-5.0, 5.0])] 188 | self.geometry = [outer, inner, second, first_line, second_line, *points] 189 | self.values = [3.0, 1.0, 3.0, 0.5, 0.5, 1.0, 1.0, 1.0] 190 | self.p = pr.Preprocessor(self.geometry) 191 | self.vp = pr.Preprocessor(self.geometry, self.values) 192 | 193 | def test_copy_with(self): 194 | def test_value_equality(old, new): 195 | for k, v in new.items(): 196 | assert v is old[k] 197 | 198 | p = self.p 199 | copied = p._copy_with() 200 | assert isinstance(copied, pr.Preprocessor) 201 | assert copied is not p 202 | test_value_equality(p.__dict__, copied.__dict__) 203 | 204 | copied = p._copy_with(values=[1, 2, 3, 4, 5, 6, 7]) 205 | assert p.values is not copied.values 206 | new = copied.__dict__ 207 | assert np.array_equal(new.pop("values"), [1, 2, 3, 4, 5, 6, 7]) 208 | test_value_equality(p.__dict__, new) 209 | 210 | def test_to_geodataframe(self): 211 | gdf = self.p.to_geodataframe() 212 | assert isinstance(gdf, gpd.GeoDataFrame) 213 | assert set(gdf.columns) == {"geometry", "indexer"} 214 | assert np.array_equal(gdf["indexer"], [0, 1, 2, 3, 4, 5, 6, 7]) 215 | 216 | gdf = self.vp.to_geodataframe() 217 | assert isinstance(gdf, gpd.GeoDataFrame) 218 | assert set(gdf.columns) == {"geometry", "indexer", "values"} 219 | assert np.array_equal(gdf["values"], self.values) 220 | 221 | def test_merge_polygons(self): 222 | merged = self.p.merge_polygons() 223 | assert isinstance(merged, pr.Preprocessor) 224 | assert len(merged.polygons == self.p.polygons) 225 | assert (shapely.get_type_id(merged.polygons) == 3).all() 226 | 227 | merged = self.vp.merge_polygons() 228 | assert len(merged.polygons) == 2 229 | assert np.array_equal(merged.polygon_indexer, [1, 2]) 230 | assert (shapely.get_type_id(merged.polygons) == 3).all() 231 | 232 | def test_unify_polygons(self): 233 | unified = self.p.unify_polygons() 234 | assert isinstance(unified, pr.Preprocessor) 235 | assert len(unified.polygons) == 5 236 | assert (shapely.get_type_id(unified.polygons) == 3).all() 237 | assert np.array_equal(unified.polygon_indexer, [0, 0, 2, 0, 0]) 238 | 239 | # Outer and second have same values, so they are merged. 240 | unified = self.vp.unify_polygons().merge_polygons() 241 | assert len(unified.polygons) == 2 242 | assert (shapely.get_type_id(unified.polygons) == 3).all() 243 | assert np.array_equal(unified.polygon_indexer, [1, 2]) 244 | 245 | def test_clip_lines(self): 246 | clipped = self.p.clip_lines() 247 | assert isinstance(clipped, pr.Preprocessor) 248 | assert len(clipped.lines) == 2 249 | 250 | expected = [ 251 | sg.LineString([[0.0, 5.0], [6.0, 5.0]]), 252 | sg.LineString([[5.0, 0.0], [5.0, 6.0]]), 253 | ] 254 | assert all(clipped.lines == expected) 255 | assert np.array_equal(clipped.line_indexer, [3, 4]) 256 | 257 | p = pr.Preprocessor(geometry=[donut, first_line, second_line]) 258 | clipped = p.clip_lines(distance=0.5) 259 | expected = [ 260 | sg.LineString([[0.5, 5.0], [1.5, 5.0]]), 261 | sg.LineString([[5.0, 0.5], [5.0, 1.5]]), 262 | ] 263 | assert all(clipped.lines == expected) 264 | assert np.array_equal(clipped.line_indexer, [1, 2]) 265 | 266 | def test_unify_lines(self): 267 | unified = self.p.unify_lines() 268 | assert isinstance(unified, pr.Preprocessor) 269 | assert len(unified.lines) == 4 270 | expected = [ 271 | sg.LineString([[-5.0, 5.0], [5.0, 5.0]]), 272 | sg.LineString([[5.0, 5.0], [6.0, 5.0]]), 273 | sg.LineString([[5.0, -5.0], [5.0, 5.0]]), 274 | sg.LineString([[5.0, 5.0], [5.0, 6.0]]), 275 | ] 276 | assert all(unified.lines == expected) 277 | assert np.array_equal(unified.line_indexer, [3, 3, 4, 4]) 278 | 279 | def test_clip_points(self): 280 | clipped = self.p.clip_points() 281 | assert isinstance(clipped, pr.Preprocessor) 282 | assert len(clipped.points) == 2 283 | assert np.array_equal(clipped.point_indexer, [5, 6]) 284 | 285 | clipped = self.p.clip_points(distance=0.5) 286 | assert isinstance(clipped, pr.Preprocessor) 287 | assert len(clipped.points) == 1 288 | assert np.array_equal(clipped.point_indexer, [6]) 289 | 290 | def test_interpolate_lines_to_points(self): 291 | with pytest.raises(ValueError, match="If values_as_distance is False"): 292 | self.p.interpolate_lines_to_points(distance=None, values_as_distance=False) 293 | 294 | interpolated = self.p.interpolate_lines_to_points(distance=1.0) 295 | assert isinstance(interpolated, pr.Preprocessor) 296 | assert len(interpolated.points) == 27 297 | assert np.array_equal( 298 | interpolated.point_indexer, [5, 6, 7] + [3] * 12 + [4] * 12 299 | ) 300 | 301 | interpolated = self.vp.interpolate_lines_to_points(values_as_distance=True) 302 | assert isinstance(interpolated, pr.Preprocessor) 303 | assert len(interpolated.points) == 49 304 | assert np.array_equal( 305 | interpolated.point_indexer, [1, 1, 1] + [0] * 23 + [0] * 23 306 | ) 307 | 308 | def test_snap_points(self): 309 | snapped = self.p.snap_points(0.05) 310 | assert isinstance(snapped, pr.Preprocessor) 311 | assert len(snapped.points) == 3 312 | assert np.array_equal(snapped.point_indexer, [5, 6, 7]) 313 | 314 | snapped = self.p.snap_points(2.0) 315 | assert isinstance(snapped, pr.Preprocessor) 316 | assert len(snapped.points) == 2 317 | assert np.array_equal(snapped.point_indexer, [5, 7]) 318 | 319 | def test_empty_ops(self): 320 | p = pr.Preprocessor(geometry=[outer]) 321 | assert isinstance(p.clip_points(), pr.Preprocessor) 322 | assert isinstance(p.snap_points(1.0), pr.Preprocessor) 323 | assert isinstance(p.clip_lines(), pr.Preprocessor) 324 | assert isinstance(p.unify_lines(), pr.Preprocessor) 325 | assert isinstance(p.interpolate_lines_to_points(), pr.Preprocessor) 326 | assert isinstance(p.unify_polygons(), pr.Preprocessor) 327 | assert isinstance(p.merge_polygons(), pr.Preprocessor) 328 | assert isinstance(p.to_geodataframe(), gpd.GeoDataFrame) 329 | -------------------------------------------------------------------------------- /tests/test_snapping.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | import pytest 5 | import shapely 6 | 7 | from pandamesh.snapping import MatrixCSR, columns_and_values, row_slice, snap_nodes 8 | 9 | 10 | def numba_enabled() -> bool: 11 | return os.environ.get("NUMBA_DISABLE_JIT") != "1" 12 | 13 | 14 | @pytest.fixture(scope="function") 15 | def csr_matrix(): 16 | i = np.repeat(np.arange(5), 2) 17 | j = np.arange(10) 18 | v = np.full(10, 0.5) 19 | return MatrixCSR.from_triplet(i, j, v, 5, 10) 20 | 21 | 22 | @pytest.mark.skipif( 23 | numba_enabled(), 24 | reason="Function returns a slice object; python and no-python slices don't mix.", 25 | ) 26 | def test_row_slice(csr_matrix): 27 | # These functions work fine if called inside of other numba functions when 28 | # numba is enabled. 29 | assert row_slice(csr_matrix, 0) == slice(0, 2, None) 30 | 31 | 32 | @pytest.mark.skipif( 33 | numba_enabled(), 34 | reason="Function returns a zip object; python and no-python zips don't mix.", 35 | ) 36 | def test_columns_and_values(csr_matrix): 37 | # These functions work fine if called inside of other numba functions when 38 | # numba is enabled. 39 | zipped = columns_and_values(csr_matrix, row_slice(csr_matrix, 0)) 40 | result = list(zipped) 41 | assert result == [(0, 0.5), (1, 0.5)] 42 | 43 | 44 | def test_snap__three_points_horizontal(): 45 | x = np.array([0.0, 1.0, 2.0]) 46 | y = np.zeros_like(x) 47 | xy = shapely.points(x, y) 48 | index = snap_nodes(xy, 0.1) 49 | assert np.array_equal(index, [0, 1, 2]) 50 | 51 | index = snap_nodes(xy, 1.0) 52 | assert np.array_equal(index, [0, 2]) 53 | 54 | index = snap_nodes(xy, 2.0) 55 | assert np.array_equal(index, [0]) 56 | 57 | 58 | def test_snap__three_points_diagonal(): 59 | x = y = np.array([0.0, 1.0, 1.5]) 60 | xy = shapely.points(x, y) 61 | index = snap_nodes(xy, 0.1) 62 | assert np.array_equal(index, [0, 1, 2]) 63 | 64 | # hypot(0.5, 0.5) = 0.707... 65 | index = snap_nodes(xy, 0.71) 66 | assert np.array_equal(index, [0, 1]) 67 | 68 | # hypot(1, 1) = 1.414... 69 | index = snap_nodes(xy, 1.42) 70 | assert np.array_equal(index, [0, 2]) 71 | -------------------------------------------------------------------------------- /tests/test_triangle_geometry.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import shapely 4 | import shapely.geometry as sg 5 | 6 | from pandamesh import triangle_geometry as tg 7 | 8 | outer_coords = np.array([(0.0, 0.0), (10.0, 0.0), (10.0, 10.0), (0.0, 10.0)]) 9 | inner_coords = np.array([(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0)]) 10 | ring_coords = np.array([(3.0, 3.0), (7.0, 3.0), (7.0, 7.0), (3.0, 7.0), (3.0, 3.0)]) 11 | line_coords = np.array([(2.0, 8.0), (8.0, 2.0)]) 12 | inner = sg.LinearRing(inner_coords) 13 | outer = sg.LinearRing(outer_coords) 14 | line = sg.LineString(line_coords) 15 | ring = sg.LineString(ring_coords) 16 | donut = sg.Polygon(outer, holes=[inner]) 17 | refined = sg.Polygon(inner_coords) 18 | 19 | 20 | def area(vertices, triangles): 21 | """ 22 | Compute the area of every triangle in the mesh. 23 | (Helper for these tests.) 24 | """ 25 | coords = vertices[triangles] 26 | u = coords[:, 1] - coords[:, 0] 27 | v = coords[:, 2] - coords[:, 0] 28 | return 0.5 * np.abs(u[:, 0] * v[:, 1] - u[:, 1] * v[:, 0]) 29 | 30 | 31 | def test_add_linestrings(): 32 | series = gpd.GeoSeries(data=[line]) 33 | vertices, segments = tg.add_linestrings(series) 34 | expected = np.unique(line_coords, axis=0) 35 | expected_segments = np.array([[0, 1]]) 36 | assert np.allclose(vertices, expected) 37 | assert np.array_equal(segments, expected_segments) 38 | 39 | series = gpd.GeoSeries(data=[inner]) 40 | vertices, segments = tg.add_linestrings(series) 41 | vertices, segments = tg.unique_vertices_and_segments(vertices, segments) 42 | expected = np.unique(inner_coords, axis=0) 43 | expected_segments = np.array( 44 | [ 45 | [0, 2], 46 | [1, 0], 47 | [2, 3], 48 | [3, 1], 49 | ] 50 | ) 51 | assert np.allclose(vertices, expected) 52 | assert np.array_equal(segments, expected_segments) 53 | 54 | series = gpd.GeoSeries(data=[outer]) 55 | vertices, segments = tg.add_linestrings(series) 56 | vertices, segments = tg.unique_vertices_and_segments(vertices, segments) 57 | expected = np.unique(outer_coords, axis=0) 58 | assert np.allclose(vertices, expected) 59 | assert np.array_equal(segments, expected_segments) 60 | 61 | # Empty should work too 62 | series = gpd.GeoSeries(data=[]) 63 | _, _ = tg.add_linestrings(series) 64 | 65 | 66 | def test_add_polygons(): 67 | gdf = gpd.GeoDataFrame(geometry=[donut]) 68 | cellsize = 0.5 69 | gdf["cellsize"] = cellsize 70 | vertices, segments, regions = tg.add_polygons(gdf) 71 | vertices, segments = tg.unique_vertices_and_segments(vertices, segments) 72 | expected = np.unique( 73 | np.concatenate([outer_coords, inner_coords]), 74 | axis=0, 75 | ) 76 | expected_segments = np.array( 77 | [ 78 | [0, 6], 79 | [1, 0], 80 | [2, 4], 81 | [3, 2], 82 | [4, 5], 83 | [5, 3], 84 | [6, 7], 85 | [7, 1], 86 | ] 87 | ) 88 | x, y = regions[0, :2] 89 | assert np.allclose(vertices, expected) 90 | assert np.array_equal(segments, expected_segments) 91 | assert regions[0, 2] == 0 92 | assert np.allclose(regions[0, 3], 0.25 * np.sqrt(3) * cellsize**2) 93 | assert sg.Point(x, y).within(donut) 94 | 95 | 96 | def test_add_points(): 97 | xy = np.array( 98 | [ 99 | [0.0, 0.0], 100 | [1.0, 1.0], 101 | ] 102 | ) 103 | gdf = gpd.GeoDataFrame(geometry=gpd.points_from_xy(xy[:, 0], xy[:, 1])) 104 | vertices = tg.add_points(gdf) 105 | assert np.allclose(vertices, xy) 106 | 107 | 108 | def test_polygon_holes(): 109 | polygon = sg.Polygon(outer) 110 | gdf = gpd.GeoDataFrame(geometry=[polygon]) 111 | assert tg.polygon_holes(gdf) is None 112 | 113 | gdf = gpd.GeoDataFrame(geometry=[donut]) 114 | assert len(tg.polygon_holes(gdf)) == 1 115 | 116 | gdf = gpd.GeoDataFrame(geometry=[donut, refined]) 117 | assert tg.polygon_holes(gdf) is None 118 | 119 | 120 | def test_convert_ring_linestring(): 121 | # The linestring forms a ring within the outer polygon. During the 122 | # conversion, the ring should be converted to a second polygon, and a hole 123 | # should be made in the first polygon. 124 | polygon = sg.Polygon(shell=outer_coords) 125 | polygons = gpd.GeoDataFrame(geometry=[polygon]) 126 | polygons["cellsize"] = 1.0 127 | linestrings = gpd.GeoDataFrame(geometry=[ring]) 128 | 129 | new_polygons = tg.convert_linestring_rings(polygons, linestrings) 130 | assert isinstance(new_polygons, gpd.GeoDataFrame) 131 | assert np.allclose(new_polygons["cellsize"], 1.0) 132 | assert np.allclose(new_polygons.area, [84.0, 16.0]) 133 | 134 | 135 | def test_convert_ring_linestring__hole(): 136 | # This second case has a hole inside of the linestring ring. The hole 137 | # should be preserved. 138 | inner = [ 139 | [4.0, 6.0], 140 | [6.0, 6.0], 141 | [6.0, 4.0], 142 | [4.0, 4.0], 143 | [4.0, 6.0], 144 | ] 145 | polygon = sg.Polygon(shell=outer_coords, holes=[inner]) 146 | polygons = gpd.GeoDataFrame(geometry=[polygon]) 147 | polygons["cellsize"] = 1.0 148 | linestrings = gpd.GeoDataFrame(geometry=[ring]) 149 | 150 | new_polygons = tg.convert_linestring_rings(polygons, linestrings) 151 | assert isinstance(new_polygons, gpd.GeoDataFrame) 152 | assert np.allclose(new_polygons["cellsize"], 1.0) 153 | assert np.allclose(new_polygons.area, [84.0, 12.0]) 154 | 155 | 156 | def test_convert_ring_linestring__nested_ring(): 157 | # This third case has the ring inside of a nested polygon. 158 | nested_ring = [ 159 | [4.0, 6.0], 160 | [6.0, 6.0], 161 | [6.0, 4.0], 162 | [4.0, 4.0], 163 | [4.0, 6.0], 164 | ] 165 | polygon = sg.Polygon(shell=outer_coords, holes=[inner]) 166 | inner_polygon = sg.Polygon(shell=inner_coords) 167 | polygons = gpd.GeoDataFrame(geometry=[polygon, inner_polygon]) 168 | polygons["cellsize"] = 1.0 169 | linestrings = gpd.GeoDataFrame(geometry=[sg.LineString(nested_ring)]) 170 | 171 | new_polygons = tg.convert_linestring_rings(polygons, linestrings) 172 | assert isinstance(new_polygons, gpd.GeoDataFrame) 173 | assert np.allclose(new_polygons["cellsize"], 1.0) 174 | assert np.allclose(new_polygons.area, [84.0, 12.0, 4.0]) 175 | 176 | 177 | def test_segmentize_linestrings(): 178 | gdf = gpd.GeoDataFrame() 179 | actual = tg.segmentize_linestrings(gdf) 180 | assert actual is gdf 181 | 182 | gdf = gpd.GeoDataFrame( 183 | geometry=[ 184 | sg.LineString([[0.0, 0.0], [10.0, 0.0]]), 185 | sg.LineString([[0.0, 5.0], [10.0, 5.0]]), 186 | sg.LineString([[0.0, 10.0], [10.0, 10.0]]), 187 | ], 188 | data={"cellsize": [np.nan, 1.0, 0.5]}, 189 | ) 190 | actual = tg.segmentize_linestrings(gdf) 191 | _, index = shapely.get_coordinates(actual.geometry, return_index=True) 192 | _, counts = np.unique(index, return_counts=True) 193 | assert np.array_equal(counts, [2, 11, 21]) 194 | --------------------------------------------------------------------------------