├── xugrid ├── ugrid │ ├── __init__.py │ ├── selection_utils.py │ └── polygonize.py ├── plot │ └── __init__.py ├── data │ ├── __init__.py │ ├── registry.txt │ ├── sample_data.py │ └── synthetic.py ├── regrid │ ├── network.py │ ├── utils.py │ ├── gridder.py │ ├── nanpercentile.py │ ├── unstructured.py │ ├── reduce.py │ └── overlap_1d.py ├── meshkernel_utils.py ├── constants.py ├── __init__.py └── core │ ├── utils.py │ ├── sparse.py │ ├── common.py │ └── accessorbase.py ├── MANIFEST.in ├── .codecov.yml ├── .gitattributes ├── data ├── ADH_SanDiego.nc ├── elevation_nl.nc ├── README.md ├── examples │ ├── provinces_nl.py │ ├── xoxo.py │ ├── elevation_nl.py │ ├── disk.py │ ├── adh_san_diego.py │ ├── README.rst │ └── network.py ├── hydamo_profiles.csv ├── xoxo_triangles.txt ├── xoxo_vertices.txt └── hydamo_objects.csv ├── docs ├── _static │ ├── xugrid-demo.png │ ├── theme-deltares.css │ ├── deltares-blue.svg │ ├── deltares-white.svg │ └── enabling-delta-life.svg ├── dev_docs.rst ├── clean.py ├── Makefile ├── user_guide.rst ├── make.bat ├── index.rst ├── conf.py └── terminology.rst ├── examples ├── README.rst ├── partitioning.py ├── quick_overview.py ├── plotting.py ├── overlap_regridder.py ├── network_gridder.py ├── regridder_overview.py └── selection.py ├── .github ├── dependabot.yml └── workflows │ ├── pixi_auto_update.yml │ └── ci.yml ├── .pre-commit-config.yaml ├── examples-dev └── README.rst ├── .zenodo.json ├── tests ├── __init__.py ├── test_data.py ├── test_core_utils.py ├── test_polygonize.py ├── conftest.py ├── test_meshkernel_utils.py ├── test_regrid │ ├── test_utils.py │ ├── test_unstructured.py │ ├── test_overlap_1d.py │ ├── test_reduce.py │ └── test_network_gridder.py ├── test_interpolate.py ├── test_sparse.py └── test_burn.py ├── LICENSE ├── .gitignore ├── README.rst └── pyproject.toml /xugrid/ugrid/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include xugrid/data/registry.txt 2 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | github_checks: 3 | annotations: false 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # GitHub syntax highlighting 2 | pixi.lock linguist-language=YAML 3 | 4 | -------------------------------------------------------------------------------- /data/ADH_SanDiego.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/xugrid/main/data/ADH_SanDiego.nc -------------------------------------------------------------------------------- /data/elevation_nl.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/xugrid/main/data/elevation_nl.nc -------------------------------------------------------------------------------- /docs/_static/xugrid-demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/xugrid/main/docs/_static/xugrid-demo.png -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | # Sample data sets 2 | 3 | These files are used as sample data in Xugrid and are downloaded by 4 | `xugrid.data` functions. 5 | -------------------------------------------------------------------------------- /examples/README.rst: -------------------------------------------------------------------------------- 1 | .. examples-index: 2 | 3 | Examples 4 | ======== 5 | 6 | The examples in this gallery demonstrate the functionality of the ``xugrid`` 7 | package. Every example can be downloaded as either a Python script or a Jupyter 8 | notebook for interactive exploration. -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" # Location of package manifests 6 | schedule: 7 | interval: "weekly" 8 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | # Ruff version. 4 | rev: v0.1.5 5 | hooks: 6 | # Run the linter. 7 | - id: ruff 8 | args: [--fix, --exit-non-zero-on-fix] 9 | # Run the formatter. 10 | - id: ruff-format -------------------------------------------------------------------------------- /data/examples/provinces_nl.py: -------------------------------------------------------------------------------- 1 | """ 2 | Provinces NL 3 | ============ 4 | 5 | This is a small vector dataset containing polygons of the provinces of the 6 | Netherlands, including water, presented as geopandas GeoDataFrame. 7 | """ 8 | 9 | import xugrid 10 | 11 | gdf = xugrid.data.provinces_nl() 12 | gdf.plot() 13 | -------------------------------------------------------------------------------- /examples-dev/README.rst: -------------------------------------------------------------------------------- 1 | Developer Examples 2 | ================== 3 | 4 | The examples in this gallery demonstrate more low-level aspects and trade-offs 5 | of the ``xugrid`` package, and provide visual examples to help understanding. 6 | Every example can be downloaded as either a Python script or a Jupyter notebook 7 | for interactive exploration. 8 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Xugrid", 3 | "description": "Xarray and unstructured grids", 4 | "upload_type": "software", 5 | "creators": [ 6 | { 7 | "affiliation": "Deltares", 8 | "name": "Bootsma, Huite", 9 | "orcid": "0009-0000-2214-1742" 10 | } 11 | ], 12 | "access_right": "open", 13 | "license": "MIT" 14 | } 15 | -------------------------------------------------------------------------------- /xugrid/plot/__init__.py: -------------------------------------------------------------------------------- 1 | from xugrid.plot.plot import ( 2 | contour, 3 | contourf, 4 | imshow, 5 | line, 6 | pcolormesh, 7 | scatter, 8 | surface, 9 | tripcolor, 10 | ) 11 | 12 | __all__ = ( 13 | "contour", 14 | "contourf", 15 | "imshow", 16 | "line", 17 | "pcolormesh", 18 | "scatter", 19 | "surface", 20 | "tripcolor", 21 | ) 22 | -------------------------------------------------------------------------------- /docs/dev_docs.rst: -------------------------------------------------------------------------------- 1 | Development 2 | =========== 3 | 4 | The examples in this gallery demonstrate more low-level aspects and trade-offs 5 | of the ``xugrid`` package, and provide visual examples to help understanding. 6 | Every example can be downloaded as either a Python script or a Jupyter notebook 7 | for interactive exploration. 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | examples-dev/voronoi.rst 13 | -------------------------------------------------------------------------------- /xugrid/data/__init__.py: -------------------------------------------------------------------------------- 1 | from xugrid.data.sample_data import ( 2 | adh_san_diego, 3 | elevation_nl, 4 | hydamo_network, 5 | provinces_nl, 6 | xoxo, 7 | ) 8 | from xugrid.data.synthetic import disk, generate_disk 9 | 10 | __all__ = ( 11 | "adh_san_diego", 12 | "elevation_nl", 13 | "provinces_nl", 14 | "xoxo", 15 | "disk", 16 | "generate_disk", 17 | "hydamo_network", 18 | ) 19 | -------------------------------------------------------------------------------- /docs/_static/theme-deltares.css: -------------------------------------------------------------------------------- 1 | /* enlarge deltares & github icon size; only works with local/url svg files; not with fa icons */ 2 | img.icon-link-image { 3 | height: 2.5em !important; 4 | } 5 | 6 | [data-theme="dark"] img.icon-link-image[src*="deltares-blue.svg"] { 7 | filter: brightness(0) saturate(100%) invert(100%); 8 | } 9 | [data-theme="dark"] img.icon-link-image[src*="Octicons-mark-github.svg"] { 10 | filter: brightness(0) saturate(100%) invert(100%); 11 | } -------------------------------------------------------------------------------- /docs/clean.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def remove_dir_content(path: str) -> None: 6 | for root, dirs, files in os.walk(path): 7 | for f in files: 8 | os.unlink(os.path.join(root, f)) 9 | for d in dirs: 10 | shutil.rmtree(os.path.join(root, d)) 11 | 12 | 13 | remove_dir_content("api") 14 | remove_dir_content("examples") 15 | remove_dir_content("examples-dev") 16 | remove_dir_content("sample_data") 17 | remove_dir_content("_build") 18 | -------------------------------------------------------------------------------- /data/examples/xoxo.py: -------------------------------------------------------------------------------- 1 | """ 2 | Xoxo 3 | ==== 4 | 5 | This is a small unstructured grid consisting of two unconnected parts. This is 6 | downloaded via `xugrid.data.xoxo()`. The topology data is downloaded to a local 7 | directory if it's not there already. 8 | """ 9 | 10 | import matplotlib.pyplot as plt 11 | 12 | import xugrid 13 | 14 | grid = xugrid.data.xoxo() 15 | 16 | fig, ax = plt.subplots() 17 | xugrid.plot.line(grid, ax=ax, color="#bd0d1f") 18 | ax.set_xlim([0.0, 100.0]) 19 | ax.set_ylim([0.0, 85.0]) 20 | ax.set_aspect(1) 21 | -------------------------------------------------------------------------------- /data/examples/elevation_nl.py: -------------------------------------------------------------------------------- 1 | """ 2 | Elevation NL 3 | ============ 4 | 5 | This is a small dataset containing a triangulation of a digital elevation model 6 | of the Netherlands. 7 | """ 8 | 9 | import matplotlib.pyplot as plt 10 | 11 | import xugrid 12 | 13 | uda = xugrid.data.elevation_nl() 14 | 15 | section_y = 475_000.0 16 | section = uda.ugrid.sel(y=section_y) 17 | 18 | fig, (ax0, ax1) = plt.subplots(figsize=(22.6, 10), ncols=2) 19 | uda.ugrid.plot(ax=ax0, vmin=-20, vmax=90, cmap="terrain") 20 | ax0.axhline(y=section_y, color="red") 21 | section.plot(ax=ax1, x="mesh2d_x") 22 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # __init__.py for pytest-cov 2 | import pytest 3 | 4 | 5 | def _importorskip(modname): 6 | try: 7 | import meshkernel 8 | 9 | # If the DLL/SO fails to load / be found, still skip. 10 | try: 11 | meshkernel.MeshKernel() 12 | has = True 13 | except OSError: 14 | has = False 15 | except ImportError: 16 | has = False 17 | func = pytest.mark.skipif(not has, reason=f"requires {modname}") 18 | return has, func 19 | 20 | 21 | has_meshkernel, requires_meshkernel = _importorskip("meshkernel") 22 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = imod 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /xugrid/data/registry.txt: -------------------------------------------------------------------------------- 1 | xoxo_vertices.txt 71daf767e15ea7c7e2d1d872cd8cc97aa2b1e509cf00bbc1f6f0273f5daaf62f 2 | xoxo_triangles.txt 129168cd60e944f03dfff5320365eb73321038aa872b5460576f5d064bba372b 3 | ADH_SanDiego.nc c09eb694a0951f5d718179ebb167d1e7c2fdb8234ba8a5bb588dae3a3610b76a 4 | elevation_nl.nc b6e33c3c08d0cc762c28113096a857221d3ae57a899eaf394856aa448bfe9d12 5 | provinces-nl.geojson 7539318974d1d78f35e4c2987287aa81f5ff505f444a2e0f340d804f57c0f8e3 6 | hydamo_objects.csv 96a7ed8358f8e49d05630230fc14579813513f534ed3ecd9abd2d5abacb6a1c4 7 | hydamo_points.csv 7a52d217aff7ad47757ebbb390a385fbeedd605b3bf20efa0d903478ee5de983 8 | hydamo_profiles.csv 4b3fec1b2cf22fcc36412b2def0456364fce6dcb0a7754bd88ea42f11a4fa65d -------------------------------------------------------------------------------- /data/examples/disk.py: -------------------------------------------------------------------------------- 1 | """ 2 | Disk 3 | ==== 4 | 5 | This is a small synthetic unstructured XugridDataset with topology in the shape 6 | of a disk. It contains data on the nodes, faces, and edges. 7 | """ 8 | 9 | # %% 10 | import matplotlib.pyplot as plt 11 | 12 | import xugrid 13 | 14 | uds = xugrid.data.disk() 15 | 16 | fig, axes = plt.subplots(nrows=1, ncols=3, sharex=True, sharey=True, figsize=(15, 5)) 17 | axes = axes.ravel() 18 | for ax in axes: 19 | ax.set_aspect(1) 20 | 21 | uds["node_z"].ugrid.plot(ax=axes[0], add_colorbar=False, cmap="terrain") 22 | uds["face_z"].ugrid.plot(ax=axes[1], add_colorbar=False, cmap="terrain") 23 | uds["edge_z"].ugrid.plot(ax=axes[2], add_colorbar=False, cmap="terrain") 24 | 25 | # %% 26 | -------------------------------------------------------------------------------- /data/examples/adh_san_diego.py: -------------------------------------------------------------------------------- 1 | """ 2 | ADH San Diego 3 | ============= 4 | 5 | This is small dataset containing the output of a hydraulic simulation. 6 | 7 | It contains a static dataset (bed elevation) and a time varying dataset 8 | (water depth). 9 | """ 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy as np 13 | 14 | import xugrid 15 | 16 | uds = xugrid.data.adh_san_diego() 17 | 18 | fig, (ax0, ax1) = plt.subplots( 19 | nrows=1, ncols=2, sharex=True, sharey=True, figsize=(13, 5) 20 | ) 21 | uds["elevation"].ugrid.plot( 22 | ax=ax0, add_colorbar=False, cmap="RdBu", levels=np.arange(-75.0, 0.0) 23 | ) 24 | uds["depth"].isel(time=0).ugrid.plot( 25 | ax=ax1, add_colorbar=False, cmap="viridis", levels=np.arange(20.0) 26 | ) 27 | -------------------------------------------------------------------------------- /docs/user_guide.rst: -------------------------------------------------------------------------------- 1 | User Guide 2 | ========== 3 | 4 | This user guide shows an overview of the concepts and features of xugrid. 5 | 6 | It is primarily example based, and should give you an idea how to accomplish 7 | common tasks. 8 | 9 | Information on specific methods and classes can be found in the API Reference. 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | terminology.rst 15 | examples/quick_overview.rst 16 | examples/plotting.rst 17 | examples/selection.rst 18 | examples/regridder_overview.rst 19 | examples/overlap_regridder.rst 20 | examples/vector_conversion.rst 21 | examples/connectivity.rst 22 | examples/partitioning.rst 23 | examples/hydamo_network.rst 24 | examples/network_gridder.rst 25 | sample_data/index.rst 26 | -------------------------------------------------------------------------------- /data/examples/README.rst: -------------------------------------------------------------------------------- 1 | .. sample-data: 2 | 3 | Sample Data 4 | =========== 5 | 6 | Xugrid provides some sample data and ways of generating synthetic data through 7 | the :mod:`xugrid.data` module. 8 | 9 | Where are my data files? 10 | ------------------------ 11 | 12 | The sample data files are downloaded automatically by :mod:`pooch` the first 13 | time you load them. The files are saved to the default cache location on your 14 | operating system. The location varies depending on your system and 15 | configuration. We provide the :func:`xugrid.data.locate` function if you 16 | need to find the data storage location on your system. 17 | 18 | You can change the base data directory by setting the ``XUGRID_DATA_DIR`` 19 | environment variable to the desired path. 20 | 21 | Available datasets 22 | ------------------ 23 | 24 | These are the datasets currently available: 25 | -------------------------------------------------------------------------------- /data/examples/network.py: -------------------------------------------------------------------------------- 1 | """ 2 | HyDAMO Network 3 | ============== 4 | 5 | This is small dataset containing elevation points, cross-section profiles 6 | and center lines for a small stream network. 7 | 8 | It is simplified HyDAMO data, the standardized data model for surface 9 | water in the Netherlands. 10 | """ 11 | 12 | import matplotlib.patches as patches 13 | import matplotlib.pyplot as plt 14 | 15 | import xugrid 16 | 17 | lines, profiles, points = xugrid.data.hydamo_network() 18 | 19 | xy = (140_270.0, 393_140.0) 20 | dx = dy = 100.0 21 | 22 | fig, (ax0, ax1) = plt.subplots(ncols=2, figsize=(10, 5)) 23 | lines.plot(ax=ax0) 24 | profiles.plot(ax=ax0, color="red") 25 | ax0.add_patch(patches.Rectangle(xy, dx, dy, fill=False)) 26 | 27 | lines.plot(ax=ax1, column="type") 28 | profiles.plot(ax=ax1, color="red") 29 | points.plot(ax=ax1, color="black") 30 | ax1.set_xlim(xy[0], xy[0] + dx) 31 | ax1.set_ylim(xy[1], xy[1] + dy) 32 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=xugrid 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /xugrid/regrid/network.py: -------------------------------------------------------------------------------- 1 | import xugrid as xu 2 | 3 | 4 | class Network1d: 5 | def __init__(self, obj): 6 | # TODO: do not omit type check on grid! 7 | if isinstance(obj, (xu.UgridDataArray, xu.UgridDataset)): 8 | self.ugrid_topology = obj.grid 9 | elif isinstance(obj, xu.Ugrid1d): 10 | self.ugrid_topology = obj 11 | else: 12 | options = {"Ugrid1d", "UgridDataArray", "UgridDataset"} 13 | raise TypeError( 14 | f"Expected one of {options}, received: {type(obj).__name__}" 15 | ) 16 | 17 | @property 18 | def ndim(self): 19 | return 1 20 | 21 | @property 22 | def dims(self): 23 | return (self.ugrid_topology.edge_dimension,) 24 | 25 | @property 26 | def shape(self): 27 | return (self.ugrid_topology.n_edge,) 28 | 29 | @property 30 | def size(self): 31 | return self.ugrid_topology.n_edge 32 | 33 | @property 34 | def length(self): 35 | return self.ugrid_topology.edge_length 36 | -------------------------------------------------------------------------------- /xugrid/ugrid/selection_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | 5 | from xugrid.constants import FloatArray, IntArray 6 | 7 | 8 | def get_sorted_section_coords( 9 | s: FloatArray, xy: FloatArray, dim: str, index: IntArray, name: str 10 | ): 11 | order = np.argsort(s) 12 | coords = { 13 | f"{name}_x": (dim, xy[order, 0]), 14 | f"{name}_y": (dim, xy[order, 1]), 15 | f"{name}_s": (dim, s[order]), 16 | } 17 | return coords, index[order] 18 | 19 | 20 | def section_coordinates_1d( 21 | edges: FloatArray, xy: FloatArray, dim: str, index: IntArray, name: str 22 | ) -> Tuple[IntArray, dict]: 23 | s = np.linalg.norm(xy - edges[0, 0], axis=1) 24 | return get_sorted_section_coords(s, xy, dim, index, name) 25 | 26 | 27 | def section_coordinates_2d( 28 | edges: FloatArray, xy: FloatArray, dim: str, index: IntArray, name: str 29 | ) -> Tuple[IntArray, dict]: 30 | # TODO: add boundaries xy[:, 0] and xy[:, 1] 31 | xy_mid = 0.5 * (xy[:, 0, :] + xy[:, 1, :]) 32 | return section_coordinates_1d(edges, xy_mid, dim, index, name) 33 | -------------------------------------------------------------------------------- /.github/workflows/pixi_auto_update.yml: -------------------------------------------------------------------------------- 1 | name: Pixi auto update 2 | 3 | on: 4 | schedule: 5 | # At 03:00 on day 3 of the month 6 | - cron: "0 3 3 * *" 7 | # on demand 8 | workflow_dispatch: 9 | 10 | jobs: 11 | pixi-update: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v6 15 | with: 16 | ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} 17 | - name: Set up pixi 18 | uses: prefix-dev/setup-pixi@v0.9.3 19 | with: 20 | run-install: false 21 | - name: Update lockfiles 22 | run: | 23 | set -o pipefail 24 | pixi update --json | pixi exec pixi-diff-to-markdown >> diff.md 25 | - name: Create pull request 26 | uses: peter-evans/create-pull-request@v8 27 | with: 28 | token: ${{ secrets.GITHUB_TOKEN }} 29 | commit-message: Update pixi lockfile 30 | title: Update pixi lockfile 31 | body-path: diff.md 32 | branch: update-pixi 33 | base: main 34 | labels: pixi 35 | delete-branch: true 36 | add-paths: pixi.lock 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Deltares 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /xugrid/meshkernel_utils.py: -------------------------------------------------------------------------------- 1 | from enum import EnumMeta, IntEnum 2 | from typing import Union 3 | 4 | import numpy as np 5 | 6 | 7 | def either_string_or_enum(value: Union[str, IntEnum], enum_class: EnumMeta) -> IntEnum: 8 | """Convert to enum if needed, check value""" 9 | if isinstance(value, str): 10 | name = value.upper() 11 | enum_dict = dict(enum_class.__members__) 12 | try: 13 | value = enum_dict[name] 14 | except KeyError: 15 | valid_options = ", ".join(enum_dict.keys()).lower() 16 | raise ValueError( 17 | f"Invalid option: {value}. Valid options are: {valid_options}" 18 | ) 19 | elif not isinstance(value, enum_class): 20 | raise TypeError( 21 | f"Option should be one of {enum_class}, received: {type(value)}" 22 | ) 23 | return value 24 | 25 | 26 | def to_geometry_list( 27 | polygon: Union["shapely.Geometry"], # type: ignore # noqa 28 | ) -> "meshkernel.GeometryList": # type: ignore # noqa 29 | import meshkernel 30 | import shapely 31 | 32 | xy = shapely.get_coordinates(shapely.get_exterior_ring(polygon)) 33 | return meshkernel.GeometryList(np.array(xy[:, 0]), np.array(xy[:, 1])) 34 | -------------------------------------------------------------------------------- /tests/test_data.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import xarray as xr 3 | 4 | import xugrid 5 | 6 | 7 | def test_generate_disk(): 8 | with pytest.raises(ValueError, match="partitions should be >= 3"): 9 | xugrid.data.generate_disk(2, 2) 10 | 11 | nodes, faces = xugrid.data.generate_disk(4, 1) 12 | assert nodes.shape == (5, 2) 13 | assert faces.shape == (4, 3) 14 | _, faces = xugrid.data.generate_disk(4, 2) 15 | assert faces.shape == (16, 3) 16 | 17 | 18 | def test_adh_san_diego(): 19 | ds = xugrid.data.adh_san_diego() 20 | assert isinstance(ds, xugrid.UgridDataset) 21 | ds = xugrid.data.adh_san_diego(xarray=True) 22 | assert isinstance(ds, xr.Dataset) 23 | 24 | 25 | def test_disk(): 26 | ds = xugrid.data.disk() 27 | assert isinstance(ds, xugrid.UgridDataset) 28 | 29 | 30 | def test_xoxo(): 31 | grid = xugrid.data.xoxo() 32 | assert isinstance(grid, xugrid.Ugrid2d) 33 | 34 | 35 | def test_elevation_nl(): 36 | ds = xugrid.data.elevation_nl() 37 | assert isinstance(ds, xugrid.UgridDataArray) 38 | ds = xugrid.data.elevation_nl(xarray=True) 39 | assert isinstance(ds, xr.Dataset) 40 | 41 | 42 | def test_provinces_nl(): 43 | import geopandas as gpd 44 | 45 | gdf = xugrid.data.provinces_nl() 46 | assert isinstance(gdf, gpd.GeoDataFrame) 47 | -------------------------------------------------------------------------------- /xugrid/regrid/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def create_linear_index(arrays, dims): 5 | meshgrids = [a.ravel() for a in np.meshgrid(*arrays, indexing="ij")] 6 | return np.ravel_multi_index(meshgrids, dims) 7 | 8 | 9 | def create_weights(arrays): 10 | meshgrids = np.meshgrid(*arrays, indexing="ij") 11 | weight = meshgrids[0] 12 | for dim_weight in meshgrids[1:]: 13 | weight *= dim_weight 14 | return weight.ravel() 15 | 16 | 17 | def broadcast( 18 | source_shape, 19 | target_shape, 20 | source_indices, 21 | target_indices, 22 | weights, 23 | ): 24 | """ 25 | Parameters 26 | ---------- 27 | source_shape: tuple of int 28 | target_shape: tuple of int 29 | source_indices: tuple of numpy arrays of int 30 | target_indices: tuple of numpy arrays of int 31 | weights: tuple of numpy arrays of floats 32 | """ 33 | source_index = create_linear_index(source_indices, source_shape) 34 | target_index = create_linear_index(target_indices, target_shape) 35 | weights = create_weights(weights) 36 | return source_index, target_index, weights 37 | 38 | 39 | def alt_cumsum(a): 40 | """ 41 | Alternative cumsum, always starts at 0 and omits the last value of the 42 | regular cumsum. 43 | """ 44 | out = np.empty(a.size, a.dtype) 45 | out[0] = 0 46 | np.cumsum(a[:-1], out=out[1:]) 47 | return out 48 | -------------------------------------------------------------------------------- /xugrid/constants.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple, Union 2 | 3 | import numpy as np 4 | from scipy.sparse import coo_matrix, csr_matrix 5 | 6 | # import numpy.typing as npt 7 | 8 | FloatDType = np.float64 9 | IntDType = np.intp 10 | 11 | # Requires numpy 1.21, not on conda yet... 12 | # FloatArray = np.ndarray[FloatDType] 13 | # IntArray = np.ndarray[IntDType] 14 | # BoolArray = np.ndarray[np.bool_] 15 | 16 | FloatArray = np.ndarray 17 | IntArray = np.ndarray 18 | BoolArray = np.ndarray 19 | # Pygeos collections: 20 | PointArray = np.ndarray 21 | LineArray = np.ndarray 22 | PolygonArray = np.ndarray 23 | SparseMatrix = Union[coo_matrix, csr_matrix] 24 | 25 | # Internally we always use a fill value of -1. This ensures we can always index 26 | # with the fill value as well, since any array will have at least size 1. 27 | FILL_VALUE = -1 28 | 29 | 30 | class Point(NamedTuple): 31 | x: float 32 | y: float 33 | 34 | 35 | class Vector(NamedTuple): 36 | x: float 37 | y: float 38 | 39 | 40 | # Spatial coordinate epsilon for floating point comparison 41 | # Assuming world coordinates in meters: 40 000 m along equator: 42 | # 40 000 000 = 4e7 mm 43 | # np.spacing(4e7) == 7.45E-9 ~= 1E-8 44 | X_EPSILON = 1.0e-8 45 | X_OFFSET = 1.0e-8 46 | T_OFFSET = 1.0e-6 47 | 48 | 49 | class MissingOptionalModule: 50 | """Presents a clear error for optional modules.""" 51 | 52 | def __init__(self, name): 53 | self.name = name 54 | 55 | def __getattr__(self, name): 56 | raise ImportError(f"{self.name} is required for this functionality") 57 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Xugrid 2 | ====== 3 | 4 | .. image:: https://img.shields.io/github/actions/workflow/status/deltares/xugrid/ci.yml?style=flat-square 5 | :target: https://github.com/deltares/xugrid/actions?query=workflows%3Aci 6 | .. image:: https://img.shields.io/codecov/c/github/deltares/xugrid.svg?style=flat-square 7 | :target: https://app.codecov.io/gh/deltares/xugrid 8 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg?style=flat-square 9 | :target: https://github.com/psf/black 10 | 11 | **This is a work in progress.** 12 | 13 | Xarray extension to work with 2D unstructured grids, for data and topology 14 | stored according to `UGRID conventions 15 | `_. 16 | 17 | Processing structured data with xarray is convenient and efficient. The goal 18 | of Xugrid is to extend this ease to unstructured grids. 19 | 20 | .. code:: python 21 | 22 | import matplotlib.pyplot as plt 23 | import xugrid 24 | 25 | # Get some sample data as a xugrid UgridDataArray 26 | uda = xugrid.data.elevation_nl() 27 | 28 | # Get a cross-section 29 | section_y = 475_000.0 30 | section = uda.ugrid.sel(y=section_y) 31 | 32 | # Plot unstructured grid and cross section 33 | fig, (ax0, ax1) = plt.subplots(figsize=(22.6, 10), ncols=2) 34 | uda.ugrid.plot(ax=ax0, vmin=-20, vmax=90, cmap="terrain") 35 | ax0.axhline(y=section_y, color="red") 36 | section.plot(ax=ax1, x="mesh2d_face_x") 37 | 38 | .. image:: _static/xugrid-demo.png 39 | 40 | Installation 41 | ------------ 42 | 43 | .. code:: console 44 | 45 | pip install xugrid 46 | 47 | .. toctree:: 48 | :titlesonly: 49 | :hidden: 50 | 51 | user_guide 52 | api 53 | dev_docs 54 | -------------------------------------------------------------------------------- /docs/_static/deltares-blue.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /tests/test_core_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copied from xarray.tests.test_utils.py 3 | 4 | The reason is that the content of xarray.core.utils are all private methods. 5 | Hence, Xarray provides no guarantees on breaking changes. 6 | 7 | Xarray is licensed under Apache License 2.0: 8 | https://github.com/pydata/xarray/blob/main/LICENSE 9 | """ 10 | 11 | import numpy as np 12 | import pytest 13 | 14 | import xugrid 15 | from xugrid.core.utils import either_dict_or_kwargs, unique_grids 16 | 17 | 18 | def grid1d(dataset=None, indexes=None, crs=None, attrs=None): 19 | xy = np.array( 20 | [ 21 | [0.0, 0.0], 22 | [1.0, 1.0], 23 | [2.0, 2.0], 24 | ] 25 | ) 26 | grid = xugrid.Ugrid1d( 27 | node_x=xy[:, 0], 28 | node_y=xy[:, 1], 29 | fill_value=-1, 30 | edge_node_connectivity=np.array([[0, 1], [1, 2]]), 31 | dataset=dataset, 32 | indexes=indexes, 33 | crs=crs, 34 | attrs=attrs, 35 | ) 36 | return grid 37 | 38 | 39 | def test_either_dict_or_kwargs(): 40 | result = either_dict_or_kwargs({"a": 1}, None, "foo") 41 | expected = {"a": 1} 42 | assert result == expected 43 | 44 | result = either_dict_or_kwargs(None, {"a": 1}, "foo") 45 | expected = {"a": 1} 46 | assert result == expected 47 | 48 | with pytest.raises(ValueError, match=r"foo"): 49 | result = either_dict_or_kwargs({"a": 1}, {"a": 1}, "foo") 50 | 51 | 52 | def test_unique_grids(): 53 | grid = grid1d() 54 | grid2 = grid1d() 55 | grid_different = grid1d() 56 | 57 | grid_different._attrs["something"] = "different" 58 | 59 | assert len(unique_grids([grid, grid2, grid_different])) == 2 60 | assert len(unique_grids([grid, grid2])) == 1 61 | assert len(unique_grids([grid, grid_different])) == 2 62 | -------------------------------------------------------------------------------- /tests/test_polygonize.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pytest 4 | import xarray as xr 5 | 6 | import xugrid as xu 7 | 8 | 9 | @pytest.fixture(scope="function") 10 | def grid(): 11 | """Three by three squares""" 12 | x = np.arange(0.0, 4.0) 13 | y = np.arange(0.0, 4.0) 14 | node_y, node_x = [a.ravel() for a in np.meshgrid(y, x, indexing="ij")] 15 | nx = ny = 3 16 | # Define the first vertex of every face, v. 17 | v = (np.add.outer(np.arange(nx), nx * np.arange(ny)) + np.arange(ny)).T.ravel() 18 | faces = np.column_stack((v, v + 1, v + nx + 2, v + nx + 1)) 19 | return xu.Ugrid2d(node_x, node_y, -1, faces) 20 | 21 | 22 | def test_polygonize__errors(grid): 23 | uda = xu.UgridDataArray( 24 | xr.DataArray(np.ones(grid.n_edge), dims=[grid.edge_dimension]), grid=grid 25 | ) 26 | with pytest.raises(ValueError, match="Cannot polygonize non-face dimension"): 27 | xu.polygonize(uda) 28 | 29 | uda = xu.UgridDataArray( 30 | xr.DataArray(np.ones((3, grid.n_face)), dims=["layer", grid.face_dimension]), 31 | grid=grid, 32 | ) 33 | with pytest.raises(ValueError, match="Cannot polygonize non-face dimension"): 34 | xu.polygonize(uda) 35 | 36 | 37 | def test_polygonize(grid): 38 | a = np.array([0, 0, 0, 1, 1, 1, 0, 0, 0]) 39 | uda = xu.UgridDataArray(xr.DataArray(a, dims=grid.face_dimension), grid) 40 | actual = xu.polygonize(uda) 41 | assert isinstance(actual, gpd.GeoDataFrame) 42 | assert len(actual) == 3 43 | 44 | # With a hole in the 1-valued polygon. 45 | a = np.array([1, 1, 1, 1, 0, 1, 1, 1, 1]) 46 | uda = xu.UgridDataArray(xr.DataArray(a, dims=grid.face_dimension), grid) 47 | actual = xu.polygonize(uda) 48 | assert isinstance(actual, gpd.GeoDataFrame) 49 | assert len(actual) == 2 50 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Skip Tcl failures 2 | import matplotlib 3 | 4 | matplotlib.use("Agg", force=True) 5 | 6 | from tests.fixtures.fixture_regridder import ( 7 | disk, 8 | disk_layered, 9 | expected_results_centroid, 10 | expected_results_linear, 11 | expected_results_overlap, 12 | grid_data_a, 13 | grid_data_a_1d, 14 | grid_data_a_2d, 15 | grid_data_a_layered, 16 | grid_data_a_layered_1d, 17 | grid_data_a_layered_2d, 18 | grid_data_b, 19 | grid_data_b_1d, 20 | grid_data_b_2d, 21 | grid_data_b_flipped_1d, 22 | grid_data_c, 23 | grid_data_c_1d, 24 | grid_data_c_2d, 25 | grid_data_d, 26 | grid_data_d_1d, 27 | grid_data_dask_expected, 28 | grid_data_dask_expected_layered, 29 | grid_data_dask_source, 30 | grid_data_dask_source_layered, 31 | grid_data_dask_target, 32 | grid_data_e, 33 | grid_data_e_1d, 34 | quads_0_25, 35 | quads_1, 36 | quads_structured, 37 | ) 38 | 39 | __all__ = ( 40 | "disk", 41 | "disk_layered", 42 | "expected_results_centroid", 43 | "expected_results_linear", 44 | "expected_results_overlap", 45 | "grid_data_a", 46 | "grid_data_a_1d", 47 | "grid_data_a_2d", 48 | "grid_data_a_layered", 49 | "grid_data_a_layered_1d", 50 | "grid_data_a_layered_2d", 51 | "grid_data_b", 52 | "grid_data_b_1d", 53 | "grid_data_b_2d", 54 | "grid_data_b_flipped_1d", 55 | "grid_data_c", 56 | "grid_data_c_1d", 57 | "grid_data_c_2d", 58 | "grid_data_d", 59 | "grid_data_d_1d", 60 | "grid_data_dask_expected", 61 | "grid_data_dask_expected_layered", 62 | "grid_data_dask_source", 63 | "grid_data_dask_source_layered", 64 | "grid_data_dask_target", 65 | "grid_data_e", 66 | "grid_data_e_1d", 67 | "quads_0_25", 68 | "quads_1", 69 | "quads_structured", 70 | ) 71 | -------------------------------------------------------------------------------- /docs/_static/deltares-white.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /tests/test_meshkernel_utils.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | from unittest.mock import MagicMock 3 | 4 | import pytest 5 | import shapely.geometry as sg 6 | 7 | from xugrid import meshkernel_utils as mku 8 | from xugrid.constants import MissingOptionalModule 9 | 10 | from . import requires_meshkernel 11 | 12 | try: 13 | import meshkernel as mk 14 | 15 | except ImportError: 16 | mk = MagicMock() 17 | mk.RefinementType = IntEnum( 18 | "RefinementType", ["WAVE_COURANT", "REFINEMENT_LEVELS", "RIDGE_DETECTION"] 19 | ) 20 | 21 | 22 | class Dummy(IntEnum): 23 | A = 1 24 | B = 2 25 | C = 3 26 | 27 | 28 | def test_either_string_or_enum(): 29 | assert ( 30 | mku.either_string_or_enum("wave_courant", mk.RefinementType) 31 | == mk.RefinementType.WAVE_COURANT 32 | ) 33 | assert ( 34 | mku.either_string_or_enum("WAVE_COURANT", mk.RefinementType) 35 | == mk.RefinementType.WAVE_COURANT 36 | ) 37 | assert ( 38 | mku.either_string_or_enum("refinement_levels", mk.RefinementType) 39 | == mk.RefinementType.REFINEMENT_LEVELS 40 | ) 41 | with pytest.raises(ValueError, match="Invalid option"): 42 | mku.either_string_or_enum("none", mk.RefinementType) 43 | with pytest.raises(TypeError, match="Option should be one of"): 44 | mku.either_string_or_enum(Dummy.A, mk.RefinementType) 45 | 46 | 47 | @requires_meshkernel 48 | def test_to_geometry_list(): 49 | polygon = sg.Polygon( 50 | [ 51 | [0.0, 0.0], 52 | [1.0, 0.0], 53 | [1.0, 1.0], 54 | ] 55 | ) 56 | actual = mku.to_geometry_list(polygon) 57 | assert isinstance(actual, mk.GeometryList) 58 | 59 | 60 | def test_missing_optional_module(): 61 | abc = MissingOptionalModule("abc") 62 | with pytest.raises(ImportError, match="abc is required for this functionality"): 63 | abc.attr 64 | -------------------------------------------------------------------------------- /tests/test_regrid/test_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from xugrid.regrid import utils 4 | 5 | 6 | def test_create_linear_index(): 7 | index_a = [0, 0, 1] 8 | index_b = [0] 9 | shape = (2, 1) 10 | actual = utils.create_linear_index((index_a, index_b), shape) 11 | expected = np.array([0, 0, 1]) 12 | assert np.array_equal(actual, expected) 13 | 14 | index_a = [0, 0, 1, 1] 15 | index_b = [0, 1, 2] 16 | shape = (2, 3) 17 | actual = utils.create_linear_index((index_a, index_b), shape) 18 | expected = np.array([0, 1, 2, 0, 1, 2, 3, 4, 5, 3, 4, 5]) 19 | assert np.array_equal(actual, expected) 20 | 21 | 22 | def test_create_weights(): 23 | weights_a = [0.25, 0.25, 0.25, 0.25] 24 | weights_b = [0.5, 0.5] 25 | actual = utils.create_weights((weights_a, weights_b)) 26 | expected = np.full(8, 0.125) 27 | assert np.allclose(actual, expected) 28 | 29 | 30 | def test_broadcast(): 31 | source_shape = (3, 2) 32 | target_shape = (6, 4) 33 | index_src_a = [0, 0, 1, 1] 34 | index_src_b = [0, 1] 35 | index_tgt_a = [2, 2, 3, 3] 36 | index_tgt_b = [2, 3] 37 | weights_a = [0.5, 0.5, 0.5, 0.5] 38 | weights_b = [0.5, 0.5] 39 | actual_src, actual_tgt, actual_weights = utils.broadcast( 40 | source_shape, 41 | target_shape, 42 | (index_src_a, index_src_b), 43 | (index_tgt_a, index_tgt_b), 44 | (weights_a, weights_b), 45 | ) 46 | expected_src = np.array([0, 1, 0, 1, 2, 3, 2, 3]) 47 | expected_tgt = np.array([10, 11, 10, 11, 14, 15, 14, 15]) 48 | expected_weights = np.full(8, 0.25) 49 | assert np.array_equal(actual_src, expected_src) 50 | assert np.array_equal(actual_tgt, expected_tgt) 51 | assert np.allclose(actual_weights, expected_weights) 52 | 53 | 54 | def test_alt_cumsum(): 55 | a = np.ones(5) 56 | assert np.array_equal(utils.alt_cumsum(a), np.arange(5)) 57 | 58 | a = np.array([1, 3, 4]) 59 | assert np.array_equal(utils.alt_cumsum(a), [0, 1, 4]) 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # file based on github/gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | .pytest_cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | .static_storage/ 59 | .media/ 60 | local_settings.py 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | docs/api 72 | docs/examples 73 | docs/examples-dev 74 | docs/sample_data 75 | docs/sg_execution_times.rst 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # celery beat schedule file 87 | celerybeat-schedule 88 | 89 | # SageMath parsed files 90 | *.sage.py 91 | 92 | # Environments 93 | .env 94 | .venv 95 | env/ 96 | venv/ 97 | ENV/ 98 | env.bak/ 99 | venv.bak/ 100 | 101 | # Spyder project settings 102 | .spyderproject 103 | .spyproject 104 | 105 | # VScode 106 | .vscode 107 | 108 | # PyCharm 109 | .idea 110 | 111 | # Rope project settings 112 | .ropeproject 113 | 114 | # mkdocs documentation 115 | /site 116 | 117 | # mypy 118 | .mypy_cache/ 119 | 120 | # pixi environments 121 | .pixi 122 | *.egg-info 123 | 124 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | lint: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Check out repo 20 | uses: actions/checkout@v6 21 | - name: Set up Python 22 | uses: actions/setup-python@v6 23 | - name: Run pre-commit 24 | uses: pre-commit/action@v3.0.1 25 | test: 26 | name: ${{ matrix.pixi-environment }} - ${{ matrix.os }} 27 | runs-on: ${{ matrix.os }} 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | os: 32 | - ubuntu-latest 33 | - macOS-latest 34 | - windows-latest 35 | pixi-environment: 36 | - py313 37 | - py312 38 | - py311 39 | - py310 40 | steps: 41 | - name: Check out repo 42 | uses: actions/checkout@v6 43 | - name: Setup Pixi 44 | uses: prefix-dev/setup-pixi@v0.9.3 45 | with: 46 | manifest-path: pyproject.toml 47 | - name: Run Tests 48 | run: pixi run --environment ${{ matrix.pixi-environment }} test 49 | 50 | build: 51 | runs-on: ubuntu-latest 52 | steps: 53 | - name: Check out repo 54 | uses: actions/checkout@v6 55 | - name: Setup Pixi 56 | uses: prefix-dev/setup-pixi@v0.9.3 57 | with: 58 | manifest-path: pyproject.toml 59 | - name: Run Tests 60 | run: pixi run test 61 | - name: Publish Code Coverage 62 | uses: codecov/codecov-action@v5 63 | with: 64 | token: ${{ secrets.CODECOV_TOKEN }} 65 | fail_ci_if_error: false 66 | - name: Build Docs 67 | run: pixi run docs 68 | - name: Deploy to Github Pages 69 | if: github.ref == 'refs/heads/main' 70 | uses: peaceiris/actions-gh-pages@v4 71 | with: 72 | github_token: ${{ secrets.GITHUB_TOKEN }} 73 | publish_dir: ./docs/_build 74 | -------------------------------------------------------------------------------- /xugrid/__init__.py: -------------------------------------------------------------------------------- 1 | from xugrid import data 2 | from xugrid.core.common import ( 3 | concat, 4 | full_like, 5 | load_dataarray, 6 | load_dataset, 7 | merge, 8 | ones_like, 9 | open_dataarray, 10 | open_dataset, 11 | open_mfdataset, 12 | open_zarr, 13 | zeros_like, 14 | ) 15 | from xugrid.core.dataarray_accessor import UgridDataArrayAccessor 16 | from xugrid.core.dataset_accessor import UgridDatasetAccessor 17 | from xugrid.core.wrap import UgridDataArray, UgridDataset 18 | from xugrid.plot import plot 19 | from xugrid.regrid.gridder import NetworkGridder 20 | from xugrid.regrid.regridder import ( 21 | BarycentricInterpolator, 22 | CentroidLocatorRegridder, 23 | OverlapRegridder, 24 | RelativeOverlapRegridder, 25 | ) 26 | from xugrid.ugrid.burn import burn_vector_geometry, earcut_triangulate_polygons 27 | from xugrid.ugrid.conventions import UgridRolesAccessor 28 | from xugrid.ugrid.partitioning import merge_partitions 29 | from xugrid.ugrid.polygonize import polygonize 30 | from xugrid.ugrid.snapping import ( 31 | create_snap_to_grid_dataframe, 32 | snap_nodes, 33 | snap_to_grid, 34 | ) 35 | from xugrid.ugrid.ugrid1d import Ugrid1d 36 | from xugrid.ugrid.ugrid2d import Ugrid2d 37 | 38 | __version__ = "0.14.3" 39 | 40 | __all__ = ( 41 | "data", 42 | "concat", 43 | "full_like", 44 | "load_dataarray", 45 | "load_dataset", 46 | "merge", 47 | "ones_like", 48 | "open_dataarray", 49 | "open_dataset", 50 | "open_mfdataset", 51 | "open_zarr", 52 | "zeros_like", 53 | "UgridDataArrayAccessor", 54 | "UgridDatasetAccessor", 55 | "UgridDataArray", 56 | "UgridDataset", 57 | "plot", 58 | "BarycentricInterpolator", 59 | "CentroidLocatorRegridder", 60 | "OverlapRegridder", 61 | "RelativeOverlapRegridder", 62 | "burn_vector_geometry", 63 | "earcut_triangulate_polygons", 64 | "NetworkGridder", 65 | "UgridRolesAccessor", 66 | "merge_partitions", 67 | "polygonize", 68 | "snap_nodes", 69 | "snap_to_grid", 70 | "create_snap_to_grid_dataframe", 71 | "Ugrid1d", 72 | "Ugrid2d", 73 | ) 74 | -------------------------------------------------------------------------------- /tests/test_interpolate.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from scipy import sparse 4 | 5 | from xugrid.ugrid import interpolate 6 | 7 | 8 | def test_ilu0(): 9 | # Create a 1D Laplace problem: 10 | # 11 | # * Dirichlet boundary left and right of 1.0. 12 | # * Constant inflow 0.001 everywhere else. 13 | # 14 | n = 1000 15 | d = np.ones(n) 16 | A = sparse.diags((-d[:-1], 2 * d, -d[:-1]), (-1, 0, 1)).tocsr() 17 | A[0, 0] = 1.0 18 | A[0, 1] = 0.0 19 | A[-1, -1] = 1.0 20 | A[-1, -2] = 0.0 21 | b = np.full(n, 0.001) 22 | b[0] = 1.0 23 | b[-1] = 1.0 24 | M = interpolate.ILU0Preconditioner.from_csr_matrix(A) 25 | _, info_cg = sparse.linalg.cg(A, b, maxiter=10) 26 | x_pcg, info_pcg = sparse.linalg.cg(A, b, maxiter=10, M=M) 27 | x_direct = sparse.linalg.spsolve(A, b) 28 | assert info_cg != 0 # cg does not converge 29 | assert info_pcg == 0 # preconditioned cg does converge 30 | assert np.allclose(x_pcg, x_direct) # answer matches direct solve 31 | 32 | 33 | def test_laplace_interpolate(): 34 | i = np.array([0, 1, 1, 2, 2, 3, 3]) 35 | j = np.array([1, 0, 2, 1, 3, 2, 4]) 36 | coo_content = (j, (i, j)) 37 | data = np.array([1.0, np.nan, np.nan, np.nan, 5.0]) 38 | labels = np.zeros_like(data, dtype=int) 39 | with pytest.raises(ValueError, match="connectivity is not a square matrix"): 40 | con = sparse.coo_matrix(coo_content, shape=(4, 5)).tocsr() 41 | interpolate.laplace_interpolate(data, con, labels, use_weights=False) 42 | 43 | i = np.array([0, 1, 1, 2, 2, 3, 3, 4]) 44 | j = np.array([1, 0, 2, 1, 3, 2, 4, 3]) 45 | coo_content = (j, (i, j)) 46 | data = np.array([1.0, np.nan, np.nan, np.nan, 5.0]) 47 | expected = np.arange(1.0, 6.0) 48 | con = sparse.coo_matrix(coo_content, shape=(5, 5)).tocsr() 49 | actual = interpolate.laplace_interpolate( 50 | data, con, labels, use_weights=False, direct_solve=True 51 | ) 52 | assert np.allclose(actual, expected) 53 | 54 | actual = interpolate.laplace_interpolate( 55 | data, con, labels, use_weights=False, direct_solve=False 56 | ) 57 | assert np.allclose(actual, expected) 58 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Xugrid 2 | ====== 3 | 4 | .. image:: https://img.shields.io/github/actions/workflow/status/deltares/xugrid/ci.yml 5 | :target: https://github.com/deltares/xugrid/actions?query=workflows%3Aci 6 | .. image:: https://img.shields.io/codecov/c/github/deltares/xugrid.svg 7 | :target: https://app.codecov.io/gh/deltares/xugrid 8 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg 9 | :target: https://github.com/psf/black 10 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.10534099.svg 11 | :target: https://doi.org/10.5281/zenodo.10534099 12 | 13 | **This is a work in progress.** `See documentation `_. 14 | 15 | Xarray extension to work with 2D unstructured grids, for data and topology 16 | stored according to `UGRID conventions 17 | `_. 18 | 19 | Processing structured data with xarray is convenient and efficient. The goal of 20 | Xugrid is to extend this ease to unstructured grids. 21 | 22 | .. code:: python 23 | 24 | import matplotlib.pyplot as plt 25 | import xugrid 26 | 27 | # Get some sample data as a xugrid UgridDataArray 28 | uda = xugrid.data.elevation_nl() 29 | 30 | # Get a cross-section 31 | section_y = 475_000.0 32 | section = uda.ugrid.sel(y=section_y) 33 | 34 | # Plot unstructured grid and cross section 35 | fig, (ax0, ax1) = plt.subplots(figsize=(22.6, 10), ncols=2) 36 | uda.ugrid.plot(ax=ax0, vmin=-20, vmax=90, cmap="terrain") 37 | ax0.axhline(y=section_y, color="red") 38 | section.plot(ax=ax1, x="mesh2d_face_x") 39 | 40 | .. image:: https://raw.githubusercontent.com/Deltares/xugrid/main/docs/_static/xugrid-demo.png 41 | :target: https://github.com/deltares/xugrid 42 | 43 | Installation 44 | ------------ 45 | 46 | Install via conda from the conda-forge channel: 47 | 48 | .. code:: console 49 | 50 | conda install -c conda-forge xugrid 51 | 52 | Or from the Python Package Index: 53 | 54 | .. code:: console 55 | 56 | pip install xugrid 57 | 58 | Documentation 59 | ------------- 60 | 61 | .. image:: https://img.shields.io/github/actions/workflow/status/deltares/xugrid/ci.yml?style=flat-square 62 | :target: https://deltares.github.io/xugrid/ 63 | -------------------------------------------------------------------------------- /tests/test_regrid/test_unstructured.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import xugrid 5 | from xugrid.regrid.unstructured import UnstructuredGrid2d 6 | 7 | 8 | @pytest.fixture(scope="function") 9 | def circle(): 10 | return UnstructuredGrid2d(xugrid.data.disk().grid) 11 | 12 | 13 | def test_init(): 14 | uds = xugrid.data.disk() 15 | assert isinstance(UnstructuredGrid2d(uds), UnstructuredGrid2d) 16 | assert isinstance(UnstructuredGrid2d(uds["face_z"]), UnstructuredGrid2d) 17 | assert isinstance(UnstructuredGrid2d(uds.ugrid.grid), UnstructuredGrid2d) 18 | with pytest.raises(TypeError): 19 | UnstructuredGrid2d(1) 20 | 21 | 22 | def test_grid_properties(circle): 23 | assert circle.dims == ("mesh2d_nFaces",) 24 | assert circle.shape == (384,) 25 | assert circle.size == 384 26 | assert isinstance(circle.area, np.ndarray) 27 | assert circle.area.size == 384 28 | 29 | 30 | @pytest.mark.parametrize("relative", [True, False]) 31 | def test_overlap(circle, relative): 32 | source, target, weights = circle.overlap(other=circle, relative=relative) 33 | valid = weights > 1.0e-5 34 | source = source[valid] 35 | target = target[valid] 36 | weights = weights[valid] 37 | sorter = np.argsort(source) 38 | assert np.array_equal(source[sorter], np.arange(circle.size)) 39 | assert np.array_equal(target[sorter], np.arange(circle.size)) 40 | if relative: 41 | assert np.allclose(weights[sorter], np.ones(circle.size)) 42 | else: 43 | assert np.allclose(weights[sorter], circle.area) 44 | 45 | 46 | def test_locate_centroids(circle): 47 | source, target, weights = circle.locate_centroids(circle) 48 | sorter = np.argsort(source) 49 | assert np.array_equal(source[sorter], np.arange(circle.size)) 50 | assert np.array_equal(target[sorter], np.arange(circle.size)) 51 | assert np.allclose(weights[sorter], np.ones(circle.size)) 52 | 53 | 54 | def test_barycentric(circle): 55 | source, target, weights = circle.barycentric(circle) 56 | sorter = np.argsort(source) 57 | assert np.array_equal(source[sorter], np.arange(circle.size)) 58 | assert np.array_equal(target[sorter], np.arange(circle.size)) 59 | assert np.allclose(weights[sorter], np.ones(circle.size)) 60 | -------------------------------------------------------------------------------- /xugrid/core/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copied from xarray.core.utils.py 3 | 4 | The reason is that the content of xarray.core.utils are all private methods. 5 | Hence, Xarray provides no guarantees on breaking changes. 6 | 7 | Xarray is licensed under Apache License 2.0: 8 | https://github.com/pydata/xarray/blob/main/LICENSE 9 | """ 10 | 11 | from __future__ import annotations 12 | 13 | from collections.abc import Hashable, Mapping 14 | from typing import Any, TypeVar, cast 15 | 16 | from xugrid.ugrid.ugridbase import UgridType 17 | 18 | T = TypeVar("T") 19 | 20 | 21 | def is_dict_like(value: Any) -> Any: 22 | return hasattr(value, "keys") and hasattr(value, "__getitem__") 23 | 24 | 25 | def either_dict_or_kwargs( 26 | pos_kwargs: Mapping[Any, T] | None, 27 | kw_kwargs: Mapping[str, T], 28 | func_name: str, 29 | ) -> Mapping[Hashable, T]: 30 | if pos_kwargs is None or pos_kwargs == {}: 31 | # Need an explicit cast to appease mypy due to invariance; see 32 | # https://github.com/python/mypy/issues/6228 33 | return cast(Mapping[Hashable, T], kw_kwargs) 34 | 35 | if not is_dict_like(pos_kwargs): 36 | raise ValueError(f"the first argument to .{func_name} must be a dictionary") 37 | if kw_kwargs: 38 | raise ValueError( 39 | f"cannot specify both keyword and positional arguments to .{func_name}" 40 | ) 41 | return pos_kwargs 42 | 43 | 44 | # EDIT: copied and simplified. 45 | class UncachedAccessor: 46 | """Acts like a property, but on both classes and class instances 47 | 48 | This class is necessary because some tools (e.g. pydoc and sphinx) 49 | inspect classes for which property returns itself and not the 50 | accessor. 51 | """ 52 | 53 | def __init__(self, accessor: type) -> None: 54 | self._accessor = accessor 55 | 56 | def __get__(self, obj: None | object, cls) -> Any: 57 | if obj is None: 58 | return self._accessor 59 | 60 | return self._accessor(obj) # type: ignore # assume it is a valid accessor! 61 | 62 | 63 | def unique_grids(grids: list[UgridType]) -> list[UgridType]: 64 | uniques: list[UgridType] = [] 65 | for grid in grids: 66 | for unique in uniques: 67 | if grid.equals(unique): 68 | break 69 | else: 70 | uniques.append(grid) 71 | return uniques 72 | -------------------------------------------------------------------------------- /xugrid/regrid/gridder.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Union 2 | 3 | import xarray as xr 4 | 5 | import xugrid 6 | from xugrid.core.sparse import MatrixCSR 7 | from xugrid.regrid import reduce 8 | from xugrid.regrid.network import Network1d 9 | from xugrid.regrid.regridder import BaseRegridder, make_regrid, setup_grid 10 | from xugrid.regrid.structured import StructuredGrid2d 11 | from xugrid.regrid.unstructured import UnstructuredGrid2d 12 | 13 | 14 | def convert_to_match(source, target): 15 | PROMOTIONS = { 16 | frozenset({StructuredGrid2d}): UnstructuredGrid2d, 17 | frozenset({UnstructuredGrid2d}): UnstructuredGrid2d, 18 | } 19 | types = set({type(target)}) 20 | matched_type = PROMOTIONS[frozenset(types)] 21 | return source, target.convert_to(matched_type) 22 | 23 | 24 | class NetworkGridder(BaseRegridder): 25 | """ 26 | Network gridder for 2D unstructured grids. 27 | 28 | Parameters 29 | ---------- 30 | grid: Ugrid1d 31 | The grid to be used for the regridding. 32 | """ 33 | 34 | _JIT_FUNCTIONS = { 35 | k: make_regrid(f) for k, f in reduce.ABSOLUTE_OVERLAP_METHODS.items() 36 | } 37 | 38 | def __init__( 39 | self, 40 | source: "xugrid.Ugrid1d", 41 | target: "xugrid.Ugrid2d", 42 | method: Union[str, Callable] = "mean", 43 | ): 44 | self._source = Network1d(source) 45 | self._target = setup_grid(target) 46 | self._weights = None 47 | self._compute_weights(self._source, self._target, relative=False) 48 | self._setup_regrid(method) 49 | return 50 | 51 | @property 52 | def weights(self): 53 | return self.to_dataset() 54 | 55 | @weights.setter 56 | def weights(self, weights: MatrixCSR): 57 | if not isinstance(weights, MatrixCSR): 58 | raise TypeError(f"Expected MatrixCSR, received: {type(weights).__name__}") 59 | self._weights = weights 60 | return 61 | 62 | @classmethod 63 | def _weights_from_dataset(cls, dataset: xr.Dataset) -> MatrixCSR: 64 | return cls._csr_from_dataset(dataset) 65 | 66 | def _compute_weights(self, source, target, relative: bool) -> None: 67 | source, target = convert_to_match(source, target) 68 | source_index, target_index, weight_values = target.intersection_length( 69 | source, relative=relative 70 | ) 71 | self._weights = MatrixCSR.from_triplet( 72 | target_index, source_index, weight_values, n=target.size, m=source.size 73 | ) 74 | return 75 | 76 | @classmethod 77 | def from_weights( 78 | cls, 79 | weights: xr.Dataset, 80 | target: Union["xugrid.Ugrid2d", xr.DataArray, xr.Dataset], 81 | method: Union[str, Callable] = "mean", 82 | ): 83 | instance = super().from_weights(weights, target) 84 | instance._setup_regrid(method) 85 | return instance 86 | -------------------------------------------------------------------------------- /xugrid/regrid/nanpercentile.py: -------------------------------------------------------------------------------- 1 | """ 2 | Numba percentile methods allocate continuously on the heap. 3 | 4 | This has significant overhead when calling the reduction method millions of 5 | times -- as happens when regridding. 6 | 7 | This is a simplified port of the percentile helpers: 8 | # https://github.com/numba/numba/blob/0441bb17c7820efc2eba4fd141b68dac2afa4740/numba/np/arraymath.py#L1595 9 | """ 10 | 11 | import numba as nb 12 | import numpy as np 13 | 14 | 15 | @nb.njit(inline="always") 16 | def nan_le(a, b) -> bool: 17 | # Nan-aware < 18 | if np.isnan(a): 19 | return False 20 | elif np.isnan(b): 21 | return True 22 | else: 23 | return a < b 24 | 25 | 26 | @nb.njit 27 | def _partition(A, low, high): 28 | mid = (low + high) >> 1 29 | # NOTE: the pattern of swaps below for the pivot choice and the 30 | # partitioning gives good results (i.e. regular O(n log n)) 31 | # on sorted, reverse-sorted, and uniform arrays. Subtle changes 32 | # risk breaking this property. 33 | 34 | # Use median of three {low, middle, high} as the pivot 35 | if nan_le(A[mid], A[low]): 36 | A[low], A[mid] = A[mid], A[low] 37 | if nan_le(A[high], A[mid]): 38 | A[high], A[mid] = A[mid], A[high] 39 | if nan_le(A[mid], A[low]): 40 | A[low], A[mid] = A[mid], A[low] 41 | pivot = A[mid] 42 | 43 | A[high], A[mid] = A[mid], A[high] 44 | i = low 45 | j = high - 1 46 | while True: 47 | while i < high and nan_le(A[i], pivot): 48 | i += 1 49 | while j >= low and nan_le(pivot, A[j]): 50 | j -= 1 51 | if i >= j: 52 | break 53 | A[i], A[j] = A[j], A[i] 54 | i += 1 55 | j -= 1 56 | # Put the pivot back in its final place (all items before `i` 57 | # are smaller than the pivot, all items at/after `i` are larger) 58 | A[i], A[high] = A[high], A[i] 59 | return i 60 | 61 | 62 | @nb.njit 63 | def _select(arry, k, low, high): 64 | """Select the k'th smallest element in array[low:high + 1].""" 65 | i = _partition(arry, low, high) 66 | while i != k: 67 | if i < k: 68 | low = i + 1 69 | i = _partition(arry, low, high) 70 | else: 71 | high = i - 1 72 | i = _partition(arry, low, high) 73 | return arry[k] 74 | 75 | 76 | @nb.njit 77 | def _select_two(arry, k, low, high): 78 | """ 79 | Select the k'th and k+1'th smallest elements in array[low:high + 1]. 80 | 81 | This is significantly faster than doing two independent selections 82 | for k and k+1. 83 | """ 84 | while True: 85 | assert high > low # by construction 86 | i = _partition(arry, low, high) 87 | if i < k: 88 | low = i + 1 89 | elif i > k + 1: 90 | high = i - 1 91 | elif i == k: 92 | _select(arry, k + 1, i + 1, high) 93 | break 94 | else: # i == k + 1 95 | _select(arry, k, low, i - 1) 96 | break 97 | 98 | return arry[k], arry[k + 1] 99 | -------------------------------------------------------------------------------- /xugrid/data/sample_data.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | 3 | import numpy as np 4 | import pooch 5 | import xarray as xr 6 | 7 | import xugrid 8 | 9 | REGISTRY = pooch.create( 10 | path=pooch.os_cache("xugrid"), 11 | base_url="https://github.com/deltares/xugrid/raw/main/data/", 12 | version=None, 13 | version_dev="main", 14 | env="XUGRID_DATA_DIR", 15 | ) 16 | REGISTRY.load_registry(importlib.resources.files("xugrid.data") / "registry.txt") 17 | 18 | 19 | def xoxo(): 20 | """Fetch a simple two part synthetic unstructured grid topology.""" 21 | fname_vertices = REGISTRY.fetch("xoxo_vertices.txt") 22 | fname_triangles = REGISTRY.fetch("xoxo_triangles.txt") 23 | vertices = np.loadtxt(fname_vertices, dtype=float) 24 | triangles = np.loadtxt(fname_triangles, dtype=int) 25 | grid = xugrid.Ugrid2d( 26 | node_x=vertices[:, 0], 27 | node_y=vertices[:, 1], 28 | fill_value=-1, 29 | face_node_connectivity=triangles, 30 | ) 31 | return grid 32 | 33 | 34 | def adh_san_diego(xarray=False): 35 | """Fetch time varying output of a hydraulic simulation.""" 36 | fname = REGISTRY.fetch("ADH_SanDiego.nc") 37 | ds = xr.open_dataset(fname) 38 | ds["node_x"].attrs["standard_name"] = "projection_x_coordinate" 39 | ds["node_y"].attrs["standard_name"] = "projection_y_coordinate" 40 | if xarray: 41 | return ds 42 | else: 43 | grid = xugrid.Ugrid2d.from_dataset(ds) 44 | return xugrid.UgridDataset(ds, grid) 45 | 46 | 47 | def elevation_nl(xarray=False): 48 | """Fetch surface elevation dataset for the Netherlands.""" 49 | fname = REGISTRY.fetch("elevation_nl.nc") 50 | ds = xr.open_dataset(fname) 51 | ds["mesh2d_node_x"].attrs["standard_name"] = "projection_x_coordinate" 52 | ds["mesh2d_node_y"].attrs["standard_name"] = "projection_y_coordinate" 53 | ds["mesh2d_face_x"].attrs["standard_name"] = "projection_x_coordinate" 54 | ds["mesh2d_face_y"].attrs["standard_name"] = "projection_y_coordinate" 55 | if xarray: 56 | return ds 57 | else: 58 | grid = xugrid.Ugrid2d.from_dataset(ds) 59 | return xugrid.UgridDataArray(ds["elevation"], grid) 60 | 61 | 62 | def provinces_nl(): 63 | """Fetch provinces polygons for the Netherlands.""" 64 | import geopandas as gpd 65 | 66 | fname = REGISTRY.fetch("provinces-nl.geojson") 67 | gdf = gpd.read_file(fname) 68 | return gdf 69 | 70 | 71 | def hydamo_network(): 72 | """Fetch some surface water data for the Netherlands.""" 73 | import geopandas as gpd 74 | import pandas as pd 75 | 76 | def read_wkt_csv(path): 77 | df = pd.read_csv(path) 78 | return gpd.GeoDataFrame( 79 | data=df, 80 | geometry=gpd.GeoSeries.from_wkt(df["geometry"]), 81 | ) 82 | 83 | objects_fname = REGISTRY.fetch("hydamo_objects.csv") 84 | points_fname = REGISTRY.fetch("hydamo_points.csv") 85 | profiles_fname = REGISTRY.fetch("hydamo_profiles.csv") 86 | 87 | return ( 88 | read_wkt_csv(objects_fname), 89 | read_wkt_csv(points_fname), 90 | read_wkt_csv(profiles_fname), 91 | ) 92 | -------------------------------------------------------------------------------- /tests/test_regrid/test_overlap_1d.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from xugrid.regrid import overlap_1d 4 | 5 | 6 | def test_minmax(): 7 | assert overlap_1d.minmax(-1.0, 0.0, 2.0) == 0.0 8 | assert overlap_1d.minmax(3.0, 0.0, 2.0) == 2.0 9 | assert overlap_1d.minmax(1.0, 0.0, 2.0) == 1.0 10 | 11 | 12 | def test_find_indices(): 13 | a = np.arange(0.0, 11.0)[np.newaxis, :] 14 | b = np.arange(0.0, 12.5, 2.5)[np.newaxis, :] 15 | source_index = np.array([0]) 16 | target_index = np.array([0]) 17 | 18 | lower = overlap_1d.find_lower_indices(a, b, source_index, target_index) 19 | upper = overlap_1d.find_upper_indices(a, b, source_index, target_index) 20 | assert np.array_equal(lower, [[0, 2, 5, 7, 9]]) 21 | assert np.array_equal(upper, [[1, 4, 6, 9, 11]]) 22 | 23 | 24 | def test_vectorized_overlap(): 25 | bounds_a = np.array( 26 | [ 27 | [0.0, 3.0], 28 | [0.0, 3.0], 29 | ] 30 | ) 31 | bounds_b = np.array( 32 | [ 33 | [1.0, 2.0], 34 | [1.0, 2.0], 35 | ] 36 | ) 37 | actual = overlap_1d.vectorized_overlap(bounds_a, bounds_b) 38 | assert np.array_equal(actual, np.array([1.0, 1.0])) 39 | 40 | 41 | def test_overlap_1d(): 42 | source_bounds = np.array( 43 | [ 44 | [0.0, 1.0], 45 | [2.0, 3.0], 46 | [np.nan, np.nan], 47 | [5.0, 6.0], 48 | ] 49 | ) 50 | target_bounds = np.array( 51 | [ 52 | [0.0, 10.0], 53 | [10.0, 20.0], 54 | ] 55 | ) 56 | source, target, overlap = overlap_1d.overlap_1d(source_bounds, target_bounds) 57 | assert np.array_equal(source, [0, 1, 3]) 58 | assert np.array_equal(target, [0, 0, 0]) 59 | assert np.allclose(overlap, [1.0, 1.0, 1.0]) 60 | 61 | target_bounds = np.array( 62 | [ 63 | [0.0, 2.5], 64 | [np.nan, np.nan], 65 | ] 66 | ) 67 | source, target, overlap = overlap_1d.overlap_1d(source_bounds, target_bounds) 68 | assert np.array_equal(source, [0, 1]) 69 | assert np.array_equal(target, [0, 0]) 70 | assert np.allclose(overlap, [1.0, 0.5]) 71 | 72 | 73 | def test_overlap_1d_nd(): 74 | source_bounds = np.array( 75 | [ 76 | [ 77 | [0.0, 1.0], 78 | [2.0, 3.0], 79 | [np.nan, np.nan], 80 | [5.0, 6.0], 81 | ] 82 | ] 83 | ) 84 | target_bounds = np.array( 85 | [ 86 | [ 87 | [0.0, 10.0], 88 | [10.0, 20.0], 89 | ], 90 | [ 91 | [0.0, 2.5], 92 | [np.nan, np.nan], 93 | ], 94 | ] 95 | ) 96 | source_index = np.array([0, 0]) 97 | target_index = np.array([0, 1]) 98 | source, target, overlap = overlap_1d.overlap_1d_nd( 99 | source_bounds, target_bounds, source_index, target_index 100 | ) 101 | assert np.array_equal(source, [0, 1, 3, 0, 1]) 102 | assert np.array_equal(target, [0, 0, 0, 2, 2]) 103 | assert np.allclose(overlap, [1.0, 1.0, 1.0, 1.0, 0.5]) 104 | -------------------------------------------------------------------------------- /data/hydamo_profiles.csv: -------------------------------------------------------------------------------- 1 | ,geometry,line_id 2 | 0,"LINESTRING (140258.605 393046.526000001, 140276.320999999 393039.037)",0 3 | 1,"LINESTRING (140269.848200001 393065.001499999, 140281.513799999 393059.564599998)",1 4 | 2,"LINESTRING (140269.300999999 393067.519000001, 140274.785399999 393065.1789)",2 5 | 3,"LINESTRING (140297.544 393137.182, 140314.335000001 393129.506999999)",3 6 | 4,"LINESTRING (140309.962200001 393157.749000002, 140322.340500001 393152.129500002)",4 7 | 5,"LINESTRING (139965.783 393165.548, 139979.182 393145.252999999)",5 8 | 6,"LINESTRING (140309.719999999 393158.515999999, 140315.209199999 393156.215599999)",6 9 | 7,"LINESTRING (139971.581 393162.131000001, 139972.259599999 393161.369600002)",7 10 | 8,"LINESTRING (139971.460499998 393165.747299999, 139983.532699998 393156.958299998)",8 11 | 9,"LINESTRING (140331.522 393211.059, 140335.370999999 393223.112)",9 12 | 10,"LINESTRING (140322.135899998 393215.3268, 140327.049899999 393228.992199998)",10 13 | 11,"LINESTRING (140320.046 393216.201000001, 140321.820500001 393221.702399999)",11 14 | 12,"LINESTRING (140024.504999999 393240.816, 140042.307999998 393225.980999999)",12 15 | 13,"LINESTRING (140028.535100002 393238.764600001, 140037.391800001 393231.950300001)",13 16 | 14,"LINESTRING (140032.616999999 393238.192000002, 140033.260299999 393237.6774)",14 17 | 15,"LINESTRING (140303.524 393302.662999999, 140320.692000002 393302.984999999)",15 18 | 16,"LINESTRING (140091.625 393312.785, 140092.287700001 393312.227400001)",16 19 | 17,"LINESTRING (140090.846700002 393317.065099999, 140103.908500001 393307.517499998)",17 20 | 18,"LINESTRING (140087.43 393319.477000002, 140103.640000001 393305.421)",18 21 | 19,"LINESTRING (140304.467999998 393331.368000001, 140310.462299999 393331.267000001)",19 22 | 20,"LINESTRING (140305.993999999 393342.1875, 140320.190200001 393342.611299999)",20 23 | 21,"LINESTRING (140150.346999999 393387.217, 140152.3145 393385.624499999)",21 24 | 22,"LINESTRING (140147.3301 393389.952199999, 140160.845699999 393380.436099999)",22 25 | 23,"LINESTRING (140150.495999999 393397.188999999, 140166.125 393384.502)",23 26 | 24,"LINESTRING (140301.717999998 393402.738000002, 140324.749000002 393403.010000002)",24 27 | 25,"LINESTRING (140304.787 393420.401999999, 140310.358100001 393420.5647)",25 28 | 26,"LINESTRING (140304.856899999 393426.191500001, 140321.3433 393426.447299998)",26 29 | 27,"LINESTRING (140213.827 393449.063000001, 140214.519299999 393447.3292)",27 30 | 28,"LINESTRING (140212.843800001 393453.9921, 140218.9936 393439.571400002)",28 31 | 29,"LINESTRING (140227.434999999 393461.969999999, 140235.155999999 393442.241)",29 32 | 30,"LINESTRING (140293.619600002 393482.844900001, 140299.335099999 393468.523600001)",30 33 | 31,"LINESTRING (140301.221000001 393482.127, 140301.6072 393480.537000001)",31 34 | 32,"LINESTRING (140302.385000002 393489.783, 140308.344000001 393469.203000002)",32 35 | 33,"LINESTRING (140301.522 393506.197000001, 140325.386999998 393505.931000002)",33 36 | 34,"LINESTRING (140305.031399999 393508.694600001, 140320.487199999 393509.102200002)",34 37 | 35,"LINESTRING (140304.710999999 393510.166000001, 140311.720600002 393510.2929)",35 38 | 36,"LINESTRING (140305.844700001 393593.798900001, 140321.200599998 393594.9954)",36 39 | 37,"LINESTRING (140305.943 393596.884, 140311.3596 393597.0233)",37 40 | 38,"LINESTRING (140301.032000002 393598.237, 140324.136 393598.592)",38 41 | -------------------------------------------------------------------------------- /docs/_static/enabling-delta-life.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 -------------------------------------------------------------------------------- /xugrid/data/synthetic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | import xugrid 5 | 6 | 7 | def transform(vertices, minx, maxx, miny): 8 | """ 9 | Transform vertices to fit within minx to maxx. 10 | 11 | Maintains x:y aspect ratio. 12 | """ 13 | x, y = vertices.T 14 | xmin = x.min() 15 | xmax = x.max() 16 | ymin = y.min() 17 | ymax = y.max() 18 | dx = xmax - xmin 19 | dy = ymax - ymin 20 | new_dx = maxx - minx 21 | new_dy = dy / dx * new_dx 22 | x = (x - xmin) * new_dx / dx + minx 23 | y = (y - ymin) * new_dy / dy + miny 24 | return np.column_stack([x, y]) 25 | 26 | 27 | def generate_disk(partitions: int, depth: int): 28 | """ 29 | Generate a triangular mesh for the unit circle. 30 | 31 | Parameters 32 | ---------- 33 | partitions: int 34 | Number of triangles around the origin. 35 | depth: int 36 | Number of "layers" of triangles around the origin. 37 | 38 | Returns 39 | ------- 40 | vertices: np.ndarray of floats with shape ``(n_vertex, 2)`` 41 | triangles: np.ndarray of integers with shape ``(n_triangle, 3)`` 42 | """ 43 | import matplotlib.tri 44 | 45 | if partitions < 3: 46 | raise ValueError("partitions should be >= 3") 47 | 48 | N = depth + 1 49 | n_per_level = partitions * np.arange(N) 50 | n_per_level[0] = 1 51 | 52 | delta_angle = (2 * np.pi) / np.repeat(n_per_level, n_per_level) 53 | index = np.repeat(np.insert(n_per_level.cumsum()[:-1], 0, 0), n_per_level) 54 | angles = delta_angle.cumsum() 55 | angles = angles - angles[index] + 0.5 * np.pi 56 | radii = np.repeat(np.linspace(0.0, 1.0, N), n_per_level) 57 | 58 | x = np.cos(angles) * radii 59 | y = np.sin(angles) * radii 60 | triang = matplotlib.tri.Triangulation(x, y) 61 | return np.column_stack((x, y)), triang.triangles 62 | 63 | 64 | def disk(): 65 | def function_z(x, y): 66 | """ 67 | Generate a somewhat interesting surface. 68 | 69 | See: https://matplotlib.org/stable/gallery/images_contours_and_fields/tricontour_smooth_user.html 70 | """ 71 | r1 = np.sqrt((0.5 - x) ** 2 + (0.5 - y) ** 2) 72 | theta1 = np.arctan2(0.5 - x, 0.5 - y) 73 | r2 = np.sqrt((-x - 0.2) ** 2 + (-y - 0.2) ** 2) 74 | theta2 = np.arctan2(-x - 0.2, -y - 0.2) 75 | z = -( 76 | 2 * (np.exp((r1 / 10) ** 2) - 1) * 30.0 * np.cos(7.0 * theta1) 77 | + (np.exp((r2 / 10) ** 2) - 1) * 30.0 * np.cos(11.0 * theta2) 78 | + 0.7 * (x**2 + y**2) 79 | ) 80 | zmin = z.min() 81 | zmax = z.max() 82 | return (zmax - z) / (zmax - zmin) * 10.0 83 | 84 | vertices, triangles = generate_disk(6, 8) 85 | vertices = transform(vertices, 0.0, 10.0, 0.0) 86 | grid = xugrid.Ugrid2d( 87 | node_x=vertices[:, 0], 88 | node_y=vertices[:, 1], 89 | fill_value=-1, 90 | face_node_connectivity=triangles, 91 | ) 92 | 93 | ds = xr.Dataset() 94 | ds["node_z"] = xr.DataArray( 95 | data=function_z(*grid.node_coordinates.T), 96 | dims=[grid.node_dimension], 97 | ) 98 | ds["face_z"] = xr.DataArray( 99 | data=function_z(*grid.face_coordinates.T), 100 | dims=[grid.face_dimension], 101 | ) 102 | ds["edge_z"] = xr.DataArray( 103 | data=function_z(*grid.edge_coordinates.T), 104 | dims=[grid.edge_dimension], 105 | ) 106 | return xugrid.UgridDataset(ds, grid) 107 | -------------------------------------------------------------------------------- /tests/test_sparse.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from xugrid.core import sparse 7 | 8 | 9 | def numba_enabled() -> bool: 10 | return os.environ.get("NUMBA_DISABLE_JIT") != "1" 11 | 12 | 13 | @pytest.fixture(scope="function") 14 | def coo_matrix(): 15 | source_index = np.arange(10) 16 | target_index = np.repeat(np.arange(5), 2) 17 | weights = np.full(10, 0.5) 18 | return sparse.MatrixCOO.from_triplet(target_index, source_index, weights) 19 | 20 | 21 | @pytest.fixture(scope="function") 22 | def csr_matrix(): 23 | source_index = np.arange(10) 24 | target_index = np.repeat(np.arange(5), 2) 25 | weights = np.full(10, 0.5) 26 | return sparse.MatrixCSR.from_triplet(target_index, source_index, weights) 27 | 28 | 29 | def test_weight_matrix_coo(coo_matrix): 30 | assert isinstance(coo_matrix, sparse.MatrixCOO) 31 | assert np.allclose(coo_matrix.data, np.full(10, 0.5)) 32 | assert np.array_equal(coo_matrix.row, [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) 33 | assert np.array_equal(coo_matrix.col, np.arange(10)) 34 | assert coo_matrix.nnz == 10 35 | 36 | 37 | def test_weight_matrix_csr(csr_matrix): 38 | assert isinstance(csr_matrix, sparse.MatrixCSR) 39 | assert np.allclose(csr_matrix.data, np.full(10, 0.5)) 40 | assert np.array_equal(csr_matrix.indices, np.arange(10)) 41 | assert np.array_equal(csr_matrix.indptr, [0, 2, 4, 6, 8, 10]) 42 | assert csr_matrix.n == 5 43 | assert csr_matrix.nnz == 10 44 | 45 | 46 | @pytest.mark.skipif( 47 | numba_enabled(), 48 | reason="Numba cannot convert native range_state_int64 to Python object.", 49 | ) 50 | def test_nzrange(csr_matrix): 51 | # These functions work fine if called inside of other numba functions when 52 | # numba is enabled. 53 | i = sparse.nzrange(csr_matrix, 0) 54 | assert np.array_equal(i, range(0, 2)) 55 | i = sparse.nzrange(csr_matrix, 1) 56 | assert np.array_equal(i, range(2, 4)) 57 | 58 | 59 | @pytest.mark.skipif( 60 | numba_enabled(), 61 | reason="Function returns a slice object; python and no-python slices don't mix.", 62 | ) 63 | def test_row_slice(csr_matrix): 64 | # These functions work fine if called inside of other numba functions when 65 | # numba is enabled. 66 | assert sparse.row_slice(csr_matrix, 0) == slice(0, 2, None) 67 | 68 | 69 | @pytest.mark.skipif( 70 | numba_enabled(), 71 | reason="Function returns a zip object; python and no-python zips don't mix.", 72 | ) 73 | def test_columns_and_values(csr_matrix): 74 | # These functions work fine if called inside of other numba functions when 75 | # numba is enabled. 76 | zipped = sparse.columns_and_values(csr_matrix, sparse.row_slice(csr_matrix, 0)) 77 | result = list(zipped) 78 | assert result == [(0, 0.5), (1, 0.5)] 79 | 80 | 81 | def test_coo_to_csr(coo_matrix): 82 | csr_matrix = coo_matrix.to_csr() 83 | assert isinstance(csr_matrix, sparse.MatrixCSR) 84 | assert np.allclose(csr_matrix.data, np.full(10, 0.5)) 85 | assert np.array_equal(csr_matrix.indices, np.arange(10)) 86 | assert np.array_equal(csr_matrix.indptr, [0, 2, 4, 6, 8, 10]) 87 | assert csr_matrix.n == 5 88 | assert csr_matrix.nnz == 10 89 | 90 | 91 | def test_csr_to_coo(csr_matrix): 92 | coo_matrix = csr_matrix.to_coo() 93 | assert isinstance(coo_matrix, sparse.MatrixCOO) 94 | assert np.allclose(coo_matrix.data, np.full(10, 0.5)) 95 | assert np.array_equal(coo_matrix.row, [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) 96 | assert np.array_equal(coo_matrix.col, np.arange(10)) 97 | assert coo_matrix.nnz == 10 98 | 99 | 100 | def test_shape(): 101 | source_index = np.arange(10) 102 | target_index = np.repeat(np.arange(5), 2) 103 | weights = np.full(10, 0.5) 104 | matrix = sparse.MatrixCSR.from_triplet(target_index, source_index, weights, n=20) 105 | assert matrix.n == 20 106 | assert matrix.m == 10 107 | matrix = sparse.MatrixCSR.from_triplet(target_index, source_index, weights, m=20) 108 | assert matrix.n == 5 109 | assert matrix.m == 20 110 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "xugrid" 7 | description = "Xarray extension for unstructured grids" 8 | readme = { file = "README.rst", content-type = "text/x-rst" } 9 | maintainers = [{ name = "Huite Bootsma", email = "huite.bootsma@deltares.nl" }] 10 | requires-python = ">=3.10" 11 | dependencies = [ 12 | 'pandas', 13 | 'numba', 14 | 'numba_celltree>=0.4.1', 15 | 'dask>=2025.1.0', 16 | 'numpy', 17 | 'pooch', 18 | 'scipy', 19 | 'xarray', 20 | ] 21 | dynamic = ["version"] 22 | classifiers = [ 23 | 'Development Status :: 3 - Alpha', 24 | 'Intended Audience :: Science/Research', 25 | 'License :: OSI Approved :: MIT License', 26 | 'Programming Language :: Python', 27 | 'Operating System :: OS Independent', 28 | 'Programming Language :: Python :: 3', 29 | 'Programming Language :: Python :: 3.10', 30 | 'Programming Language :: Python :: 3.11', 31 | 'Programming Language :: Python :: 3.12', 32 | 'Programming Language :: Python :: 3.13', 33 | 'Programming Language :: Python :: Implementation :: CPython', 34 | 'Topic :: Scientific/Engineering', 35 | ] 36 | keywords = ['mesh', 'ugrid', 'unstructured grid', 'xarray'] 37 | license = { text = "MIT" } 38 | 39 | [project.urls] 40 | Home = "https://github.com/deltares/xugrid" 41 | Code = "https://github.com/deltares/xugrid" 42 | Issues = "https://github.com/deltares/xugrid/issues" 43 | 44 | [project.optional-dependencies] 45 | all = [ 46 | 'geopandas', 47 | 'mapbox_earcut', 48 | 'matplotlib', 49 | 'meshkernel >= 3.0.0', 50 | 'netcdf4', 51 | 'pooch', 52 | 'shapely >= 2.0', 53 | 'pyproj', 54 | 'zarr', 55 | ] 56 | 57 | [tool.hatch.version] 58 | path = "xugrid/__init__.py" 59 | 60 | [tool.hatch.build.targets.sdist] 61 | only-include = ["xugrid", "tests"] 62 | 63 | [tool.isort] 64 | profile = "black" 65 | 66 | [tool.coverage.report] 67 | exclude_lines = [ 68 | "pragma: no cover", 69 | "@abc.abstractmethod", 70 | ] 71 | 72 | [tool.pixi.project] 73 | channels = ["conda-forge"] 74 | platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] 75 | 76 | [tool.pixi.pypi-dependencies] 77 | xugrid = { path = ".", editable = true } 78 | 79 | [tool.pixi.dependencies] 80 | dask = ">=2025.1.0" 81 | geopandas = "*" 82 | mapbox_earcut = "*" 83 | matplotlib-base = "*" 84 | netcdf4 = "*" 85 | numba_celltree = ">=0.4.1" 86 | numpy = "*" 87 | pip = "*" 88 | pooch = "*" 89 | pre-commit = "*" 90 | pydata-sphinx-theme = "*" 91 | pymetis = "*" 92 | pyproj = "*" 93 | pytest = "*" 94 | pytest-cases = "*" 95 | pytest-cov = "*" 96 | python = ">=3.10" 97 | ruff = "*" 98 | shapely = ">=2.0" 99 | scipy = "*" 100 | sphinx = "*" 101 | sphinx-gallery = "*" 102 | xarray = "*" 103 | zarr = "*" 104 | ipykernel = "*" # So we can run examples 105 | twine = "*" 106 | python-build = "*" 107 | 108 | [tool.pixi.tasks] 109 | install-pre-commit = "pre-commit install" 110 | pre-commit = "pre-commit run --all-files" 111 | test = "NUMBA_DISABLE_JIT=1 pytest --cov=xugrid --cov-report xml --cov-report term" 112 | docs = "NUMBA_DISABLE_JIT=1 sphinx-build docs docs/_build" 113 | all = { depends-on = ["pre-commit", "test", "docs"]} 114 | pypi-publish = "rm --recursive --force dist && python -m build && twine check dist/* && twine upload dist/*" 115 | 116 | [tool.pixi.feature.py312.dependencies] 117 | python = "3.12.*" 118 | 119 | [tool.pixi.feature.py313.dependencies] 120 | python = "3.13.*" 121 | 122 | [tool.pixi.feature.py311.dependencies] 123 | python = "3.11.*" 124 | 125 | [tool.pixi.feature.py310.dependencies] 126 | python = "3.10.*" 127 | 128 | [tool.pixi.environments] 129 | default = { features = ["py312"], solve-group = "py312" } 130 | py313 = { features = ["py313"], solve-group = "py313" } 131 | py312 = { features = ["py312"], solve-group = "py312" } 132 | py311 = ["py311"] 133 | py310 = ["py310"] 134 | 135 | [tool.ruff.lint] 136 | # See https://docs.astral.sh/ruff/rules/ 137 | select = ["C4", "D2", "D3", "D4", "E", "F", "I", "NPY", "PD"] 138 | ignore = [ 139 | "D202", 140 | "D205", 141 | "D206", 142 | "D400", 143 | "D404", 144 | "E402", 145 | "E501", 146 | "E703", 147 | "PD002", 148 | "PD901", 149 | "PD003", 150 | "PD004", 151 | ] 152 | ignore-init-module-imports = true 153 | 154 | [tool.ruff.lint.pydocstyle] 155 | convention = "numpy" 156 | -------------------------------------------------------------------------------- /data/xoxo_triangles.txt: -------------------------------------------------------------------------------- 1 | 123 85 122 2 | 61 94 110 3 | 9 10 2 4 | 83 100 119 5 | 81 37 99 6 | 111 112 95 7 | 89 85 99 8 | 3 1 8 9 | 12 137 8 10 | 89 26 106 11 | 20 105 106 12 | 8 1 6 13 | 1 0 6 14 | 14 82 137 15 | 147 10 9 16 | 128 129 151 17 | 160 148 159 18 | 93 108 109 19 | 151 129 28 20 | 139 108 93 21 | 124 94 57 22 | 122 85 92 23 | 33 84 36 24 | 121 105 20 25 | 171 170 22 26 | 142 86 38 27 | 105 121 160 28 | 37 32 99 29 | 34 151 28 30 | 4 137 82 31 | 81 98 76 32 | 146 147 17 33 | 65 110 60 34 | 14 18 82 35 | 130 114 95 36 | 4 3 137 37 | 174 96 140 38 | 5 161 162 39 | 172 18 170 40 | 85 98 99 41 | 83 86 100 42 | 75 80 96 43 | 16 80 19 44 | 13 96 80 45 | 7 140 11 46 | 186 120 88 47 | 21 19 80 48 | 83 120 101 49 | 174 75 96 50 | 79 104 100 51 | 150 127 149 52 | 48 108 139 53 | 13 11 96 54 | 118 126 44 55 | 29 26 89 56 | 35 101 30 57 | 92 128 122 58 | 102 83 87 59 | 27 88 102 60 | 104 79 155 61 | 35 38 86 62 | 84 90 74 63 | 101 35 86 64 | 33 90 84 65 | 98 81 99 66 | 21 80 103 67 | 13 80 16 68 | 53 51 138 69 | 98 85 123 70 | 99 32 29 71 | 39 37 81 72 | 29 89 99 73 | 130 125 185 74 | 42 163 179 75 | 123 122 178 76 | 169 168 116 77 | 142 41 157 78 | 70 72 73 79 | 62 111 58 80 | 126 47 44 81 | 166 163 165 82 | 186 30 101 83 | 115 117 81 84 | 31 27 87 85 | 111 114 58 86 | 41 45 157 87 | 126 77 127 88 | 20 106 26 89 | 134 143 64 90 | 106 92 89 91 | 91 92 105 92 | 68 134 64 93 | 81 76 115 94 | 24 88 27 95 | 132 59 68 96 | 134 55 149 97 | 53 124 57 98 | 84 74 136 99 | 68 59 134 100 | 70 66 133 101 | 97 118 117 102 | 111 62 112 103 | 92 106 105 104 | 161 4 82 105 | 139 45 48 106 | 81 117 39 107 | 48 52 108 108 | 127 77 145 109 | 75 103 80 110 | 79 142 157 111 | 120 102 88 112 | 87 90 31 113 | 84 43 40 114 | 46 43 136 115 | 11 140 96 116 | 103 88 24 117 | 136 135 46 118 | 186 103 75 119 | 56 94 109 120 | 33 31 90 121 | 65 67 110 122 | 101 120 186 123 | 85 89 92 124 | 61 57 94 125 | 115 116 97 126 | 66 70 73 127 | 110 94 60 128 | 67 65 69 129 | 90 87 119 130 | 53 138 124 131 | 41 142 38 132 | 61 110 67 133 | 71 67 69 134 | 24 21 103 135 | 60 94 56 136 | 154 46 135 137 | 104 90 119 138 | 52 109 108 139 | 86 83 101 140 | 74 90 104 141 | 121 20 15 142 | 84 40 36 143 | 52 56 109 144 | 27 102 87 145 | 62 63 112 146 | 114 113 58 147 | 63 143 112 148 | 112 143 144 149 | 125 130 145 150 | 114 111 95 151 | 113 114 185 152 | 116 115 76 153 | 115 97 117 154 | 164 123 178 155 | 125 168 169 156 | 97 167 118 157 | 39 117 44 158 | 118 77 126 159 | 44 117 118 160 | 83 119 87 161 | 104 119 100 162 | 102 120 83 163 | 88 103 186 164 | 15 146 121 165 | 17 148 146 166 | 128 92 91 167 | 98 123 76 168 | 107 178 122 169 | 107 151 180 170 | 93 109 124 171 | 124 109 94 172 | 167 125 145 173 | 166 169 116 174 | 144 127 145 175 | 50 47 126 176 | 149 127 78 177 | 50 126 127 178 | 128 91 129 179 | 107 122 128 180 | 91 160 159 181 | 152 28 129 182 | 130 95 145 183 | 185 125 169 184 | 175 176 177 185 | 162 141 5 186 | 132 133 66 187 | 133 132 68 188 | 55 134 153 189 | 143 134 78 190 | 155 74 104 191 | 135 156 138 192 | 84 136 43 193 | 136 74 155 194 | 8 137 3 195 | 14 137 12 196 | 135 138 51 197 | 124 138 93 198 | 93 156 139 199 | 156 155 139 200 | 7 141 140 201 | 131 173 174 202 | 5 141 7 203 | 131 140 141 204 | 79 100 142 205 | 142 100 86 206 | 64 143 63 207 | 144 143 78 208 | 127 144 78 209 | 95 112 144 210 | 77 167 145 211 | 95 144 145 212 | 146 15 10 213 | 158 23 159 214 | 158 159 148 215 | 10 147 146 216 | 121 146 148 217 | 134 149 78 218 | 127 150 50 219 | 180 151 34 220 | 149 55 150 221 | 128 151 107 222 | 155 79 157 223 | 159 152 129 224 | 134 59 153 225 | 135 51 154 226 | 136 155 135 227 | 138 156 93 228 | 155 156 135 229 | 45 139 157 230 | 155 157 139 231 | 152 159 23 232 | 148 17 158 233 | 91 159 129 234 | 105 160 91 235 | 121 148 160 236 | 172 162 82 237 | 162 161 82 238 | 141 162 131 239 | 165 181 49 240 | 123 164 76 241 | 42 182 163 242 | 116 76 164 243 | 184 169 183 244 | 116 164 166 245 | 163 166 164 246 | 166 165 183 247 | 118 167 77 248 | 168 97 116 249 | 125 167 168 250 | 97 168 167 251 | 49 183 165 252 | 169 184 185 253 | 175 171 22 254 | 171 172 170 255 | 131 162 172 256 | 172 82 18 257 | 131 172 171 258 | 131 171 175 259 | 177 186 187 260 | 131 174 140 261 | 174 173 187 262 | 177 176 25 263 | 131 175 173 264 | 177 25 186 265 | 175 22 176 266 | 173 175 177 267 | 180 178 107 268 | 164 178 163 269 | 34 179 180 270 | 163 178 180 271 | 163 180 179 272 | 42 181 182 273 | 182 165 163 274 | 165 182 181 275 | 184 54 185 276 | 166 183 169 277 | 113 185 54 278 | 183 49 184 279 | 130 185 114 280 | 186 75 187 281 | 186 25 30 282 | 174 187 75 283 | 177 187 173 284 | -------------------------------------------------------------------------------- /xugrid/core/sparse.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom Sparse Matrix utilities. 3 | 4 | Numba cannot deal with scipy.sparse objects directly. The data 5 | structures are mostly a collection of numpy arrays, which can 6 | be neatly represented by (typed) namedtuples, which numba accepts. 7 | """ 8 | 9 | from typing import NamedTuple 10 | 11 | import numba 12 | import numpy as np 13 | from scipy import sparse 14 | 15 | from xugrid.constants import FloatArray, IntArray, IntDType 16 | 17 | 18 | class MatrixCOO(NamedTuple): 19 | """ 20 | A sparse matrix in COOrdinate format, also known as the "ijv" or "triplet" 21 | format. More or less matches the scipy.sparse.coo_matrix. 22 | 23 | NamedTuple for easy ingestion by numba. 24 | 25 | Parameters 26 | ---------- 27 | data: np.ndarray of floats 28 | The regridding weights. 29 | row: np.ndarray of integers 30 | The linear index into the source grid. 31 | col: np.ndarray of integers 32 | The linear index into the target grid. 33 | n: int 34 | The number of rows. 35 | m: int 36 | The number of columns. 37 | nnz: int 38 | The number of non-zero values. 39 | """ 40 | 41 | data: FloatArray 42 | row: IntArray 43 | col: IntArray 44 | n: int 45 | m: int 46 | nnz: int 47 | 48 | @staticmethod 49 | def from_triplet(row, col, data, n=None, m=None) -> "MatrixCOO": 50 | if n is None: 51 | n = row.max() + 1 52 | if m is None: 53 | m = col.max() + 1 54 | nnz = data.size 55 | return MatrixCOO(data, row, col, n, m, nnz) 56 | 57 | def to_csr(self) -> "MatrixCSR": 58 | """ 59 | Convert COO matrix to CSR matrix. 60 | 61 | Assumes the COO matrix indices are already sorted by row number! 62 | """ 63 | i = np.cumsum(np.bincount(self.row, minlength=self.n)) 64 | indptr = np.empty(i.size + 1, dtype=IntDType) 65 | indptr[0] = 0 66 | indptr[1:] = i 67 | return MatrixCSR( 68 | self.data, 69 | self.col, 70 | indptr, 71 | self.n, 72 | self.m, 73 | self.nnz, 74 | ) 75 | 76 | 77 | class MatrixCSR(NamedTuple): 78 | """ 79 | Compressed Sparse Row matrix. The row indices are compressed; all values 80 | must therefore be sorted by row number. More or less matches the 81 | scipy.sparse.csr_matrix. 82 | 83 | NamedTuple for easy ingestion by numba. 84 | 85 | Parameters 86 | ---------- 87 | data: np.ndarray of floats 88 | The values of the matrix. 89 | indices: np.ndarray of integers 90 | The column numbers of the CSR format. 91 | indptr: inp.ndarray of integers 92 | The row index CSR pointer array. 93 | Values for row i (target index i) are stored in: 94 | indices[indptr[i]: indptr[i + 1]] 95 | n: int 96 | The number of rows. 97 | m: int 98 | The number of columns. 99 | nnz: int 100 | The number of non-zero values. 101 | """ 102 | 103 | data: FloatArray 104 | indices: IntArray 105 | indptr: IntArray 106 | n: int 107 | m: int 108 | nnz: int 109 | 110 | @staticmethod 111 | def from_csr_matrix(A: sparse.csr_matrix) -> "MatrixCSR": 112 | n, m = A.shape 113 | return MatrixCSR(A.data, A.indices, A.indptr, n, m, A.nnz) 114 | 115 | @staticmethod 116 | def from_triplet( 117 | row, 118 | col, 119 | data, 120 | n=None, 121 | m=None, 122 | ) -> "MatrixCSR": 123 | return MatrixCOO.from_triplet(row, col, data, n, m).to_csr() 124 | 125 | def to_coo(self) -> MatrixCOO: 126 | """ 127 | Convert CSR matrix to COO matrix. 128 | 129 | Expand the indtpr to full row numbers. 130 | """ 131 | n_repeat = np.diff(self.indptr) 132 | row = np.repeat(np.arange(self.n), n_repeat) 133 | return MatrixCOO(self.data, row, self.indices, self.n, self.m, self.nnz) 134 | 135 | 136 | @numba.njit(inline="always") 137 | def nzrange(A: MatrixCSR, row: int) -> range: 138 | """Return the non-zero indices of a single row.""" 139 | start = A.indptr[row] 140 | end = A.indptr[row + 1] 141 | return range(start, end) 142 | 143 | 144 | @numba.njit(inline="always") 145 | def row_slice(A, row: int) -> slice: 146 | """Return the indices or data slice of a single row.""" 147 | start = A.indptr[row] 148 | end = A.indptr[row + 1] 149 | return slice(start, end) 150 | 151 | 152 | @numba.njit(inline="always") 153 | def columns_and_values(A, slice): 154 | return zip(A.indices[slice], A.data[slice]) 155 | -------------------------------------------------------------------------------- /data/xoxo_vertices.txt: -------------------------------------------------------------------------------- 1 | 5.000e+00 2.083e+01 2 | 6.690e+00 2.429e+01 3 | 7.102e+00 1.836e+01 4 | 8.092e+00 2.908e+01 5 | 8.834e+00 3.551e+01 6 | 9.988e+00 4.334e+01 7 | 1.015e+01 2.157e+01 8 | 1.188e+01 5.043e+01 9 | 1.311e+01 2.303e+01 10 | 1.362e+01 1.588e+01 11 | 1.444e+01 1.976e+01 12 | 1.494e+01 5.802e+01 13 | 1.601e+01 2.504e+01 14 | 1.757e+01 6.288e+01 15 | 2.000e+01 2.795e+01 16 | 2.029e+01 2.231e+01 17 | 2.112e+01 6.667e+01 18 | 2.194e+01 1.102e+01 19 | 2.351e+01 3.155e+01 20 | 2.483e+01 6.849e+01 21 | 2.763e+01 2.685e+01 22 | 2.846e+01 6.997e+01 23 | 2.895e+01 3.988e+01 24 | 3.002e+01 7.144e+00 25 | 3.274e+01 6.972e+01 26 | 3.439e+01 4.771e+01 27 | 3.472e+01 3.089e+01 28 | 3.777e+01 6.956e+01 29 | 3.802e+01 5.000e+00 30 | 3.876e+01 3.336e+01 31 | 4.019e+01 4.903e+01 32 | 4.140e+01 7.220e+01 33 | 4.316e+01 3.468e+01 34 | 4.429e+01 7.566e+01 35 | 4.544e+01 5.000e+00 36 | 4.712e+01 5.028e+01 37 | 4.783e+01 7.863e+01 38 | 4.841e+01 3.584e+01 39 | 5.097e+01 5.340e+01 40 | 5.229e+01 3.683e+01 41 | 5.270e+01 7.995e+01 42 | 5.511e+01 5.612e+01 43 | 5.525e+01 6.484e+00 44 | 5.616e+01 7.995e+01 45 | 5.682e+01 3.963e+01 46 | 5.896e+01 5.727e+01 47 | 6.226e+01 7.822e+01 48 | 6.276e+01 4.433e+01 49 | 6.540e+01 5.678e+01 50 | 6.622e+01 1.044e+01 51 | 6.630e+01 4.565e+01 52 | 6.845e+01 7.500e+01 53 | 7.166e+01 5.628e+01 54 | 7.397e+01 7.170e+01 55 | 7.488e+01 1.514e+01 56 | 7.686e+01 4.870e+01 57 | 7.773e+01 5.578e+01 58 | 7.949e+01 6.807e+01 59 | 8.312e+01 2.240e+01 60 | 8.365e+01 5.234e+01 61 | 8.439e+01 5.673e+01 62 | 8.494e+01 6.519e+01 63 | 8.626e+01 2.784e+01 64 | 8.865e+01 3.666e+01 65 | 8.898e+01 4.103e+01 66 | 8.941e+01 5.873e+01 67 | 8.953e+01 5.606e+01 68 | 9.071e+01 6.362e+01 69 | 9.112e+01 4.721e+01 70 | 9.217e+01 6.090e+01 71 | 9.351e+01 5.505e+01 72 | 9.458e+01 6.354e+01 73 | 9.500e+01 6.032e+01 74 | 9.148e+01 5.758e+01 75 | 5.443e+01 7.062e+01 76 | 2.786e+01 5.759e+01 77 | 5.301e+01 2.589e+01 78 | 6.808e+01 3.544e+01 79 | 7.955e+01 3.954e+01 80 | 5.503e+01 6.338e+01 81 | 2.533e+01 6.275e+01 82 | 5.163e+01 3.129e+01 83 | 1.653e+01 3.485e+01 84 | 4.247e+01 5.993e+01 85 | 5.146e+01 7.489e+01 86 | 4.359e+01 2.525e+01 87 | 4.594e+01 5.565e+01 88 | 4.302e+01 6.616e+01 89 | 3.502e+01 6.225e+01 90 | 3.919e+01 2.812e+01 91 | 4.828e+01 6.952e+01 92 | 3.529e+01 1.734e+01 93 | 3.851e+01 2.227e+01 94 | 6.916e+01 6.454e+01 95 | 7.964e+01 6.178e+01 96 | 7.643e+01 2.987e+01 97 | 2.139e+01 5.763e+01 98 | 6.053e+01 3.069e+01 99 | 4.817e+01 2.753e+01 100 | 4.408e+01 3.000e+01 101 | 4.879e+01 6.152e+01 102 | 4.038e+01 5.468e+01 103 | 3.944e+01 6.333e+01 104 | 3.014e+01 6.190e+01 105 | 5.195e+01 6.677e+01 106 | 3.233e+01 2.279e+01 107 | 3.503e+01 2.639e+01 108 | 4.468e+01 1.472e+01 109 | 6.880e+01 5.992e+01 110 | 7.416e+01 6.120e+01 111 | 8.388e+01 6.101e+01 112 | 8.101e+01 2.724e+01 113 | 8.295e+01 3.347e+01 114 | 7.900e+01 1.877e+01 115 | 7.606e+01 2.394e+01 116 | 5.598e+01 2.953e+01 117 | 6.003e+01 2.319e+01 118 | 5.713e+01 3.452e+01 119 | 6.253e+01 3.589e+01 120 | 4.719e+01 6.543e+01 121 | 3.799e+01 5.867e+01 122 | 2.639e+01 2.066e+01 123 | 4.331e+01 1.990e+01 124 | 4.890e+01 2.171e+01 125 | 7.405e+01 6.645e+01 126 | 6.874e+01 2.737e+01 127 | 6.566e+01 4.003e+01 128 | 7.264e+01 4.078e+01 129 | 4.005e+01 1.627e+01 130 | 3.639e+01 1.111e+01 131 | 7.308e+01 2.710e+01 132 | 1.999e+01 4.446e+01 133 | 8.659e+01 5.420e+01 134 | 9.232e+01 5.113e+01 135 | 8.476e+01 4.596e+01 136 | 6.217e+01 7.048e+01 137 | 5.799e+01 7.478e+01 138 | 1.240e+01 3.184e+01 139 | 6.908e+01 6.979e+01 140 | 6.263e+01 6.291e+01 141 | 1.784e+01 5.244e+01 142 | 1.612e+01 4.770e+01 143 | 5.273e+01 5.972e+01 144 | 8.410e+01 3.810e+01 145 | 7.802e+01 3.470e+01 146 | 7.295e+01 3.369e+01 147 | 2.207e+01 1.681e+01 148 | 1.778e+01 1.345e+01 149 | 2.824e+01 1.378e+01 150 | 7.672e+01 4.369e+01 151 | 7.158e+01 4.717e+01 152 | 4.173e+01 9.261e+00 153 | 3.402e+01 6.072e+00 154 | 8.025e+01 5.052e+01 155 | 6.535e+01 7.661e+01 156 | 5.824e+01 6.729e+01 157 | 6.511e+01 6.686e+01 158 | 5.873e+01 6.144e+01 159 | 2.598e+01 9.081e+00 160 | 3.191e+01 1.121e+01 161 | 3.049e+01 1.807e+01 162 | 9.411e+00 3.942e+01 163 | 1.518e+01 4.057e+01 164 | 5.298e+01 1.448e+01 165 | 5.464e+01 2.018e+01 166 | 6.050e+01 1.539e+01 167 | 5.872e+01 1.920e+01 168 | 6.550e+01 3.117e+01 169 | 6.382e+01 2.646e+01 170 | 6.770e+01 1.926e+01 171 | 2.623e+01 3.571e+01 172 | 2.358e+01 4.042e+01 173 | 1.934e+01 3.854e+01 174 | 2.533e+01 5.018e+01 175 | 2.218e+01 5.328e+01 176 | 2.674e+01 4.489e+01 177 | 3.167e+01 4.379e+01 178 | 2.975e+01 4.853e+01 179 | 4.890e+01 1.694e+01 180 | 5.035e+01 5.742e+00 181 | 4.719e+01 1.003e+01 182 | 6.074e+01 8.463e+00 183 | 5.665e+01 1.120e+01 184 | 6.528e+01 1.513e+01 185 | 7.055e+01 1.279e+01 186 | 7.283e+01 1.989e+01 187 | 3.336e+01 5.412e+01 188 | 2.888e+01 5.310e+01 189 | -------------------------------------------------------------------------------- /data/hydamo_objects.csv: -------------------------------------------------------------------------------- 1 | ,geometry,type 2 | 0,"LINESTRING Z (140248.2638934682 393000 99, 140275.435199998 393062.397599999 99, 140316.294500001 393154.874299999 99, 140342.512499999 393214.639899999 99, 140342.670400001 393216.418200001 99, 140333.2227 393219.416299999 99, 140324.620700002 393222.236900002 99, 140313.096999999 393225.987100001 99, 140313.085999999 393225.991999999 99)",primary 3 | 1,"LINESTRING Z (140034.773699999 393053.626499999 99, 140008.798 393095.008000001 99, 139979.511999998 393141.831 99, 139973.864500001 393150.348400001 99, 139971.7841 393153.500799999 99, 139971.193 393154.837000001 99)",secondary 4 | 2,"LINESTRING Z (139971.193 393154.837000001 99, 139972.094999999 393155.728 99, 139977.019200001 393161.700399999 99, 139987.521899998 393174.8814 99)",primary 5 | 3,"LINESTRING Z (140417.693999998 393241.623 99, 140418.789000001 393213.677000001 99, 140418.105 393203.677000001 99, 140412.351 393201.074000001 99, 140407.419 393202.170000002 99, 140382.624000002 393211.348000001 99, 140365.09 393218.881999999 99, 140355.226 393222.033 99, 140345.363000002 393225.184 99, 140321.0845 393232.7115 99, 140313.850299999 393235.721299998 99, 140313.055 393236.274999999 99)",secondary 6 | 4,"LINESTRING Z (139990.815200001 393172.8094 99, 139990.6545 393172.929099999 99, 139988.721999999 393174.242199998 99, 139987.521899998 393174.8814 99)",secondary 7 | 5,"LINESTRING Z (140313.085999999 393225.991999999 99, 140313.1578 393226.3321 99, 140313.201499999 393235.670899998 99, 140313.055300001 393236.274900001 99)",primary 8 | 6,"LINESTRING Z (140288.923 393236.280000001 99, 140301.313499998 393230.917300001 99, 140307.857000001 393228.1087 99, 140312.762200002 393226.244800001 99, 140313.085999999 393225.991999999 99)",secondary 9 | 7,"LINESTRING Z (140062.956 393420.105 99, 140038.329999998 393388.405000001 99, 140022.741999999 393368.625 99, 140021.824999999 393368.625 99, 140017.239999998 393372.293000001 99, 140015.537 393372.293000001 99, 139975.061000001 393321.861000001 99, 139960.914000001 393304.831999999 99, 139958.425000001 393301.949999999 99, 139946.243000001 393287.541000001 99, 139942.182 393283.219000001 99, 139935.107999999 393277.585999999 99, 139913.364 393259.640000001 99, 139908.124000002 393255.840999998 99, 139907.862 393253.482999999 99, 139913.232999999 393245.493000001 99, 139941.134 393201.741999999 99, 139956.590999998 393177.508000001 99, 139966.546 393161.265000001 99, 139968.331599999 393158.7775 99, 139970.375399999 393155.708500002 99, 139971.193 393154.837000001 99)",secondary 10 | 8,"LINESTRING Z (139987.521899998 393174.8814 99, 140034.6657 393234.047800001 99, 140091.780000001 393305.390999999 99, 140094.973000001 393309.967999999 99, 140097.024700001 393312.549199998 99, 140149.602000002 393378.147999998 99, 140154.559 393384.862399999 99, 140167.522 393400.642000001 99, 140195.730999999 393436.289999999 99, 140195.8378 393436.806400001 99, 140203.280400001 393440.604699999 99, 140203.302999999 393440.960999999 99)",primary 11 | 9,"LINESTRING Z (140313.055300001 393236.274900001 99, 140313.056000002 393236.476 99, 140313.057700001 393342.398400001 99, 140313.159000002 393370.576000001 99, 140313.184599999 393371.4844 99, 140313.158500001 393388.725699998 99, 140313.138 393389.752 99, 140313.2674 393426.322000001 99, 140313.364599999 393480.583000001 99)",primary 12 | 10,"LINESTRING Z (140208.749000002 393401.039000001 99, 140199.276000001 393429.844999999 99, 140196.759 393434.942000002 99, 140195.730999999 393436.289999999 99)",secondary 13 | 11,"LINESTRING Z (140214.669 393409.649999999 99, 140210.094000001 393422.298999999 99, 140209.017999999 393426.603999998 99, 140205.515799999 393436.484499998 99, 140204.0669 393439.897300001 99, 140203.302999999 393440.960999999 99)",secondary 14 | 12,"LINESTRING Z (140203.302999999 393440.960999999 99, 140205.960999999 393441.662 99, 140206.477699999 393441.9124 99, 140212.3255 393444.189599998 99, 140212.877 393444.392999999 99, 140216.3675 393445.729400001 99, 140296.239999998 393476.278999999 99, 140308.156 393480.289999999 99, 140313.364599999 393480.583000001 99)",primary 15 | 13,"LINESTRING Z (140136.627999999 393526.712000001 99, 140144.701000001 393515.41 99, 140161.386 393490.651999999 99, 140170.805 393475.581999999 99, 140185.605 393454.859999999 99, 140191.0913 393446.287300002 99, 140195.592 393437.254500002 99, 140195.8378 393436.806400001 99)",secondary 16 | 14,"LINESTRING Z (140102.451000001 393564.656 99, 140134.206 393541.243999999 99, 140138.780999999 393536.938000001 99, 140154.120000001 393517.293000001 99, 140161.923999999 393505.453000002 99, 140183.453000002 393473.429000001 99, 140190.988000002 393460.780999999 99, 140200.904100001 393445.054099999 99, 140202.6941 393441.763300002 99, 140203.302999999 393440.960999999 99)",secondary 17 | 15,"LINESTRING Z (140313.364599999 393480.583000001 99, 140313.457800001 393508.9168 99, 140313.490699999 393594.394699998 99, 140313.42264932147 393600 99)",primary 18 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | 16 | import xugrid 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = "Xugrid" 21 | copyright = "Deltares" 22 | author = "Huite Bootsma" 23 | 24 | # The short X.Y version. 25 | version = xugrid.__version__.split("+")[0] 26 | # The full version, including alpha/beta/rc tags. 27 | release = xugrid.__version__ 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # If your documentation needs a minimal Sphinx version, state it here. 32 | # 33 | # needs_sphinx = '1.0' 34 | 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.autosummary", 41 | "sphinx.ext.viewcode", 42 | "sphinx.ext.todo", 43 | "sphinx.ext.napoleon", 44 | "sphinx_gallery.gen_gallery", 45 | ] 46 | 47 | sphinx_gallery_conf = { 48 | "examples_dirs": [ 49 | "../examples", 50 | "../examples-dev", 51 | "../data/examples", 52 | ], # path to your example scripts 53 | "gallery_dirs": [ 54 | "examples", 55 | "examples-dev", 56 | "sample_data", 57 | ], # path to where to save gallery generated output 58 | "filename_pattern": ".py", 59 | "abort_on_example_error": True, 60 | "download_all_examples": False, 61 | } 62 | 63 | # Add any paths that contain templates here, relative to this directory. 64 | templates_path = ["_templates"] 65 | 66 | # The suffix(es) of source filenames. 67 | # You can specify multiple suffix as a list of string: 68 | # 69 | # source_suffix = ['.rst', '.md'] 70 | source_suffix = ".rst" 71 | 72 | # The master toctree document. 73 | master_doc = "index" 74 | 75 | # The language for content autogenerated by Sphinx. Refer to documentation 76 | # for a list of supported languages. 77 | # 78 | # This is also used if you do content translation via gettext catalogs. 79 | # Usually you set "language" from the command line for these cases. 80 | language = "en" 81 | 82 | # List of patterns, relative to source directory, that match files and 83 | # directories to ignore when looking for source files. 84 | # This pattern also affects html_static_path and html_extra_path . 85 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 86 | 87 | # The name of the Pygments (syntax highlighting) style to use. 88 | pygments_style = "sphinx" 89 | 90 | 91 | # -- Options for HTML output ------------------------------------------------- 92 | 93 | # The theme to use for HTML and HTML Help pages. See the documentation for 94 | # a list of builtin themes. 95 | # 96 | html_theme = "pydata_sphinx_theme" 97 | 98 | # Add any paths that contain custom static files (such as style sheets) here, 99 | # relative to this directory. They are copied after the builtin static files, 100 | # so a file named "default.css" will overwrite the builtin "default.css". 101 | html_static_path = ["_static"] 102 | html_css_files = ["theme-deltares.css"] 103 | 104 | # Theme options are theme-specific and customize the look and feel of a theme 105 | # further. For a list of options available for each theme, see the 106 | # documentation. 107 | # 108 | html_theme_options = { 109 | "navbar_align": "content", 110 | "icon_links": [ 111 | { 112 | "name": "GitHub", 113 | "url": "https://github.com/Deltares/xugrid", # required 114 | "icon": "https://upload.wikimedia.org/wikipedia/commons/9/91/Octicons-mark-github.svg", 115 | "type": "url", 116 | }, 117 | { 118 | "name": "Deltares", 119 | "url": "https://deltares.nl/en/", 120 | "icon": "_static/deltares-blue.svg", 121 | "type": "local", 122 | }, 123 | ], 124 | "logo": { 125 | "text": "Xugrid", 126 | "image_light": "xugrid-logo.svg", 127 | "image_dark": "xugrid-logo.svg", 128 | }, 129 | } 130 | 131 | # -- Extension configuration ------------------------------------------------- 132 | 133 | # extension sphinx.ext.todo 134 | # If true, `todo` and `todoList` produce output, else they produce nothing. 135 | todo_include_todos = True 136 | -------------------------------------------------------------------------------- /xugrid/core/common.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | import xarray as xr 4 | 5 | from xugrid.core.utils import unique_grids 6 | from xugrid.core.wrap import UgridDataArray, UgridDataset 7 | 8 | DATAARRAY_NAME = "__xarray_dataarray_name__" 9 | DATAARRAY_VARIABLE = "__xarray_dataarray_variable__" 10 | 11 | 12 | def _dataset_helper(ds: xr.Dataset): 13 | n_topology = len(ds.ugrid_roles.topology) 14 | if n_topology == 0: 15 | raise ValueError( 16 | "The file or object does not contain UGRID conventions data. " 17 | "One or more UGRID topologies are required. Perhaps you wrote " 18 | "the file using `data.to_netcdf()` instead of `data.ugrid.to_netcdf()`?" 19 | ) 20 | return UgridDataset(ds) 21 | 22 | 23 | def open_dataset(*args, **kwargs): 24 | ds = xr.open_dataset(*args, **kwargs) 25 | return _dataset_helper(ds) 26 | 27 | 28 | def load_dataset(*args, **kwargs): 29 | ds = xr.load_dataset(*args, **kwargs) 30 | return _dataset_helper(ds) 31 | 32 | 33 | def _dataarray_helper(ds: xr.Dataset): 34 | dataset = _dataset_helper(ds) 35 | if len(dataset.data_vars) != 1: 36 | raise ValueError( 37 | "The file or object contains more than one data " 38 | "variable. Please read with xarray.open_dataset and " 39 | "then select the variable you want." 40 | ) 41 | else: 42 | (data_array,) = dataset.data_vars.values() 43 | 44 | data_array.set_close(dataset._close) 45 | 46 | # Reset names if they were changed during saving 47 | # to ensure that we can 'roundtrip' perfectly 48 | if DATAARRAY_NAME in dataset.attrs: 49 | data_array.name = dataset.attrs[DATAARRAY_NAME] 50 | del dataset.attrs[DATAARRAY_NAME] 51 | 52 | if data_array.name == DATAARRAY_VARIABLE: 53 | data_array.name = None 54 | 55 | return UgridDataArray(data_array, dataset.grid) 56 | 57 | 58 | def load_dataarray(*args, **kwargs): 59 | ds = xr.load_dataset(*args, **kwargs) 60 | return _dataarray_helper(ds) 61 | 62 | 63 | def open_dataarray(*args, **kwargs): 64 | ds = xr.open_dataset(*args, **kwargs) 65 | return _dataarray_helper(ds) 66 | 67 | 68 | def open_mfdataset(*args, **kwargs): 69 | if "data_vars" in kwargs: 70 | raise ValueError("data_vars kwargs is not supported in xugrid.open_mfdataset") 71 | kwargs["data_vars"] = "minimal" 72 | ds = xr.open_mfdataset(*args, **kwargs) 73 | return UgridDataset(ds) 74 | 75 | 76 | def open_zarr(*args, **kwargs): 77 | ds = xr.open_zarr(*args, **kwargs) 78 | return UgridDataset(ds) 79 | 80 | 81 | load_dataset.__doc__ = xr.load_dataset.__doc__ 82 | open_dataset.__doc__ = xr.open_dataset.__doc__ 83 | load_dataarray.__doc__ = xr.load_dataarray.__doc__ 84 | open_dataarray.__doc__ = xr.open_dataarray.__doc__ 85 | open_mfdataset.__doc__ = xr.open_mfdataset.__doc__ 86 | open_zarr.__doc__ = xr.open_zarr.__doc__ 87 | 88 | 89 | # Other utilities 90 | # --------------- 91 | 92 | 93 | def wrap_func_like(func): 94 | @wraps(func) 95 | def _like(other, *args, **kwargs): 96 | obj = func(other.obj, *args, **kwargs) 97 | if isinstance(obj, xr.DataArray): 98 | return type(other)(obj, other.grid) 99 | elif isinstance(obj, xr.Dataset): 100 | return type(other)(obj, other.grids) 101 | else: 102 | raise TypeError( 103 | f"Expected Dataset or DataArray, received {type(other).__name__}" 104 | ) 105 | 106 | _like.__doc__ = func.__doc__ 107 | return _like 108 | 109 | 110 | def wrap_func_objects(func): 111 | @wraps(func) 112 | def _f(objects, *args, **kwargs): 113 | grids = [] 114 | bare_objs = [] 115 | for obj in objects: 116 | if isinstance(obj, UgridDataArray): 117 | grids.append(obj.grid) 118 | elif isinstance(obj, UgridDataset): 119 | grids.extend(obj.grids) 120 | else: 121 | raise TypeError( 122 | "Can only concatenate xugrid UgridDataset and UgridDataArray " 123 | f"objects, got {type(obj).__name__}" 124 | ) 125 | 126 | bare_objs.append(obj.obj) 127 | 128 | grids = unique_grids(grids) 129 | result = func(bare_objs, *args, **kwargs) 130 | if isinstance(result, xr.DataArray): 131 | if len(grids) > 1: 132 | raise ValueError("All UgridDataArrays must have the same grid") 133 | return UgridDataArray(result, next(iter(grids))) 134 | else: 135 | return UgridDataset(result, grids) 136 | 137 | _f.__doc__ = func.__doc__ 138 | return _f 139 | 140 | 141 | full_like = wrap_func_like(xr.full_like) 142 | zeros_like = wrap_func_like(xr.zeros_like) 143 | ones_like = wrap_func_like(xr.ones_like) 144 | 145 | concat = wrap_func_objects(xr.concat) 146 | merge = wrap_func_objects(xr.merge) 147 | -------------------------------------------------------------------------------- /examples/partitioning.py: -------------------------------------------------------------------------------- 1 | """ 2 | Partitioning 3 | ============ 4 | 5 | Grid partitioning, or domain decomposition, is an important step in setting up 6 | parallellized simulations. Xugrid provides utilities for partitioning a grid 7 | and its associated data, and for merging partitions back into a single whole. 8 | """ 9 | # %% 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy as np 13 | 14 | import xugrid as xu 15 | 16 | # %% 17 | # Create partitions 18 | # ----------------- 19 | # 20 | # Xugrid wraps the well known `METIS library`_ via the `pymetis bindings`_. 21 | # METIS is generally used to partition a grid in such a manner that 22 | # communication between parallel processes is minimized. 23 | # 24 | # We'll demonstrate the functionality by diving the elevation example 25 | # into several parts. 26 | 27 | uda = xu.data.elevation_nl() 28 | uda.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 29 | 30 | # %% 31 | # Let's start by dividing the grid into four parts: 32 | 33 | partitions = uda.ugrid.partition(n_part=4) 34 | 35 | fig, axes = plt.subplots(2, 2, figsize=(12.6, 10)) 36 | for partition, ax in zip(partitions, axes.ravel()): 37 | partition.ugrid.plot(ax=ax, vmin=-20, vmax=90, cmap="terrain") 38 | 39 | # %% 40 | # Partition the grid 41 | # ------------------ 42 | # 43 | # Calling ``.partition`` on a UgridDataArray or UgridDataset will automatically 44 | # partition the grid topology, select all associated data, and create a new 45 | # UgridDataArray or UgridDataset for each partition. 46 | # 47 | # However, in some case, we might prefer to pre-compute the labels, and then 48 | # apply them multiple datasets. To do so, we compute the partition labels from 49 | # the grid. ``label_partitions`` returns a UgridDataArray, with every cell given 50 | # its partition label number. 51 | # 52 | # We can easily plot this data to visualize the partitions: 53 | 54 | labels = uda.ugrid.grid.label_partitions(n_part=12) 55 | labels.ugrid.plot() 56 | 57 | # %% 58 | # Not quite the twelve provinces of the Netherlands! 59 | # 60 | # However, we may use the labels to partition the data nonetheless: 61 | 62 | partitions = uda.ugrid.partition_by_label(labels) 63 | 64 | fig, axes = plt.subplots(4, 3, figsize=(15, 15)) 65 | for partition, ax in zip(partitions, axes.ravel()): 66 | partition.ugrid.plot(ax=ax, vmin=-20, vmax=90, cmap="terrain") 67 | 68 | # %% 69 | # Since the labels are an ordinary UgridDataArray, we can easily store them in 70 | # a netCDF file and re-use them in another part of a workflow. 71 | # 72 | # Merging partitions 73 | # ------------------ 74 | # 75 | # Generally, after partitioning the data we write it as model input and run a 76 | # model in parallel. Many model codes produce output per process. Xugrid can 77 | # merge these partitions back into one whole for post-processing: 78 | 79 | merged = xu.merge_partitions(partitions)["elevation"] 80 | 81 | merged.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 82 | 83 | # %% 84 | # Partitioning grids without data 85 | # ------------------------------- 86 | # 87 | # Of course, we can also partition the grid topology without any associated 88 | # data: 89 | 90 | grid = uda.ugrid.grid 91 | grid_parts = grid.partition(n_part=4) 92 | 93 | fig, axes = plt.subplots(2, 2, figsize=(12.6, 10)) 94 | for part, ax in zip(grid_parts, axes.ravel()): 95 | part.plot(ax=ax) 96 | 97 | # %% 98 | # ... and merge them back into one whole: 99 | 100 | merged_grid, _ = xu.Ugrid2d.merge_partitions(grid_parts) 101 | merged_grid.plot() 102 | 103 | # %% 104 | # Preserving order 105 | # ---------------- 106 | # 107 | # Note that partioning and merging does not preserve order! 108 | 109 | uda == merged 110 | 111 | # %% 112 | # The topology is equivalent, but the nodes, edges, and faces are in a 113 | # different order. This is because ``merge_partitions`` simply concatenates the 114 | # partitions. 115 | # 116 | # The easiest way to restore the order is by providing an example of the 117 | # original topology. ``reindex_like`` looks at the coordinates of both 118 | # (equivalent!) grids and automatically determines how to reorder: 119 | 120 | reordered = merged.ugrid.reindex_like(uda) 121 | uda == reordered 122 | 123 | # %% 124 | # Alternatively, we can also assign IDs, carry these along, and use these to 125 | # reorder the data after merging. 126 | 127 | uds = xu.UgridDataset(grids=[uda.ugrid.grid]) 128 | uds["elevation"] = uda 129 | uds["cell_id"] = ("mesh2d_nFaces", np.arange(len(uda))) 130 | 131 | partitions = uds.ugrid.partition(n_part=4) 132 | merged = xu.merge_partitions(partitions) 133 | order = np.argsort(merged["cell_id"].values) 134 | reordered = merged.isel(mesh2d_nFaces=order) 135 | 136 | uds["elevation"] == reordered["elevation"] 137 | 138 | # %% 139 | # This is required if results are compared with the input, or with results 140 | # stemming from another partitioning, e.g. one with a different number of 141 | # partitions. 142 | # 143 | # .. _METIS library: https://github.com/KarypisLab/METIS 144 | # .. _pymetis bindings: https://github.com/inducer/pymetis 145 | -------------------------------------------------------------------------------- /xugrid/core/accessorbase.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from typing import Tuple, Union 3 | 4 | import numpy as np 5 | import xarray as xr 6 | 7 | import xugrid 8 | 9 | 10 | class AbstractUgridAccessor(abc.ABC): 11 | @abc.abstractmethod 12 | def to_dataset(self): 13 | pass 14 | 15 | @abc.abstractmethod 16 | def assign_node_coords(self): 17 | pass 18 | 19 | @abc.abstractmethod 20 | def set_node_coords(self): 21 | pass 22 | 23 | @property 24 | @abc.abstractmethod 25 | def crs(self): 26 | pass 27 | 28 | @abc.abstractmethod 29 | def set_crs(self): 30 | pass 31 | 32 | @abc.abstractmethod 33 | def to_crs(self): 34 | pass 35 | 36 | @abc.abstractmethod 37 | def sel(self): 38 | pass 39 | 40 | @abc.abstractmethod 41 | def sel_points(self, x, y, out_of_bounds, fill_value): 42 | pass 43 | 44 | @abc.abstractmethod 45 | def intersect_line(self): 46 | pass 47 | 48 | @abc.abstractmethod 49 | def intersect_linestring(self): 50 | pass 51 | 52 | @property 53 | @abc.abstractmethod 54 | def bounds(self): 55 | pass 56 | 57 | @property 58 | @abc.abstractmethod 59 | def total_bounds(self): 60 | pass 61 | 62 | @property 63 | @abc.abstractmethod 64 | def name(self): 65 | pass 66 | 67 | @property 68 | @abc.abstractmethod 69 | def names(self): 70 | pass 71 | 72 | @property 73 | @abc.abstractmethod 74 | def topology(self): 75 | pass 76 | 77 | @staticmethod 78 | def _raster_xy(bounds: Tuple[float, float, float, float], resolution: float): 79 | xmin, ymin, xmax, ymax = bounds 80 | d = abs(resolution) 81 | xmin = np.floor(xmin / d) * d 82 | xmax = np.ceil(xmax / d) * d 83 | ymin = np.floor(ymin / d) * d 84 | ymax = np.ceil(ymax / d) * d 85 | x = np.arange(xmin + 0.5 * d, xmax, d) 86 | y = np.arange(ymax - 0.5 * d, ymin, -d) 87 | return x, y 88 | 89 | def _raster(self, x, y, index) -> xr.DataArray: 90 | index = index.ravel() 91 | indexer = xr.DataArray( 92 | data=index.reshape(y.size, x.size), 93 | coords={"y": y, "x": x}, 94 | dims=["y", "x"], 95 | ) 96 | out = self.obj.isel({self.grid.face_dimension: indexer}).where(indexer != -1) 97 | return out 98 | 99 | def clip_box( 100 | self, 101 | xmin: float, 102 | ymin: float, 103 | xmax: float, 104 | ymax: float, 105 | ): 106 | """ 107 | Clip the DataArray or Dataset by a bounding box. 108 | 109 | Parameters 110 | ---------- 111 | xmin: float 112 | ymin: float 113 | xmax: float 114 | ymax: float 115 | 116 | ------- 117 | clipped: 118 | xugrid.UgridDataArray or xugrid.UgridDataset 119 | """ 120 | return self.sel(x=slice(xmin, xmax), y=slice(ymin, ymax)) 121 | 122 | def partition_by_label( 123 | self, labels: np.ndarray 124 | ) -> Union["xugrid.UgridDataArray", "xugrid.UgridDataset"]: 125 | """ 126 | Partition a grid by labels. 127 | 128 | Parameters 129 | ---------- 130 | labels: np.ndarray of integers labeling each face. 131 | 132 | Returns 133 | ------- 134 | partitioned: list of partitions 135 | """ 136 | from xugrid.ugrid import partitioning 137 | 138 | return partitioning.partition_by_label(self.grid, self.obj, labels) 139 | 140 | def partition( 141 | self, n_part: int 142 | ) -> Union["xugrid.UgridDataArray", "xugrid.UgridDataset"]: 143 | """ 144 | Partition a grid into a given number of parts. 145 | 146 | Parameters 147 | ---------- 148 | n_part: integer 149 | The number of parts to partition the mesh. 150 | 151 | Returns 152 | ------- 153 | partitioned: list of partitions 154 | """ 155 | labels = self.grid.label_partitions(n_part) 156 | return self.partition_by_label(labels) 157 | 158 | def to_netcdf(self, *args, **kwargs): 159 | """ 160 | Write dataset contents to a UGRID compliant netCDF file. 161 | 162 | This function wraps :py:meth:`xr.Dataset.to_netcdf`; it adds the UGRID 163 | variables and coordinates to a standard xarray Dataset, then writes the 164 | result to a netCDF. 165 | 166 | All arguments are forwarded to :py:meth:`xr.Dataset.to_netcdf`. 167 | """ 168 | self.to_dataset().to_netcdf(*args, **kwargs) 169 | 170 | def to_zarr(self, *args, **kwargs): 171 | """ 172 | Write dataset contents to a UGRID compliant Zarr file. 173 | 174 | This function wraps :py:meth:`xr.Dataset.to_zarr`; it adds the UGRID 175 | variables and coordinates to a standard xarray Dataset, then writes the 176 | result to a Zarr file. 177 | 178 | All arguments are forwarded to :py:meth:`xr.Dataset.to_zarr`. 179 | """ 180 | self.to_dataset().to_zarr(*args, **kwargs) 181 | -------------------------------------------------------------------------------- /xugrid/ugrid/polygonize.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | 3 | import numpy as np 4 | from scipy import sparse 5 | 6 | from xugrid.constants import FILL_VALUE, IntArray 7 | 8 | 9 | def _bbox_area(bounds): 10 | return (bounds[2] - bounds[0]) * (bounds[3] - bounds[1]) 11 | 12 | 13 | def _classify( 14 | i: IntArray, j: IntArray, face_values: np.ndarray 15 | ) -> Tuple[int, IntArray]: 16 | """ 17 | Find out how many discrete polygons are created. Identify the connectivity, 18 | such that we can select a single polygon afterwards. 19 | 20 | Parameters 21 | ---------- 22 | i: np.ndarray of int 23 | First face of the edge. 24 | j: np.ndarray of int 25 | Second face of the edge. 26 | face_values: np.ndarray 27 | 28 | Returns 29 | ------- 30 | n_polygon: int 31 | polygon_id: np.ndarray of int 32 | """ 33 | # Every face connects up to two faces. vi holds the values of the first face, 34 | # vj holds the value of the second face. 35 | vi = face_values[i] 36 | vj = face_values[j] 37 | n = face_values.size 38 | # For labelling, only those parts of the mesh that have the same value 39 | # should be connected with each other. 40 | # Since we dropped NaN values before, we needn't worry about those. 41 | is_connection = (i != FILL_VALUE) & (j != FILL_VALUE) & (vi == vj) 42 | i = i[is_connection] 43 | j = j[is_connection] 44 | ij = np.concatenate([i, j]) 45 | ji = np.concatenate([j, i]) 46 | coo_content = (ji, (ij, ji)) 47 | # Make sure to explicitly set the matrix shape: otherwise, isolated 48 | # elements witout any connection might disappear, and connected_components 49 | # will not return a value for every face. 50 | coo_matrix = sparse.coo_matrix(coo_content, shape=(n, n)) 51 | # We can classify the grid faces using this (reduced) connectivity 52 | return sparse.csgraph.connected_components(coo_matrix) 53 | 54 | 55 | def polygonize(uda: "UgridDataArray") -> "gpd.GeoDataFrame": # type: ignore # noqa 56 | """ 57 | Polygonize a UgridDataArray. 58 | 59 | This function creates vector polygons for all connected regions of cells 60 | (faces) in the Ugrid2d topology sharing a common value. 61 | 62 | The produced polygon edges will follow exactly the cell boundaries. When 63 | the data consists of many unique values (e.g. unbinned elevation data), the 64 | result will essentially be one polygon per face. In such cases, it is much 65 | more efficient to use ``xugrid.UgridDataArray.to_geodataframe``, which 66 | directly converts every cell to a polygon. This function is meant for data 67 | with relatively few unique values such as classification results. 68 | 69 | Parameters 70 | ---------- 71 | uda: UgridDataArray 72 | The DataArray should only contain the face dimension. Additional 73 | dimensions, such as time, are not allowed. 74 | 75 | Returns 76 | ------- 77 | polygonized: GeoDataFrame 78 | """ 79 | 80 | import geopandas as gpd 81 | import shapely 82 | 83 | facedim = uda.ugrid.grid.face_dimension 84 | if uda.dims != (facedim,): 85 | raise ValueError( 86 | "Cannot polygonize non-face dimensions. Expected only" 87 | f"({facedim},), but received {uda.dims}." 88 | ) 89 | 90 | # First remove the NaN values. These will not be polygonized anyway. 91 | dropped = uda.dropna(dim=uda.ugrid.grid.face_dimension) 92 | face_values = dropped.to_numpy() 93 | grid = dropped.ugrid.grid 94 | i, j = grid.edge_face_connectivity.T 95 | n_polygon, polygon_id = _classify(i, j, face_values) 96 | 97 | # Now we identify for each label the subset of edges. These are the 98 | # "exterior" edges: either the exterior edge of the mesh identified by a 99 | # fill value, or by being connected to a cell with a different value. 100 | coordinates = grid.node_coordinates 101 | data_i = face_values[i] 102 | vi = polygon_id[i] 103 | vj = polygon_id[j] 104 | # Ensure that no result thas has been created by indexing with the 105 | # fill_value remains. Since polygon_id starts counting a 0, we may use -1. 106 | vi[i == FILL_VALUE] = FILL_VALUE 107 | vj[j == FILL_VALUE] = FILL_VALUE 108 | boundary = vi != vj 109 | 110 | polygons = [] 111 | values = [] 112 | for label in range(n_polygon): 113 | keep = ((vi == label) | (vj == label)) & boundary 114 | # The result of shapely polygonize is always a GeometryCollection. 115 | # Holes are included twice: once as holes in the largest body, and once 116 | # more as polygons on their own. We are interested in the largest 117 | # polygon, which we identify through its bounding box. 118 | edges = grid.edge_node_connectivity[keep] 119 | collection = shapely.polygonize(shapely.linestrings(coordinates[edges])) 120 | polygon = max(collection.geoms, key=lambda x: _bbox_area(x.bounds)) 121 | # Find the first True value in keep, use that to fetch the polygon 122 | # value. 123 | value = data_i[keep.argmax()] 124 | polygons.append(polygon) 125 | values.append(value) 126 | 127 | return gpd.GeoDataFrame({"values": values}, geometry=polygons) 128 | -------------------------------------------------------------------------------- /examples/quick_overview.py: -------------------------------------------------------------------------------- 1 | """ 2 | Quick overview 3 | ============== 4 | 5 | Here are a number of quick examples of how to get started with xugrid. More 6 | detailed explanation can be found in the rest of the documentation. 7 | 8 | We'll start by importing a few essential packages. 9 | """ 10 | # %% 11 | 12 | import numpy as np 13 | import xarray as xr 14 | 15 | import xugrid as xu 16 | 17 | # %% 18 | # Create a UgridDataArray 19 | # ----------------------- 20 | # 21 | # There are three ways to create a UgridDataArray: 22 | # 23 | # * From an xarray Dataset containing the grid topology stored according to the 24 | # UGRID conventions. 25 | # * From a xugrid Ugrid object and an xarray DataArray containing the data. 26 | # * From a UGRID netCDF file, via :py:func:`xugrid.open_dataset`. 27 | # 28 | # 29 | # From xarray Dataset 30 | # ~~~~~~~~~~~~~~~~~~~ 31 | # 32 | # xugrid will automatically find the UGRID topological variables, and separate 33 | # them from the main data variables. 34 | # 35 | # Details on the required variables can be found in the `UGRID conventions`_. 36 | # For 1D and 2D UGRID topologies, the required variables are: 37 | # 38 | # * x-coordinates of the nodes 39 | # * y-coordinates of the nodes 40 | # * edge node connectivity (1D) or face node connectivity (2D) 41 | # * a "dummy" variable storing the names of the above variables in its 42 | # attributes 43 | # 44 | # We'll start by fetching a dataset: 45 | 46 | ds = xu.data.adh_san_diego(xarray=True) 47 | ds 48 | 49 | # %% 50 | # There are a number of topology coordinates and variables: ``node_x`` and 51 | # ``node_y``, ``mesh2d`` and ``face_node_connectivity``. The dummy variable 52 | # is ``mesh2d`` contains only a 0 for data; its attributes contain a mapping of 53 | # UGRID roles to dataset variables. 54 | # 55 | # We can convert this dataset to a UgridDataset which will automatically 56 | # separate the variables: 57 | 58 | uds = xu.UgridDataset(ds) 59 | uds 60 | 61 | # %% 62 | # We can then grab one of the data variables as usual for xarray: 63 | 64 | elev = uds["elevation"] 65 | elev 66 | 67 | # %% 68 | # From Ugrid and DataArray 69 | # ~~~~~~~~~~~~~~~~~~~~~~~~ 70 | # 71 | # Alternatively, we can build a Ugrid topology object first from vertices and 72 | # connectivity numpy arrays, for example when using the topology data generated 73 | # by a mesh generator (at which stage there is no data asssociated with the 74 | # nodes, edges, or faces). 75 | # 76 | # There are many ways to construct such arrays, typically via mesh generators 77 | # or Delaunay triangulation, but we will construct two simple triangles and 78 | # some data by hand here: 79 | 80 | nodes = np.array([[0, 0], [0, 1.1], [1, 0], [1, 1]]) 81 | faces = np.array([[2, 3, 0], [3, 1, 0]]) 82 | fill_value = -1 83 | 84 | grid = xu.Ugrid2d(nodes[:, 0], nodes[:, 1], fill_value, faces) 85 | da = xr.DataArray( 86 | data=[1.0, 2.0], 87 | dims=[grid.face_dimension], 88 | ) 89 | uda = xu.UgridDataArray(da, grid) 90 | uda 91 | 92 | # %% 93 | # From netCDF file 94 | # ~~~~~~~~~~~~~~~~ 95 | # 96 | # :py:func:`xugrid.open_dataset` is demonstrated in the last section of this 97 | # guide. Internally, it opens the netCDF as a regular dataset, then converts it 98 | # as seen in the first example. 99 | # 100 | # Plotting 101 | # -------- 102 | 103 | elev.ugrid.plot(cmap="viridis") 104 | 105 | # %% 106 | # Data selection 107 | # -------------- 108 | # 109 | # A UgridDataArray behaves identical to an xarray DataArray: 110 | 111 | whole = xu.data.disk()["face_z"] 112 | 113 | # %% 114 | # To select based on the topology, use the ``.ugrid`` attribute: 115 | 116 | subset = whole.ugrid.sel(y=slice(5.0, None)) 117 | subset.ugrid.plot() 118 | 119 | # %% 120 | # .. note:: 121 | # 122 | # ``ugrid.sel()`` currently only supports data on the faces for 2D 123 | # topologies, and data on edges for 1D topologies. More flexibility 124 | # may be added. 125 | # 126 | # Computation 127 | # ----------- 128 | # 129 | # Computation on DataArrays is unchanged from xarray: 130 | 131 | uda + 10.0 132 | 133 | # %% 134 | # Geopandas 135 | # --------- 136 | # 137 | # Xugrid objects provide a number of conversion functions from and to geopandas 138 | # GeoDataFrames using :py:meth:`xugrid.UgridDataset.from_geodataframe`. Note 139 | # that storing large grids as GeoDataFrames can be very inefficient. 140 | 141 | gdf = uda.ugrid.to_geodataframe(name="test") 142 | gdf 143 | 144 | # %% 145 | # Conversion from Geopandas is easy too: 146 | 147 | xu.UgridDataset.from_geodataframe(gdf) 148 | 149 | # %% 150 | # XugridDatasets 151 | # -------------- 152 | # 153 | # Like an Xarray Dataset, a UgridDataset is a dict-like container of 154 | # UgridDataArrays. It is required that they share the same grid topology; 155 | # but the individual DataArrays may be located on different aspects of the 156 | # grid (nodes, faces, edges). 157 | 158 | xu.data.disk() 159 | 160 | # %% 161 | # A UgridDataset may be initialized without data variables, but this requires 162 | # a grid object: 163 | 164 | new_uds = xu.UgridDataset(grids=uds.ugrid.grids) 165 | new_uds 166 | 167 | # %% 168 | # We can then add variables one-by-one, as we might with an xarray Dataset: 169 | 170 | new_uds["elevation"] = elev 171 | new_uds 172 | 173 | # %% 174 | # Write netCDF files 175 | # ------------------ 176 | # 177 | # Once again like xarray, NetCDF is the recommended file format for xugrid 178 | # objects. Xugrid automatically stores the grid topology according to the UGRID 179 | # conventions and merges it with the main dataset containing the data variables 180 | # before writing. 181 | 182 | uds.ugrid.to_netcdf("example-ugrid.nc") 183 | xu.open_dataset("example-ugrid.nc") 184 | 185 | # %% 186 | # .. _UGRID Conventions: https://ugrid-conventions.github.io/ugrid-conventions 187 | -------------------------------------------------------------------------------- /tests/test_regrid/test_reduce.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xugrid.regrid import reduce 5 | 6 | 7 | def forward_args(): 8 | values = np.array([0.0, 1.0, 2.0, np.nan]) 9 | weights = np.array([0.5, 0.5, 0.5, 0.5]) 10 | work = np.empty_like(weights) 11 | return values, weights, work 12 | 13 | 14 | def reverse_args(): 15 | values, weights, work = forward_args() 16 | return np.flip(values), weights, work 17 | 18 | 19 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 20 | def test_mean(args): 21 | actual = reduce.mean(*args) 22 | assert np.allclose(actual, 1.0) 23 | 24 | 25 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 26 | def test_harmonic_mean(args): 27 | actual = reduce.harmonic_mean(*args) 28 | assert np.allclose(actual, 1.0 / (0.5 / 1.0 + 0.5 / 2.0)) 29 | 30 | 31 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 32 | def test_geometric_mean(args): 33 | actual = reduce.geometric_mean(*args) 34 | assert np.allclose(actual, np.sqrt(1.0 * 2.0)) 35 | 36 | 37 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 38 | def test_sum(args): 39 | actual = reduce.sum(*args) 40 | assert np.allclose(actual, 3.0) 41 | 42 | 43 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 44 | def test_minimum(args): 45 | actual = reduce.minimum(*args) 46 | assert np.allclose(actual, 0.0) 47 | 48 | 49 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 50 | def test_maximum(args): 51 | actual = reduce.maximum(*args) 52 | assert np.allclose(actual, 2.0) 53 | 54 | 55 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 56 | def test_mode(args): 57 | actual = reduce.mode(*args) 58 | # We have tied frequency (all weights 0.5). In this case we return the 59 | # highest value. 60 | assert np.allclose(actual, 2.0) 61 | 62 | 63 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 64 | def test_median(args): 65 | actual = reduce.median(*args) 66 | assert np.allclose(actual, 1.0) 67 | 68 | 69 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 70 | def test_conductance(args): 71 | actual = reduce.conductance(*args) 72 | assert np.allclose(actual, 1.5) 73 | 74 | 75 | @pytest.mark.parametrize("args", [forward_args(), reverse_args()]) 76 | def test_max_overlap(args): 77 | actual = reduce.max_overlap(*args) 78 | # We have tied overlap (all 0.5). In this case we return the highest value. 79 | assert np.allclose(actual, 2.0) 80 | 81 | 82 | def test_max_overlap_extra(): 83 | values = np.array([0.0, 1.0, 2.0, np.nan]) 84 | weights = np.array([0.5, 1.5, 0.5, 2.5]) 85 | workspace = np.empty_like(weights) 86 | args = (values, weights, workspace) 87 | actual = reduce.max_overlap(*args) 88 | assert np.allclose(actual, 1.0) 89 | 90 | 91 | def test_mode_extra(): 92 | values = np.array([0.0, 1.0, 1.0, 2.0, np.nan]) 93 | weights = np.array([0.5, 0.5, 0.5, 0.5, 0.5]) 94 | workspace = np.empty_like(weights) 95 | args = (values, weights, workspace) 96 | actual = reduce.mode(*args) 97 | assert np.allclose(actual, 1.0) 98 | # The weights shouldn't be mutated! 99 | assert np.allclose(weights, 0.5) 100 | 101 | values = np.array([1, 1, 3]) 102 | weights = np.array([1.0, 1.0, 1.0]) 103 | workspace = np.empty_like(weights) 104 | args = (values, weights, workspace) 105 | actual = reduce.mode(*args) 106 | assert np.allclose(actual, 1.0) 107 | 108 | values = np.array([4, 5, 6]) 109 | weights = np.array([0.5, 0.5, 0.5]) 110 | workspace = np.empty_like(weights) 111 | args = (values, weights, workspace) 112 | actual = reduce.mode(*args) 113 | # Returns last not-nan value 114 | assert np.allclose(actual, 6) 115 | assert np.allclose(weights, 0.5) 116 | 117 | 118 | def test_percentile(): 119 | # Simplified from: 120 | # https://github.com/numba/numba/blob/2001717f3321a5082c39c5787676320e699aed12/numba/tests/test_array_reductions.py#L396 121 | def func(x, p): 122 | p = np.atleast_1d(p) 123 | values = x.ravel() 124 | weights = np.empty_like(values) 125 | work = np.empty_like(values) 126 | return np.array([reduce.percentile(values, weights, work, pval) for pval in p]) 127 | 128 | q_upper_bound = 100.0 129 | x = np.arange(8) * 0.5 130 | np.testing.assert_equal(func(x, 0), 0.0) 131 | np.testing.assert_equal(func(x, q_upper_bound), 3.5) 132 | np.testing.assert_equal(func(x, q_upper_bound / 2), 1.75) 133 | 134 | x = np.arange(12).reshape(3, 4) 135 | q = np.array((0.25, 0.5, 1.0)) * q_upper_bound 136 | np.testing.assert_equal(func(x, q), [2.75, 5.5, 11.0]) 137 | 138 | x = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6) 139 | q = np.array((0.25, 0.50)) * q_upper_bound 140 | np.testing.assert_equal(func(x, q).shape, (2,)) 141 | 142 | q = np.array((0.25, 0.50, 0.75)) * q_upper_bound 143 | np.testing.assert_equal(func(x, q).shape, (3,)) 144 | 145 | x = np.arange(12).reshape(3, 4) 146 | np.testing.assert_equal(func(x, q_upper_bound / 2), 5.5) 147 | 148 | np.testing.assert_equal(func(np.array([1, 2, 3]), 0), 1) 149 | 150 | a = np.array([2, 3, 4, 1]) 151 | func(a, [q_upper_bound / 2]) 152 | np.testing.assert_equal(a, np.array([2, 3, 4, 1])) 153 | 154 | 155 | METHODS = [ 156 | reduce.mean, 157 | reduce.harmonic_mean, 158 | reduce.geometric_mean, 159 | reduce.sum, 160 | reduce.minimum, 161 | reduce.maximum, 162 | reduce.mode, 163 | reduce.first_order_conservative, 164 | reduce.conductance, 165 | reduce.max_overlap, 166 | reduce.median, 167 | ] 168 | 169 | 170 | @pytest.mark.parametrize("f", METHODS) 171 | def test_weights_all_zeros(f): 172 | values = np.ones(5) 173 | weights = np.zeros(5) 174 | workspace = np.zeros(5) 175 | assert np.isnan(f(values, weights, workspace)) 176 | 177 | 178 | @pytest.mark.parametrize("f", METHODS) 179 | def test_values_all_nan(f): 180 | values = np.full(5, np.nan) 181 | weights = np.ones(5) 182 | workspace = np.zeros(5) 183 | assert np.isnan(f(values, weights, workspace)) 184 | -------------------------------------------------------------------------------- /docs/terminology.rst: -------------------------------------------------------------------------------- 1 | Terminology 2 | =========== 3 | 4 | This document builds on `Xarray's glossary 5 | `_. We 6 | strongly recommend reading the Xarray Terminology document before reading this 7 | document. The `UGRID Conventions 8 | `_ are also a 9 | recommended read. 10 | 11 | Grid 12 | A representation of a larger geometric domain by smaller discrete 13 | cells. "Mesh" is used somewhat interchangeabely. 14 | 15 | Structured Grid 16 | Structured grids arrange cells in a simple (``n_row, n_column)`` array. 17 | Structured grids are identified by regular connectivity: every cell has 18 | the same number of neighbors, with the exception of boundaries. Each 19 | cross-section has the same number of cells, even though the cell shape 20 | and size may differ arbitrarily (non-equidistant spacing). Cells are 21 | quadrilateral (four sides) in 2D. Cell to cell connectivity is implicit 22 | and can be directly derived from row defined from the row and column 23 | numbers. 24 | 25 | Unstructured Grid 26 | In contrast to a structured grid, connectivity for an unstructured grid 27 | is irregular and has to be defined explicitly. The primary benefit of 28 | unstructured grids are possibilities for local refinement. Another 29 | benefit is that arbitrary geometries can be easily represented. 30 | Unstructured grids generally arrange cells in a flat (``n_cell,``) 31 | array and separate arrays are used to store the cell locations. 32 | "Unstructured mesh" or "flexible mesh" are used interchangeably. 33 | 34 | Topology 35 | In these pages, short for "grid topology". Grid topology refers to the 36 | location and connectivity of the grid cells and its constituent parts 37 | (nodes, edges). More broadly it also refers to any connectivity 38 | information with respect to a grid. 39 | 40 | UGRID 41 | `Conventions `_ 42 | for specifying the topology of unstructured grids. The focus of the 43 | UGRID conventions is environmental applications and it builds on the 44 | `Climate & Forecast (CF) Metadata Conventions 45 | `_. Data stored according to the UGRID 46 | conventions is thus nearly always written to Unidata Network Common 47 | Data Form (NetCDF) files, but the convention applies to the data and 48 | metadata: they can be written to any sufficiently rich file format 49 | (e.g. `Zarr `_). 50 | 51 | Node 52 | A point, a coordinate pair (x, y): the most basic element of the 53 | topology. "Vertex" is used interchangeably. 54 | 55 | Edge 56 | A line or curve bounded by two nodes. 57 | 58 | Face 59 | A plane or surface enclosed by a set of edges. "Cell" is used somewhat 60 | interchangeably; "polygon" also, but to a lesser degree. 61 | 62 | Sparse Array 63 | A sparse matrix or sparse array is a matrix in which most elements are 64 | zero. For efficiency reasons, sparse matrices are commonly stored in 65 | special data structures, storing only the non-zero values. A 66 | straightforward storage scheme is Coordinate list (COO) or triplet 67 | format: for every non-zero value, three values are stored: 68 | ``(row_index, column_index, value)``. In the Python data ecosystem, 69 | these data structures are provided by `SciPy 70 | `_; in these 71 | pages, "sparse array" or "sparse matrix" refers specifically to data 72 | stored in one of these Scipy objects. 73 | 74 | Dense Array 75 | Contrast with "sparse array": a dense array is an array in which all 76 | values -- including zeros or fill values -- are stored. In these pages, 77 | "dense array" refers to "ordinary" NumPy arrays. 78 | 79 | Adjacency List 80 | A list describing which features of the grid (e.g. faces) are 81 | associated with each other (e.g. nodes, or the neighboring faces). 82 | This can be stored as a list of lists for a grid, a rectangular array 83 | for regular connectivity, or a "ragged array" for irregular 84 | connectivity. In the UGRID conventions, both regular and irregular 85 | connectivity is stored in (dense) rectangular arrays; ragged arrays are 86 | represented by rectangular arrays partially filled with a fill value. 87 | 88 | Adjacency Matrix 89 | An alternative to adjacency lists is an adjacency matrix, which is a 90 | matrix in which the row and column numbers correspond to the element 91 | numbers and wherein the cell value contains a Boolean value denoting 92 | connectivity (``True``, ``1``) or not (``False``, ``0``); such a matrix 93 | can be efficiently stored as a sparse matrix. 94 | 95 | Face node connectivity 96 | An index array of integers. For every face, a list of index values 97 | indicating which members of the list of nodes form its (exterior) 98 | edges. According to UGRID conventions, this data is stored in a 99 | (dense) rectangular array with explicit fill values of the shape 100 | ``(n_face, n_max_nodes_per_face``). For a grid consisting of 101 | exclusively triangles, ``n_max_nodes_per_face == 3`` and no fill value 102 | is required; for an exclusively quadrilateral grid 103 | ``n_max_nodes_per_face == 4``; the fill value is only used for mixed 104 | grids (e.g. triangles and quandrilaterals). The numbering of the faces 105 | is implicit in the first index (row number) of the array; we would 106 | collect the index values for the first face as follows: 107 | ``face_node_connectivity[0]``. 108 | 109 | Edge node connectivity 110 | An index arrray of integers. For every edge, a list of index value 111 | indicating which two members of the list of nodes bound a curve or 112 | line. This data is stored in a (dense) rectangular array of the shape 113 | ``(n_edge, 2)``. The numbering of the edges is implicit in the first 114 | index (row number) of the array. Refer to the `UGRID Conventions 115 | `_ for an 116 | exhaustive description of connectivities. -------------------------------------------------------------------------------- /tests/test_regrid/test_network_gridder.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | import xugrid as xu 6 | 7 | 8 | @pytest.fixture(scope="function") 9 | def structured_grid(): 10 | dims = ("y", "x") 11 | y = np.arange(3.5, -0.5, -1.0) 12 | x = np.arange(0.5, 4.5, 1.0) 13 | coords = {"y": y, "x": x} 14 | 15 | return xr.DataArray(np.ones((4, 4), dtype=np.int32), coords=coords, dims=dims) 16 | 17 | 18 | @pytest.fixture(scope="function") 19 | def unstructured_grid(structured_grid): 20 | return xu.UgridDataArray.from_structured2d(structured_grid) 21 | 22 | 23 | @pytest.fixture(scope="function") 24 | def network(): 25 | node_xy = np.array( 26 | [ 27 | [0.0, 0.0], 28 | [1.5, 1.5], 29 | [2.5, 1.5], 30 | [4.0, 0.0], 31 | [4.0, 3.0], 32 | ] 33 | ) 34 | edge_nodes = np.array( 35 | [ 36 | [0, 1], 37 | [1, 2], 38 | [2, 3], 39 | [2, 4], 40 | ] 41 | ) 42 | ugrid1d = xu.Ugrid1d(*node_xy.T, -1, edge_nodes) 43 | data = xr.DataArray( 44 | np.array([1, 2, 4, -4], dtype=float), dims=(ugrid1d.edge_dimension,) 45 | ) 46 | return xu.UgridDataArray(data, grid=ugrid1d) 47 | 48 | 49 | @pytest.fixture(scope="function") 50 | def points_to_sample(): 51 | x_loc = np.array([0.5, 1.5, 2.5, 3.5, 3.5]) 52 | y_loc = np.array([0.5, 1.5, 1.5, 2.5, 0.5]) 53 | diag = 0.5 * np.sqrt(2) 54 | expected_values = np.array( 55 | [ 56 | 1.0, 57 | (diag * 1 + 0.5 * 2) / (diag + 0.5), # 1 diagonal edge, 1 horizontal edge 58 | (0.5 * 2 + diag * -4 + diag * 4) 59 | / (2 * diag + 0.5), # 2 diagonal edge, 1 horizontal edge 60 | -4.0, 61 | 4.0, 62 | ] 63 | ) 64 | 65 | return x_loc, y_loc, expected_values 66 | 67 | 68 | def test_network_gridder_init__unstructured(network, unstructured_grid): 69 | gridder = xu.NetworkGridder(network, unstructured_grid, method="mean") 70 | 71 | assert isinstance(gridder, xu.NetworkGridder) 72 | assert gridder._source.ugrid_topology == network.grid 73 | assert gridder._target.ugrid_topology == unstructured_grid.grid 74 | assert gridder._weights.n == unstructured_grid.grid.n_face 75 | assert gridder._weights.m == network.grid.n_edge 76 | assert gridder._weights.nnz == 8 77 | 78 | 79 | def test_network_gridder_regrid__unstructured( 80 | network, unstructured_grid, points_to_sample 81 | ): 82 | gridder = xu.NetworkGridder(network, unstructured_grid, method="mean") 83 | gridded = gridder.regrid(network) 84 | 85 | assert isinstance(gridded, type(unstructured_grid)) 86 | assert gridded.shape == unstructured_grid.shape 87 | assert np.count_nonzero(np.isnan(gridded)) == 11 88 | 89 | x_loc, y_loc, expected_values = points_to_sample 90 | grid_values = gridded.ugrid.sel_points(x=x_loc, y=y_loc) 91 | 92 | np.testing.assert_allclose(grid_values, expected_values) 93 | 94 | 95 | def test_network_gridder_regrid__unstructured_transient( 96 | network, unstructured_grid, points_to_sample 97 | ): 98 | # Make transient network 99 | times = [np.datetime64("2022-01-01"), np.datetime64("2023-01-01")] 100 | time_multiplier = xr.DataArray([1.0, 2.0], dims="time", coords={"time": times}) 101 | network = (network * time_multiplier).transpose( 102 | "time", network.ugrid.grid.core_dimension 103 | ) 104 | 105 | gridder = xu.NetworkGridder(network, unstructured_grid, method="mean") 106 | gridded = gridder.regrid(network) 107 | 108 | assert isinstance(gridded, type(unstructured_grid)) 109 | assert np.count_nonzero(np.isnan(gridded)) == 22 110 | 111 | x_loc, y_loc, expected_values = points_to_sample 112 | grid_values_t0 = gridded.isel(time=0).ugrid.sel_points(x=x_loc, y=y_loc) 113 | grid_values_t1 = gridded.isel(time=1).ugrid.sel_points(x=x_loc, y=y_loc) 114 | 115 | np.testing.assert_allclose(grid_values_t0, expected_values) 116 | np.testing.assert_allclose(grid_values_t1, 2 * expected_values) 117 | 118 | 119 | def test_network_gridder_init__structured(network, structured_grid): 120 | gridder = xu.NetworkGridder(network, structured_grid, method="mean") 121 | 122 | assert isinstance(gridder, xu.NetworkGridder) 123 | assert gridder._source.ugrid_topology == network.grid 124 | np.testing.assert_array_equal( 125 | gridder._target.coords["x"], structured_grid.x.to_numpy() 126 | ) 127 | np.testing.assert_array_equal( 128 | gridder._target.coords["y"], structured_grid.y.to_numpy() 129 | ) 130 | assert gridder._weights.n == structured_grid.size 131 | assert gridder._weights.m == network.grid.n_edge 132 | assert gridder._weights.nnz == 8 133 | 134 | 135 | def test_network_gridder_regrid__structured(network, structured_grid, points_to_sample): 136 | gridder = xu.NetworkGridder(network, structured_grid, method="mean") 137 | gridded = gridder.regrid(network) 138 | 139 | assert isinstance(gridded, type(structured_grid)) 140 | assert gridded.shape == structured_grid.shape 141 | assert np.count_nonzero(np.isnan(gridded)) == 11 142 | 143 | x_loc, y_loc, expected_values = points_to_sample 144 | x_loc = xr.DataArray(x_loc, dims="points") 145 | y_loc = xr.DataArray(y_loc, dims="points") 146 | grid_values = gridded.sel(x=x_loc, y=y_loc).to_numpy() 147 | 148 | np.testing.assert_allclose(grid_values, expected_values) 149 | 150 | 151 | def test_network_gridder_regrid__structured_transient( 152 | network, structured_grid, points_to_sample 153 | ): 154 | # Make transient network 155 | times = [np.datetime64("2022-01-01"), np.datetime64("2023-01-01")] 156 | time_multiplier = xr.DataArray([1.0, 2.0], dims="time", coords={"time": times}) 157 | network = (network * time_multiplier).transpose( 158 | "time", network.ugrid.grid.core_dimension 159 | ) 160 | 161 | gridder = xu.NetworkGridder(network, structured_grid, method="mean") 162 | gridded = gridder.regrid(network) 163 | 164 | assert isinstance(gridded, type(structured_grid)) 165 | assert np.count_nonzero(np.isnan(gridded)) == 22 166 | 167 | x_loc, y_loc, expected_values = points_to_sample 168 | x_loc = xr.DataArray(x_loc, dims="points") 169 | y_loc = xr.DataArray(y_loc, dims="points") 170 | grid_values_t0 = gridded.isel(time=0).sel(x=x_loc, y=y_loc).to_numpy() 171 | grid_values_t1 = gridded.isel(time=1).sel(x=x_loc, y=y_loc).to_numpy() 172 | 173 | np.testing.assert_allclose(grid_values_t0, expected_values) 174 | np.testing.assert_allclose(grid_values_t1, 2 * expected_values) 175 | -------------------------------------------------------------------------------- /examples/plotting.py: -------------------------------------------------------------------------------- 1 | """ 2 | Plot unstructured mesh data 3 | =========================== 4 | 5 | The labels that are present in xarray's data structures allow for easy creation 6 | of informative plots: think of dates on the x-axis, or geospatial coordinates. 7 | Xarray provides a convenient way of plotting your data provided it is 8 | structured. Xugrid extends these plotting methods to easily make spatial 9 | (x-y) plots of unstructured grids. 10 | 11 | Like Xarray's focus for plotting is the DataArray, Xugrid's focus is the 12 | UgridDataArray; like Xarray, if your (extracted) data fits into a pandas 13 | DataFrame, you're better of using pandas tools instead. 14 | 15 | As every other method in Xugrid, any logic involving the unstructured topology 16 | is accessed via the ``.ugrid`` accessor on the DataArrays and Datasets; 17 | UgridDatasets and UgridDataArrays behave the same as ordinary Xarray DataArrays 18 | and Datasets otherwise. 19 | 20 | Imports 21 | ------- 22 | 23 | The following imports suffice for the examples. 24 | """ 25 | 26 | # %% 27 | import matplotlib.pyplot as plt 28 | 29 | import xugrid 30 | 31 | # %% 32 | # We'll use a simple synthetic example. This dataset contains data for all 33 | # topological attributes of a two dimensional mesh: 34 | # 35 | # * Nodes: the coordinate pair (x, y) forming a point. 36 | # * Edges: a line or curve bounded by two nodes. 37 | # * Faces: the polygon enclosed by a set of edges. 38 | # 39 | # In this disk example, very similar has been placed on the nodes, edges, and 40 | # faces. 41 | 42 | ds = xugrid.data.disk() 43 | ds 44 | 45 | # %% 46 | # UgridDataArray 47 | # -------------- 48 | # 49 | # Just like Xarray, we can create a plot by selecting a DataArray from the 50 | # Dataset and calling the :py:meth:`UgridDataArray.ugrid.plot()` method. 51 | 52 | uda = ds["face_z"] 53 | uda.ugrid.plot() 54 | 55 | # %% 56 | # Like Xarray, the axes and the colorbar are labeled automatically using the 57 | # available information. 58 | # 59 | # The convenience function :py:meth:`xugrid.UgridDataArray.ugrid.plot()` 60 | # dispatches on the topological dimension of the variable. In this case, the 61 | # data is associated with the face dimension of the topology. Data located on 62 | # the edges results in a different kind of plot: 63 | 64 | ds["edge_z"].ugrid.plot() 65 | 66 | # %% 67 | # The method called by default depends on the type of the data: 68 | # 69 | # =============== =========================== 70 | # Dimension Plotting function 71 | # =============== =========================== 72 | # Face :py:func:`xugrid.plot.pcolormesh` 73 | # Edge :py:func:`xugrid.plot.line` 74 | # Node :py:func:`xugrid.plot.tripcolor` 75 | # =============== =========================== 76 | # 77 | # We can put them side by side to illustrate the differences: 78 | 79 | fig, (ax0, ax1, ax2) = plt.subplots(ncols=3, figsize=(11, 3), sharex=True, sharey=True) 80 | ds["face_z"].ugrid.plot(ax=ax0) 81 | ds["edge_z"].ugrid.plot(ax=ax1) 82 | ds["node_z"].ugrid.plot(ax=ax2) 83 | 84 | # %% 85 | # We can also exactly control the type of plot we want. For example, to plot 86 | # filled contours for data associated with the face dimension: 87 | 88 | ds["face_z"].ugrid.plot.contourf() 89 | 90 | # %% 91 | # We can also overlay this data with the edges: 92 | 93 | fig, ax = plt.subplots() 94 | ds["face_z"].ugrid.plot.contourf() 95 | ds["face_z"].ugrid.plot.line(color="black") 96 | 97 | # %% 98 | # In general, there has to be data associated with the mesh topology before a 99 | # plot can be made. ``plot.line()`` forms an exception to this rule, as the 100 | # location of the edges is meaningful on its own: for this reason 101 | # ``plot.line()`` does not error in the example above. 102 | # 103 | # Other types of plot 104 | # ------------------- 105 | # 106 | # The available plotting methods per topology dimension are listed here. 107 | # 108 | # For the **face** dimension: 109 | # 110 | # * :py:func:`xugrid.plot.contour` 111 | # * :py:func:`xugrid.plot.contourf` 112 | # * :py:func:`xugrid.plot.imshow` 113 | # * :py:func:`xugrid.plot.pcolormesh` 114 | # * :py:func:`xugrid.plot.scatter` 115 | # * :py:func:`xugrid.plot.surface` 116 | # 117 | # For the **edge** dimension: 118 | # 119 | # * :py:func:`xugrid.plot.line` 120 | # * :py:func:`xugrid.plot.scatter` 121 | # 122 | # For the **node** dimension: 123 | # 124 | # * :py:func:`xugrid.plot.contour` 125 | # * :py:func:`xugrid.plot.contourf` 126 | # * :py:func:`xugrid.plot.scatter` 127 | # * :py:func:`xugrid.plot.surface` 128 | # * :py:func:`xugrid.plot.tripcolor` 129 | # 130 | # All these (2D) plots are illustrated here for completeness' sake: 131 | 132 | fig, axes = plt.subplots(nrows=5, ncols=3, figsize=(10, 15)) 133 | 134 | ds["face_z"].ugrid.plot.pcolormesh(ax=axes[0, 0]) 135 | ds["face_z"].ugrid.plot.contour(ax=axes[1, 0]) 136 | ds["face_z"].ugrid.plot.contourf(ax=axes[2, 0]) 137 | ds["face_z"].ugrid.plot.imshow(ax=axes[3, 0]) 138 | ds["face_z"].ugrid.plot.scatter(ax=axes[4, 0]) 139 | 140 | ds["edge_z"].ugrid.plot.line(ax=axes[0, 1]) 141 | ds["edge_z"].ugrid.plot.scatter(ax=axes[4, 1]) 142 | 143 | ds["node_z"].ugrid.plot.tripcolor(ax=axes[0, 2]) 144 | ds["node_z"].ugrid.plot.contour(ax=axes[1, 2]) 145 | ds["node_z"].ugrid.plot.contourf(ax=axes[2, 2]) 146 | ds["node_z"].ugrid.plot.scatter(ax=axes[4, 2]) 147 | 148 | # %% 149 | # The ``surface`` methods generate 3D surface plots: 150 | 151 | fig = plt.figure(figsize=plt.figaspect(0.5)) 152 | ax0 = fig.add_subplot(1, 2, 1, projection="3d") 153 | ax1 = fig.add_subplot(1, 2, 2, projection="3d") 154 | ds["face_z"].ugrid.plot.surface(ax=ax0) 155 | ds["node_z"].ugrid.plot.surface(ax=ax1) 156 | 157 | # %% 158 | # Additional Arguments 159 | # -------------------- 160 | # 161 | # Once again like in Xarray, additional arguments are passed to the underlying 162 | # matplotlib function and the additional arguments supported by Xarray can be 163 | # used: 164 | 165 | ds["face_z"].ugrid.plot(cmap="RdBu", levels=8, yincrease=False) 166 | 167 | # %% 168 | # As a function 169 | # ------------- 170 | # 171 | # The plotting methods can also be called as a function, in which case they 172 | # take an xarray DataArray and a xugrid grid as arguments. 173 | 174 | grid = ds.ugrid.grids[0] 175 | da = ds.obj["face_z"] 176 | 177 | xugrid.plot.pcolormesh(grid, da) 178 | 179 | # %% 180 | # Xarray DataArray plots 181 | # ---------------------- 182 | # 183 | # As mentioned, apart from the ``.ugrid`` accessor, a UgridDataArray behaves the 184 | # same as an Xarray DataArray. To illustrate, we can select a location 185 | # somewhere in the unstructured topology, and plot the resulting timeseries: 186 | 187 | ds = xugrid.data.adh_san_diego() 188 | depth = ds["depth"] 189 | depth.isel(node=1000).plot() 190 | 191 | # %% 192 | -------------------------------------------------------------------------------- /examples/overlap_regridder.py: -------------------------------------------------------------------------------- 1 | """ 2 | OverlapRegridder 3 | ================ 4 | 5 | The overlap regridder works in two stages. First, it searches the source grid 6 | for all faces of the target grid, computes the intersections, and stores all 7 | overlaps between source and target faces. This occurs when the regridder is 8 | initialized. Second, the regridder applies the weights: it reduces the 9 | collection of overlapping faces to a single value for the target face. 10 | 11 | There are many reductions possible. The best choice generally differs based on 12 | the physical meaning of the variable, or the application. Xugrid provides a 13 | number of reductions, but it's also possible to use a custom reduction 14 | function. This is demonstrated here. 15 | 16 | We start with the same example as in the quick overview. 17 | """ 18 | # %% 19 | 20 | import matplotlib.pyplot as plt 21 | import numpy as np 22 | 23 | import xugrid as xu 24 | 25 | # %% 26 | # We'll use a part of a triangular grid with the surface elevation (including 27 | # some bathymetry) of the Netherlands, and a coarser target grid. 28 | 29 | 30 | def create_grid(bounds, nx, ny): 31 | """Create a simple grid of triangles covering a rectangle.""" 32 | import numpy as np 33 | from matplotlib.tri import Triangulation 34 | 35 | xmin, ymin, xmax, ymax = bounds 36 | dx = (xmax - xmin) / nx 37 | dy = (ymax - ymin) / ny 38 | x = np.arange(xmin, xmax + dx, dx) 39 | y = np.arange(ymin, ymax + dy, dy) 40 | y, x = [a.ravel() for a in np.meshgrid(y, x, indexing="ij")] 41 | faces = Triangulation(x, y).triangles 42 | return xu.Ugrid2d(x, y, -1, faces) 43 | 44 | 45 | uda = xu.data.elevation_nl().ugrid.sel( 46 | x=slice(125_000, 225_000), y=slice(440_000, 500_000) 47 | ) 48 | grid = create_grid(uda.ugrid.total_bounds, nx=7, ny=6) 49 | 50 | # %% 51 | 52 | fig, ax = plt.subplots() 53 | uda.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", ax=ax) 54 | grid.plot(ax=ax, color="red") 55 | 56 | # %% 57 | # Method comparison 58 | # ----------------- 59 | # 60 | # Let's compare the different reduction functions that are available in 61 | # xugrid. We'll create a regridder once for every method, and plot the results 62 | # side by side. 63 | # 64 | # .. note:: 65 | # Sum and results in much higher values. The white in the figures are high 66 | # values, not no data. In contrast, a geometric mean generally only makes 67 | # sense for physical quantities with a "true zero": surface elevation is not 68 | # such quantity, as a datum is an arbitrary level. The xugrid geometric mean 69 | # returns NaN if reducing over negative values. 70 | 71 | functions = [ 72 | "mean", 73 | "harmonic_mean", 74 | "geometric_mean", 75 | "sum", 76 | "minimum", 77 | "maximum", 78 | "mode", 79 | "median", 80 | "max_overlap", 81 | ] 82 | 83 | fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(10, 25), sharey=True, sharex=True) 84 | axes = axes.ravel() 85 | 86 | for f, ax in zip(functions, axes): 87 | regridder = xu.OverlapRegridder(source=uda, target=grid, method=f) 88 | result = regridder.regrid(uda) 89 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", ax=ax) 90 | ax.set_title(f) 91 | 92 | # %% 93 | # Relative overlap 94 | # ---------------- 95 | # 96 | # For some reductions, the relative degree of overlap with the original source 97 | # cell is required rather than the absolute overlap, e.g. for first-order 98 | # conservative methods, such as conductance: 99 | 100 | regridder = xu.RelativeOverlapRegridder(source=uda, target=grid, method="conductance") 101 | result = regridder.regrid(uda) 102 | result.ugrid.plot() 103 | 104 | # %% 105 | # Custom reductions 106 | # ----------------- 107 | # 108 | # It's also possible to define your own reduction methods. Such a method is 109 | # inserted during the ``.regrid`` call and compiled by `Numba`_ for performance. 110 | # 111 | # A valid reduction method must be compileable by Numba, and takes exactly three 112 | # arguments: ``values``, ``weights``, ``workspace``. 113 | # 114 | # * ``values``: is the array containing the (float) source values. 115 | # * ``weights``: contains the (float) overlap between the target face and the 116 | # source faces. The size of ``weights`` is equal to the size of ``values``. 117 | # * ``workspace``: used as a temporary workspace of floats. The size of ``work`` is 118 | # equal to the size of ``values``. (Make sure to zero it beforehand if that's 119 | # important to your reduction!) 120 | # 121 | # Xugrid regridder reduction functions are implemented in such a way. For a 122 | # example, an area weighted sum could be implemented as follows: 123 | 124 | 125 | def mean(values, weights, workspace): 126 | total = 0.0 127 | weight_sum = 0.0 128 | for value, weight in zip(values, weights): 129 | if ~np.isnan(value): 130 | total += value * weight 131 | weight_sum += weight 132 | if weight_sum == 0.0: 133 | return np.nan 134 | return total / weight_sum 135 | 136 | 137 | # %% 138 | # .. note:: 139 | # * Each reduction must return a single float. 140 | # * Always check for ``np.isnan(value)``: Custom reductions methods must be 141 | # able to deal with NaN values as these are commonly encountered in datasets 142 | # as a "no data value". 143 | # * If Python features are used that are unsupported by Numba, you will get 144 | # somewhat obscure errors. In such a case, ``numba.njit`` and test your 145 | # function separately with synthetic values for ``values, weights, 146 | # workspace``. 147 | # * The ``workspace`` array is provided to avoid dynamic memory allocations. 148 | # It is a an array of floats with the same size as ``values`` or 149 | # ``weights``. You may freely allocate new arrays within the reduction 150 | # function but it will impact performance. (Methods such as mode or median 151 | # require a workspace.) 152 | # * While we could have implemented a weighted mean as: 153 | # ``np.nansum(values * weights) / np.nansum(weights)``, the function above 154 | # is efficiently compiled by Numba and does not allocate temporary arrays. 155 | # 156 | # To use our custom method, we provide it at initialization of the 157 | # OverlapRegridder: 158 | 159 | regridder = xu.OverlapRegridder(uda, grid, method=mean) 160 | result = regridder.regrid(uda) 161 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 162 | 163 | # %% 164 | # Not every reduction uses the ``weights`` and ``workspace`` arguments. For 165 | # example, a regular sum could only look at the values: 166 | 167 | 168 | def nansum(values, weights, workspace): 169 | return np.nansum(values) 170 | 171 | 172 | # %% 173 | # Custom percentiles 174 | # ------------------ 175 | # 176 | # Xugrid provides a number of predefined percentiles (5, 10, 25, 50, 75, 90, 177 | # 95). In case you need a different percentile value, you can use this utility: 178 | 179 | p333 = xu.OverlapRegridder.create_percentile_method(33.3) 180 | 181 | # %% 182 | # Then, provide it as the regridder method as above: 183 | 184 | regridder = xu.OverlapRegridder(uda, grid, method=p333) 185 | result = regridder.regrid(uda) 186 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 187 | 188 | # %% 189 | # .. _Numba: https://numba.pydata.org/ 190 | -------------------------------------------------------------------------------- /examples/network_gridder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Gridding Ugrid1d Network Data to 2D Grids 3 | ========================================= 4 | 5 | In this example, we demonstrate how to interpolate and grid data from a network 6 | of line elements (UGRID1D) to a 2D structured grid. We'll cover the following 7 | xugrid techniques: 8 | 9 | * Creating Ugrid1d networks from node and edge arrays 10 | * Finding intersections between 1D and 2D grid edges 11 | * Refining networks by inserting intersection points 12 | * Topology-aware Laplace interpolation along networks 13 | * Converting node data to edge data 14 | * Regridding from 1D networks to 2D grids using NetworkGridder 15 | * Visualization of mixed 1D and 2D grids 16 | """ 17 | 18 | # %% 19 | # We'll start by setting up the structured grid first and converting it to a 20 | # Ugrid2d grid. 21 | import numpy as np 22 | import xarray as xr 23 | 24 | import xugrid as xu 25 | 26 | 27 | def make_structured_grid(nrow, ncol, dx, dy): 28 | if dy >= 0: 29 | raise ValueError("dy must be negative.") 30 | 31 | shape = nrow, ncol 32 | 33 | xmin = 0.0 34 | xmax = dx * ncol 35 | ymin = 0.0 36 | ymax = abs(dy) * nrow 37 | dims = ("y", "x") 38 | 39 | y = np.arange(ymax, ymin, dy) + 0.5 * dy 40 | x = np.arange(xmin, xmax, dx) + 0.5 * dx 41 | coords = {"y": y, "x": x} 42 | 43 | return xr.DataArray(np.ones(shape, dtype=np.int32), coords=coords, dims=dims) 44 | 45 | 46 | structured_grid = make_structured_grid(10, 10, 1.5, -1.5) 47 | uda = xu.UgridDataArray.from_structured2d(structured_grid) 48 | ugrid2d = uda.ugrid.grid 49 | 50 | uda 51 | 52 | # %% 53 | # 54 | # Next, we create a 1D network. This network consists of 5 nodes and 4 edges. At 55 | # node 2 the network forks to two branches. The data is located assigned to the 56 | # nodes. 57 | 58 | node_xy = np.array( 59 | [ 60 | [0.0, 0.0], 61 | [5.0, 5.0], 62 | [10.0, 5.0], 63 | [15.0, 0.0], 64 | [15.0, 10.0], 65 | ] 66 | ) 67 | edge_nodes = np.array( 68 | [ 69 | [0, 1], 70 | [1, 2], 71 | [2, 3], 72 | [2, 4], 73 | ] 74 | ) 75 | network = xu.Ugrid1d(*node_xy.T, -1, edge_nodes) 76 | data = xr.DataArray( 77 | np.array([1, 1.5, 2, 4, -4], dtype=float), dims=(network.node_dimension,) 78 | ) 79 | uda_1d = xu.UgridDataArray(data, grid=network) 80 | 81 | uda_1d 82 | 83 | # %% 84 | # 85 | # Let's plot the 1D network on top of the 2D grid. The 1D network is shown in 86 | # light gray, the 2D grid in dark gray. The network's nodes are colored by data 87 | # values. 88 | 89 | uda_1d.ugrid.plot(zorder=10) 90 | uda_1d.ugrid.grid.plot(color="black", alpha=0.5) 91 | ugrid2d.plot(color="gray", alpha=0.5) 92 | 93 | # %% 94 | # 95 | # Intersect edges 96 | # --------------- 97 | # 98 | # First, we need to find the intersection points between the edges of the 99 | # network and the 2D grid. We can do this by using the 100 | # :meth:`xugrid.Ugrid1D.intersect_edges` method. 101 | 102 | edges_coords = ugrid2d.node_coordinates[ugrid2d.edge_node_connectivity] 103 | _, _, intersections_xy = network.intersect_edges(edges_coords) 104 | 105 | # %% 106 | # Let's look at the intersection points. 107 | 108 | import matplotlib.pyplot as plt 109 | 110 | fig, ax = plt.subplots() 111 | 112 | ugrid2d.plot( 113 | ax=ax, 114 | color="gray", 115 | alpha=0.5, 116 | ) 117 | plt.scatter(*intersections_xy.T) 118 | 119 | # %% 120 | # Let's take a look at the individual values. We can see that the intersection 121 | # points are located at the vertices of the 2D grid. This introduces duplicate 122 | # intersection points. 123 | 124 | intersections_xy 125 | 126 | # %% 127 | # Because the line interesects cell vertices, we have duplicate intersection 128 | # points. Remove duplicates by finding the uniques, as duplicates are not 129 | # allowed in the network refinement we'll do in the next step. 130 | 131 | _intersections_xy = np.unique(intersections_xy, axis=0) 132 | 133 | _intersections_xy 134 | 135 | # %% 136 | # 137 | # Refining the network 138 | # -------------------- 139 | # 140 | # Let's refine the network by the intersection points. This will create new nodes at 141 | # the intersection points and add edges between the new nodes and the original 142 | 143 | refined_network, refined_node_index = network.refine_by_vertices( 144 | _intersections_xy, return_index=True 145 | ) 146 | 147 | refined_network 148 | 149 | # %% 150 | # We'll create a new UgridDataArray with the refined network. The data will be set to 151 | # NaN at the refined nodes. The original data will be set to the original nodes. 152 | refined_data = xr.DataArray( 153 | np.empty_like(refined_network.node_x), dims=(refined_network.node_dimension,) 154 | ) 155 | uda_1d_refined = xu.UgridDataArray(refined_data, grid=refined_network) 156 | 157 | # Set data 158 | node_dim = uda_1d.ugrid.grid.node_dimension 159 | uda_1d_refined.data[uda_1d[node_dim].data] = uda_1d.data 160 | uda_1d_refined.data[refined_node_index] = np.nan 161 | 162 | uda_1d_refined 163 | 164 | # %% 165 | # 166 | # Interpolation 167 | # ------------- 168 | # 169 | # Next, interpolate the data to the refined nodes, we can do this with a laplace 170 | # interpolation. This nicely interpolates the data along the network: Notice 171 | # that the two branches on the right-hand side are interpolated separately. 172 | 173 | uda_1d_interpolated = uda_1d_refined.ugrid.laplace_interpolate() 174 | 175 | fig, ax = plt.subplots() 176 | uda_1d_interpolated.ugrid.plot(ax=ax, zorder=10) 177 | uda_1d_interpolated.ugrid.grid.plot(ax=ax, color="black", alpha=0.5, zorder=2) 178 | ugrid2d.plot(ax=ax, color="gray", alpha=0.5, zorder=3) 179 | 180 | # %% 181 | # 182 | # The final step before we can grid the network is setting the data to edge 183 | # centroids. We do this by averaging the data at the nodes that are connected to 184 | # the edges. This is done by using the edge_node_connectivity of the network. 185 | 186 | edge_data = xr.DataArray( 187 | data=uda_1d_interpolated.data[refined_network.edge_node_connectivity].mean(axis=1), 188 | dims=(refined_network.edge_dimension,), 189 | ) 190 | uda_1d_edge = xu.UgridDataArray(edge_data, grid=refined_network) 191 | 192 | fig, ax = plt.subplots() 193 | uda_1d_edge.ugrid.plot(ax=ax, zorder=10) 194 | ugrid2d.plot(ax=ax, color="gray", alpha=0.5, zorder=3) 195 | 196 | # %% 197 | # 198 | # Gridding 199 | # -------- 200 | # 201 | # Finally, we can grid the data to the 2D grid. We can do this by using the 202 | # :class:`xugrid.regrid.gridder.NetworkGridder` class. This class takes the 203 | # Ugrid1d grid as source and Ugrid2d grit as target, the method to use for 204 | # gridding and the data to grid. 205 | 206 | from xugrid.regrid.gridder import NetworkGridder 207 | 208 | gridder = NetworkGridder( 209 | source=uda_1d_edge.ugrid.grid, 210 | target=ugrid2d, 211 | method="mean", 212 | ) 213 | 214 | gridder 215 | 216 | # %% 217 | # 218 | # Next, we can grid the data. Call the 219 | # :meth:`xugrid.regrid.gridder.NetworkGridder.regrid` method to grid the data. 220 | 221 | network_gridded = gridder.regrid(uda_1d_edge) 222 | 223 | fig, ax = plt.subplots() 224 | network_gridded.ugrid.plot(ax=ax) 225 | uda_1d_edge.ugrid.grid.plot(ax=ax, color="black", alpha=0.5, zorder=2) 226 | ugrid2d.plot(ax=ax, color="gray", alpha=0.5, zorder=3) 227 | 228 | # %% 229 | -------------------------------------------------------------------------------- /examples/regridder_overview.py: -------------------------------------------------------------------------------- 1 | """ 2 | Regridding overview 3 | =================== 4 | 5 | `Regridding`_ is the process of converting gridded data from one grid to 6 | another grid. Xugrid provides tools for 2D and 3D regridding of structured 7 | gridded data, represented as xarray objects, as well as (`layered`_) 8 | unstructured gridded data, represented as xugrid objects. 9 | 10 | A number of regridding methods are provided, based on area or volume overlap, 11 | as well as interpolation routines. It currently only supports Cartesian 12 | coordinates. See e.g. `xESMF`_ instead for regridding with a spherical Earth 13 | representation (note: EMSF is `not available`_ via conda-forge on Windows). 14 | 15 | Here are a number of quick examples of how to get started with regridding. 16 | 17 | We'll start by importing a few essential packages. 18 | """ 19 | # %% 20 | 21 | import matplotlib.pyplot as plt 22 | import xarray as xr 23 | 24 | import xugrid as xu 25 | 26 | # %% 27 | # We will take a look at a sample dataset: a triangular grid with the surface 28 | # elevation of the Netherlands. 29 | 30 | uda = xu.data.elevation_nl() 31 | uda.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 32 | 33 | # %% 34 | # Xugrid provides several "regridder" classes which can convert gridded data 35 | # from one grid to another grid. Let's generate a very simple coarse mesh that 36 | # covers the entire Netherlands. 37 | 38 | 39 | def create_grid(bounds, nx, ny): 40 | """Create a simple grid of triangles covering a rectangle.""" 41 | import numpy as np 42 | from matplotlib.tri import Triangulation 43 | 44 | xmin, ymin, xmax, ymax = bounds 45 | dx = (xmax - xmin) / nx 46 | dy = (ymax - ymin) / ny 47 | x = np.arange(xmin, xmax + dx, dx) 48 | y = np.arange(ymin, ymax + dy, dy) 49 | y, x = [a.ravel() for a in np.meshgrid(y, x, indexing="ij")] 50 | faces = Triangulation(x, y).triangles 51 | return xu.Ugrid2d(x, y, -1, faces) 52 | 53 | 54 | grid = create_grid(uda.ugrid.total_bounds, 7, 7) 55 | 56 | # %% 57 | # CentroidLocatorRegridder 58 | # ------------------------ 59 | # 60 | # An easy way of regridding is by simply looking in which cell of the original 61 | # the centroids of the new grid fall. 62 | 63 | fig, ax = plt.subplots() 64 | uda.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", ax=ax) 65 | grid.plot(ax=ax, color="red") 66 | ax.scatter(*grid.centroids.T, color="red") 67 | 68 | # %% 69 | # Xugrid provides the CentroidLocatorRegridder for this: 70 | 71 | regridder = xu.CentroidLocatorRegridder(source=uda, target=grid) 72 | result = regridder.regrid(uda) 73 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", edgecolor="red") 74 | 75 | # %% 76 | # OverlapRegridder 77 | # ---------------- 78 | # 79 | # Such a regridding is not appropriate when the new grid cells are 80 | # so large. Let's try the OverlapOverregridder instead. 81 | 82 | regridder = xu.OverlapRegridder(source=uda, target=grid) 83 | mean = regridder.regrid(uda) 84 | mean.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", edgecolor="red") 85 | 86 | # %% 87 | # By default, the OverlapRegridder computes an area weighted mean. 88 | # Let's try again, now with the minimum: 89 | 90 | regridder = xu.OverlapRegridder(source=uda, target=grid, method="minimum") 91 | minimum = regridder.regrid(uda) 92 | minimum.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", edgecolor="red") 93 | 94 | # %% 95 | # Or the maximum: 96 | 97 | regridder = xu.OverlapRegridder(source=uda, target=grid, method="maximum") 98 | maximum = regridder.regrid(uda) 99 | maximum.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", edgecolor="red") 100 | 101 | # %% 102 | # All regridders also work for multi-dimensional data. 103 | # 104 | # Let's pretend our elevation dataset contains multiple layers, for example to 105 | # denote multiple geological strata. We'll generate five layers, each with a 106 | # thickness of 10.0 meters. 107 | 108 | thickness = xr.DataArray( 109 | data=[10.0, 10.0, 10.0, 10.0, 10.0], 110 | coords={"layer": [1, 2, 3, 4, 5]}, 111 | dims=["layer"], 112 | ) 113 | 114 | # %% 115 | # We need to make that the face dimension remains last, so we transpose the 116 | # result. 117 | 118 | bottom = (uda - thickness.cumsum("layer")).transpose() 119 | bottom 120 | 121 | # %% 122 | # We can feed the result to the regridder, which will automatically regrid over 123 | # all additional dimensions. 124 | 125 | mean_bottom = xu.OverlapRegridder(source=bottom, target=grid).regrid(bottom) 126 | mean_bottom 127 | 128 | # %% 129 | # Let's take a slice to briefly inspect our original layer bottom elevation, 130 | # and the aggregated mean. 131 | 132 | section_y = 475_000.0 133 | section = bottom.ugrid.sel(y=section_y) 134 | section_mean = mean_bottom.ugrid.sel(y=section_y) 135 | 136 | fig, (ax0, ax1) = plt.subplots(ncols=2, figsize=(10, 5), sharex=True, sharey=True) 137 | section.plot.line(x="mesh2d_s", hue="layer", ax=ax0) 138 | section_mean.plot.line(x="mesh2d_s", hue="layer", ax=ax1) 139 | 140 | # %% 141 | # BarycentricInterpolator 142 | # ----------------------- 143 | # 144 | # All examples above show reductions: from a fine grid to a coarse grid. 145 | # However, xugrid also provides interpolation to generate smooth fine 146 | # representations of a coarse grid. 147 | # 148 | # To illustrate, we will zoom in to a part of the Netherlands. 149 | 150 | part = uda.ugrid.sel(x=slice(125_000, 225_000), y=slice(440_000, 500_000)) 151 | part.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 152 | 153 | # %% 154 | # We can clearly identify the individual triangles that form the grid. To get a 155 | # smooth presentation, we can use the BarycentricInterpolator. 156 | # 157 | # We will generate a fine grid. 158 | 159 | grid = create_grid(part.ugrid.total_bounds, nx=100, ny=100) 160 | 161 | # %% 162 | # We use the centroids of the fine grid to interpolate between the centroids of 163 | # the triangles. 164 | 165 | regridder = xu.BarycentricInterpolator(part, grid) 166 | interpolated = regridder.regrid(part) 167 | interpolated.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 168 | 169 | # %% 170 | # Arbitrary grids 171 | # --------------- 172 | # 173 | # The above examples all feature triangular source and target grids. However, 174 | # the regridders work for any collection of (convex) faces. 175 | 176 | grid = create_grid(part.ugrid.total_bounds, nx=20, ny=15) 177 | voronoi_grid = grid.tesselate_centroidal_voronoi() 178 | 179 | regridder = xu.CentroidLocatorRegridder(part, voronoi_grid) 180 | result = regridder.regrid(part) 181 | 182 | fig, ax = plt.subplots() 183 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 184 | voronoi_grid.plot(ax=ax, color="red") 185 | 186 | # %% 187 | # Re-use 188 | # ------ 189 | # 190 | # The most expensive step of the regridding process is finding and computing 191 | # overlaps. A regridder can be used repeatedly, provided the source topology 192 | # is kept the same. 193 | 194 | part_other = part - 50.0 195 | result = regridder.regrid(part_other) 196 | result.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 197 | 198 | # %% 199 | # .. _Xarray: https://docs.xarray.dev/en/stable/index.html 200 | # .. _Xugrid: https://deltares.github.io/xugrid/ 201 | # .. _Regridding: https://climatedataguide.ucar.edu/climate-tools/regridding-overview 202 | # .. _layered: https://ugrid-conventions.github.io/ugrid-conventions/#3d-layered-mesh-topology 203 | # .. _xESMF: https://xesmf.readthedocs.io/en/latest/index.html 204 | # .. _not available: https://github.com/conda-forge/esmf-feedstock/issues/64 205 | 206 | # %% 207 | -------------------------------------------------------------------------------- /xugrid/regrid/unstructured.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import numba as nb 4 | import numpy as np 5 | import xarray as xr 6 | 7 | import xugrid as xu 8 | from xugrid.constants import FloatDType 9 | from xugrid.ugrid import voronoi 10 | from xugrid.ugrid.ugrid2d import Ugrid2d 11 | 12 | 13 | @nb.njit(cache=True) 14 | def replace_interpolated_weights( 15 | vertices, 16 | faces, 17 | face_index, 18 | weights, 19 | node_to_node_map, 20 | node_index_threshold, 21 | ): 22 | n, m = weights.shape 23 | for i in range(n): 24 | face = faces[face_index[i]] 25 | weights_row = weights[i] 26 | for j in range(m): 27 | p = face[j] 28 | w = weights_row[j] 29 | if (p < node_index_threshold) or (w <= 0): 30 | continue 31 | # Find the two surrounding nodes (q and r) 32 | index = p - node_index_threshold 33 | q, r = node_to_node_map[index] 34 | px, py = vertices[p] 35 | qx, qy = vertices[q] 36 | rx, ry = vertices[r] 37 | # Compute the euclidian distance to both 38 | p_q = np.sqrt((qx - px) ** 2 + (qy - py) ** 2) 39 | p_r = np.sqrt((rx - px) ** 2 + (ry - py) ** 2) 40 | total = p_q + p_r 41 | # Redistribute weight according to inverse distance. 42 | weight_q = (p_r / total) * w 43 | weight_r = (p_q / total) * w 44 | # Set weights to zero for p, and add to r and q weights. 45 | weights[i, j] = 0.0 46 | # Search for p and q 47 | for jj in range(m): 48 | node = face[jj] 49 | if node == q: 50 | weights[i, jj] += weight_q 51 | if node == r: 52 | weights[i, jj] += weight_r 53 | return 54 | 55 | 56 | class UnstructuredGrid2d: 57 | """ 58 | Stores only the grid topology. 59 | 60 | e.g. face -> face 61 | 62 | Parameters 63 | ---------- 64 | grid: Ugrid2d 65 | """ 66 | 67 | def __init__(self, obj): 68 | # TODO: do not omit type check on grid! 69 | if isinstance(obj, (xu.UgridDataArray, xu.UgridDataset)): 70 | self.ugrid_topology = obj.grid 71 | elif isinstance(obj, Ugrid2d): 72 | self.ugrid_topology = obj 73 | else: 74 | options = {"Ugrid2d", "UgridDataArray", "UgridDataset"} 75 | raise TypeError( 76 | f"Expected one of {options}, received: {type(obj).__name__}" 77 | ) 78 | 79 | @property 80 | def ndim(self): 81 | return 1 82 | 83 | @property 84 | def dims(self): 85 | return (self.ugrid_topology.face_dimension,) 86 | 87 | @property 88 | def shape(self): 89 | return (self.ugrid_topology.n_face,) 90 | 91 | @property 92 | def size(self): 93 | return self.ugrid_topology.n_face 94 | 95 | @property 96 | def area(self): 97 | return self.ugrid_topology.area 98 | 99 | def convert_to(self, matched_type): 100 | if isinstance(self, matched_type): 101 | return self 102 | else: 103 | TypeError(f"Cannot convert UnstructuredGrid2d to {matched_type.__name__}") 104 | 105 | def overlap(self, other, relative: bool): 106 | """ 107 | Parameters 108 | ---------- 109 | other: UnstructuredGrid2d 110 | relative: bool 111 | Whether to divide by the original area. Used for e.g. 112 | first-order-conservative methods. 113 | 114 | Returns 115 | ------- 116 | source_index: 1d np.ndarray of int 117 | target_index: 1d np.ndarray of int 118 | weights: 1d np.ndarray of float 119 | """ 120 | ( 121 | target_index, 122 | source_index, 123 | weights, 124 | ) = self.ugrid_topology.celltree.intersect_faces( 125 | vertices=other.ugrid_topology.node_coordinates, 126 | faces=other.ugrid_topology.face_node_connectivity, 127 | fill_value=other.ugrid_topology.fill_value, 128 | ) 129 | if relative: 130 | weights /= self.area[source_index] 131 | return source_index, target_index, weights 132 | 133 | def locate_centroids(self, other, tolerance: Optional[float] = None): 134 | tree = self.ugrid_topology.celltree 135 | source_index = tree.locate_points(other.ugrid_topology.centroids, tolerance) 136 | inside = source_index != -1 137 | source_index = source_index[inside] 138 | target_index = np.arange(other.size, dtype=source_index.dtype)[inside] 139 | weight_values = np.ones_like(source_index, dtype=FloatDType) 140 | return source_index, target_index, weight_values 141 | 142 | def barycentric(self, other, tolerance: Optional[float] = None): 143 | points = other.ugrid_topology.centroids 144 | grid = self.ugrid_topology 145 | 146 | # Create a voronoi grid to get surrounding nodes as vertices 147 | ( 148 | vertices, 149 | faces, 150 | node_to_face_index, 151 | node_to_node_map, 152 | ) = voronoi.voronoi_topology( 153 | grid.node_face_connectivity, 154 | grid.node_coordinates, 155 | grid.centroids, 156 | edge_face_connectivity=grid.edge_face_connectivity, 157 | edge_node_connectivity=grid.edge_node_connectivity, 158 | add_exterior=True, 159 | add_vertices=True, 160 | skip_concave=True, 161 | ) 162 | 163 | voronoi_grid = Ugrid2d( 164 | vertices[:, 0], 165 | vertices[:, 1], 166 | -1, 167 | faces, 168 | ) 169 | face_index, weights = voronoi_grid.compute_barycentric_weights( 170 | points, tolerance 171 | ) 172 | 173 | # Find which nodes are interpolated. Redistribute their weights 174 | # according to distance to projection vertex. 175 | replace_interpolated_weights( 176 | vertices=vertices, 177 | faces=faces, 178 | face_index=face_index, 179 | weights=weights, 180 | node_to_node_map=node_to_node_map, 181 | node_index_threshold=len(vertices) - len(node_to_node_map), 182 | ) 183 | 184 | # Discards 0 weights and points that fall outside of the grid. 185 | outside = grid.locate_points(points) == -1 186 | weights[outside] = 0 187 | keep = weights.ravel() > 0 188 | source_index = node_to_face_index[ 189 | voronoi_grid.face_node_connectivity[face_index] 190 | ].ravel()[keep] 191 | 192 | n_points, n_max_node = weights.shape 193 | target_index = np.repeat(np.arange(n_points), n_max_node)[keep] 194 | weights = weights.ravel()[keep] 195 | 196 | order = np.argsort(target_index) 197 | return source_index[order], target_index[order], weights[order] 198 | 199 | def intersection_length(self, other: "xu.regrid.network.Network1d", relative: bool): 200 | ( 201 | target_index, 202 | source_index, 203 | intersections, 204 | ) = self.ugrid_topology.celltree.intersect_edges( 205 | other.ugrid_topology.edge_node_coordinates 206 | ) 207 | order = np.argsort(source_index) 208 | length = np.linalg.norm(np.diff(intersections, axis=1)[:, 0, :], axis=-1) 209 | if relative: 210 | length /= other.length[source_index] 211 | return target_index[order], source_index[order], length[order] 212 | 213 | def to_dataset(self, name: str): 214 | ds = self.ugrid_topology.rename(name).to_dataset() 215 | ds[name + "_type"] = xr.DataArray(-1, attrs={"type": "UnstructuredGrid2d"}) 216 | return ds 217 | -------------------------------------------------------------------------------- /xugrid/regrid/reduce.py: -------------------------------------------------------------------------------- 1 | """Contains common reduction methods.""" 2 | 3 | import math 4 | from typing import Callable 5 | 6 | import numba as nb 7 | import numpy as np 8 | 9 | from xugrid.regrid.nanpercentile import _select_two 10 | 11 | 12 | def mean(values, weights, workspace): 13 | vsum = 0.0 14 | wsum = 0.0 15 | for v, w in zip(values, weights): 16 | if np.isnan(v): 17 | continue 18 | vsum += w * v 19 | wsum += w 20 | if wsum == 0: 21 | return np.nan 22 | else: 23 | return vsum / wsum 24 | 25 | 26 | def harmonic_mean(values, weights, workspace): 27 | v_agg = 0.0 28 | w_sum = 0.0 29 | for v, w in zip(values, weights): 30 | if np.isnan(v) or v == 0: 31 | continue 32 | if w > 0: 33 | w_sum += w 34 | v_agg += w / v 35 | if v_agg == 0 or w_sum == 0: 36 | return np.nan 37 | else: 38 | return w_sum / v_agg 39 | 40 | 41 | def geometric_mean(values, weights, workspace): 42 | v_agg = 0.0 43 | w_sum = 0.0 44 | 45 | # Compute sum to normalize weights to avoid tiny or huge values in exp 46 | normsum = 0.0 47 | for v, w in zip(values, weights): 48 | normsum += w 49 | # Early return if no values 50 | if normsum == 0: 51 | return np.nan 52 | 53 | for v, w in zip(values, weights): 54 | w = w / normsum 55 | # Skip if v is NaN or 0. 56 | if v > 0 and w > 0: 57 | v_agg += w * np.log(abs(v)) 58 | w_sum += w 59 | elif v < 0: 60 | # Computing a geometric mean of negative numbers requires a complex 61 | # value. 62 | return np.nan 63 | 64 | if w_sum == 0: 65 | return np.nan 66 | else: 67 | # w_sum is generally 1.0, but might not be if there are NaNs present! 68 | return np.exp((1.0 / w_sum) * v_agg) 69 | 70 | 71 | def sum(values, weights, workspace): 72 | v_sum = 0.0 73 | w_sum = 0.0 74 | 75 | for v, w in zip(values, weights): 76 | if np.isnan(v): 77 | continue 78 | v_sum += v 79 | w_sum += w 80 | if w_sum == 0: 81 | return np.nan 82 | else: 83 | return v_sum 84 | 85 | 86 | @nb.njit(inline="always") 87 | def _minimum(values, weights): 88 | v_min = np.inf 89 | w_max = 0.0 90 | for v, w in zip(values, weights): 91 | if np.isnan(v): 92 | continue 93 | v_min = min(v, v_min) 94 | w_max = max(w_max, w) 95 | if w_max == 0.0: 96 | return np.nan 97 | return v_min 98 | 99 | 100 | def minimum(values, weights, workspace): 101 | return _minimum(values, weights) 102 | 103 | 104 | @nb.njit(inline="always") 105 | def _maximum(values, weights): 106 | v_max = -np.inf 107 | w_max = 0.0 108 | for v, w in zip(values, weights): 109 | if np.isnan(v): 110 | continue 111 | v_max = max(v, v_max) 112 | w_max = max(w_max, w) 113 | if w_max == 0.0: 114 | return np.nan 115 | return v_max 116 | 117 | 118 | def maximum(values, weights, workspace): 119 | return _maximum(values, weights) 120 | 121 | 122 | def mode(values, weights, workspace): 123 | # Area weighted mode. We use a linear search to accumulate weights, as we 124 | # generally expect a relatively small number of elements in the indices and 125 | # weights arrays. 126 | accum = workspace 127 | accum[: weights.size] = weights[:] 128 | w_sum = 0 129 | w_max = 0.0 130 | for running_total, (v, w) in enumerate(zip(values, weights)): 131 | if np.isnan(v): 132 | continue 133 | w_max = max(w, w_max) 134 | w_sum += 1 135 | for j in range(running_total): # Compare with previously found values 136 | if values[j] == v: # matches previous value 137 | accum[j] += w # increase previous weight sum 138 | break 139 | 140 | if w_sum == 0 or w_max == 0.0: 141 | # Everything skipped (all nodata), or all weights zero 142 | return np.nan 143 | else: 144 | # Find value with highest frequency. 145 | # In case frequencies are equal (a tie), take the largest value. 146 | # This ensures the same result irrespective of value ordering. 147 | w_max = 0 148 | mode_value = values[0] 149 | for w_accum, v in zip(accum, values): 150 | if ~np.isnan(v): 151 | if (w_accum > w_max) or (w_accum == w_max and v > mode_value): 152 | w_max = w_accum 153 | mode_value = v 154 | return mode_value 155 | 156 | 157 | @nb.njit 158 | def percentile(values, weights, workspace, p): 159 | # This function is a simplified port of: 160 | # https://github.com/numba/numba/blob/0441bb17c7820efc2eba4fd141b68dac2afa4740/numba/np/arraymath.py#L1745 161 | 162 | # Exit early if all weights are 0. 163 | w_max = 0.0 164 | for w in weights: 165 | w_max = max(w, w_max) 166 | if w_max == 0.0: 167 | return np.nan 168 | 169 | if p == 0: 170 | return _minimum(values, weights) 171 | 172 | if p == 100: 173 | return _maximum(values, weights) 174 | 175 | # Everything should've been checked before: 176 | # 177 | # * a.dtype should be float 178 | # * 0 <= q <= 100. 179 | # 180 | # Filter the NaNs 181 | 182 | n = 0 183 | for v in values: 184 | if ~np.isnan(v): 185 | workspace[n] = v 186 | n += 1 187 | 188 | # Early returns 189 | if n == 0: 190 | return np.nan 191 | if n == 1: 192 | return workspace[0] 193 | 194 | # linear interp between closest ranks 195 | rank = 1 + (n - 1) * p / 100.0 196 | f = math.floor(rank) 197 | m = rank - f 198 | lower, upper = _select_two(workspace[:n], k=int(f - 1), low=0, high=(n - 1)) 199 | return lower * (1 - m) + upper * m 200 | 201 | 202 | def first_order_conservative(values, weights, workspace): 203 | # Uses relative weights! 204 | # Rename to: first order conservative? 205 | v_agg = 0.0 206 | w_sum = 0.0 207 | for v, w in zip(values, weights): 208 | if np.isnan(v): 209 | continue 210 | v_agg += v * w 211 | w_sum += w 212 | if w_sum == 0: 213 | return np.nan 214 | else: 215 | return v_agg 216 | 217 | 218 | conductance = first_order_conservative 219 | 220 | 221 | def max_overlap(values, weights, workspace): 222 | w_max = 0.0 223 | v_max = -np.inf 224 | # Find value with highest overlap. 225 | # In case frequencies are equal (a tie), take the largest value. 226 | # This ensures the same result irrespective of value ordering. 227 | for v, w in zip(values, weights): 228 | if ~np.isnan(v): 229 | if (w > w_max) or (w == w_max and v > v_max): 230 | w_max = w 231 | v_max = v 232 | if w_max == 0.0: 233 | return np.nan 234 | return v_max 235 | 236 | 237 | def create_percentile_method(p: float) -> Callable: 238 | if not (0.0 <= p <= 100.0): 239 | raise ValueError(f"percentile must be in the range [0, 100], received: {p}") 240 | 241 | def f(values, weights, workspace) -> float: 242 | return percentile(values, weights, workspace, p) 243 | 244 | return f 245 | 246 | 247 | median = create_percentile_method(50) 248 | 249 | 250 | ABSOLUTE_OVERLAP_METHODS = { 251 | "mean": mean, 252 | "harmonic_mean": harmonic_mean, 253 | "geometric_mean": geometric_mean, 254 | "sum": sum, 255 | "minimum": minimum, 256 | "maximum": maximum, 257 | "mode": mode, 258 | "median": median, 259 | "max_overlap": max_overlap, 260 | } 261 | for p in (5, 10, 25, 50, 75, 90, 95): 262 | ABSOLUTE_OVERLAP_METHODS[f"p{p}"] = create_percentile_method(p) 263 | 264 | 265 | RELATIVE_OVERLAP_METHODS = { 266 | "conductance": conductance, 267 | "first_order_conservative": first_order_conservative, 268 | } 269 | -------------------------------------------------------------------------------- /examples/selection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Select unstructured data 3 | ======================== 4 | 5 | Xarray has flexible tools for label based selection, in the form of ``.sel`` 6 | and ``.isel`` for index selection. This works well for structured data since 7 | the orthogonality of the x and y axes is reflected in the axes of the 8 | underlying arrays. This orthogonality does not exist for unstructured grids, as 9 | the data for all faces cannot be stored in a two-dimensional array and is 10 | stored in a one-dimensional array instead. 11 | 12 | Xugrid provides tools for convenient spatial selection, primarily via the 13 | ``.ugrid.sel`` method; its behavior is comparable to xarray's ``.sel`` method. 14 | The ``.ugrid.sel`` method should only be used for selection in the x or y 15 | dimension. Selections along other dimension (such as time) should be performed 16 | by xarray's ``.sel`` instead (without the ``ugrid`` accessor). 17 | 18 | The examples below demonstrate the various ways to select data. 19 | 20 | Imports 21 | ------- 22 | 23 | The following imports suffice for the examples. 24 | """ 25 | 26 | # %% 27 | import matplotlib.pyplot as plt 28 | import numpy as np 29 | import shapely 30 | 31 | import xugrid as xu 32 | 33 | # %% 34 | # We will take a look at a sample dataset: a triangular grid with the surface 35 | # elevation of the Netherlands. 36 | 37 | uda = xu.data.elevation_nl() 38 | uda.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 39 | 40 | # %% 41 | # We will start by demonstrating the behavior of ``.ugrid.sel``. This method 42 | # takes several types of arguments, like its xarray equivalent. The return type 43 | # and shape of the selection operation depends on the argument given. 44 | # 45 | # ========== =========== 46 | # Selection Result type 47 | # ========== =========== 48 | # Subset xugrid 49 | # Point xarray 50 | # Line xarray 51 | # ========== =========== 52 | # 53 | # Grid subset selection 54 | # --------------------- 55 | # 56 | # A subset of the unstructured grid is returned by using slices without a step: 57 | 58 | subset = uda.ugrid.sel(x=slice(100_000.0, 200_000.0), y=slice(450_000.0, 550_000.0)) 59 | subset.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 60 | 61 | # %% 62 | # The default arguments of ``x`` and ``y`` are: ``slice(None, None)``. 63 | # In such a case the entire grid is returned. 64 | 65 | subset = uda.ugrid.sel() 66 | subset.ugrid.plot(vmin=-20, vmax=90, cmap="terrain") 67 | 68 | # %% 69 | # .. note:: 70 | # 71 | # ``None`` in a Python slice can be interpreted as "from the start" or "up to 72 | # and including the end". 73 | # 74 | # This means we can easily select along a single dimension: 75 | 76 | subset = uda.ugrid.sel(x=slice(100_000.0, 200_000.0)) 77 | subset.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", aspect=1, size=5) 78 | 79 | # %% 80 | # Or, using ``None`` if we only care about the start: 81 | 82 | subset = uda.ugrid.sel(x=slice(100_000.0, None)) 83 | subset.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", aspect=1, size=5) 84 | 85 | # %% 86 | # Point selection 87 | # --------------- 88 | # 89 | # Since point data can be represented as an ordinary xarray DataArray with x 90 | # and y coordinates, all point selection result in xarray DataArrays rather 91 | # than UgridDataArrays with an associated unstructured grid topology. 92 | # 93 | # We will use a utility function to show what is selected on the map: 94 | 95 | 96 | def show_point_selection(uda, da): 97 | _, ax = plt.subplots() 98 | uda.ugrid.plot(ax=ax, vmin=-20, vmax=90, cmap="terrain") 99 | ax.scatter(da["mesh2d_x"], da["mesh2d_y"], color="red") 100 | ax.set_aspect(1.0) 101 | 102 | 103 | # %% 104 | # Two values will select a point: 105 | 106 | da = uda.ugrid.sel(x=150_000.0, y=463_000.0) 107 | show_point_selection(uda, da) 108 | da 109 | 110 | # %% 111 | # Multiple values are broadcasted against each other ("outer indexing"). 112 | # If we select by three x values and two y values, the result is a collection 113 | # of six points: 114 | 115 | da = uda.ugrid.sel(x=[125_000.0, 150_000.0, 175_000.0], y=[400_000.0, 465_000.0]) 116 | show_point_selection(uda, da) 117 | da 118 | 119 | # %% 120 | # To select points without broadcasting, use ``.ugrid.sel_points`` instead: 121 | 122 | da = uda.ugrid.sel_points( 123 | x=[125_000.0, 150_000.0, 175_000.0], y=[400_000.0, 430_000.0, 465_000.0] 124 | ) 125 | show_point_selection(uda, da) 126 | da 127 | 128 | # %% 129 | # We can sample points along a line as well by providing slices **with** a step: 130 | 131 | da = uda.ugrid.sel(x=slice(100_000.0, 200_000.0, 10_000.0), y=465_000.0) 132 | show_point_selection(uda, da) 133 | da 134 | 135 | # %% 136 | # Two slices with a step results in broadcasting: 137 | 138 | da = uda.ugrid.sel( 139 | x=slice(100_000.0, 200_000.0, 10_000.0), y=slice(400_000.0, 500_000.0, 10_000.0) 140 | ) 141 | show_point_selection(uda, da) 142 | da 143 | 144 | # %% 145 | # As well as a slice with a step and multiple values: 146 | 147 | da = uda.ugrid.sel(x=slice(100_000.0, 200_000.0, 10_000.0), y=[400_000.0, 430_000.0]) 148 | show_point_selection(uda, da) 149 | da 150 | 151 | # %% 152 | # Line selection 153 | # -------------- 154 | # 155 | # Since line data can be represented as an ordinary xarray DataArray with x 156 | # and y coordinates, all line selection result in xarray DataArrays rather 157 | # than UgridDataArrays with an associated unstructured grid topology. 158 | # 159 | # Line selection is performed by finding all faces that are intersected by 160 | # the line. 161 | # 162 | # We start by defining a utility to show the selection again: 163 | 164 | 165 | def show_line_selection(uda, da, line_x=None, line_y=None): 166 | _, (ax0, ax1) = plt.subplots(ncols=2, figsize=(10, 5)) 167 | uda.ugrid.plot(ax=ax0, vmin=-20, vmax=90, cmap="terrain") 168 | da.plot(ax=ax1, x="mesh2d_s") 169 | if line_x is None: 170 | ax0.axhline(line_y, color="red") 171 | elif line_y is None: 172 | ax0.axvline(line_x, color="red") 173 | else: 174 | ax0.plot(line_x, line_y, color="red") 175 | ax0.set_aspect(1.0) 176 | 177 | 178 | # %% 179 | # A single value for either x or y in ``.ugrid.sel`` will select values along a 180 | # line: 181 | 182 | da = uda.ugrid.sel(y=465_000.0) 183 | show_line_selection(uda, da, line_y=465_000.0) 184 | 185 | # %% 186 | # Line segments that are not axis aligned can be selected with 187 | # ``.ugrid.intersect_line``: 188 | 189 | da = uda.ugrid.intersect_line(start=(60_000.0, 400_000.0), end=(190_000.0, 475_000.0)) 190 | show_line_selection(uda, da, (60_000.0, 190_000.0), (400_000.0, 475_000.0)) 191 | 192 | # %% 193 | # Linestrings can be selected with ``.ugrid.intersect_linestring``: 194 | 195 | linestring = shapely.geometry.LineString( 196 | [ 197 | (60_000.0, 400_000.0), 198 | (190_000.0, 400_000.0), 199 | (120_000.0, 575_000.0), 200 | (250_000.0, 575_000.0), 201 | ] 202 | ) 203 | 204 | da = uda.ugrid.intersect_linestring(linestring) 205 | show_line_selection(uda, da, *shapely.get_coordinates(linestring).T) 206 | 207 | # %% 208 | # This will work for any type of shapely line: 209 | 210 | ring = shapely.geometry.Point(155_000.0, 463_000).buffer(50_000.0).exterior 211 | da = uda.ugrid.intersect_linestring(ring) 212 | show_line_selection(uda, da, *shapely.get_coordinates(ring).T) 213 | 214 | # %% 215 | # Index selection 216 | # --------------- 217 | # 218 | # We may also use ordinary index selection to create a subset. This does not 219 | # require the ``.ugrid`` accessor. For example, to take only the first 220 | # thousands faces: 221 | 222 | subset = uda.isel(mesh2d_nFaces=np.arange(1000)) 223 | subset.ugrid.plot(vmin=-20, vmax=90, cmap="terrain", aspect=1, size=5) 224 | 225 | # %% 226 | # For a 2D topology, selecting faces by an index always results in a valid 227 | # topology. However, selecting by node or edge does not give a guarantee that 228 | # the result forms a valid 2D topology: e.g. if we only select two nodes, or 229 | # only two edges from a face, the result cannot form a valid 2D face. 230 | # 231 | # To avoid generating invalid topologies, xugrid always checks whether the 232 | # result of a selection results in a valid 2D topology and raises an error if 233 | # the result is invalid. 234 | # 235 | # In general, index selection should only be performed on the "core" dimension 236 | # of the UGRID topology. This is the edge dimension for 1D topologies, and the 237 | # face dimension for 2D topologies. 238 | 239 | # %% 240 | -------------------------------------------------------------------------------- /xugrid/regrid/overlap_1d.py: -------------------------------------------------------------------------------- 1 | import numba 2 | import numpy as np 3 | 4 | from xugrid.constants import IntDType 5 | from xugrid.regrid.utils import alt_cumsum 6 | 7 | 8 | @numba.njit(inline="always") 9 | def minmax(v, lower, upper): 10 | return min(upper, max(lower, v)) 11 | 12 | 13 | def _find_indices(side): 14 | @numba.njit(inline="always") 15 | def lt(x, y): 16 | return x < y 17 | 18 | @numba.njit(inline="always") 19 | def le(x, y): 20 | return x <= y 21 | 22 | if side == "right": 23 | func = le 24 | add = -1 25 | elif side == "left": 26 | func = lt 27 | add = 1 28 | else: 29 | raise ValueError("side must be left or right") 30 | 31 | @numba.njit 32 | def searchsorted_inner(a, v, v_last, lo, hi, n): 33 | """ 34 | Perform inner loop of searchsorted (i.e. a binary search). 35 | This is loosely based on the NumPy implementation in [1]_. 36 | 37 | Parameters 38 | ---------- 39 | a: 1-D array_like 40 | The input array. 41 | v: array_like 42 | The current value to insert into `a`. 43 | v_last: array_like 44 | The previous value inserted into `a`. 45 | lo: int 46 | The initial/previous "low" value of the binary search. 47 | hi: int 48 | The initial/previous "high" value of the binary search. 49 | n: int 50 | The length of `a`. 51 | .. [1] https://github.com/numpy/numpy/blob/809e8d26b03f549fd0b812a17b8a166bcd966889/numpy/core/source/npysort/binsearch.cpp#L173 52 | """ # noqa: E501 53 | if v_last < v: 54 | hi = n 55 | else: 56 | lo = 0 57 | hi = hi + 1 if hi < n else n 58 | 59 | while hi > lo: 60 | mid = (lo + hi) >> 1 61 | if func(a[mid], v): 62 | # mid is too low => go up 63 | lo = mid + 1 64 | else: 65 | # mid is too high, or is a NaN => go down 66 | hi = mid 67 | return lo 68 | 69 | @numba.njit 70 | def preallocated_searchsorted(a, v, out, nan_helper, sorter) -> None: 71 | # source bounds may contain NaN values. Eliminate those here and store 72 | # the original positions in the sorter. 73 | jj = 0 74 | for ii, vv in enumerate(a): 75 | if np.isnan(vv): 76 | continue 77 | nan_helper[jj] = vv 78 | sorter[jj] = ii 79 | jj += 1 80 | 81 | n = jj - 1 82 | lo = 0 83 | hi = n 84 | v_last = v[0] 85 | for i in range(len(v)): 86 | v_search = v[i] 87 | if np.isnan(v_search): 88 | continue 89 | lo = searchsorted_inner(nan_helper, v_search, v_last, lo, hi, n) 90 | v_last = v_search 91 | out[i] = minmax(sorter[lo] + add, 0, a.size) 92 | return 93 | 94 | @numba.njit 95 | def find_indices( 96 | source, 97 | target, 98 | source_index, 99 | target_index, 100 | ): 101 | """ 102 | Find the indices of target in source. Allocate the result in slices of 103 | source_index and target_index. 104 | 105 | This is basically a workaround. Numpy searchsorted does not support an axis 106 | argument to search one nD array on the other. 107 | See: https://github.com/numpy/numpy/issues/4224 108 | 109 | Fortunately, numba implements searchsorted here: 110 | https://github.com/numba/numba/blob/f867999c7453141642ea9af21febef796da9ca93/numba/np/arraymath.py#L3647 111 | But it continously allocates new result arrays. 112 | 113 | Parameters 114 | ---------- 115 | source: np.ndarray of shape (n, m) 116 | All other dims flatted to dimension with size n. 117 | target: np.ndarray of shape (n, m) 118 | All other dims flatted to dimension with size n. 119 | source_index: np.ndarray of shape (n_index,) 120 | target_index: np.ndarray of shape (n_index,) 121 | """ 122 | _, n = source.shape 123 | _, m = target.shape 124 | # Will contain the index along dimension m. 125 | indices = np.full((source_index.size, m), -1, dtype=IntDType) 126 | sorter = np.empty(n, IntDType) 127 | nan_helper = np.empty(n, source.dtype) 128 | for k, (i, j) in enumerate(zip(source_index, target_index)): 129 | source_i = source[i, :] 130 | target_j = target[j, :] 131 | preallocated_searchsorted( 132 | source_i, target_j, indices[k], nan_helper, sorter 133 | ) 134 | return indices 135 | 136 | return find_indices 137 | 138 | 139 | find_lower_indices = _find_indices("right") 140 | find_upper_indices = _find_indices("left") 141 | 142 | 143 | def vectorized_overlap(bounds_a, bounds_b): 144 | """ 145 | Vectorized overlap computation. 146 | 147 | Compare with: 148 | 149 | overlap = max(0, min(a[1], b[1]) - max(a[0], b[0])) 150 | """ 151 | return np.maximum( 152 | 0.0, 153 | np.minimum(bounds_a[:, 1], bounds_b[:, 1]) 154 | - np.maximum(bounds_a[:, 0], bounds_b[:, 0]), 155 | ) 156 | 157 | 158 | def overlap_1d_nd( 159 | source_bounds, 160 | target_bounds, 161 | source_index, 162 | target_index, 163 | ): 164 | """ 165 | As this uses a binary search, bounds must be monotonic ascending. 166 | 167 | source_bounds and target_bounds may both contain NaN values to denote NoData. 168 | 169 | Parameters 170 | ---------- 171 | source_bounds: np.ndarray with shape (n, source_size, 2) 172 | target_bounds: np.ndarray with shape (m, target_size, 2) 173 | source_index: np.ndarray with shape (o,) 174 | Used to index into n axis of source_bounds 175 | target_index: np.ndarray with shape (o,) 176 | Used to index into m axis of target_bounds 177 | 178 | Returns 179 | ------- 180 | flat_source_index: np.ndarray of integers with shape (p,) 181 | flat_target_index: np.ndarray of integers with shape (p,) 182 | overlap: np.ndarray of floats with shape (p,) 183 | """ 184 | # _d refers to the dimension of interest _nd refers to the (variable!) 185 | # number of other dimensions which are flattened and come before d. 186 | 187 | _, target_d_size, _ = target_bounds.shape 188 | 189 | source_lower = source_bounds[..., 0] 190 | source_upper = source_bounds[..., 1] 191 | target_lower = target_bounds[..., 0] 192 | target_upper = target_bounds[..., 1] 193 | 194 | lower_indices = find_lower_indices( 195 | source_lower, target_lower, source_index, target_index 196 | ) 197 | upper_indices = find_upper_indices( 198 | source_upper, target_upper, source_index, target_index 199 | ) 200 | 201 | n_overlap = upper_indices - lower_indices 202 | n_overlap_nd = n_overlap.sum(axis=1) 203 | n_total = n_overlap_nd.sum() 204 | 205 | # Create target index 206 | target_index_d = np.repeat( 207 | np.broadcast_to( 208 | np.arange(target_d_size, dtype=IntDType), target_bounds.shape[:2] 209 | ).ravel(), 210 | n_overlap.ravel(), 211 | ) 212 | 213 | # Create source index 214 | increment = alt_cumsum(np.ones(n_total, dtype=IntDType)) - np.repeat( 215 | alt_cumsum(n_overlap.ravel()), n_overlap.ravel() 216 | ) 217 | source_index_d = ( 218 | np.repeat( 219 | lower_indices.ravel(), 220 | n_overlap.ravel(), 221 | ) 222 | + increment 223 | ) 224 | 225 | # Now turn them into a linear index. 226 | target_linear_index = np.ravel_multi_index( 227 | (np.repeat(target_index, n_overlap_nd), target_index_d), 228 | dims=target_bounds.shape[:2], 229 | ) 230 | source_linear_index = np.ravel_multi_index( 231 | (np.repeat(source_index, n_overlap_nd), source_index_d), 232 | dims=source_bounds.shape[:2], 233 | ) 234 | 235 | # Compute overlap. 236 | overlap = vectorized_overlap( 237 | source_bounds.reshape((-1, 2))[source_linear_index], 238 | target_bounds.reshape((-1, 2))[target_linear_index], 239 | ) 240 | valid = overlap > 0.0 241 | return (source_linear_index[valid], target_linear_index[valid], overlap[valid]) 242 | 243 | 244 | def overlap_1d( 245 | source_bounds, 246 | target_bounds, 247 | ): 248 | return overlap_1d_nd( 249 | source_bounds[np.newaxis], 250 | target_bounds[np.newaxis], 251 | np.array([0]), 252 | np.array([0]), 253 | ) 254 | -------------------------------------------------------------------------------- /tests/test_burn.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pytest 4 | import shapely 5 | from shapely.geometry import Polygon 6 | 7 | import xugrid as xu 8 | from xugrid.ugrid import burn 9 | 10 | 11 | @pytest.fixture(scope="function") 12 | def grid(): 13 | """Three by three squares""" 14 | x = np.arange(0.0, 4.0) 15 | y = np.arange(0.0, 4.0) 16 | node_y, node_x = [a.ravel() for a in np.meshgrid(y, x, indexing="ij")] 17 | nx = ny = 3 18 | # Define the first vertex of every face, v. 19 | v = (np.add.outer(np.arange(nx), nx * np.arange(ny)) + np.arange(ny)).T.ravel() 20 | faces = np.column_stack((v, v + 1, v + nx + 2, v + nx + 1)) 21 | return xu.Ugrid2d(node_x, node_y, -1, faces) 22 | 23 | 24 | @pytest.fixture(scope="function") 25 | def points_and_values(): 26 | xy = np.array( 27 | [ 28 | [0.5, 0.5], 29 | [1.5, 0.5], 30 | [2.5, 2.5], 31 | ] 32 | ) 33 | points = gpd.points_from_xy(*xy.T) 34 | values = np.array([0.0, 1.0, 3.0]) 35 | return points, values 36 | 37 | 38 | @pytest.fixture(scope="function") 39 | def lines_and_values(): 40 | xy = np.array( 41 | [ 42 | [0.5, 0.5], 43 | [2.5, 0.5], 44 | [1.2, 1.5], 45 | [1.8, 1.5], 46 | [0.2, 2.2], 47 | [0.8, 2.8], 48 | [1.2, 2.2], 49 | [1.8, 2.8], 50 | ] 51 | ) 52 | indices = np.array([0, 0, 1, 1, 2, 2, 2, 2]) 53 | values = np.array([0, 1, 2]) 54 | lines = gpd.GeoSeries(shapely.linestrings(xy, indices=indices)) 55 | return lines, values 56 | 57 | 58 | @pytest.fixture(scope="function") 59 | def polygons_and_values(): 60 | values = [0, 1] 61 | polygons = gpd.GeoSeries( 62 | [ 63 | shapely.Polygon(shell=[(0.0, 0.0), (2.0, 0.0), (2.0, 2.0), (0.0, 2.0)]), 64 | shapely.Polygon( 65 | shell=[ 66 | (0.0, 2.0), 67 | (2.0, 2.0), 68 | (2.0, 0.0), 69 | (3.0, 0.0), 70 | (3.0, 3.0), 71 | (0.0, 3.0), 72 | ] 73 | ), 74 | ] 75 | ) 76 | return polygons, values 77 | 78 | 79 | def test_locate_polygon(grid): 80 | polygon = shapely.Polygon(shell=[(0.5, 0.5), (2.5, 0.5), (0.5, 2.5)]) 81 | exterior = shapely.get_coordinates(polygon.exterior) 82 | interiors = [shapely.get_coordinates(interior) for interior in polygon.interiors] 83 | 84 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=False) 85 | assert np.array_equal(np.sort(actual), [0, 1, 2, 3, 4, 6]) 86 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=True) 87 | assert np.array_equal(np.sort(actual), [0, 1, 2, 3, 4, 6]) 88 | 89 | polygon = shapely.Polygon(shell=[(0.75, 0.5), (2.5, 0.5), (0.75, 2.5)]) 90 | exterior = shapely.get_coordinates(polygon.exterior) 91 | interiors = [shapely.get_coordinates(interior) for interior in polygon.interiors] 92 | 93 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=False) 94 | assert np.array_equal(np.sort(actual), [1, 2, 4]) 95 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=True) 96 | assert np.array_equal(np.sort(actual), [0, 1, 2, 3, 4, 5, 6, 7]) 97 | 98 | 99 | def test_locate_polygon_with_hole(grid): 100 | # The hole omits the centroid at (1.5, 1.5) 101 | polygon = shapely.Polygon( 102 | shell=[(0.7, 0.7), (2.3, 0.7), (1.5, 2.3)], 103 | holes=[[(1.4, 1.6), (1.5, 1.4), (1.6, 1.6)]], 104 | ) 105 | exterior = shapely.get_coordinates(polygon.exterior) 106 | interiors = [shapely.get_coordinates(interior) for interior in polygon.interiors] 107 | 108 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=False) 109 | assert np.array_equal(actual, []) 110 | actual = burn._locate_polygon(grid, exterior, interiors, all_touched=True) 111 | assert np.array_equal(np.unique(actual), [0, 1, 2, 3, 4, 5, 7]) 112 | 113 | 114 | def test_burn_polygons(grid, polygons_and_values): 115 | polygons, values = polygons_and_values 116 | output = np.full(grid.n_face, np.nan) 117 | burn._burn_polygons(polygons, grid, values, all_touched=False, output=output) 118 | expected = np.array([0, 0, 1, 0, 0, 1, 1, 1, 1]) 119 | assert np.allclose(output, expected) 120 | 121 | 122 | def test_burn_points(grid, points_and_values): 123 | points, values = points_and_values 124 | output = np.full(grid.n_face, -1.0) 125 | 126 | burn._burn_points(points, grid, values, output=output) 127 | expected = np.array([0, 1, -1, -1, -1, -1, -1, -1, 3]) 128 | assert np.allclose(output, expected) 129 | 130 | 131 | def test_burn_lines(grid, lines_and_values): 132 | lines, values = lines_and_values 133 | output = np.full(grid.n_face, -1.0) 134 | 135 | burn._burn_lines(lines, grid, values, output=output) 136 | expected = np.array([0, 0, 0, -1, 1, -1, 2, 2, -1]) 137 | assert np.allclose(output, expected) 138 | 139 | 140 | def test_burn_vector_geometry__errors(grid, points_and_values): 141 | with pytest.raises(TypeError, match="gdf must be GeoDataFrame"): 142 | xu.burn_vector_geometry(0, grid) 143 | 144 | points, values = points_and_values 145 | gdf = gpd.GeoDataFrame({"values": values}, geometry=points) 146 | with pytest.raises(TypeError, match="Like must be Ugrid2d, UgridDataArray"): 147 | xu.burn_vector_geometry(gdf, gdf) 148 | 149 | # This seems like the easiest way to generate a multi-polygon inside a 150 | # GeoDataFrame, since it won't initialize with a multi-polygon. 151 | p1 = Polygon([(0, 0), (1, 0), (1, 1)]) 152 | p2 = Polygon([(0, 0), (1, 0), (1, 1), (0, 1)]) 153 | p3 = Polygon([(2, 0), (3, 0), (3, 1), (2, 1)]) 154 | gdf = gpd.GeoDataFrame({"values": [0, 0, 0]}, geometry=[p1, p2, p3]).dissolve( 155 | by="values" 156 | ) 157 | with pytest.raises( 158 | TypeError, match="GeoDataFrame contains unsupported geometry types" 159 | ): 160 | xu.burn_vector_geometry(gdf, grid) 161 | 162 | 163 | def test_burn_vector_geometry( 164 | grid, points_and_values, lines_and_values, polygons_and_values 165 | ): 166 | polygons, poly_values = polygons_and_values 167 | gdf = gpd.GeoDataFrame({"values": poly_values}, geometry=polygons) 168 | actual = xu.burn_vector_geometry(gdf, grid) 169 | assert isinstance(actual, xu.UgridDataArray) 170 | assert np.allclose(actual.to_numpy(), 1) 171 | actual = xu.burn_vector_geometry(gdf, grid, all_touched=True) 172 | assert np.allclose(actual.to_numpy(), 1) 173 | 174 | expected = np.array([0, 0, 1, 0, 0, 1, 1, 1, 1]) 175 | actual = xu.burn_vector_geometry(gdf, grid, column="values") 176 | assert np.allclose(actual.to_numpy(), expected) 177 | 178 | points, point_values = points_and_values 179 | lines, line_values = lines_and_values 180 | line_values += 10 181 | point_values += 20 182 | values = np.concatenate([poly_values, line_values, point_values]) 183 | geometry = np.concatenate( 184 | [polygons.to_numpy(), lines.to_numpy(), points.to_numpy()] 185 | ) 186 | gdf = gpd.GeoDataFrame({"values": values}, geometry=geometry) 187 | actual = xu.burn_vector_geometry(gdf, grid, column="values") 188 | expected = np.array([20.0, 21.0, 10.0, 0.0, 11.0, 1.0, 12.0, 12.0, 23.0]) 189 | assert np.allclose(actual.to_numpy(), expected) 190 | 191 | # All touched should give the same answer for this specific example. 192 | actual = xu.burn_vector_geometry(gdf, grid, column="values", all_touched=True) 193 | assert np.allclose(actual.to_numpy(), expected) 194 | 195 | 196 | def test_earcut_triangulate_polygons(): 197 | xy = np.array( 198 | [ 199 | [0.0, 0.0], 200 | [1.0, 0.0], 201 | [1.0, 1.0], 202 | [0.0, 1.0], 203 | [0.0, 0.0], 204 | ] 205 | ) 206 | hole = np.array( 207 | [ 208 | [ 209 | [0.25, 0.25], 210 | [0.75, 0.25], 211 | [0.75, 0.75], 212 | [0.25, 0.25], 213 | ] 214 | ] 215 | ) 216 | polygon = shapely.polygons(xy, holes=hole) 217 | gdf = gpd.GeoDataFrame(data={"a": [10.0], "b": [20.0]}, geometry=[polygon]) 218 | uda = xu.earcut_triangulate_polygons(polygons=gdf) 219 | assert isinstance(uda, xu.UgridDataArray) 220 | assert np.allclose(uda.to_numpy(), 0) 221 | assert uda.name is None 222 | 223 | uda = xu.earcut_triangulate_polygons(polygons=gdf, column="a") 224 | assert np.allclose(uda.to_numpy(), 10.0) 225 | assert uda.name == "a" 226 | 227 | uda = xu.earcut_triangulate_polygons(polygons=gdf, column="b") 228 | assert np.allclose(uda.to_numpy(), 20.0) 229 | assert uda.name == "b" 230 | --------------------------------------------------------------------------------