├── tests ├── __init__.py ├── _typing │ ├── __init__.py │ ├── test_error_no_logger.py │ └── test_error_with_logger.py ├── gis │ └── test_vector_utils.py ├── data │ ├── world.gpkg │ ├── rioda_tiled │ │ ├── 0 │ │ │ ├── 0 │ │ │ │ ├── 0.tif │ │ │ │ ├── 1.tif │ │ │ │ ├── 2.tif │ │ │ │ └── 3.tif │ │ │ ├── 1 │ │ │ │ ├── 0.tif │ │ │ │ ├── 1.tif │ │ │ │ ├── 2.tif │ │ │ │ └── 3.tif │ │ │ ├── 2 │ │ │ │ ├── 0.tif │ │ │ │ ├── 1.tif │ │ │ │ ├── 2.tif │ │ │ │ └── 3.tif │ │ │ └── 3 │ │ │ │ ├── 0.tif │ │ │ │ ├── 1.tif │ │ │ │ ├── 2.tif │ │ │ │ └── 3.tif │ │ └── tiled.yml │ ├── vito_mapping.parquet │ ├── run_config.toml │ ├── v0x_workflow.yml │ ├── missing_data_workflow.yml │ ├── build_config.yml │ ├── update_config.yml │ ├── build_config_extended.yml │ ├── export_config.yml │ ├── legacy_esa_worldcover.yml │ ├── stac │ │ ├── gadm_level1 │ │ │ ├── catalog.json │ │ │ └── gadm_level1 │ │ │ │ └── gadm_level1.json │ │ ├── chirps_global │ │ │ ├── catalog.json │ │ │ └── chirps_global │ │ │ │ └── chirps_global.json │ │ ├── gtsmv3_eu_era5 │ │ │ ├── catalog.json │ │ │ └── gtsmv3_eu_era5 │ │ │ │ └── gtsmv3_eu_era5.json │ │ └── catalog.json │ ├── aws_esa_worldcover.yml │ ├── parameters_data.yml │ ├── merged_esa_worldcover.yml │ ├── test_sources2.yml │ ├── vito_mapping.csv │ └── test_sources1.yml ├── model │ ├── test_hydromt_step.py │ ├── test_root.py │ └── test_example_model.py ├── _utils │ ├── test_rgetattr.py │ ├── test_uris.py │ └── test_deep_merge.py ├── data_catalog │ ├── drivers │ │ ├── test_preprocessing.py │ │ └── test_base_driver.py │ ├── adapters │ │ ├── test_geodataset_adapter.py │ │ ├── test_dataframe_adapter.py │ │ └── test_geodataframe_adapter.py │ └── sources │ │ ├── test_dataframe_source.py │ │ └── test_factory.py ├── components │ ├── test_vector_component.py │ ├── test_spatial_component.py │ ├── test_datasets_component.py │ └── test_tables_component.py ├── _validators │ └── test_region_validator.py └── stats │ ├── test_stats_skill.py │ └── test_stats_design_events.py ├── docs ├── _templates │ └── .gitignore ├── _static │ ├── iMOD.png │ ├── region.png │ ├── wflow.png │ ├── Delft3D.png │ ├── ribasim.png │ ├── SFINCS_logo.png │ ├── data_peru.png │ ├── hydromt_run.jpg │ ├── peru_coli.png │ ├── africa_hiwai.png │ ├── hydromt_arch.jpg │ ├── hydromt_using.jpg │ ├── BlueEarth-icon.png │ ├── core_and_plugins.png │ ├── getting_started.png │ ├── hydromt_before.jpg │ ├── plugin_structure.png │ ├── wflow_indonesia.png │ ├── hydromt_architecture.jpeg │ ├── model_building_process.png │ ├── mozambique_result_flood.png │ ├── mozambique_wflow_sfincs.png │ ├── data_catalog_architecture.jpeg │ ├── Architecture_model_data_input.png │ ├── theme-deltares.css │ ├── deltares-blue.svg │ ├── deltares-white.svg │ └── switcher.json ├── drawio │ ├── HydroMT-Architecture.png │ └── exported │ │ └── HydroMT-Architecture-OverArching.drawio.png ├── assets │ ├── data_types │ │ ├── netcdf_dataset.yml │ │ ├── csv_geodataframe.yml │ │ ├── csv_geodataset.yml │ │ ├── csv_dataframe.yml │ │ ├── gpkg_geodataframe.yml │ │ ├── single_variable_geotiff_raster.yml │ │ ├── tiled_raster_dataset.yml │ │ ├── vrt_raster_dataset.yml │ │ ├── netcdf_raster_dataset.yml │ │ └── netcdf_geodataset.yml │ ├── example_catalog_simple.yml │ └── example_catalog.yml ├── api │ ├── writers.rst │ ├── readers.rst │ ├── uri_resolvers.rst │ ├── cli.rst │ ├── data_source.rst │ ├── plugin.rst │ ├── stats.rst │ ├── data_adapter.rst │ ├── model.rst │ ├── data_catalog.rst │ ├── api.rst │ ├── model_processes.rst │ └── drivers.rst ├── overview │ ├── overview_usage_plugins.rst │ ├── overview_usage.rst │ ├── examples_models.rst │ ├── examples_data.rst │ ├── examples_gis.rst │ ├── overview_usage_common.rst │ ├── overview_usage_interface.rst │ └── intro.rst ├── clean.py ├── dev │ ├── core_dev │ │ ├── documentation.rst │ │ ├── test_ci.rst │ │ ├── index.rst │ │ └── release.rst │ ├── intro.rst │ └── plugin_dev │ │ ├── example_test.yml │ │ ├── index.rst │ │ ├── example_pixi.toml │ │ └── custom_implementation │ │ └── index.rst ├── make.bat ├── about │ ├── team.rst │ ├── publications.rst │ ├── intro.rst │ └── dependencies.rst └── user_guide │ ├── overview │ └── index.rst │ ├── models │ └── model_overview.rst │ └── migration_guide │ └── data_catalog.rst ├── examples ├── tmpdir │ └── .gitkeep ├── data │ ├── stations.csv │ ├── discharge.nc │ ├── mesh_model │ │ └── mesh1d2d.nc │ ├── tabular_data_catalog.yml │ ├── vito_reclass.yml │ ├── geodataset_catalog.yml │ └── vito_reclass.csv ├── legends │ └── vito-label-qgis.txt └── grid_model_build.yaml ├── .dockerignore ├── hydromt ├── model │ ├── processes │ │ └── __init__.py │ ├── __init__.py │ ├── steps.py │ └── components │ │ └── __init__.py ├── cli │ ├── __init__.py │ └── _utils.py ├── data_catalog │ ├── drivers │ │ ├── dataset │ │ │ ├── __init__.py │ │ │ └── dataset_driver.py │ │ ├── dataframe │ │ │ └── __init__.py │ │ ├── geodataset │ │ │ └── __init__.py │ │ ├── geodataframe │ │ │ └── __init__.py │ │ └── raster │ │ │ └── __init__.py │ ├── __init__.py │ ├── uri_resolvers │ │ └── __init__.py │ ├── adapters │ │ ├── __init__.py │ │ ├── adapter_utils.py │ │ └── data_adapter_base.py │ └── sources │ │ ├── __init__.py │ │ └── factory.py ├── _utils │ ├── rgetattr.py │ ├── unused_kwargs.py │ ├── deep_merge.py │ ├── elevation.py │ ├── uris.py │ ├── temp_env.py │ ├── steps_validator.py │ ├── dictionaries.py │ ├── __init__.py │ ├── nodata.py │ └── naming_convention.py ├── config.py ├── gis │ ├── _gdal_drivers.py │ └── __init__.py ├── typing │ ├── metadata.py │ ├── crs.py │ ├── __init__.py │ ├── model_mode.py │ └── deferred_file_close.py ├── stats │ └── __init__.py ├── _compat.py ├── __init__.py ├── _validators │ └── __init__.py └── error.py ├── .gitattributes ├── CONTRIBUTING.rst ├── data ├── catalogs │ ├── aws_data │ │ ├── registry.txt │ │ ├── v0.1.0 │ │ │ └── data_catalog.yml │ │ └── v1.0.0 │ │ │ └── data_catalog.yml │ ├── gcs_cmip6_data │ │ └── registry.txt │ ├── update_versions.py │ ├── artifact_data │ │ └── registry.txt │ └── deltares_data │ │ └── registry.txt └── predefined_catalogs.yml ├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── feature_request.yaml │ ├── documentation_improvement.yaml │ └── bug_report.yaml ├── workflows │ ├── linting.yml │ ├── purge_all_caches.yml │ ├── pre-commit_auto_update.yml │ ├── check-data-catalogs.yml │ ├── pixi_auto_update.yml │ ├── sonar.yml │ ├── test-docker.yml │ └── tests.yml ├── dependabot.yml └── pull_request_template.md ├── sonar-project.properties ├── .vscode ├── launch.json ├── extensions.json └── settings.json ├── joss_paper └── citation.bib ├── .pre-commit-config.yaml ├── .binder └── Dockerfile ├── LICENSE ├── .zenodo.json └── .gitignore /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_templates/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/tmpdir/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/_typing/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | -------------------------------------------------------------------------------- /hydromt/model/processes/__init__.py: -------------------------------------------------------------------------------- 1 | """HydroMT model processes.""" 2 | -------------------------------------------------------------------------------- /tests/gis/test_vector_utils.py: -------------------------------------------------------------------------------- 1 | # #%% 2 | 3 | # # %% 4 | # # %% 5 | # # %% 6 | -------------------------------------------------------------------------------- /docs/_static/iMOD.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/iMOD.png -------------------------------------------------------------------------------- /docs/_static/region.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/region.png -------------------------------------------------------------------------------- /docs/_static/wflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/wflow.png -------------------------------------------------------------------------------- /examples/data/stations.csv: -------------------------------------------------------------------------------- 1 | stations,x,y 2 | 1001,12.50244,45.25635 3 | 1002,12.75879,45.24902 4 | -------------------------------------------------------------------------------- /tests/data/world.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/world.gpkg -------------------------------------------------------------------------------- /docs/_static/Delft3D.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/Delft3D.png -------------------------------------------------------------------------------- /docs/_static/ribasim.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/ribasim.png -------------------------------------------------------------------------------- /hydromt/cli/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Submodule for hydromt command line tool.""" 3 | -------------------------------------------------------------------------------- /docs/_static/SFINCS_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/SFINCS_logo.png -------------------------------------------------------------------------------- /docs/_static/data_peru.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/data_peru.png -------------------------------------------------------------------------------- /docs/_static/hydromt_run.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/hydromt_run.jpg -------------------------------------------------------------------------------- /docs/_static/peru_coli.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/peru_coli.png -------------------------------------------------------------------------------- /examples/data/discharge.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/examples/data/discharge.nc -------------------------------------------------------------------------------- /docs/_static/africa_hiwai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/africa_hiwai.png -------------------------------------------------------------------------------- /docs/_static/hydromt_arch.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/hydromt_arch.jpg -------------------------------------------------------------------------------- /docs/_static/hydromt_using.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/hydromt_using.jpg -------------------------------------------------------------------------------- /docs/_static/BlueEarth-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/BlueEarth-icon.png -------------------------------------------------------------------------------- /docs/_static/core_and_plugins.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/core_and_plugins.png -------------------------------------------------------------------------------- /docs/_static/getting_started.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/getting_started.png -------------------------------------------------------------------------------- /docs/_static/hydromt_before.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/hydromt_before.jpg -------------------------------------------------------------------------------- /docs/_static/plugin_structure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/plugin_structure.png -------------------------------------------------------------------------------- /docs/_static/wflow_indonesia.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/wflow_indonesia.png -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/0/0.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/0/0.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/0/1.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/0/1.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/0/2.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/0/2.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/0/3.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/0/3.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/1/0.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/1/0.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/1/1.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/1/1.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/1/2.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/1/2.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/1/3.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/1/3.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/2/0.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/2/0.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/2/1.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/2/1.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/2/2.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/2/2.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/2/3.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/2/3.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/3/0.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/3/0.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/3/1.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/3/1.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/3/2.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/3/2.tif -------------------------------------------------------------------------------- /tests/data/rioda_tiled/0/3/3.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/rioda_tiled/0/3/3.tif -------------------------------------------------------------------------------- /tests/data/vito_mapping.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/tests/data/vito_mapping.parquet -------------------------------------------------------------------------------- /docs/_static/hydromt_architecture.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/hydromt_architecture.jpeg -------------------------------------------------------------------------------- /docs/drawio/HydroMT-Architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/drawio/HydroMT-Architecture.png -------------------------------------------------------------------------------- /examples/data/mesh_model/mesh1d2d.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/examples/data/mesh_model/mesh1d2d.nc -------------------------------------------------------------------------------- /docs/_static/model_building_process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/model_building_process.png -------------------------------------------------------------------------------- /docs/_static/mozambique_result_flood.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/mozambique_result_flood.png -------------------------------------------------------------------------------- /docs/_static/mozambique_wflow_sfincs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/mozambique_wflow_sfincs.png -------------------------------------------------------------------------------- /docs/_static/data_catalog_architecture.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/data_catalog_architecture.jpeg -------------------------------------------------------------------------------- /tests/data/run_config.toml: -------------------------------------------------------------------------------- 1 | starttime = 2011-01-01 2 | endtime = 2011-12-31 3 | 4 | [model] 5 | name = "example" 6 | type = "model" 7 | -------------------------------------------------------------------------------- /docs/_static/Architecture_model_data_input.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/_static/Architecture_model_data_input.png -------------------------------------------------------------------------------- /docs/assets/data_types/netcdf_dataset.yml: -------------------------------------------------------------------------------- 1 | timeseries_dataset: 2 | uri: /path/to/timeseries.netcdf 3 | data_type: Dataset 4 | driver: dataset_xarray 5 | -------------------------------------------------------------------------------- /docs/drawio/exported/HydroMT-Architecture-OverArching.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/hydromt/main/docs/drawio/exported/HydroMT-Architecture-OverArching.drawio.png -------------------------------------------------------------------------------- /docs/assets/data_types/csv_geodataframe.yml: -------------------------------------------------------------------------------- 1 | stations: 2 | uri: /path/to/stations.csv 3 | data_type: GeoDataFrame 4 | driver: geodataframe_table 5 | metadata: 6 | crs: 4326 7 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text eol=lf 2 | *.gpkg binary 3 | *.yaml text eol=lf 4 | *.yml text eol=lf 5 | *.png binary 6 | *.nc binary 7 | 8 | # GitHub syntax highlighting 9 | pixi.lock linguist-language=YAML 10 | -------------------------------------------------------------------------------- /tests/data/v0x_workflow.yml: -------------------------------------------------------------------------------- 1 | 2 | --- 3 | setup_config: 4 | header.settings: value 5 | timers.end: '2010-02-15' 6 | timers.start: '2010-02-05' 7 | 8 | write: 9 | components: 10 | - config 11 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Please refer to our online contributing guidelines here: 2 | `https://deltares.github.io/hydromt/latest/dev/contributing.html `_ 3 | -------------------------------------------------------------------------------- /data/catalogs/aws_data/registry.txt: -------------------------------------------------------------------------------- 1 | v0.1.0/data_catalog.yml e0e3c4e10bd6ce51014a04b8ca85de0c987b3cfe7bfa16961f772a953a307038 2 | v1.0.0/data_catalog.yml 2f1f08b7ccc41bf84d89b3996084779f18da035d1ea8812898fb432b450f9938 3 | -------------------------------------------------------------------------------- /data/catalogs/gcs_cmip6_data/registry.txt: -------------------------------------------------------------------------------- 1 | v0.1.0/data_catalog.yml c8aab2c2068809a52e5e40a38e68d1a27d25346cf51724010a24f1bfad66a1c1 2 | v1.0.0/data_catalog.yml a6f90aba6310abcad35dd68a96154fa6340761355955310e4d6bb153ffcdc237 3 | -------------------------------------------------------------------------------- /examples/data/tabular_data_catalog.yml: -------------------------------------------------------------------------------- 1 | --- 2 | example_csv_data: 3 | uri: example_csv_data.csv 4 | data_type: DataFrame 5 | driver: 6 | name: pandas 7 | options: 8 | parse_dates: true 9 | index_col: time 10 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | blank_issues_enabled: true 4 | contact_links: 5 | - name: Ask a question 6 | url: https://github.com/Deltares/hydromt/discussions 7 | about: Ask questions and discuss with other community members 8 | -------------------------------------------------------------------------------- /tests/data/rioda_tiled/tiled.yml: -------------------------------------------------------------------------------- 1 | tiled: 2 | data_type: RasterDataset 3 | driver: rasterio 4 | uri: tiled_zl{overview_level}.vrt 5 | metadata: 6 | zls_dict: 7 | 0: 0.004166666666666666 8 | crs: 4326 9 | nodata: -9999.0 10 | -------------------------------------------------------------------------------- /examples/data/vito_reclass.yml: -------------------------------------------------------------------------------- 1 | --- 2 | vito_reclass: 3 | data_type: DataFrame 4 | driver: 5 | name: pandas 6 | options: 7 | index_col: 0 8 | metadata: 9 | notes: reclass table for manning values 10 | uri: vito_reclass.csv 11 | -------------------------------------------------------------------------------- /tests/model/test_hydromt_step.py: -------------------------------------------------------------------------------- 1 | from hydromt.model.steps import hydromt_step 2 | 3 | 4 | def test_hydromt_step_adds_ishydromtstep_attribute(): 5 | @hydromt_step 6 | def foo(): 7 | pass 8 | 9 | assert foo.__ishydromtstep__ 10 | -------------------------------------------------------------------------------- /docs/assets/data_types/csv_geodataset.yml: -------------------------------------------------------------------------------- 1 | waterlevels_txt: 2 | uri: /path/to/stations.csv 3 | data_type: GeoDataset 4 | driver: 5 | name: geodataset_vector 6 | options: 7 | data_path: /path/to/stations_data.csv 8 | metadata: 9 | crs: 4326 10 | -------------------------------------------------------------------------------- /docs/api/writers.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.writers 2 | 3 | .. _writers: 4 | 5 | 6 | 7 | Writers 8 | ======= 9 | 10 | .. autosummary:: 11 | :toctree: ../_generated 12 | 13 | write_nc 14 | write_region 15 | write_toml 16 | write_xy 17 | write_yaml 18 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/dataset/__init__.py: -------------------------------------------------------------------------------- 1 | """Dataset Drivers.""" 2 | 3 | from hydromt.data_catalog.drivers.dataset.dataset_driver import DatasetDriver 4 | from hydromt.data_catalog.drivers.dataset.xarray_driver import DatasetXarrayDriver 5 | 6 | __all__ = ["DatasetDriver", "DatasetXarrayDriver"] 7 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/dataframe/__init__.py: -------------------------------------------------------------------------------- 1 | """Drivers for tabular data.""" 2 | 3 | from hydromt.data_catalog.drivers.dataframe.dataframe_driver import DataFrameDriver 4 | from hydromt.data_catalog.drivers.dataframe.pandas_driver import PandasDriver 5 | 6 | __all__ = ["DataFrameDriver", "PandasDriver"] 7 | -------------------------------------------------------------------------------- /tests/data/missing_data_workflow.yml: -------------------------------------------------------------------------------- 1 | 2 | global: 3 | components: 4 | config: 5 | type: ConfigComponent 6 | filename: run_config.toml 7 | steps: 8 | - config.update: 9 | starttime: 2020-01-01 10 | endtime: 2020-12-31 11 | - write: 12 | components: 13 | - config 14 | -------------------------------------------------------------------------------- /tests/data/build_config.yml: -------------------------------------------------------------------------------- 1 | global: 2 | components: 3 | config: 4 | type: ConfigComponent 5 | filename: run_config.toml 6 | steps: 7 | - config.update: 8 | data: 9 | starttime: 2010-01-01 10 | model.type: model 11 | - write: 12 | components: 13 | - config 14 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.projectKey = Deltares_hydromt 2 | sonar.organization = deltares 3 | sonar.tests = tests 4 | sonar.python.version = 3.9, 3.10, 3.11, 3.12, 3.13 5 | sonar.python.coverage.reportPaths = coverage.xml 6 | sonar.sources = hydromt,docs,data/src,examples 7 | sonar.coverage.exclusions = docs/**,examples/** 8 | -------------------------------------------------------------------------------- /tests/data/update_config.yml: -------------------------------------------------------------------------------- 1 | global: 2 | components: 3 | config: 4 | type: ConfigComponent 5 | filename: run_config.toml 6 | steps: 7 | - config.update: 8 | data: 9 | starttime: 2020-01-01 10 | endtime: 2020-12-31 11 | - write: 12 | components: 13 | - config 14 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Linting 3 | 4 | on: 5 | pull_request: 6 | push: 7 | branches: [main] 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v6 14 | - uses: actions/setup-python@v6 15 | - uses: pre-commit/action@v3.0.1 16 | -------------------------------------------------------------------------------- /examples/data/geodataset_catalog.yml: -------------------------------------------------------------------------------- 1 | --- 2 | waterlevels_txt: 3 | uri: stations.csv 4 | data_type: GeoDataset 5 | driver: 6 | name: geodataset_vector 7 | options: 8 | data_path: stations_data.csv 9 | metadata: 10 | crs: 4326 11 | data_adapter: 12 | rename: 13 | stations_data: waterlevel 14 | -------------------------------------------------------------------------------- /hydromt/_utils/rgetattr.py: -------------------------------------------------------------------------------- 1 | from functools import reduce 2 | 3 | __all__ = ["_rgetattr"] 4 | 5 | 6 | def _rgetattr(obj, attr, *args): 7 | """Recursive get attribute from object.""" 8 | 9 | def _getattr(obj, attr): 10 | return getattr(obj, attr, *args) 11 | 12 | return reduce(_getattr, [obj] + attr.split(".")) 13 | -------------------------------------------------------------------------------- /hydromt/data_catalog/__init__.py: -------------------------------------------------------------------------------- 1 | """Module for managing data catalogs and components, such as data sources, drivers, adapters and uri_resolvers.""" 2 | 3 | from hydromt.data_catalog.data_catalog import DataCatalog 4 | from hydromt.data_catalog.predefined_catalog import PredefinedCatalog 5 | 6 | __all__ = ["DataCatalog", "PredefinedCatalog"] 7 | -------------------------------------------------------------------------------- /docs/assets/data_types/csv_dataframe.yml: -------------------------------------------------------------------------------- 1 | observations: 2 | uri: data/lulc/globcover_mapping.csv 3 | data_type: DataFrame 4 | driver: 5 | name: pandas 6 | options: 7 | header: null # null translates to None in Python -> no header 8 | index_col: 0 9 | parse_dates: false 10 | metadata: 11 | category: parameter_mapping 12 | -------------------------------------------------------------------------------- /docs/assets/data_types/gpkg_geodataframe.yml: -------------------------------------------------------------------------------- 1 | GDP_world: 2 | uri: base/emissions/GDP-countries/World_countries_GDPpcPPP.gpkg 3 | data_type: GeoDataFrame 4 | driver: 5 | name: pyogrio 6 | options: 7 | layer: GDP 8 | data_adapter: 9 | rename: 10 | GDP: gdp 11 | unit_mult: 12 | gdp: 0.001 13 | metadata: 14 | category: socio-economic 15 | source_version: 1.0 16 | -------------------------------------------------------------------------------- /tests/data/build_config_extended.yml: -------------------------------------------------------------------------------- 1 | modeltype: model 2 | global: 3 | components: 4 | config: 5 | type: ConfigComponent 6 | filename: run_config.toml 7 | steps: 8 | - config.create: 9 | template: run_config.toml 10 | - config.update: 11 | data: 12 | starttime: 2010-01-01 13 | model.type: model 14 | - write: 15 | components: 16 | - config 17 | -------------------------------------------------------------------------------- /tests/data/export_config.yml: -------------------------------------------------------------------------------- 1 | export_data: 2 | data_libs: 3 | - tests/data/test_sources1.yml 4 | - tests/data/test_sources2.yml 5 | region: 6 | bbox: [4.6891,52.9750,4.9576,53.1994] 7 | time_range: ['2010-01-01', '2020-12-31'] 8 | sources: 9 | - hydro_lakes[xout] 10 | - gtsmv3_eu_era5 11 | unit_conversion: False 12 | append: False 13 | meta: 14 | version: 0.1 15 | -------------------------------------------------------------------------------- /data/catalogs/update_versions.py: -------------------------------------------------------------------------------- 1 | """Script to update the registry.txt file for each predefined catalog.""" 2 | 3 | from pathlib import Path 4 | 5 | from hydromt.data_catalog.predefined_catalog import create_registry_file 6 | 7 | if __name__ == "__main__": 8 | root = Path(__file__).parent 9 | cat_roots = [d for d in root.iterdir() if d.is_dir()] 10 | for cat_root in cat_roots: 11 | create_registry_file(cat_root) 12 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/geodataset/__init__.py: -------------------------------------------------------------------------------- 1 | """All drivers for reading GeoDatasets.""" 2 | 3 | from hydromt.data_catalog.drivers.geodataset.geodataset_driver import GeoDatasetDriver 4 | from hydromt.data_catalog.drivers.geodataset.vector_driver import GeoDatasetVectorDriver 5 | from hydromt.data_catalog.drivers.geodataset.xarray_driver import GeoDatasetXarrayDriver 6 | 7 | __all__ = ["GeoDatasetDriver", "GeoDatasetVectorDriver", "GeoDatasetXarrayDriver"] 8 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Python Debugger: Current File", 6 | "type": "debugpy", 7 | "request": "launch", 8 | "program": "${file}", 9 | "console": "integratedTerminal", 10 | "purpose": [ 11 | "debug-test" 12 | ], 13 | "justMyCode": false 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /tests/data/legacy_esa_worldcover.yml: -------------------------------------------------------------------------------- 1 | esa_worldcover: 2 | name: esa_worldcover 3 | uri: landuse/esa_worldcover/esa-worldcover.vrt 4 | driver: 5 | options: 6 | chunks: 7 | x: 36000 8 | y: 36000 9 | name: rasterio 10 | version: 2020 11 | metadata: 12 | crs: 4326 13 | category: landuse 14 | url: https://doi.org/10.5281/zenodo.5571936 15 | version: v100 16 | license: CC BY 4.0 17 | data_type: RasterDataset 18 | -------------------------------------------------------------------------------- /docs/api/readers.rst: -------------------------------------------------------------------------------- 1 | 2 | .. currentmodule:: hydromt.readers 3 | 4 | .. _readers: 5 | 6 | Readers 7 | ======= 8 | 9 | .. autosummary:: 10 | :toctree: ../_generated 11 | 12 | open_geodataset 13 | open_mfcsv 14 | open_mfraster 15 | open_nc 16 | open_ncs 17 | open_raster 18 | open_raster_from_tindex 19 | open_timeseries_from_table 20 | open_vector 21 | open_vector_from_table 22 | read_toml 23 | read_workflow_yaml 24 | read_yaml 25 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-python.python", 4 | "ms-python.debugpy", 5 | "ms-python.vscode-pylance", 6 | "ms-python.mypy-type-checker", 7 | "charliermarsh.ruff", 8 | "njpwerner.autodocstring", 9 | "streetsidesoftware.code-spell-checker", 10 | "tamasfe.even-better-toml", 11 | "davidanson.vscode-markdownlint", 12 | "sonarsource.sonarlint-vscode" 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /docs/overview/overview_usage_plugins.rst: -------------------------------------------------------------------------------- 1 | 2 | HydroMT and plugins 3 | ------------------- 4 | More often than not, we use HydroMT together with a :ref:`plugin ` to work with 5 | specific model software. HydroMT core then provides the command line and Python interface, 6 | data handling via the data catalog (and adapters) and generic GIS methods. 7 | 8 | The plugin provides model-specific GIS methods and allows to read and write model data 9 | in the model's native format. 10 | -------------------------------------------------------------------------------- /hydromt/model/__init__.py: -------------------------------------------------------------------------------- 1 | """HydroMT Model API.""" 2 | 3 | from hydromt.model.model import Model 4 | from hydromt.model.root import ModelRoot 5 | from hydromt.model.steps import hydromt_step 6 | from hydromt.model.example.example_model import ExampleModel 7 | 8 | __all__ = ["Model", "ExampleModel", "ModelRoot", "hydromt_step"] 9 | 10 | 11 | # define hydromt model entry points 12 | # see also hydromt.model group in pyproject.toml 13 | __hydromt_eps__ = ["Model", "ExampleModel"] 14 | -------------------------------------------------------------------------------- /data/catalogs/artifact_data/registry.txt: -------------------------------------------------------------------------------- 1 | v0.0.6/data_catalog.yml 5d9e47158185f1afbf793db68c887f6e6b119d7ffd3edfbc198e5ae3a9d760f3 2 | v0.0.7/data_catalog.yml 8daccd1b551b3bafdb95b5ddc4b8dddafd6f25070ad9c543e3c0027bb55bc16e 3 | v0.0.8/data_catalog.yml 3092305249af479061938ca484fe0245731174f072325740e9a85d1e986f8efc 4 | v0.0.9/data_catalog.yml 6592ad6028b01012ed6bf1a7511238c3e02844f696ff2065c404ddbbab55bdc8 5 | v1.0.0/data_catalog.yml 921ef50c906f3280d50eeb8e3e9e369c15554b0b09c32612bf58247c1401a5ad 6 | -------------------------------------------------------------------------------- /data/catalogs/deltares_data/registry.txt: -------------------------------------------------------------------------------- 1 | v0.5.0/data_catalog.yml 418f93cebb57c8d165556c874a3cd4077afca1d38e76640273c86b740d18c0ef 2 | v0.6.0/data_catalog.yml b002767a1cdd24ec8708caa0b658bdeed1cfb93d985efd78f6e9343e00da0f21 3 | v0.7.0/data_catalog.yml 24c77c0c5429353eeedb75d4c0f2dbfa305d81454b2fbf32239ab23166b3b1a6 4 | v1.0.0/data_catalog.yml 72eaf0a2233206513626e90a4a01304191ccda41e2c686328624f33d36cb10ba 5 | v1.1.0/data_catalog.yml bcdbb471de8a097f79734b96d0e6ad8ddc9c364ac23c57d22f5243bedd68dc2b 6 | -------------------------------------------------------------------------------- /docs/clean.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def remove_dir_content(path: str) -> None: 6 | for root, dirs, files in os.walk(path): 7 | for f in files: 8 | os.unlink(os.path.join(root, f)) 9 | for d in dirs: 10 | shutil.rmtree(os.path.join(root, d)) 11 | if os.path.isdir(path): 12 | shutil.rmtree(path) 13 | 14 | 15 | remove_dir_content("_build") 16 | remove_dir_content("_generated") 17 | remove_dir_content("_examples") 18 | -------------------------------------------------------------------------------- /hydromt/model/steps.py: -------------------------------------------------------------------------------- 1 | """Decorator for hydromt steps.""" 2 | 3 | from typing import Any, Callable, TypeVar 4 | 5 | F = TypeVar("F", bound=Callable[..., Any]) 6 | 7 | 8 | def hydromt_step(funcobj: F) -> F: 9 | """Decorate a method indicating it is a hydromt step. 10 | 11 | Only methods decorated with this decorator are allowed to be called by Model.build and Model.update. 12 | """ 13 | funcobj.__ishydromtstep__ = True # type: ignore[attr-defined] 14 | return funcobj 15 | -------------------------------------------------------------------------------- /docs/assets/data_types/single_variable_geotiff_raster.yml: -------------------------------------------------------------------------------- 1 | globcover: 2 | uri: base/landcover/globcover/GLOBCOVER_200901_200912_300x300m.tif 3 | data_type: RasterDataset 4 | driver: 5 | name: rasterio 6 | options: 7 | chunks: 8 | x: 3600 9 | y: 3600 10 | metadata: 11 | category: landuse 12 | source_url: http://due.esrin.esa.int/page_globcover.php 13 | source_license: CC-BY-3.0 14 | paper_ref: Arino et al (2012) 15 | paper_doi: 10.1594/PANGAEA.787668 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # there is no conda support for dependabot so this is the closest analog 4 | # since the conda deps are also built from pyproject.toml it should work well enough 5 | - package-ecosystem: "pip" 6 | directory: "/" # Location of package manifests 7 | schedule: 8 | interval: "weekly" 9 | - package-ecosystem: "github-actions" 10 | directory: "/" 11 | schedule: 12 | # Check for updates to GitHub Actions every week 13 | interval: "weekly" 14 | -------------------------------------------------------------------------------- /tests/_utils/test_rgetattr.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from hydromt._utils.rgetattr import _rgetattr 6 | 7 | 8 | def test_rgetattr_os_sep(): 9 | assert _rgetattr(os, "path.sep") == os.path.sep 10 | 11 | 12 | def test_rgetattr_os_fake_default(): 13 | assert _rgetattr(os, "path.sepx", None) is None 14 | assert _rgetattr(os, "path.sepx", "--") == "--" 15 | 16 | 17 | def test_rgetattr_os_fake_no_default_fails(): 18 | with pytest.raises(AttributeError): 19 | _rgetattr(os, "path.sepx") 20 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/geodataframe/__init__.py: -------------------------------------------------------------------------------- 1 | """All drivers for readin GeoDataframe datasets.""" 2 | 3 | from hydromt.data_catalog.drivers.geodataframe.geodataframe_driver import ( 4 | GeoDataFrameDriver, 5 | ) 6 | from hydromt.data_catalog.drivers.geodataframe.pyogrio_driver import PyogrioDriver 7 | from hydromt.data_catalog.drivers.geodataframe.table_driver import ( 8 | GeoDataFrameTableDriver, 9 | ) 10 | 11 | __all__ = [ 12 | "GeoDataFrameDriver", 13 | "PyogrioDriver", 14 | "GeoDataFrameTableDriver", 15 | ] 16 | -------------------------------------------------------------------------------- /hydromt/_utils/unused_kwargs.py: -------------------------------------------------------------------------------- 1 | """Utilities for logging unused kwargs.""" 2 | 3 | import logging 4 | from typing import Any, Dict 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | __all__ = ["_warn_on_unused_kwargs"] 9 | 10 | 11 | def _warn_on_unused_kwargs(obj_name: str, name_value_dict: Dict[str, Any]): 12 | """Warn on unused kwargs.""" 13 | for name, value in name_value_dict.items(): 14 | if value is not None: 15 | logger.debug( 16 | f"object: {obj_name} does not use kwarg {name} with value {value}." 17 | ) 18 | -------------------------------------------------------------------------------- /tests/data/stac/gadm_level1/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "gadm_level1", 4 | "stac_version": "1.0.0", 5 | "description": "gadm_level1", 6 | "links": [ 7 | { 8 | "rel": "root", 9 | "href": "../catalog.json", 10 | "type": "application/json" 11 | }, 12 | { 13 | "rel": "item", 14 | "href": "./gadm_level1/gadm_level1.json", 15 | "type": "application/json" 16 | }, 17 | { 18 | "rel": "parent", 19 | "href": "../catalog.json", 20 | "type": "application/json" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /docs/dev/core_dev/documentation.rst: -------------------------------------------------------------------------------- 1 | .. _contribute_documentation: 2 | 3 | Adding Documentation 4 | ==================== 5 | 6 | There are a few guidelines when adding new documentation, or when refactoring the 7 | current documentation. 8 | 9 | - We use the `numpy docstring format `. 10 | - Code examples or example ``yaml`` files should be tested using the sphinx extension 11 | ``doctest``. 12 | - New APIs should be added to the ``docs/api`` folder. The builtin ``autosummary`` 13 | and ``toctree`` are used to keep track. 14 | -------------------------------------------------------------------------------- /.github/workflows/purge_all_caches.yml: -------------------------------------------------------------------------------- 1 | name: cleanup all caches 2 | on: 3 | workflow_dispatch: 4 | workflow_call: 5 | 6 | 7 | jobs: 8 | cleanup: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Check out code 12 | uses: actions/checkout@v6 13 | 14 | - name: Cleanup caches 15 | run: | 16 | if gh cache list | grep -vq "No cache"; then 17 | gh cache delete -a 18 | else 19 | echo "No caches found. skipping..." 20 | fi 21 | env: 22 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | -------------------------------------------------------------------------------- /data/catalogs/aws_data/v0.1.0/data_catalog.yml: -------------------------------------------------------------------------------- 1 | --- 2 | meta: 3 | version: v0.1.0 4 | name: aws_data 5 | 6 | esa_worldcover_2020_v100: 7 | crs: 4326 8 | data_type: RasterDataset 9 | driver: raster 10 | filesystem: s3 11 | storage_options: 12 | anon: true 13 | meta: 14 | category: landuse 15 | source_license: CC BY 4.0 16 | source_url: https://doi.org/10.5281/zenodo.5571936 17 | source_version: v100 18 | path: s3://esa-worldcover/v100/2020/ESA_WorldCover_10m_2020_v100_Map_AWS.vrt 19 | rename: 20 | ESA_WorldCover_10m_2020_v100_Map_AWS: landuse 21 | -------------------------------------------------------------------------------- /tests/data/aws_esa_worldcover.yml: -------------------------------------------------------------------------------- 1 | esa_worldcover: 2 | name: esa_worldcover 3 | uri: s3://esa-worldcover/v100/2020/ESA_WorldCover_10m_2020_v100_Map_AWS.vrt 4 | data_adapter: 5 | rename: 6 | ESA_WorldCover_10m_2020_v100_Map_AWS: landuse 7 | driver: 8 | filesystem: 9 | protocol: s3 10 | anon: True 11 | name: rasterio 12 | version: 2021 13 | provider: aws 14 | metadata: 15 | crs: 4326 16 | category: landuse 17 | url: https://doi.org/10.5281/zenodo.5571936 18 | version: v100 19 | license: CC BY 4.0 20 | data_type: RasterDataset 21 | -------------------------------------------------------------------------------- /tests/data/stac/chirps_global/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "chirps_global", 4 | "stac_version": "1.0.0", 5 | "description": "chirps_global", 6 | "links": [ 7 | { 8 | "rel": "root", 9 | "href": "../catalog.json", 10 | "type": "application/json" 11 | }, 12 | { 13 | "rel": "item", 14 | "href": "./chirps_global/chirps_global.json", 15 | "type": "application/json" 16 | }, 17 | { 18 | "rel": "parent", 19 | "href": "../catalog.json", 20 | "type": "application/json" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tests/data/stac/gtsmv3_eu_era5/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "gtsmv3_eu_era5", 4 | "stac_version": "1.0.0", 5 | "description": "gtsmv3_eu_era5", 6 | "links": [ 7 | { 8 | "rel": "root", 9 | "href": "../catalog.json", 10 | "type": "application/json" 11 | }, 12 | { 13 | "rel": "item", 14 | "href": "./gtsmv3_eu_era5/gtsmv3_eu_era5.json", 15 | "type": "application/json" 16 | }, 17 | { 18 | "rel": "parent", 19 | "href": "../catalog.json", 20 | "type": "application/json" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /docs/overview/overview_usage.rst: -------------------------------------------------------------------------------- 1 | .. _overview_usage: 2 | 3 | How to use HydroMT ? 4 | ==================== 5 | 6 | .. include:: overview_usage_plugins.rst 7 | 8 | Here is what happens under the hood when building a model: 9 | 10 | .. image:: ../_static/hydromt_using.jpg 11 | 12 | .. include:: overview_usage_common.rst 13 | 14 | .. image:: ../_static/getting_started.png 15 | 16 | .. include:: overview_usage_interface.rst 17 | 18 | After this overview, please refer to the :ref:`user guide ` for more detailed 19 | instructions on how to use HydroMT and the documentation of the plugin you are using. 20 | -------------------------------------------------------------------------------- /tests/_typing/test_error_no_logger.py: -------------------------------------------------------------------------------- 1 | from hydromt.error import NoDataStrategy, exec_nodata_strat 2 | 3 | # This test is dependent on the caller and call stack. 4 | # Don't let pytest optimize the test. 5 | # pytest: disable_assert_rewriting 6 | 7 | 8 | def test_logger_from_frame_in_nodata_strat(caplog): 9 | exec_nodata_strat("foo", NoDataStrategy.WARN) 10 | assert caplog.records[-1].levelname == "WARNING" 11 | assert caplog.records[-1].message == "foo" 12 | # Test that the name of the logger is this current frame's module, not the error.py logger. 13 | assert caplog.records[-1].name == __name__ 14 | -------------------------------------------------------------------------------- /tests/_typing/test_error_with_logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from hydromt.error import NoDataStrategy, exec_nodata_strat 4 | 5 | _LOGGER_NAME = "test_logger" 6 | logger = logging.getLogger(_LOGGER_NAME) 7 | 8 | # This test is dependent on the caller and call stack. 9 | # Don't let pytest optimize the test. 10 | # pytest: disable_assert_rewriting 11 | 12 | 13 | def test_logger_from_frame_in_nodata_strat(caplog): 14 | exec_nodata_strat("foo", NoDataStrategy.WARN) 15 | assert caplog.records[0].levelname == "WARNING" 16 | assert caplog.records[0].message == "foo" 17 | assert caplog.records[0].name == _LOGGER_NAME 18 | -------------------------------------------------------------------------------- /data/catalogs/aws_data/v1.0.0/data_catalog.yml: -------------------------------------------------------------------------------- 1 | meta: 2 | version: v1.0.0 3 | name: aws_data 4 | hydromt_version: '>1.0a,<2' 5 | esa_worldcover_2020_v100: 6 | data_type: RasterDataset 7 | uri: s3://esa-worldcover/v100/2020/ESA_WorldCover_10m_2020_v100_Map_AWS.vrt 8 | driver: 9 | name: rasterio 10 | filesystem: 11 | protocol: s3 12 | anon: true 13 | metadata: 14 | category: landuse 15 | source_version: v100 16 | url: https://doi.org/10.5281/zenodo.5571936 17 | license: CC BY 4.0 18 | crs: 4326 19 | data_adapter: 20 | rename: 21 | ESA_WorldCover_10m_2020_v100_Map_AWS: landuse 22 | -------------------------------------------------------------------------------- /joss_paper/citation.bib: -------------------------------------------------------------------------------- 1 | @article{Eilander2023, 2 | doi = {10.21105/joss.04897}, 3 | url = {https://doi.org/10.21105/joss.04897}, 4 | year = {2023}, 5 | publisher = {The Open Journal}, 6 | volume = {8}, 7 | number = {83}, 8 | pages = {4897}, 9 | author = {Dirk Eilander and Hélène Boisgontier and Laurène J. E. Bouaziz and Joost Buitink and Anaïs Couasnon and Brendan Dalmijn and Mark Hegnauer and Tjalling de Jong and Sibren Loos and Indra Marth and Willem van Verseveld}, 10 | title = {HydroMT: Automated and reproducible model building and analysis}, 11 | journal = {Journal of Open Source Software} 12 | } 13 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/raster/__init__.py: -------------------------------------------------------------------------------- 1 | """All drivers that can read RasterDatasets.""" 2 | 3 | from hydromt.data_catalog.drivers.raster.raster_dataset_driver import ( 4 | RasterDatasetDriver, 5 | ) 6 | from hydromt.data_catalog.drivers.raster.raster_xarray_driver import ( 7 | RasterDatasetXarrayDriver, 8 | RasterXarrayOptions, 9 | ) 10 | from hydromt.data_catalog.drivers.raster.rasterio_driver import ( 11 | RasterioDriver, 12 | RasterioOptions, 13 | ) 14 | 15 | __all__ = [ 16 | "RasterDatasetDriver", 17 | "RasterDatasetXarrayDriver", 18 | "RasterioDriver", 19 | "RasterXarrayOptions", 20 | "RasterioOptions", 21 | ] 22 | -------------------------------------------------------------------------------- /tests/data_catalog/drivers/test_preprocessing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | from hydromt.data_catalog.drivers.preprocessing import ( 5 | round_latlon, 6 | ) 7 | from hydromt.gis.raster_utils import full_from_transform 8 | 9 | 10 | def test_round_latlon(): 11 | da: xr.DataArray = full_from_transform( 12 | transform=[1.0, 0.0, 0.0005, 0.0, -1.0, 0.0], 13 | shape=(4, 6), 14 | nodata=-1, 15 | name="test", 16 | crs=4326, 17 | ) 18 | 19 | ds: xr.Dataset = da.to_dataset(name="test") 20 | res: xr.Dataset = round_latlon(ds, decimals=3) 21 | assert np.equal(np.round(res.x.data[0], 3), 0.5) 22 | -------------------------------------------------------------------------------- /docs/assets/data_types/tiled_raster_dataset.yml: -------------------------------------------------------------------------------- 1 | grwl_mask: 2 | uri: static_data/base/grwl/tindex.gpkg 3 | data_type: RasterDataset 4 | uri_resolver: 5 | name: raster_tindex 6 | options: 7 | tileindex: location 8 | driver: 9 | name: rasterio 10 | options: 11 | chunks: 12 | x: 3000 13 | y: 3000 14 | mosaic_kwargs: 15 | method: nearest 16 | metadata: 17 | nodata: 0 18 | category: hydrography 19 | paper_doi: 10.1126/science.aat0636 20 | paper_ref: Allen and Pavelsky (2018) 21 | source_license: CC BY 4.0 22 | source_url: https://doi.org/10.5281/zenodo.1297434 23 | source_version: 1.01 24 | -------------------------------------------------------------------------------- /docs/overview/examples_models.rst: -------------------------------------------------------------------------------- 1 | .. _examples_models: 2 | 3 | Examples: Working with models 4 | ============================= 5 | 6 | The following examples show how to work with models in HydroMT either through the CLI or 7 | Python API and how to delineate and define basins as your model region. 8 | 9 | Please see the :ref:`installation guide ` to see how to install HydroMT 10 | and to :ref:`download the examples `. 11 | 12 | **Available examples** 13 | 14 | .. toctree:: 15 | :titlesonly: 16 | 17 | ../_examples/working_with_models_basics 18 | ../_examples/working_with_models 19 | ../_examples/delineate_basin 20 | -------------------------------------------------------------------------------- /docs/assets/data_types/vrt_raster_dataset.yml: -------------------------------------------------------------------------------- 1 | merit_hydro: 2 | uri: base/merit_hydro/{variable}.vrt 3 | data_type: RasterDataset 4 | driver: 5 | name: rasterio 6 | options: 7 | chunks: 8 | x: 6000 9 | y: 6000 10 | data_adapter: 11 | rename: 12 | dir: flwdir 13 | bas: basins 14 | upa: uparea 15 | elv: elevtn 16 | sto: strord 17 | metadata: 18 | crs: 4326 19 | category: topography 20 | source_version: 1.0 21 | paper_doi: 10.1029/2019WR024873 22 | paper_ref: Dai Yamazaki 23 | source_url: http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro 24 | source_license: CC-BY-NC 4.0 or ODbL 1.0 25 | -------------------------------------------------------------------------------- /docs/api/uri_resolvers.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.data_catalog.uri_resolvers 2 | 3 | .. _uri_resolver_api: 4 | 5 | ============== 6 | URI Resolvers 7 | ============== 8 | 9 | The Hydromt URI resolvers module provides mechanisms for resolving and interpreting 10 | dataset URIs, including convention-based and index-based resolution schemes. 11 | 12 | Base Classes 13 | ------------ 14 | 15 | .. autosummary:: 16 | :toctree: ../_generated 17 | :nosignatures: 18 | 19 | URIResolver 20 | 21 | Resolver Implementations 22 | ------------------------ 23 | 24 | .. autosummary:: 25 | :toctree: ../_generated 26 | :nosignatures: 27 | 28 | ConventionResolver 29 | RasterTindexResolver 30 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v6.0.0 5 | hooks: 6 | - id: trailing-whitespace 7 | - id: end-of-file-fixer 8 | - id: check-yaml 9 | - id: check-added-large-files 10 | - id: check-ast 11 | - id: check-json 12 | - id: debug-statements 13 | - id: mixed-line-ending 14 | - repo: https://github.com/astral-sh/ruff-pre-commit 15 | rev: v0.14.7 16 | hooks: 17 | - id: ruff 18 | args: [--fix, --exit-non-zero-on-fix] 19 | - id: ruff-format 20 | - repo: https://github.com/kynan/nbstripout 21 | rev: 0.8.2 22 | hooks: 23 | - id: nbstripout 24 | -------------------------------------------------------------------------------- /tests/data/parameters_data.yml: -------------------------------------------------------------------------------- 1 | vito_mapping: 2 | name: vito_mapping 3 | uri: vito_mapping.csv 4 | driver: 5 | options: 6 | index_col: 0 7 | name: pandas 8 | metadata: 9 | category: landuse 10 | version: 1.0 11 | info: landuse parameters based on vito classification (https://land.copernicus.eu/global/products/lc) 12 | data_type: DataFrame 13 | vito_mapping_parquet: 14 | name: vito_mapping_parquet 15 | uri: vito_mapping.parquet 16 | driver: 17 | name: pandas 18 | metadata: 19 | category: landuse 20 | version: 1.0 21 | info: landuse parameters based on vito classification (https://land.copernicus.eu/global/products/lc) 22 | data_type: DataFrame 23 | -------------------------------------------------------------------------------- /tests/_utils/test_uris.py: -------------------------------------------------------------------------------- 1 | """Testing for uris utils.""" 2 | 3 | from hydromt._utils.uris import _is_valid_url, _strip_scheme 4 | 5 | 6 | def test_is_valid_url(): 7 | assert _is_valid_url("https://example.com") 8 | assert _is_valid_url("s3://example-bucket/file.html") 9 | assert not _is_valid_url("/mnt/data") 10 | assert not _is_valid_url(r"C:\\MyComputer\Downloads") 11 | 12 | 13 | def test_strip_scheme(): 14 | assert _strip_scheme("https://example.com") == ("https://", "example.com") 15 | assert _strip_scheme("s3://example-bucket/file.html") == ( 16 | "s3://", 17 | "example-bucket/file.html", 18 | ) 19 | assert _strip_scheme("/mnt/data") == (None, "/mnt/data") 20 | -------------------------------------------------------------------------------- /hydromt/data_catalog/uri_resolvers/__init__.py: -------------------------------------------------------------------------------- 1 | """URIResolvers obtain multiple URIs before being passed to Drivers.""" 2 | 3 | from hydromt.data_catalog.uri_resolvers.convention_resolver import ConventionResolver 4 | from hydromt.data_catalog.uri_resolvers.raster_tindex_resolver import ( 5 | RasterTindexResolver, 6 | ) 7 | from hydromt.data_catalog.uri_resolvers.uri_resolver import URIResolver 8 | 9 | __all__ = [ 10 | "ConventionResolver", 11 | "URIResolver", 12 | "RasterTindexResolver", 13 | ] 14 | 15 | # define hydromt uri resolver entry points 16 | # see also hydromt.uri_resolver group in pyproject.toml 17 | __hydromt_eps__ = [ 18 | "ConventionResolver", 19 | "RasterTindexResolver", 20 | ] 21 | -------------------------------------------------------------------------------- /docs/api/cli.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.cli 2 | 3 | .. _hydromt_cli_api: 4 | 5 | ====================== 6 | Command Line Interface 7 | ====================== 8 | 9 | .. click:: hydromt.cli.main:main 10 | :prog: hydromt 11 | :nested: short 12 | 13 | .. _build_api: 14 | 15 | .. click:: hydromt.cli.main:build 16 | :prog: hydromt build 17 | :nested: full 18 | 19 | .. _update_api: 20 | 21 | .. click:: hydromt.cli.main:update 22 | :prog: hydromt update 23 | :nested: full 24 | 25 | 26 | .. _export_api: 27 | 28 | .. click:: hydromt.cli.main:export 29 | :prog: hydromt export 30 | :nested: full 31 | 32 | .. _check_api: 33 | 34 | .. click:: hydromt.cli.main:check 35 | :prog: hydromt check 36 | :nested: full 37 | -------------------------------------------------------------------------------- /hydromt/config.py: -------------------------------------------------------------------------------- 1 | """Runtime Settings for HydroMT.""" 2 | 3 | from pathlib import Path 4 | from typing import Annotated, Any 5 | 6 | from pydantic import Field, ValidationInfo, ValidatorFunctionWrapHandler, WrapValidator 7 | from pydantic_settings import BaseSettings 8 | 9 | 10 | def _validate_path( 11 | path: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo 12 | ): 13 | if isinstance(path, str): 14 | path = Path(path) 15 | return handler(path, info) 16 | 17 | 18 | Pathdantic = Annotated[Path, WrapValidator(_validate_path)] 19 | 20 | 21 | class Settings(BaseSettings): 22 | """Runtime Settings for HydroMT.""" 23 | 24 | cache_root: Pathdantic = Field(default=Path.home() / ".hydromt") 25 | 26 | 27 | SETTINGS = Settings() 28 | -------------------------------------------------------------------------------- /hydromt/data_catalog/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | """Data Adapters are generic for its HydroMT type and perform transformations.""" 2 | 3 | from hydromt.data_catalog.adapters.data_adapter_base import DataAdapterBase 4 | from hydromt.data_catalog.adapters.dataframe import DataFrameAdapter 5 | from hydromt.data_catalog.adapters.dataset import DatasetAdapter 6 | from hydromt.data_catalog.adapters.geodataframe import GeoDataFrameAdapter 7 | from hydromt.data_catalog.adapters.geodataset import GeoDatasetAdapter 8 | from hydromt.data_catalog.adapters.rasterdataset import RasterDatasetAdapter 9 | 10 | __all__ = [ 11 | "DataAdapterBase", 12 | "GeoDataFrameAdapter", 13 | "DatasetAdapter", 14 | "DataFrameAdapter", 15 | "GeoDatasetAdapter", 16 | "RasterDatasetAdapter", 17 | ] 18 | -------------------------------------------------------------------------------- /docs/assets/data_types/netcdf_raster_dataset.yml: -------------------------------------------------------------------------------- 1 | 2 | era5_hourly: 3 | uri: forcing/ERA5/org/era5_{variable}_{year}_hourly.nc 4 | data_type: RasterDataset 5 | driver: 6 | name: raster_xarray 7 | options: 8 | chunks: {latitude: 125, longitude: 120, time: 50} 9 | combine: by_coords 10 | decode_times: true 11 | parallel: true 12 | metadata: 13 | crs: 4326 14 | category: meteo 15 | paper_doi: 10.1002/qj.3803 16 | paper_ref: Hersbach et al. (2019) 17 | source_license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 18 | source_url: https://doi.org/10.24381/cds.bd0915c6 19 | data_adapter: 20 | rename: 21 | t2m: temp 22 | tp: precip 23 | unit_add: 24 | temp: -273.15 25 | unit_mult: 26 | precip: 1000 27 | -------------------------------------------------------------------------------- /docs/assets/data_types/netcdf_geodataset.yml: -------------------------------------------------------------------------------- 1 | gtsmv3_eu_era5: 2 | uri: reanalysis-waterlevel-{year}-m{month:02d}.nc 3 | data_type: GeoDataset 4 | driver: 5 | name: geodataset_xarray 6 | options: 7 | chunks: {stations: 100, time: 1500} 8 | combine: by_coords 9 | decode_times: true 10 | parallel: true 11 | data_adapter: 12 | rename: 13 | station_x_coordinate: lon 14 | station_y_coordinate: lat 15 | stations: index 16 | metadata: 17 | crs: 4326 18 | category: ocean 19 | paper_doi: 10.24381/cds.8c59054f 20 | paper_ref: Copernicus Climate Change Service 2019 21 | source_license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 22 | source_url: https://cds.climate.copernicus.eu/cdsapp#!/dataset/10.24381/cds.8c59054f?tab=overview 23 | -------------------------------------------------------------------------------- /hydromt/_utils/deep_merge.py: -------------------------------------------------------------------------------- 1 | """Helper function to merge deeply nested dictionaries.""" 2 | 3 | from typing import Any, Dict 4 | 5 | 6 | def _deep_merge(left: Dict[str, Any], right: Dict[str, Any]) -> Dict[str, Any]: 7 | """Merge deeply nested dictionaries. 8 | 9 | Given overlap in the leaves the right dictionary will take precedence. 10 | """ 11 | merged = {} 12 | 13 | for k_left, v_left in left.items(): 14 | merged[k_left] = v_left 15 | 16 | for k_right, v_right in right.items(): 17 | if k_right in merged: 18 | v_left = merged[k_right] 19 | if isinstance(v_left, dict) and isinstance(v_right, dict): 20 | merged[k_right] = _deep_merge(v_left, v_right) 21 | else: 22 | merged[k_right] = v_right 23 | else: 24 | merged[k_right] = v_right 25 | 26 | return merged 27 | -------------------------------------------------------------------------------- /tests/data/stac/catalog.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Catalog", 3 | "id": "hydromt-stac-catalog", 4 | "stac_version": "1.0.0", 5 | "description": "The stac catalog of hydromt", 6 | "links": [ 7 | { 8 | "rel": "root", 9 | "href": "./catalog.json", 10 | "type": "application/json" 11 | }, 12 | { 13 | "rel": "child", 14 | "href": "./chirps_global/catalog.json", 15 | "type": "application/json" 16 | }, 17 | { 18 | "rel": "child", 19 | "href": "./gadm_level1/catalog.json", 20 | "type": "application/json" 21 | }, 22 | { 23 | "rel": "child", 24 | "href": "./gtsmv3_eu_era5/catalog.json", 25 | "type": "application/json" 26 | }, 27 | { 28 | "rel": "self", 29 | "href": "/home/sam/Documents/work/hydromt/tests/data/stac/catalog.json", 30 | "type": "application/json" 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /.binder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/prefix-dev/pixi:0.61.0-focal 2 | RUN groupadd deltares && useradd deltares --uid 1000 -m -g deltares 3 | 4 | USER deltares 5 | WORKDIR /home/deltares 6 | 7 | ENV PATH=/home/deltares/.pixi/bin:$PATH 8 | ENV HOME=/home/deltares/ 9 | 10 | COPY pixi.lock pyproject.toml README.rst ./ 11 | COPY ./hydromt ./hydromt 12 | COPY ./docs ./docs 13 | COPY examples ./examples 14 | 15 | RUN pixi install --locked -e slim-latest \ 16 | && rm -rf .cache \ 17 | && find .pixi -type f -name "*.pyc" -delete 18 | 19 | ENV HOME=/home/deltares \ 20 | NUMBA_CACHE_DIR=${HOME}/.cache/numba \ 21 | USE_PYGEOS=0 \ 22 | PYTHONDONTWRITEBYTECODE=1 \ 23 | PYDEVD_DISABLE_FILE_VALIDATION=1 24 | WORKDIR ${HOME} 25 | 26 | ENTRYPOINT ["pixi", "run", "-e", "slim-latest"] 27 | CMD ["pixi", "run", "-e", "slim-latest" , "jupyter", "notebook", "--no-browser", "--port=8888", "--ip=0.0.0.0"] 28 | -------------------------------------------------------------------------------- /tests/data/merged_esa_worldcover.yml: -------------------------------------------------------------------------------- 1 | esa_worldcover: 2 | metadata: 3 | license: CC BY 4.0 4 | category: landuse 5 | crs: 4326 6 | url: https://doi.org/10.5281/zenodo.5571936 7 | driver: 8 | name: rasterio 9 | options: 10 | chunks: 11 | x: 36000 12 | y: 36000 13 | data_type: RasterDataset 14 | name: esa_worldcover 15 | variants: 16 | - uri: landuse/esa_worldcover/esa-worldcover.vrt 17 | version: 2020 18 | provider: local 19 | - uri: landuse/esa_worldcover_2021/esa-worldcover.vrt 20 | version: 2021 21 | provider: local 22 | - data_adapter: 23 | rename: 24 | ESA_WorldCover_10m_2020_v100_Map_AWS: landuse 25 | provider: aws 26 | uri: s3://esa-worldcover/v100/2020/ESA_WorldCover_10m_2020_v100_Map_AWS.vrt 27 | version: 2020 28 | driver: 29 | filesystem: 30 | protocol: s3 31 | anon: true 32 | -------------------------------------------------------------------------------- /docs/overview/examples_data.rst: -------------------------------------------------------------------------------- 1 | .. _examples_data: 2 | 3 | Examples: Data Catalog 4 | ====================== 5 | 6 | HydroMT provides a data catalog system to easily access and manage various types of 7 | hydrological and geospatial data. The following examples demonstrate how to prepare a 8 | data catalog, export data for a certain region, and read different types of data 9 | (raster, vector, point, tabular) using HydroMT. 10 | 11 | Please see the :ref:`installation guide ` to see how to install HydroMT 12 | and to :ref:`download the examples `. 13 | 14 | **Available examples** 15 | 16 | .. toctree:: 17 | :titlesonly: 18 | 19 | ../_examples/prep_data_catalog 20 | ../_examples/export_data 21 | ../_examples/reading_raster_data 22 | ../_examples/reading_vector_data 23 | ../_examples/reading_point_data 24 | ../_examples/reading_tabular_data 25 | -------------------------------------------------------------------------------- /hydromt/data_catalog/sources/__init__.py: -------------------------------------------------------------------------------- 1 | """DataSources responsible for validating the DataCatalog.""" 2 | 3 | # importing all data sources for discovery, factory needs to be imported last. 4 | from hydromt.data_catalog.sources.data_source import DataSource # noqa: I001 5 | from hydromt.data_catalog.sources.dataframe import DataFrameSource 6 | from hydromt.data_catalog.sources.dataset import DatasetSource 7 | from hydromt.data_catalog.sources.geodataframe import GeoDataFrameSource 8 | from hydromt.data_catalog.sources.geodataset import GeoDatasetSource 9 | from hydromt.data_catalog.sources.rasterdataset import RasterDatasetSource 10 | 11 | from hydromt.data_catalog.sources.factory import create_source # noqa: I001 12 | 13 | __all__ = [ 14 | "DataSource", 15 | "DataFrameSource", 16 | "DatasetSource", 17 | "GeoDataFrameSource", 18 | "GeoDatasetSource", 19 | "RasterDatasetSource", 20 | "create_source", 21 | ] 22 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=hydromt 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /tests/data/test_sources2.yml: -------------------------------------------------------------------------------- 1 | meta: 2 | hydromt_version: '>=1.0a, <2.0' 3 | version: v1.0.0 4 | gtsmv3_eu_era5: 5 | data_type: GeoDataset 6 | uri: water_level\reanalysis-waterlevel-{year}-m{month:02d}.nc 7 | driver: 8 | name: geodataset_xarray 9 | options: 10 | chunks: 11 | stations: 100 12 | time: 1500 13 | concat_dim: time 14 | decode_times: true 15 | combine: by_coords 16 | parallel: true 17 | metadata: 18 | category: ocean 19 | paper_doi: 10.24381/cds.8c59054f 20 | paper_ref: Copernicus Climate Change Service 2019 21 | url: https://cds.climate.copernicus.eu/cdsapp#!/dataset/10.24381/cds.8c59054f?tab=overview 22 | version: GTSM v3.0 23 | license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 24 | crs: 4326 25 | data_adapter: 26 | rename: 27 | station_x_coordinate: lon 28 | station_y_coordinate: lat 29 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Issue addressed 2 | 3 | Fixes # 4 | 5 | ## Explanation 6 | 7 | Explain how you addressed the bug/feature request, what choices you made and why. 8 | 9 | ## General Checklist 10 | 11 | - [ ] Updated tests or added new tests 12 | - [ ] Branch is up to date with `main` 13 | - [ ] Tests & pre-commit hooks pass 14 | - [ ] Updated documentation 15 | - [ ] Updated changelog.rst 16 | 17 | ## Data/Catalog checklist 18 | 19 | - [ ] `data/catalogs/predefined_catalogs.yml` has not been modified. 20 | - [ ] None of the old `data_catalog.yml` files have been changed 21 | - [ ] `data/changelog.rst` has been updated 22 | - [ ] new file uses `LF` line endings (done automatically if you used `update_versions.py`) 23 | - [ ] New file has been tested locally 24 | - [ ] Tests have been added using the new file in the test suite 25 | 26 | ## Additional Notes (optional) 27 | 28 | Add any additional notes or information that may be helpful. 29 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit_auto_update.yml: -------------------------------------------------------------------------------- 1 | name: Pre-commit auto update 2 | on: 3 | schedule: 4 | # At 03:00 on day 3 of the month 5 | - cron: "0 3 3 * *" 6 | # on demand 7 | workflow_dispatch: 8 | jobs: 9 | auto-update: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v6 13 | with: 14 | ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} 15 | - uses: actions/setup-python@v6 16 | with: 17 | python-version: "3.12" 18 | - uses: browniebroke/pre-commit-autoupdate-action@main 19 | - uses: peter-evans/create-pull-request@v8 20 | if: always() 21 | with: 22 | token: ${{ secrets.GITHUB_TOKEN }} 23 | branch: update/pre-commit-hooks 24 | title: Update pre-commit hooks 25 | commit-message: "Update pre-commit hooks" 26 | body: Update versions of pre-commit hooks to latest version. 27 | author: "GitHub " 28 | -------------------------------------------------------------------------------- /docs/about/team.rst: -------------------------------------------------------------------------------- 1 | .. _contributors: 2 | 3 | Team 4 | ===== 5 | 6 | Contributors 7 | ------------ 8 | 9 | HydroMT (core) is mostly developed by a team of researchers and software engineers from `Deltares `_. 10 | About `25+ contributors `_ have helped to develop HydroMT (core) so far. 11 | 12 | Active developers 13 | ----------------- 14 | 15 | * Luuk Blom `@LuukBlom `_ 16 | * Hélène Boisgontier `@hboisgon `_ 17 | * Tjalling de Jong `@Tjalling-dejong `_ 18 | * Marnix Kraus `@deltamarnix `_ 19 | * Sam Vente `@savente93 `_ 20 | 21 | Founder 22 | ------- 23 | 24 | * Dirk Eilander `@DirkEilander `_ 25 | 26 | Product owner 27 | ------------- 28 | 29 | * Ali Meshgi `@alimeshgi `_ 30 | -------------------------------------------------------------------------------- /hydromt/_utils/elevation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.typing import NDArray 3 | 4 | __all__ = ["_elevation2rgba", "_rgba2elevation"] 5 | 6 | 7 | def _elevation2rgba(val, nodata=np.nan) -> NDArray[np.uint8]: 8 | """Convert elevation to rgb tuple.""" 9 | val += 32768 10 | r = np.floor(val / 256).astype(np.uint8) 11 | g = np.floor(val % 256).astype(np.uint8) 12 | b = np.floor((val - np.floor(val)) * 256).astype(np.uint8) 13 | mask = np.isnan(val) if np.isnan(nodata) else val == nodata 14 | a = np.where(mask, 0, 255).astype(np.uint8) 15 | return np.stack((r, g, b, a), axis=2) 16 | 17 | 18 | def _rgba2elevation( 19 | rgba: np.ndarray, nodata=np.nan, dtype=np.float32 20 | ) -> NDArray[np.float32]: 21 | """Convert rgb tuple to elevation.""" 22 | rgba = rgba.astype(dtype) 23 | r, g, b, a = np.split(rgba, 4, axis=2) 24 | val = (r * 256 + g + b / 256) - 32768 25 | return np.where(a == 0, nodata, val).squeeze().astype(dtype) 26 | -------------------------------------------------------------------------------- /hydromt/cli/_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Utils for parsing cli options and arguments.""" 3 | 4 | import json 5 | from os.path import isfile 6 | from typing import Any, Dict 7 | 8 | import click 9 | 10 | __all__ = ["parse_json"] 11 | 12 | 13 | ### CLI callback methods ### 14 | def parse_json(_ctx: click.Context, _param, value: str) -> Dict[str, Any]: 15 | """Parse json from object or file. 16 | 17 | If the object passed is a path pointing to a file, load it's contents and parse it. 18 | Otherwise attempt to parse the object as JSON itself. 19 | """ 20 | if isfile(value): 21 | with open(value, "r") as f: 22 | kwargs = json.load(f) 23 | else: 24 | if value.strip("{").startswith("'"): 25 | value = value.replace("'", '"') 26 | try: 27 | kwargs = json.loads(value) 28 | except json.JSONDecodeError: 29 | raise ValueError(f'Could not decode JSON "{value}"') 30 | return kwargs 31 | -------------------------------------------------------------------------------- /hydromt/gis/_gdal_drivers.py: -------------------------------------------------------------------------------- 1 | GDAL_DRIVER_CODE_MAP = { 2 | "asc": "AAIGrid", 3 | "blx": "BLX", 4 | "bmp": "BMP", 5 | "bt": "BT", 6 | "dat": "ZMap", 7 | "dem": "USGSDEM", 8 | "gen": "ADRG", 9 | "gif": "GIF", 10 | "gpkg": "GPKG", 11 | "grd": "NWT_GRD", 12 | "gsb": "NTv2", 13 | "gtx": "GTX", 14 | "hdr": "MFF", 15 | "hf2": "HF2", 16 | "hgt": "SRTMHGT", 17 | "img": "HFA", 18 | "jpg": "JPEG", 19 | "kro": "KRO", 20 | "lcp": "LCP", 21 | "mbtiles": "MBTiles", 22 | "mpr/mpl": "ILWIS", 23 | "ntf": "NITF", 24 | "pix": "PCIDSK", 25 | "png": "PNG", 26 | "pnm": "PNM", 27 | "rda": "R", 28 | "rgb": "SGI", 29 | "rst": "RST", 30 | "rsw": "RMF", 31 | "sdat": "SAGA", 32 | "sqlite": "Rasterlite", 33 | "ter": "Terragen", 34 | "tif": "GTiff", 35 | "vrt": "VRT", 36 | "xpm": "XPM", 37 | "xyz": "XYZ", 38 | } 39 | GDAL_EXT_CODE_MAP = {v: k for k, v in GDAL_DRIVER_CODE_MAP.items()} 40 | -------------------------------------------------------------------------------- /examples/data/vito_reclass.csv: -------------------------------------------------------------------------------- 1 | vito,description,landuse,manning 2 | 0,Unknown,0,-999. 3 | 20,Shrubs,20,0.5 4 | 30,Herbaceous vegetation,30,0.15 5 | 40,Cultivated and managed vegetation/agriculture (cropland),40,0.2 6 | 50,Urban / built up,50,0.011 7 | 60,Bare / sparse vegetation,60,0.02 8 | 70,Snow and Ice,70,0.01 9 | 80,Permanent water bodies,80,0.01 10 | 90,Herbaceous wetland,90,0.15 11 | 100,Moss and lichen,100,0.085 12 | 111,Closed forest evergreen needle leaf,111,0.4 13 | 112,Closed forest evergreen broad leaf,112,0.6 14 | 113,Closed forest deciduous needle leaf,113,0.4 15 | 114,Closed forest deciduous broad leaf,114,0.6 16 | 115,Closed forest mixed,115,0.5 17 | 116,Closed forest unknown,116,0.5 18 | 121,Open forest evergreen needle leaf,121,0.45 19 | 122,Open forest evergreen broad leaf,122,0.55 20 | 123,Open forest deciduous needle leaf,123,0.45 21 | 124,Open forest deciduous broad leaf,124,0.55 22 | 125,Open forest mixed,125,0.5 23 | 126,Open forest unknown,126,0.5 24 | 200,Open sea,200,0.01 25 | 255,No data,255,-999. 26 | -------------------------------------------------------------------------------- /tests/data/vito_mapping.csv: -------------------------------------------------------------------------------- 1 | vito,description,landuse,roughness_manning 2 | 20,Shrubs,20,0.5 3 | 30,Herbaceous vegetation,30,0.15 4 | 40,Cultivated and managed vegetation/agriculture (cropland),40,0.2 5 | 50,Urban / built up,50,0.011 6 | 60,Bare / sparse vegetation,60,0.02 7 | 70,Snow and Ice,70,0.01 8 | 80,Permanent water bodies,80,0.01 9 | 90,Herbaceous wetland,90,0.15 10 | 100,Moss and lichen,100,0.085 11 | 111,Closed forest evergreen needle leaf,111,0.4 12 | 112,Closed forest evergreen broad leaf,112,0.6 13 | 113,Closed forest deciduous needle leaf,113,0.4 14 | 114,Closed forest deciduous broad leaf,114,0.6 15 | 115,Closed forest mixed,115,0.5 16 | 116,Closed forest unknown,116,0.5 17 | 121,Open forest evergreen needle leaf,121,0.45 18 | 122,Open forest evergreen broad leaf,122,0.55 19 | 123,Open forest deciduous needle leaf,123,0.45 20 | 124,Open forest deciduous broad leaf,124,0.55 21 | 125,Open forest mixed,125,0.5 22 | 126,Open forest unknown,126,0.5 23 | 200,Open sea,200,0.01 24 | 0,Unknown,0,-999.0 25 | 255,No data,255,-999.0 26 | -------------------------------------------------------------------------------- /docs/user_guide/overview/index.rst: -------------------------------------------------------------------------------- 1 | .. _overview_index: 2 | 3 | Overview 4 | ======== 5 | 6 | .. grid:: 3 7 | :gutter: 1 8 | 9 | .. grid-item-card:: 10 | :text-align: center 11 | :link: detailed_intro 12 | :link-type: ref 13 | 14 | :octicon:`book;5em;sd-text-icon blue-icon` 15 | +++ 16 | Introduction and common usage 17 | 18 | .. grid-item-card:: 19 | :text-align: center 20 | :link: hydromt_cli 21 | :link-type: ref 22 | 23 | :octicon:`terminal;5em;sd-text-icon blue-icon` 24 | +++ 25 | Command Line Interface 26 | 27 | .. grid-item-card:: 28 | :text-align: center 29 | :link: hydromt_python 30 | :link-type: ref 31 | 32 | :octicon:`file;5em;sd-text-icon blue-icon` 33 | +++ 34 | Python Interface 35 | 36 | 37 | .. toctree:: 38 | :hidden: 39 | 40 | Introduction to HydroMT 41 | HydroMT CLI interface 42 | HydroMT Python API 43 | -------------------------------------------------------------------------------- /.github/workflows/check-data-catalogs.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Validate Data Catalog 3 | 4 | on: 5 | workflow_dispatch: 6 | push: 7 | branches: [main] 8 | paths: 9 | - data/catalogs/*.yml 10 | pull_request: 11 | branches: 12 | - main 13 | paths: 14 | - data/catalogs/*.yml 15 | 16 | 17 | jobs: 18 | build: 19 | defaults: 20 | run: 21 | shell: bash -l {0} 22 | runs-on: ubuntu-latest 23 | timeout-minutes: 30 24 | concurrency: 25 | group: ${{ github.workflow }}-data-catalog-validation-${{ github.ref }} 26 | cancel-in-progress: true 27 | 28 | steps: 29 | - uses: actions/checkout@v6 30 | - uses: prefix-dev/setup-pixi@v0.9.3 31 | with: 32 | pixi-version: "v0.59.0" 33 | - name: Validate the catalogs 34 | run: | 35 | export NUMBA_DISABLE_JIT=1 36 | for d in $(ls data/catalogs/*.yml); do 37 | echo "Checking $d" 38 | PYTHONPYCACHEPREFIX=~/pycache pixi run --locked hydromt check -d $d 39 | done 40 | -------------------------------------------------------------------------------- /hydromt/gis/__init__.py: -------------------------------------------------------------------------------- 1 | """Accessors to extend the functionality of xarray structures.""" 2 | 3 | # required for accessor style documentation 4 | from xarray import DataArray, Dataset # noqa: F401 5 | 6 | from hydromt.gis import flw 7 | from hydromt.gis.gis_utils import parse_crs, utm_crs, zoom_to_overview_level 8 | from hydromt.gis.raster import RasterDataArray, RasterDataset 9 | from hydromt.gis.raster_utils import ( 10 | cellres, 11 | full, 12 | full_from_transform, 13 | full_like, 14 | merge, 15 | spread2d, 16 | ) 17 | from hydromt.gis.vector import GeoDataArray, GeoDataset 18 | from hydromt.gis.vector_utils import nearest, nearest_merge 19 | 20 | __all__ = [ 21 | "RasterDataArray", 22 | "RasterDataset", 23 | "GeoDataArray", 24 | "GeoDataset", 25 | "cellres", 26 | "full", 27 | "full_from_transform", 28 | "full_like", 29 | "merge", 30 | "nearest_merge", 31 | "nearest", 32 | "spread2d", 33 | "utm_crs", 34 | "parse_crs", 35 | "zoom_to_overview_level", 36 | "flw", 37 | ] 38 | -------------------------------------------------------------------------------- /tests/_utils/test_deep_merge.py: -------------------------------------------------------------------------------- 1 | from hydromt._utils.deep_merge import _deep_merge 2 | 3 | 4 | def test_deep_merge_simple(): 5 | left = {"a": 1, "b": 2, "c": 4} 6 | right = {"d": 1, "b": 24, "e": 4} 7 | 8 | assert _deep_merge(left, right) == {"a": 1, "b": 24, "c": 4, "d": 1, "e": 4} 9 | 10 | 11 | def test_deep_merge_nested_overwrite(): 12 | left = {"a": 1, "b": 2, "c": {"d": {"b": {"e": 4}}}} 13 | right = {"c": {"d": {"b": {"e": 8}}}} 14 | 15 | assert _deep_merge(left, right) == {"a": 1, "b": 2, "c": {"d": {"b": {"e": 8}}}} 16 | 17 | 18 | def test_deep_merge_disjoint(): 19 | left = {"a": {"b": 2, "c": {"d": {"b": {"e": 4}}}}} 20 | right = {"q": {"d": {"b": {"e": 8}}}} 21 | 22 | assert _deep_merge(left, right) == { 23 | "q": {"d": {"b": {"e": 8}}}, 24 | "a": {"b": 2, "c": {"d": {"b": {"e": 4}}}}, 25 | } 26 | 27 | 28 | def test_deep_merge_override_dict_with_value(): 29 | left = {"a": {"b": 2, "c": {"d": {"b": {"e": 4}}}}} 30 | right = {"a": 3} 31 | 32 | assert _deep_merge(left, right) == {"a": 3} 33 | -------------------------------------------------------------------------------- /hydromt/data_catalog/adapters/adapter_utils.py: -------------------------------------------------------------------------------- 1 | """Utility functions for data catalog adapters.""" 2 | 3 | import logging 4 | from typing import Optional 5 | 6 | import numpy as np 7 | import xarray as xr 8 | 9 | from hydromt._utils import ( 10 | _has_no_data, 11 | ) 12 | from hydromt.error import NoDataStrategy, exec_nodata_strat 13 | from hydromt.typing import ( 14 | TimeRange, 15 | ) 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | def _slice_temporal_dimension( 21 | ds: xr.Dataset, 22 | time_range: TimeRange, 23 | handle_nodata: NoDataStrategy = NoDataStrategy.RAISE, 24 | ) -> Optional[xr.Dataset]: 25 | if ( 26 | "time" in ds.dims 27 | and ds["time"].size > 1 28 | and np.issubdtype(ds["time"].dtype, np.datetime64) 29 | ): 30 | logger.debug(f"Slicing time dim {time_range}") 31 | ds = ds.sel(time=slice(time_range.start, time_range.end)) 32 | if _has_no_data(ds): 33 | exec_nodata_strat("No data left after temporal slicing.", handle_nodata) 34 | return None 35 | 36 | return ds 37 | -------------------------------------------------------------------------------- /docs/api/data_source.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.data_catalog.sources 2 | 3 | .. _data_source_api: 4 | 5 | ============ 6 | Data Sources 7 | ============ 8 | 9 | The Hydromt data sources module provides access to various types of datasets. 10 | Each data source wraps data I/O behavior with standardized interfaces, providing consistent 11 | read and metadata operations across raster, vector, and tabular data. 12 | 13 | Base Classes 14 | ------------ 15 | 16 | .. autosummary:: 17 | :toctree: ../_generated 18 | :nosignatures: 19 | 20 | DataSource 21 | 22 | Raster Data Sources 23 | ------------------- 24 | 25 | .. autosummary:: 26 | :toctree: ../_generated 27 | :nosignatures: 28 | 29 | RasterDatasetSource 30 | 31 | Vector Data Sources 32 | ------------------- 33 | 34 | .. autosummary:: 35 | :toctree: ../_generated 36 | :nosignatures: 37 | 38 | GeoDataFrameSource 39 | GeoDatasetSource 40 | 41 | Tabular Data Sources 42 | -------------------- 43 | 44 | .. autosummary:: 45 | :toctree: ../_generated 46 | :nosignatures: 47 | 48 | DataFrameSource 49 | -------------------------------------------------------------------------------- /docs/api/plugin.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.plugins 2 | 3 | .. _hydromt_plugins_api: 4 | 5 | ======= 6 | Plugins 7 | ======= 8 | 9 | .. autosummary:: 10 | :toctree: ../_generated 11 | 12 | Plugins 13 | Plugins.plugin_summary 14 | 15 | Model plugins 16 | ------------- 17 | 18 | .. autosummary:: 19 | :toctree: ../_generated 20 | 21 | Plugins.model_plugins 22 | Plugins.model_metadata 23 | Plugins.model_summary 24 | 25 | Model component plugins 26 | ----------------------- 27 | 28 | .. autosummary:: 29 | :toctree: ../_generated 30 | 31 | Plugins.component_plugins 32 | Plugins.component_metadata 33 | Plugins.component_summary 34 | 35 | Data driver plugins 36 | ------------------- 37 | 38 | .. autosummary:: 39 | :toctree: ../_generated 40 | 41 | Plugins.driver_plugins 42 | Plugins.driver_metadata 43 | Plugins.driver_summary 44 | 45 | Predefined Catalog plugins 46 | -------------------------- 47 | 48 | .. autosummary:: 49 | :toctree: ../_generated 50 | 51 | Plugins.catalog_plugins 52 | Plugins.catalog_metadata 53 | Plugins.catalog_summary 54 | -------------------------------------------------------------------------------- /docs/dev/intro.rst: -------------------------------------------------------------------------------- 1 | .. _intro_developer_guide: 2 | 3 | Developer Guide 4 | =============== 5 | 6 | .. grid:: 3 7 | :gutter: 2 8 | 9 | .. grid-item-card:: 10 | :text-align: center 11 | :link: architecture/index 12 | :link-type: doc 13 | 14 | :octicon:`database;5em;sd-text-icon blue-icon` 15 | +++ 16 | HydroMT Architecture 17 | 18 | .. grid-item-card:: 19 | :text-align: center 20 | :link: plugin_dev/index 21 | :link-type: doc 22 | 23 | :octicon:`book;5em;sd-text-icon blue-icon` 24 | +++ 25 | Plugin Developer Guide 26 | 27 | .. grid-item-card:: 28 | :text-align: center 29 | :link: core_dev/index 30 | :link-type: doc 31 | 32 | :octicon:`book;5em;sd-text-icon blue-icon` 33 | +++ 34 | Core Developer Guide 35 | 36 | .. toctree:: 37 | :maxdepth: 2 38 | :hidden: 39 | :caption: Developer Guide 40 | 41 | HydroMT Architecture 42 | Plugin Developer Guide 43 | Core Developer Guide 44 | -------------------------------------------------------------------------------- /examples/legends/vito-label-qgis.txt: -------------------------------------------------------------------------------- 1 | 20,255,187,34,255,20 Shrubs 2 | 30,255,255,76,255,30 Herbaceous vegetation 3 | 40,240,150,255,255,40 Cropland 4 | 50,250,0,0,255,50 Urban 5 | 60,180,180,180,255,60 Bare/sparse vegetation 6 | 70,240,240,240,255,70 Snow and ice 7 | 80,0,50,200,255,80 Permanent water bodies 8 | 90,0,150,160,255,90 Herbaceous wetland 9 | 100,250,230,160,255,100 Moss and lichen 10 | 111,88,72,31,255,111 Closed deciduous needleleaf forest 11 | 112,0,153,0,255,112 Closed evergreen broadleaf forest 12 | 113,112,102,62,255,113 Closed deciduous needleleaf forest 13 | 114,0,204,0,255,114 Closed deciduous broadleaf forest 14 | 115,78,117,31,255,115 Closed mixed forest 15 | 116,0,120,0,255,116 Closed forest unknown 16 | 121,102,96,0,255,121 Open evergreen needleleaf forest 17 | 122,141,180,0,255,122 Open evergreen needleleaf forest 18 | 123,141,116,0,255,123 Open deciduous needleleaf forest 19 | 124,160,220,0,255,124 Open deciduous broadleaf forest 20 | 125,146,153,0,255,125 Open mixed forest 21 | 126,100,140,0,255,126 Open forest unknown 22 | 200,0,0,128,255,200 Open sea 23 | 255,255,255,255,0,255 No data 24 | -------------------------------------------------------------------------------- /docs/overview/examples_gis.rst: -------------------------------------------------------------------------------- 1 | .. _examples_gis: 2 | 3 | Examples: GIS functionalities (advanced) 4 | ======================================== 5 | 6 | At the core of HydroMT lies extensive GIS functionalities in order to process raw data to 7 | meaningful information for a model. We include functions to process and work with raster 8 | (regular grid) data, geodatasets (3D vector data such as point timeseries), elevation and 9 | flow direction operations, or extreme value analysis. The following examples demonstrate 10 | how to use these advanced GIS functionalities in HydroMT which can be useful for plugin 11 | development or advanced data processing tasks in python tools or scripts. 12 | 13 | Please see the :ref:`installation guide ` to see how to install HydroMT 14 | and to :ref:`download the examples `. 15 | 16 | **Available examples** 17 | 18 | .. toctree:: 19 | :titlesonly: 20 | 21 | ../_examples/working_with_raster 22 | ../_examples/working_with_geodatasets 23 | ../_examples/working_with_flow_directions 24 | ../_examples/doing_extreme_value_analysis 25 | -------------------------------------------------------------------------------- /hydromt/_utils/uris.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | from typing import Optional, Tuple 4 | from urllib.parse import urlparse 5 | 6 | __all__ = ["_strip_scheme", "_is_valid_url"] 7 | 8 | 9 | def _strip_scheme(uri: str) -> Tuple[Optional[str], str]: 10 | """Strip scheme from uri.""" 11 | try: 12 | scheme: str = next(re.finditer(r"^\w+://", uri)).group() 13 | except StopIteration: 14 | # no scheme found 15 | return (None, uri) 16 | return (scheme, uri.lstrip(scheme)) 17 | 18 | 19 | def _strip_vsi(uri: str) -> Tuple[Optional[str], str]: 20 | """Strip gdal virtual filesystem prefix.""" 21 | try: 22 | prefix: str = next(re.finditer(r"^/vsi\w+/", uri)).group() 23 | except StopIteration: 24 | # No prefix found 25 | return None, uri 26 | return (prefix, uri.lstrip(prefix)) 27 | 28 | 29 | def _is_valid_url(uri: str | Path) -> bool: 30 | """Check if uri is valid.""" 31 | try: 32 | result = urlparse(str(uri)) 33 | return all([result.scheme, result.netloc]) 34 | except (ValueError, AttributeError): 35 | return False 36 | -------------------------------------------------------------------------------- /docs/api/stats.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.stats 2 | 3 | 4 | .. _statistics: 5 | 6 | ===================================== 7 | Statistics and Extreme Value Analysis 8 | ===================================== 9 | 10 | .. _statistics_skills: 11 | 12 | Statistics and performance metrics 13 | ================================== 14 | 15 | .. autosummary:: 16 | :toctree: ../_generated 17 | 18 | skills.bias 19 | skills.percentual_bias 20 | skills.volumetric_error 21 | skills.nashsutcliffe 22 | skills.lognashsutcliffe 23 | skills.pearson_correlation 24 | skills.spearman_rank_correlation 25 | skills.kge 26 | skills.kge_2012 27 | skills.kge_non_parametric 28 | skills.kge_non_parametric_flood 29 | skills.rsquared 30 | skills.mse 31 | skills.rmse 32 | skills.rsr 33 | 34 | .. _statistics_extremes: 35 | 36 | Extreme Value Analysis and Design Events 37 | ======================================== 38 | .. autosummary:: 39 | :toctree: ../_generated 40 | 41 | extremes.get_peaks 42 | extremes.fit_extremes 43 | extremes.get_return_value 44 | extremes.eva 45 | design_events.get_peak_hydrographs 46 | -------------------------------------------------------------------------------- /docs/user_guide/models/model_overview.rst: -------------------------------------------------------------------------------- 1 | .. _model_main: 2 | 3 | Overview models 4 | =============== 5 | 6 | High level functionality 7 | ------------------------ 8 | 9 | HydroMT has the following high-level functionality for setting up models from raw data or adjusting models: 10 | 11 | * :ref:`building a model `: building a model from scratch. 12 | * :ref:`updating a model `: adding or changing model components of an existing model. 13 | 14 | The exact process of building or updating a model can be configured in a single configuration :ref:`.yaml file `. 15 | This file describes the full pipeline of model methods and their arguments. The methods vary for the 16 | different model classes and :ref:`plugins`, as documented in this documentation or for 17 | each plugin documentation website. 18 | 19 | .. toctree:: 20 | :hidden: 21 | 22 | Building a model 23 | Updating a model 24 | Model workflow file 25 | Defining a region 26 | Model components (advanced) 27 | Model processes (advanced) 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2021 Deltares 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/dev/plugin_dev/example_test.yml: -------------------------------------------------------------------------------- 1 | name: Test Plugin 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | test: 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | os: [ubuntu-latest, windows-latest] 17 | python-version: ['310', '311', '312', '313'] 18 | hydromt-version: ['latest', 'dev'] 19 | 20 | name: pytest ${{ matrix.hydromt-version }}-${{ matrix.python-version }} (${{ matrix.os }}) 21 | runs-on: ${{ matrix.os }} 22 | 23 | concurrency: 24 | group: ${{ github.workflow }}-${{ matrix.os }}-${{ matrix.hydromt-version }}-${{ matrix.python-version }}-${{ github.ref }} 25 | cancel-in-progress: true 26 | 27 | steps: 28 | - uses: actions/checkout@v5 29 | - uses: prefix-dev/setup-pixi@v0.9.3 30 | with: 31 | pixi-version: "v0.59.0" 32 | environments: ${{ matrix.hydromt-version }}_${{ matrix.python-version }} 33 | 34 | - name: Run tests 35 | run: | 36 | pixi run --locked -e ${{ matrix.hydromt-version }}_${{ matrix.python-version }} test 37 | -------------------------------------------------------------------------------- /tests/data_catalog/adapters/test_geodataset_adapter.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | 5 | from hydromt.data_catalog.adapters.geodataset import GeoDatasetAdapter 6 | from hydromt.gis.gis_utils import _parse_geom_bbox_buffer 7 | from hydromt.typing import SourceMetadata 8 | 9 | 10 | class TestGeoDatasetAdapter: 11 | @pytest.fixture 12 | def example_geo_ds(self, geoda: xr.DataArray) -> xr.Dataset: 13 | geoda.vector.set_crs(4326) 14 | return geoda.to_dataset() 15 | 16 | def test_transform_data_bbox(self, example_geo_ds: xr.Dataset): 17 | adapter = GeoDatasetAdapter() 18 | mask = _parse_geom_bbox_buffer(bbox=example_geo_ds.vector.bounds) 19 | ds = adapter.transform( 20 | example_geo_ds, 21 | metadata=SourceMetadata(), 22 | mask=mask, 23 | ) 24 | assert np.all(ds == example_geo_ds) 25 | 26 | def test_transform_data_mask(self, example_geo_ds: xr.Dataset): 27 | adapter = GeoDatasetAdapter() 28 | ds = adapter.transform(example_geo_ds, metadata=SourceMetadata()) 29 | assert np.all(ds == example_geo_ds) 30 | -------------------------------------------------------------------------------- /hydromt/typing/metadata.py: -------------------------------------------------------------------------------- 1 | """Metadata on DataSource.""" 2 | 3 | from typing import Any, Dict, Optional, Union 4 | 5 | from pydantic import BaseModel, BeforeValidator, ConfigDict, Field 6 | from typing_extensions import Annotated 7 | 8 | from hydromt.typing.crs import CRS 9 | 10 | # always stringify version 11 | Version = Annotated[str, BeforeValidator(str)] 12 | 13 | 14 | class SourceMetadata(BaseModel): 15 | """ 16 | Metadata for data source. 17 | 18 | This refers to data that is used to enrich the data format the source is in. 19 | SourceMetaData is not used to reproject or fill nodata values, it is used to 20 | check the data and enrich the metadata for HydroMT. 21 | 22 | 23 | Only the fields listed here are used in HydroMT, the rest are free for used 24 | to fill in. 25 | """ 26 | 27 | model_config = ConfigDict(extra="allow") 28 | 29 | crs: Optional[CRS] = None 30 | unit: Optional[str] = None 31 | extent: Dict[str, Any] = Field(default_factory=dict) 32 | nodata: Union[dict, float, int, None] = None 33 | attrs: Dict[str, Any] = Field(default_factory=dict) 34 | category: Optional[str] = None 35 | -------------------------------------------------------------------------------- /docs/_static/theme-deltares.css: -------------------------------------------------------------------------------- 1 | /* Override the default color set in the original theme */ 2 | 3 | html[data-theme="light"] { 4 | /* NOTE: added after pydata v0.9 */ 5 | --pst-color-primary: #080c80 !important; 6 | 7 | /* hyperlinks */ 8 | --pst-color-link: rgb(13, 56, 224); 9 | 10 | /* panels */ 11 | --pst-color-preformatted-border: #080c80 !important; 12 | --pst-color-panel-background: #f0f0f075; 13 | 14 | /* navbar */ 15 | /* NOTE: does not work since v0.9 */ 16 | --pst-color-navbar-link: rgb(237, 237, 255); 17 | --pst-color-navbar-link-hover: #fff; 18 | --pst-color-navbar-link-active: #fff; 19 | 20 | 21 | /* sphinx design */ 22 | /* NOTE: does not work since v0.9 */ 23 | --sd-color-card-border-hover: #080c80; 24 | --sd-color-tabs-label-active: #080c80; 25 | --sd-color-tabs-label-hover: #080c80; 26 | --sd-color-tabs-underline-active: #080c80; 27 | } 28 | 29 | /* enlarge deltares & github icon size; only works with local/url svg files; not with fa icons */ 30 | img.icon-link-image { 31 | height: 2em !important; 32 | } 33 | 34 | 35 | .sd-text-icon.blue-icon { 36 | color: #080c80; 37 | } 38 | -------------------------------------------------------------------------------- /hydromt/stats/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """HydroMT statistics.""" 3 | 4 | from hydromt.stats.design_events import get_peak_hydrographs 5 | from hydromt.stats.extremes import ( 6 | eva, 7 | fit_extremes, 8 | get_peaks, 9 | get_return_value, 10 | ) 11 | from hydromt.stats.skills import ( 12 | bias, 13 | kge, 14 | kge_2012, 15 | kge_non_parametric, 16 | kge_non_parametric_flood, 17 | lognashsutcliffe, 18 | mse, 19 | nashsutcliffe, 20 | pearson_correlation, 21 | percentual_bias, 22 | rmse, 23 | rsquared, 24 | rsr, 25 | spearman_rank_correlation, 26 | volumetric_error, 27 | ) 28 | 29 | __all__ = [ 30 | "bias", 31 | "kge", 32 | "kge_2012", 33 | "kge_non_parametric", 34 | "kge_non_parametric_flood", 35 | "lognashsutcliffe", 36 | "mse", 37 | "nashsutcliffe", 38 | "pearson_correlation", 39 | "percentual_bias", 40 | "rmse", 41 | "rsquared", 42 | "rsr", 43 | "spearman_rank_correlation", 44 | "volumetric_error", 45 | "eva", 46 | "fit_extremes", 47 | "get_peaks", 48 | "get_return_value", 49 | "get_peak_hydrographs", 50 | ] 51 | -------------------------------------------------------------------------------- /tests/components/test_vector_component.py: -------------------------------------------------------------------------------- 1 | from logging import DEBUG 2 | from pathlib import Path 3 | 4 | import pytest 5 | import xarray as xr 6 | from pytest_mock import MockerFixture 7 | 8 | from hydromt.model.components.vector import VectorComponent 9 | from hydromt.model.model import Model 10 | from hydromt.model.root import ModelRoot 11 | 12 | 13 | def test_empty_data(tmp_path: Path, mocker: MockerFixture): 14 | model = mocker.Mock(set=Model) 15 | model.root = mocker.Mock(set=ModelRoot) 16 | model.root.path = tmp_path 17 | vector = VectorComponent(model) 18 | xr.testing.assert_identical(vector.data, xr.Dataset()) 19 | 20 | 21 | def test_write_empty_data( 22 | tmp_path: Path, mocker: MockerFixture, caplog: pytest.LogCaptureFixture 23 | ): 24 | model = mocker.Mock(set=Model) 25 | model.root = mocker.Mock(set=ModelRoot) 26 | model.root.path = tmp_path 27 | model.name = "foo" 28 | vector = VectorComponent(model) 29 | model.components = {} 30 | model.components["vector"] = vector 31 | with caplog.at_level(DEBUG): 32 | vector.write() 33 | assert "foo.vector: No vector data found, skip writing." in caplog.text 34 | -------------------------------------------------------------------------------- /hydromt/_compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import List 3 | 4 | from packaging.version import Version 5 | 6 | __all__: List[str] = [] 7 | 8 | HAS_GCSFS = False 9 | HAS_GDAL = False 10 | HAS_OPENPYXL = False 11 | HAS_PYET = False 12 | HAS_S3FS = False 13 | 14 | try: 15 | import gcsfs 16 | 17 | HAS_GCSFS = True 18 | except ImportError: 19 | pass 20 | 21 | try: 22 | from osgeo import gdal 23 | 24 | HAS_GDAL = True 25 | except ImportError: 26 | pass 27 | 28 | try: 29 | import openpyxl 30 | 31 | HAS_OPENPYXL = True 32 | except ImportError: 33 | pass 34 | 35 | try: 36 | import pyet 37 | 38 | HAS_PYET = True 39 | except ModuleNotFoundError: 40 | pass 41 | 42 | try: 43 | import s3fs 44 | 45 | HAS_S3FS = True 46 | except ImportError: 47 | pass 48 | 49 | # entrypoints in standard library only compatible from 3.10 onwards 50 | py_version = sys.version_info 51 | if py_version[0] >= 3 and py_version[1] >= 10: 52 | from importlib.metadata import entry_points, Distribution, EntryPoint, EntryPoints # noqa: I001 53 | else: 54 | from importlib_metadata import entry_points, Distribution, EntryPoint, EntryPoints # noqa: I001 55 | -------------------------------------------------------------------------------- /tests/data/stac/gadm_level1/gadm_level1/gadm_level1.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "gadm_level1", 5 | "properties": { 6 | "category": "geography", 7 | "notes": "last downloaded 2020-10-19; license required for commercial use", 8 | "source_author": "gadm", 9 | "source_license": "https://gadm.org/license.html", 10 | "source_url": "https://gadm.org/download_world.html", 11 | "source_version": 1.0, 12 | "crs": 4326, 13 | "datetime": "0001-01-01T00:00:00Z" 14 | }, 15 | "geometry": null, 16 | "links": [ 17 | { 18 | "rel": "root", 19 | "href": "../../catalog.json", 20 | "type": "application/json" 21 | }, 22 | { 23 | "rel": "parent", 24 | "href": "../catalog.json", 25 | "type": "application/json" 26 | } 27 | ], 28 | "assets": { 29 | "gadm_level1.gpkg": { 30 | "href": "/home/sam/.hydromt_data/artifact_data/v0.0.8/gadm_level1.gpkg", 31 | "type": "application/geopackage+sqlite3" 32 | } 33 | }, 34 | "bbox": [ 35 | 6.63087893, 36 | 35.49291611, 37 | 18.52069473, 38 | 49.01704407 39 | ], 40 | "stac_extensions": [] 41 | } 42 | -------------------------------------------------------------------------------- /hydromt/typing/crs.py: -------------------------------------------------------------------------------- 1 | """Coordinate Reference System type based on pyproj.CRS with pydantic validation and serialization.""" 2 | 3 | from typing import Any, List, Optional, Union 4 | 5 | from pydantic import PlainSerializer, PlainValidator 6 | from pyproj import CRS as ProjCRS 7 | from pyproj.exceptions import ProjError 8 | from typing_extensions import Annotated 9 | 10 | 11 | def _serialize_crs(crs: ProjCRS) -> Union[int, List[str], str]: 12 | # Try epsg authority first 13 | epsg: Optional[int] = crs.to_epsg() 14 | if not epsg: 15 | # Then try any authority 16 | auth: Optional[List[str, str]] = list( 17 | crs.to_authority() 18 | ) # cast to list to be serializable to yaml 19 | if not auth: 20 | # reserve wkt for last 21 | return crs.to_wkt() 22 | return auth 23 | return epsg 24 | 25 | 26 | def _validate_crs(crs: Any) -> ProjCRS: 27 | try: 28 | return ProjCRS.from_user_input(crs) 29 | except ProjError: 30 | return ProjCRS.from_authority("ESRI", crs) # fallback on ESRI 31 | 32 | 33 | CRS = Annotated[ProjCRS, PlainValidator(_validate_crs), PlainSerializer(_serialize_crs)] 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | description: Suggest an idea/enhancement for HydroMT 4 | labels: [Enhancement, Needs refinement] 5 | 6 | body: 7 | - type: dropdown 8 | id: checks 9 | attributes: 10 | description: What kind of feature request is this? 11 | label: Kind of request 12 | options: 13 | - Adding new functionality 14 | - Changing existing functionality 15 | - Removing existing functionality 16 | - type: textarea 17 | id: description 18 | attributes: 19 | description: > 20 | Please provide a clear and concise description of the feature you're requesting 21 | label: Enhancement Description 22 | validations: 23 | required: true 24 | - type: textarea 25 | id: use-case 26 | attributes: 27 | description: > 28 | Please describe a situation in which this feature would be useful to you, with code or cli examples if possible 29 | label: Use case 30 | - type: textarea 31 | id: context 32 | attributes: 33 | description: > 34 | Please add any other context about the enhancement here 35 | label: Additional Context 36 | -------------------------------------------------------------------------------- /tests/data/stac/chirps_global/chirps_global/chirps_global.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "chirps_global", 5 | "properties": { 6 | "category": "meteo", 7 | "paper_doi": "10.3133/ds832", 8 | "paper_ref": "Funk et al (2014)", 9 | "source_license": "CC", 10 | "source_url": "https://www.chc.ucsb.edu/data/chirps", 11 | "source_version": "v2.0", 12 | "crs": 4326, 13 | "start_datetime": "2010-02-02T00:00:00Z", 14 | "end_datetime": "2010-02-15T00:00:00Z", 15 | "datetime": null 16 | }, 17 | "geometry": null, 18 | "links": [ 19 | { 20 | "rel": "root", 21 | "href": "../../catalog.json", 22 | "type": "application/json" 23 | }, 24 | { 25 | "rel": "parent", 26 | "href": "../catalog.json", 27 | "type": "application/json" 28 | } 29 | ], 30 | "assets": { 31 | "chirps_global.nc": { 32 | "href": "/home/sam/.hydromt_data/artifact_data/v0.0.8/chirps_global.nc", 33 | "type": "application/x-hdf5" 34 | } 35 | }, 36 | "bbox": [ 37 | 11.599998474121094, 38 | 45.20000076293945, 39 | 13.000083923339844, 40 | 46.79985427856445 41 | ], 42 | "stac_extensions": [] 43 | } 44 | -------------------------------------------------------------------------------- /docs/api/data_adapter.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.data_catalog.adapters 2 | 3 | .. _data_adapter_api: 4 | 5 | ============= 6 | Data Adapters 7 | ============= 8 | 9 | The Hydromt data adapter module provides an interface layer to harmonize 10 | different data formats and ensure consistency in transformations, reformatting, and metadata handling. 11 | 12 | Base Classes 13 | ------------ 14 | 15 | .. autosummary:: 16 | :toctree: ../_generated 17 | :nosignatures: 18 | 19 | DataAdapterBase 20 | 21 | Raster Data Adapters 22 | -------------------- 23 | 24 | .. autosummary:: 25 | :toctree: ../_generated 26 | :nosignatures: 27 | 28 | RasterDatasetAdapter 29 | 30 | Vector & Geospatial Adapters 31 | ---------------------------- 32 | 33 | .. autosummary:: 34 | :toctree: ../_generated 35 | :nosignatures: 36 | 37 | GeoDatasetAdapter 38 | GeoDataFrameAdapter 39 | 40 | Tabular Data Adapters 41 | --------------------- 42 | 43 | .. autosummary:: 44 | :toctree: ../_generated 45 | :nosignatures: 46 | 47 | DataFrameAdapter 48 | 49 | General Dataset Adapters 50 | ------------------------ 51 | 52 | .. autosummary:: 53 | :toctree: ../_generated 54 | :nosignatures: 55 | 56 | DatasetAdapter 57 | -------------------------------------------------------------------------------- /docs/overview/overview_usage_common.rst: -------------------------------------------------------------------------------- 1 | Common usage 2 | ------------ 3 | 4 | The most common usage of HydroMT is to build a model from scratch and to update and visualize an existing model. 5 | Here, a high-level example of how to build a model using HydroMT is provided. Building a model from scratch with 6 | HydroMT involves the following generic steps: 7 | 8 | 1) Define the input data in a :ref:`yaml data catalog file ` or selects available datasets from a 9 | :ref:`pre-defined data catalog `. 10 | 2) Define the model :ref:`region ` which describes the area of interest. The model region can be based on a 11 | simple bounding box or geometry, but also a (sub)(inter)basin that is delineated on-the-fly based on available 12 | hydrography data. 13 | 3) Configure the model setup in an :ref:`yaml configuration file `. A HydroMT yaml configuration file 14 | represents a reproducible recipe to build a model by listing (in order of execution) the model methods and 15 | their arguments. These methods and their arguments are described in the documentation. 16 | 4) Run the HydroMT :ref:`build method ` from either command line (as shown in the figure) or Python. 17 | -------------------------------------------------------------------------------- /hydromt/_utils/temp_env.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import os 3 | 4 | 5 | @contextlib.contextmanager 6 | def temp_env(*remove, **update): 7 | """ 8 | Temporarily updates the ``os.environ`` dictionary in-place. 9 | 10 | from: https://github.com/laurent-laporte-pro/stackoverflow-q2059482 11 | The ``os.environ`` dictionary is updated in-place so that the modification 12 | is sure to work in all situations. 13 | 14 | :param remove: Environment variables to remove. 15 | :param update: Dictionary of environment variables and values to add/update. 16 | """ 17 | env = os.environ 18 | update = update or {} 19 | remove = remove or [] 20 | 21 | # List of environment variables being updated or removed. 22 | stomped = (set(update.keys()) | set(remove)) & set(env.keys()) 23 | # Environment variables and values to restore on exit. 24 | update_after = {k: env[k] for k in stomped} 25 | # Environment variables and values to remove on exit. 26 | remove_after = frozenset(k for k in update if k not in env) 27 | 28 | try: 29 | env.update(update) 30 | [env.pop(k, None) for k in remove] 31 | yield 32 | finally: 33 | env.update(update_after) 34 | [env.pop(k) for k in remove_after] 35 | -------------------------------------------------------------------------------- /hydromt/__init__.py: -------------------------------------------------------------------------------- 1 | """HydroMT: Automated and reproducible model building and analysis.""" 2 | 3 | # version number without 'v' at start 4 | __version__ = "1.3.0.dev0" 5 | 6 | # This is only here to suppress the bug described in 7 | # https://github.com/pydata/xarray/issues/7259 8 | # We have to make sure that netcdf4 is imported before 9 | # numpy is imported for the first time, e.g. also via 10 | # importing xarray 11 | 12 | import warnings # noqa: F401 13 | 14 | import netCDF4 # noqa: F401 15 | 16 | # submodules 17 | from hydromt import data_catalog, gis, model, stats 18 | 19 | # high-level methods 20 | from hydromt._utils.log import initialize_logging 21 | from hydromt.data_catalog import DataCatalog 22 | from hydromt.gis import raster, vector 23 | from hydromt.model import Model 24 | from hydromt.model.steps import hydromt_step 25 | from hydromt.plugins import PLUGINS 26 | 27 | initialize_logging() 28 | 29 | __all__ = [ 30 | # high-level classes 31 | "DataCatalog", 32 | "Model", 33 | # submodules 34 | "data_catalog", 35 | "gis", 36 | "model", 37 | "stats", 38 | # raster and vector accessor 39 | "raster", 40 | "vector", 41 | # high-level functions 42 | "hydromt_step", 43 | # plugins 44 | "PLUGINS", 45 | ] 46 | -------------------------------------------------------------------------------- /docs/dev/plugin_dev/index.rst: -------------------------------------------------------------------------------- 1 | .. _intro_plugin_guide: 2 | 3 | Plugin Developer Guide 4 | ====================== 5 | 6 | This guide provides step-by-step instructions on building, registering, and testing your own HydroMT plugin — from models and components to data catalogs and resolvers. 7 | 8 | .. grid:: 3 9 | :gutter: 1 10 | 11 | .. grid-item-card:: 12 | :text-align: center 13 | :link: plugin_quickstart 14 | :link-type: ref 15 | 16 | :octicon:`rocket;5em;sd-text-icon blue-icon` 17 | +++ 18 | Starting your own HydroMT Plugin 19 | 20 | .. grid-item-card:: 21 | :text-align: center 22 | :link: register_plugins 23 | :link-type: ref 24 | 25 | :octicon:`plug;5em;sd-text-icon blue-icon` 26 | +++ 27 | Linking your own custom objects to HydroMT core API 28 | 29 | .. grid-item-card:: 30 | :text-align: center 31 | :link: test_your_plugin 32 | :link-type: ref 33 | 34 | :octicon:`check-circle;5em;sd-text-icon blue-icon` 35 | +++ 36 | Testing your plugin 37 | 38 | .. toctree:: 39 | :hidden: 40 | 41 | quickstart 42 | Implement your own HydroMT objects 43 | register 44 | testing 45 | migrating_to_v1 46 | -------------------------------------------------------------------------------- /tests/model/test_root.py: -------------------------------------------------------------------------------- 1 | from os.path import abspath 2 | 3 | import pytest 4 | 5 | from hydromt.typing import ModelMode 6 | 7 | # we need to compensate for where the repo is located when 8 | # we run the tests 9 | CURRENT_PATH = abspath(".") 10 | 11 | 12 | def case_name(case): 13 | return case["name"] 14 | 15 | 16 | @pytest.mark.parametrize("mode", ["r", "r+", ModelMode.READ, ModelMode.APPEND]) 17 | def test_assert_reading_modes(mode): 18 | assert ModelMode.from_str_or_mode(mode).is_reading_mode() 19 | 20 | 21 | @pytest.mark.parametrize( 22 | "mode", ["w", "w+", "r+", ModelMode.APPEND, ModelMode.WRITE, ModelMode.FORCED_WRITE] 23 | ) 24 | def test_assert_writing_modes(mode): 25 | assert ModelMode.from_str_or_mode(mode).is_writing_mode() 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "mode", 30 | [ 31 | "a", 32 | "wr", 33 | "rw", 34 | "r++", 35 | "w2", 36 | "\\w", 37 | "ww", 38 | "", 39 | "+w", 40 | "lorum ipsum", 41 | 1, 42 | None, 43 | -8, 44 | 3.14, 45 | "⽀", 46 | "😊", 47 | ], 48 | ) 49 | def test_errors_on_unknown_modes(mode): 50 | with pytest.raises(ValueError, match="Unknown mode"): 51 | _ = ModelMode.from_str_or_mode(mode) 52 | -------------------------------------------------------------------------------- /.github/workflows/pixi_auto_update.yml: -------------------------------------------------------------------------------- 1 | name: Update Pixi lockfile and SBOM 2 | permissions: 3 | contents: write 4 | pull-requests: write 5 | on: 6 | schedule: 7 | # At 03:00 on day 3 of the month 8 | - cron: "0 3 3 * *" 9 | # on demand 10 | workflow_dispatch: 11 | jobs: 12 | pixi-update: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v6 16 | - name: Set up pixi 17 | uses: prefix-dev/setup-pixi@v0.9.3 18 | with: 19 | pixi-version: "v0.59.0" 20 | run-install: false 21 | - name: Update lockfiles 22 | run: | 23 | set -o pipefail 24 | pixi update --json | pixi exec pixi-diff-to-markdown >> diff.md 25 | - name: Generate SBOM 26 | run: | 27 | set -o pipefail 28 | pixi run generate-sbom 29 | 30 | - name: Create pull request 31 | uses: peter-evans/create-pull-request@v8 32 | with: 33 | token: ${{ secrets.GITHUB_TOKEN }} 34 | commit-message: Update pixi lockfile 35 | title: Update pixi lockfile 36 | body-path: diff.md 37 | branch: update/pixi-lock 38 | base: main 39 | delete-branch: true 40 | add-paths: | 41 | pixi.lock 42 | hydromt-core-sbom.json 43 | -------------------------------------------------------------------------------- /hydromt/_utils/steps_validator.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import TYPE_CHECKING, Any 3 | 4 | if TYPE_CHECKING: 5 | from hydromt.model.model import Model 6 | 7 | from hydromt._utils.rgetattr import _rgetattr 8 | 9 | __all__ = ["_validate_steps"] 10 | 11 | 12 | def _validate_steps(model: "Model", steps: list[dict[str, dict[str, Any]]]) -> None: 13 | for i, step_dict in enumerate(steps): 14 | step, options = next(iter(step_dict.items())) 15 | attr = _rgetattr(model, step, None) 16 | if attr is None: 17 | raise AttributeError(f"Method {step} not found in model.") 18 | if not hasattr(attr, "__ishydromtstep__"): 19 | raise AttributeError( 20 | f"Method {step} is not allowed to be called on model, since it is not a HydroMT step definition." 21 | " Add @hydromt_step if that is your intention." 22 | ) 23 | 24 | # attribute found, validate keyword arguments 25 | # Throws if bind fails. 26 | sig = inspect.signature(attr) 27 | options = options or {} 28 | try: 29 | _ = sig.bind(**options) 30 | except TypeError as e: 31 | raise TypeError( 32 | f"Validation of step {i + 1} ({step}) failed because of the following error: {e}" 33 | ) 34 | -------------------------------------------------------------------------------- /docs/api/model.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.model 2 | 3 | .. _model_api: 4 | 5 | ===== 6 | Model 7 | ===== 8 | 9 | 10 | Model class 11 | =========== 12 | 13 | 14 | 15 | High-level methods 16 | ------------------ 17 | 18 | .. autosummary:: 19 | :toctree: ../_generated 20 | 21 | Model 22 | Model.read 23 | Model.write 24 | Model.write_data_catalog 25 | 26 | General methods 27 | --------------- 28 | 29 | .. autosummary:: 30 | :toctree: ../_generated 31 | 32 | Model.build 33 | Model.update 34 | Model.get_component 35 | Model.add_component 36 | Model.test_equal 37 | Model.__enter__ 38 | Model.__exit__ 39 | 40 | Model attributes 41 | ---------------- 42 | 43 | .. autosummary:: 44 | :toctree: ../_generated 45 | 46 | Model.data_catalog 47 | Model.crs 48 | Model.root 49 | Model.region 50 | Model.components 51 | 52 | ModelRoot 53 | ========= 54 | 55 | .. autosummary:: 56 | :toctree: ../_generated 57 | 58 | ModelRoot 59 | 60 | Attributes 61 | ---------- 62 | 63 | .. autosummary:: 64 | :toctree: ../_generated 65 | 66 | ModelRoot.mode 67 | ModelRoot.is_writing_mode 68 | ModelRoot.is_reading_mode 69 | ModelRoot.is_override_mode 70 | 71 | General Methods 72 | --------------- 73 | 74 | .. autosummary:: 75 | :toctree: ../_generated 76 | 77 | ModelRoot.set 78 | -------------------------------------------------------------------------------- /hydromt/typing/__init__.py: -------------------------------------------------------------------------------- 1 | """A module for all of the type definitions used in HydroMT.""" 2 | 3 | from hydromt.typing.crs import CRS 4 | from hydromt.typing.fsspec_types import FSSpecFileSystem 5 | from hydromt.typing.metadata import SourceMetadata 6 | from hydromt.typing.model_mode import ModelMode 7 | from hydromt.typing.type_def import ( 8 | Bbox, 9 | Crs, 10 | Data, 11 | DataType, 12 | ExportConfigDict, 13 | GeoDataframeSource, 14 | GeoDatasetSource, 15 | Geom, 16 | GeomBuffer, 17 | GpdShapeGeom, 18 | ModeLike, 19 | Number, 20 | Predicate, 21 | RasterDatasetSource, 22 | SourceSpecDict, 23 | StrPath, 24 | TimeRange, 25 | TotalBounds, 26 | Variables, 27 | XArrayDict, 28 | Zoom, 29 | ) 30 | 31 | __all__ = [ 32 | "Bbox", 33 | "Crs", 34 | "CRS", 35 | "StrPath", 36 | "ExportConfigDict", 37 | "FSSpecFileSystem", 38 | "GeoDataframeSource", 39 | "GeoDatasetSource", 40 | "ModeLike", 41 | "Number", 42 | "RasterDatasetSource", 43 | "SourceSpecDict", 44 | "TimeRange", 45 | "TotalBounds", 46 | "XArrayDict", 47 | "ModelMode", 48 | "Variables", 49 | "Geom", 50 | "GpdShapeGeom", 51 | "Data", 52 | "DataType", 53 | "GeomBuffer", 54 | "Predicate", 55 | "Zoom", 56 | "SourceMetadata", 57 | ] 58 | -------------------------------------------------------------------------------- /tests/data/stac/gtsmv3_eu_era5/gtsmv3_eu_era5/gtsmv3_eu_era5.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Feature", 3 | "stac_version": "1.0.0", 4 | "id": "gtsmv3_eu_era5", 5 | "properties": { 6 | "category": "ocean", 7 | "paper_doi": "10.24381/cds.8c59054f", 8 | "paper_ref": "Copernicus Climate Change Service 2019", 9 | "source_license": "https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products", 10 | "source_url": "https://cds.climate.copernicus.eu/cdsapp#!/dataset/10.24381/cds.8c59054f?tab=overview", 11 | "source_version": "GTSM v3.0", 12 | "crs": 4326, 13 | "start_datetime": "2010-02-01T00:00:00Z", 14 | "end_datetime": "2010-02-14T23:50:00Z", 15 | "datetime": null 16 | }, 17 | "geometry": null, 18 | "links": [ 19 | { 20 | "rel": "root", 21 | "href": "../../catalog.json", 22 | "type": "application/json" 23 | }, 24 | { 25 | "rel": "parent", 26 | "href": "../catalog.json", 27 | "type": "application/json" 28 | } 29 | ], 30 | "assets": { 31 | "gtsmv3_eu_era5.nc": { 32 | "href": "/home/sam/.hydromt_data/artifact_data/v0.0.8/gtsmv3_eu_era5.nc", 33 | "type": "application/x-hdf5" 34 | } 35 | }, 36 | "bbox": [ 37 | 12.22412, 38 | 45.22705, 39 | 12.99316, 40 | 45.62256 41 | ], 42 | "stac_extensions": [] 43 | } 44 | -------------------------------------------------------------------------------- /tests/data_catalog/adapters/test_dataframe_adapter.py: -------------------------------------------------------------------------------- 1 | from copy import copy 2 | 3 | import pandas as pd 4 | 5 | from hydromt.data_catalog.adapters.dataframe import DataFrameAdapter 6 | from hydromt.typing import SourceMetadata 7 | 8 | 9 | class TestDataFrameAdapter: 10 | def test_transform_no_filters_no_meta(self, df: pd.DataFrame): 11 | adapter = DataFrameAdapter() 12 | metadata = SourceMetadata() 13 | res = adapter.transform(df, metadata) 14 | pd.testing.assert_frame_equal(res, df) 15 | 16 | def test_transform_variables(self, df: pd.DataFrame): 17 | adapter = DataFrameAdapter(unit_add={"latitude": 1, "longitude": -1}) 18 | metadata = SourceMetadata() 19 | df_copy = copy(df) 20 | res = adapter.transform(df, metadata) 21 | pd.testing.assert_series_equal(res["longitude"], df_copy["longitude"] - 1) 22 | pd.testing.assert_series_equal(res["latitude"], df_copy["latitude"] + 1) 23 | 24 | def test_transform_meta(self, df: pd.DataFrame): 25 | adapter = DataFrameAdapter() 26 | metadata = SourceMetadata( 27 | attrs={"longitude": {"attr1": 1}}, url="www.example.com" 28 | ) 29 | res = adapter.transform(df, metadata) 30 | assert res.attrs["longitude"]["attr1"] == 1 31 | assert res.attrs["url"] == "www.example.com" 32 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true, 7 | "[python]": { 8 | "editor.defaultFormatter": "charliermarsh.ruff", 9 | "editor.codeActionsOnSave": { 10 | "source.fixAll": "explicit" 11 | } 12 | }, 13 | "editor.formatOnSave": true, 14 | "files.exclude": { 15 | ".mypy_cache": true, 16 | ".pixi": true, 17 | ".pytest_cache": true, 18 | ".ruff_cache": true, 19 | "**/__pycache__": true 20 | }, 21 | "mypy-type-checker.importStrategy": "fromEnvironment", 22 | "files.insertFinalNewline": true, 23 | "cSpell.words": [ 24 | "dtype", 25 | "GDAL", 26 | "geodataframe", 27 | "geoms", 28 | "geopandas", 29 | "hydromt", 30 | "interbasin", 31 | "ndarray", 32 | "pixi", 33 | "pydantic", 34 | "pygeos", 35 | "pyproj", 36 | "SFINCS", 37 | "subbasin", 38 | "xarray", 39 | "xugrid", 40 | "zarr" 41 | ], 42 | "autoDocstring.docstringFormat": "numpy", 43 | "sonarlint.connectedMode.project": { 44 | "connectionId": "deltares", 45 | "projectKey": "Deltares_hydromt" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /.github/workflows/sonar.yml: -------------------------------------------------------------------------------- 1 | name: SonarQube Scan and coverage 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - main 8 | - v1 9 | paths: 10 | - tests/**/* 11 | - hydromt/**/* 12 | - data/**/* 13 | - pyproject.toml 14 | - pixi.lock 15 | - .github/workflows/sonar.yml 16 | - sonar-project.properties 17 | pull_request: 18 | branches: 19 | - main 20 | - v1 21 | paths: 22 | - tests/**/* 23 | - hydromt/**/* 24 | - data/**/* 25 | - pyproject.toml 26 | - pixi.lock 27 | - .github/workflows/sonar.yml 28 | - sonar-project.properties 29 | workflow_run: 30 | workflows: [Pixi auto update] 31 | types: 32 | - completed 33 | 34 | 35 | jobs: 36 | scan: 37 | defaults: 38 | run: 39 | shell: bash -e -l {0} 40 | 41 | name: run test coverage and SonarQube scan 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v6 45 | with: 46 | fetch-depth: 0 47 | - uses: prefix-dev/setup-pixi@v0.9.3 48 | with: 49 | pixi-version: "v0.59.0" 50 | - name: Test 51 | run: pixi run --locked test-cov 52 | - name: SonarQube Scan 53 | uses: SonarSource/sonarqube-scan-action@v7 54 | env: 55 | SONAR_TOKEN: ${{ secrets.SONARCLOUD_TOKEN_2025 }} 56 | -------------------------------------------------------------------------------- /tests/_validators/test_region_validator.py: -------------------------------------------------------------------------------- 1 | """Testing for the validation of region specifications.""" 2 | 3 | from pathlib import Path 4 | 5 | import pytest 6 | from pydantic_core import ValidationError 7 | 8 | from hydromt._validators.region import ( 9 | BoundingBoxRegion, 10 | PathRegion, 11 | validate_region, 12 | ) 13 | 14 | 15 | def test_bbox_point_validator(): 16 | b = {"bbox": [-1.0, -1.0, 1.0, 1.0]} 17 | 18 | region = validate_region(b) 19 | assert region == BoundingBoxRegion(xmin=-1.0, ymin=-1.0, xmax=1.0, ymax=1.0) 20 | 21 | 22 | def test_invalid_bbox_point_validator(): 23 | b = {"bbox": [1.0, 1.0, -1.0, -1.0]} 24 | 25 | with pytest.raises(ValidationError): 26 | _ = validate_region(b) 27 | 28 | 29 | def test_unknown_region_type_validator(): 30 | b = {"asdfasdf": [1.0, 1.0, -1.0, -1.0]} 31 | 32 | with pytest.raises(NotImplementedError, match="Unknown region kind"): 33 | _ = validate_region(b) 34 | 35 | 36 | def test_geom_validator(): 37 | b = {"geom": "tests/data/world.gpkg"} 38 | 39 | region = validate_region(b) 40 | assert region == PathRegion(path=Path("tests/data/world.gpkg")) 41 | 42 | 43 | def test_geom_non_existant_path_validator(): 44 | b = {"geom": "tests/data/masdfasdfasdf.geojson"} 45 | 46 | with pytest.raises(ValueError, match="1 validation error"): 47 | _ = validate_region(b) 48 | -------------------------------------------------------------------------------- /hydromt/_utils/dictionaries.py: -------------------------------------------------------------------------------- 1 | def _partition_dictionaries(left, right): 2 | """Calculate a partitioning of the two dictionaries. 3 | 4 | given dictionaries A and B this function will the following partition: 5 | (A ∩ B, A - B, B - A) 6 | """ 7 | common = {} 8 | left_less_right = {} 9 | right_less_left = {} 10 | key_union = set(left.keys()) | set(right.keys()) 11 | 12 | for key in key_union: 13 | value_left = left.get(key, None) 14 | value_right = right.get(key, None) 15 | if isinstance(value_left, dict) and isinstance(value_right, dict): 16 | ( 17 | common_children, 18 | unique_left_children, 19 | unique_right_children, 20 | ) = _partition_dictionaries(value_left, value_right) 21 | common[key] = common_children 22 | if unique_left_children != unique_right_children: 23 | left_less_right[key] = unique_left_children 24 | right_less_left[key] = unique_right_children 25 | elif value_left == value_right: 26 | common[key] = value_left 27 | else: 28 | if value_left is not None: 29 | left_less_right[key] = value_left 30 | if value_right is not None: 31 | right_less_left[key] = value_right 32 | 33 | return common, left_less_right, right_less_left 34 | -------------------------------------------------------------------------------- /docs/api/data_catalog.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.data_catalog 2 | 3 | .. _data_catalog_api: 4 | 5 | Data catalog 6 | ============ 7 | 8 | General 9 | ------- 10 | 11 | .. autosummary:: 12 | :toctree: ../_generated 13 | 14 | DataCatalog 15 | DataCatalog.get_source 16 | DataCatalog.sources 17 | DataCatalog.predefined_catalogs 18 | DataCatalog.to_dict 19 | DataCatalog.to_yml 20 | DataCatalog.to_stac_catalog 21 | DataCatalog.export_data 22 | DataCatalog.get_source_bbox 23 | DataCatalog.get_source_time_range 24 | 25 | Add data sources 26 | ---------------- 27 | 28 | .. autosummary:: 29 | :toctree: ../_generated 30 | 31 | DataCatalog.add_source 32 | DataCatalog.update 33 | DataCatalog.from_predefined_catalogs 34 | DataCatalog.from_yml 35 | DataCatalog.from_dict 36 | DataCatalog.from_stac_catalog 37 | 38 | 39 | Get data 40 | -------- 41 | 42 | .. autosummary:: 43 | :toctree: ../_generated 44 | 45 | DataCatalog.get_rasterdataset 46 | DataCatalog.get_geodataset 47 | DataCatalog.get_geodataframe 48 | DataCatalog.get_dataframe 49 | DataCatalog.get_dataset 50 | 51 | 52 | Predefined data catalog 53 | ======================= 54 | 55 | .. autosummary:: 56 | :toctree: ../_generated 57 | 58 | PredefinedCatalog 59 | PredefinedCatalog.versions 60 | PredefinedCatalog.get_catalog_file 61 | 62 | predefined_catalog.create_registry_file 63 | -------------------------------------------------------------------------------- /tests/stats/test_stats_skill.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """Tests for the vector submodule.""" 3 | 4 | import numpy as np 5 | 6 | from hydromt.stats import skills 7 | 8 | 9 | def test_skills(obsda): 10 | simda = obsda + 5.0 11 | assert np.isclose(skills.bias(simda, obsda).values, 5.0) 12 | assert np.isclose(skills.percentual_bias(simda, obsda).values, 10.3299) 13 | assert np.isclose(skills.volumetric_error(simda, obsda).values, 0.8967) 14 | assert np.isclose(skills.nashsutcliffe(simda, obsda).values, 0.97015) 15 | assert np.isclose(skills.lognashsutcliffe(simda, obsda).values, 0.8517) 16 | assert np.isclose(skills.pearson_correlation(simda, obsda).values, 1.0) 17 | assert np.isclose(skills.spearman_rank_correlation(simda, obsda).values, 1.0) 18 | assert np.isclose(skills.kge(simda, obsda)["kge"].values, 0.8967) 19 | assert np.isclose(skills.kge_2012(simda, obsda)["kge_2012"].values, 0.86058) 20 | assert np.isclose(skills.kge_non_parametric(simda, obsda)["kge_np"].values, 0.89390) 21 | assert np.isclose( 22 | skills.kge_non_parametric_flood(simda, obsda)["kge_np_flood"].values, 0.8939 23 | ) 24 | assert np.isclose(skills.rsquared(simda, obsda).values, 1.0) 25 | assert np.isclose(skills.mse(simda, obsda).values, 25.0) 26 | assert np.isclose(skills.rmse(simda, obsda).values, 5.0) 27 | assert np.isclose(skills.rsr(simda, obsda).values, 0.1727659) 28 | -------------------------------------------------------------------------------- /hydromt/model/components/__init__.py: -------------------------------------------------------------------------------- 1 | """Implementations of the core ModelComponents.""" 2 | 3 | from hydromt.model.components.base import ModelComponent 4 | from hydromt.model.components.config import ConfigComponent 5 | from hydromt.model.components.datasets import DatasetsComponent 6 | from hydromt.model.components.geoms import GeomsComponent 7 | from hydromt.model.components.grid import GridComponent 8 | from hydromt.model.components.mesh import MeshComponent 9 | from hydromt.model.components.spatial import SpatialModelComponent 10 | from hydromt.model.components.spatialdatasets import SpatialDatasetsComponent 11 | from hydromt.model.components.tables import TablesComponent 12 | from hydromt.model.components.vector import VectorComponent 13 | 14 | __all__ = [ 15 | "ConfigComponent", 16 | "DatasetsComponent", 17 | "GeomsComponent", 18 | "GridComponent", 19 | "MeshComponent", 20 | "ModelComponent", 21 | "SpatialDatasetsComponent", 22 | "SpatialModelComponent", 23 | "TablesComponent", 24 | "VectorComponent", 25 | ] 26 | 27 | # define hydromt component entry points; abstract classes are not included 28 | # see also hydromt.component group in pyproject.toml 29 | __hydromt_eps__ = [ 30 | "ConfigComponent", 31 | "DatasetsComponent", 32 | "GeomsComponent", 33 | "GridComponent", 34 | "MeshComponent", 35 | "SpatialDatasetsComponent", 36 | "TablesComponent", 37 | "VectorComponent", 38 | ] 39 | -------------------------------------------------------------------------------- /examples/grid_model_build.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | setup_config: 3 | header.settings: value 4 | timers.end: '2010-02-15' 5 | timers.start: '2010-02-05' 6 | 7 | setup_grid: 8 | res: 0.05 9 | crs: 4326 10 | basin_index_path: merit_hydro_index 11 | hydrography_path: merit_hydro 12 | 13 | setup_grid_from_constant: 14 | constant: 0.01 15 | name: c1 16 | dtype: float32 17 | nodata: -99.0 18 | 19 | setup_grid_from_rasterdataset: 20 | raster_data: merit_hydro_1k 21 | variables: 22 | - elevtn 23 | - basins 24 | reproject_method: 25 | - average 26 | - mode 27 | 28 | setup_grid_from_rasterdataset2: 29 | raster_data: vito 30 | fill_method: nearest 31 | reproject_method: mode 32 | rename: 33 | vito: landuse 34 | 35 | setup_grid_from_raster_reclass: 36 | raster_data: vito 37 | reclass_table_data: vito_reclass 38 | reclass_variables: 39 | - manning 40 | reproject_method: 41 | - average 42 | 43 | setup_grid_from_geodataframe: 44 | vector_data: hydro_lakes 45 | variables: 46 | - waterbody_id 47 | - Depth_avg 48 | nodata: 49 | - -1 50 | - -999.0 51 | rasterize_method: value 52 | rename: 53 | waterbody_id: lake_id 54 | Detph_avg: lake_depth 55 | 56 | setup_grid_from_geodataframe2: 57 | vector_data: hydro_lakes 58 | rasterize_method: fraction 59 | rename: 60 | hydro_lakes: water_frac 61 | 62 | write: 63 | components: 64 | - config 65 | - geoms 66 | - grid 67 | -------------------------------------------------------------------------------- /hydromt/typing/model_mode.py: -------------------------------------------------------------------------------- 1 | """Handeling for the mode a HydroMT Model can be in.""" 2 | 3 | from enum import Enum 4 | from typing import Union 5 | 6 | 7 | class ModelMode(Enum): 8 | """Modes that the model can be in.""" 9 | 10 | READ = "r" 11 | WRITE = "w" 12 | FORCED_WRITE = "w+" 13 | APPEND = "r+" 14 | 15 | @staticmethod 16 | def from_str_or_mode(s: Union["ModelMode", str]) -> "ModelMode": 17 | """Construct a model mode from either a string or return provided if it's already a mode.""" 18 | if isinstance(s, ModelMode): 19 | return s 20 | 21 | if s == "r": 22 | return ModelMode.READ 23 | elif s == "r+": 24 | return ModelMode.APPEND 25 | elif s == "w": 26 | return ModelMode.WRITE 27 | elif s == "w+": 28 | return ModelMode.FORCED_WRITE 29 | else: 30 | raise ValueError(f"Unknown mode: {s}, options are: r, r+, w, w+") 31 | 32 | def is_writing_mode(self): 33 | """Assert whether mode is writing or not.""" 34 | return self in [ModelMode.WRITE, ModelMode.FORCED_WRITE, ModelMode.APPEND] 35 | 36 | def is_reading_mode(self): 37 | """Assert whether mode is reading or not.""" 38 | return self in [ModelMode.READ, ModelMode.APPEND] 39 | 40 | def is_override_mode(self): 41 | """Assert whether mode is able to overwrite or not.""" 42 | return self in [ModelMode.FORCED_WRITE, ModelMode.APPEND] 43 | -------------------------------------------------------------------------------- /tests/components/test_spatial_component.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, cast 2 | 3 | import geopandas as gpd 4 | from pytest_mock import MockerFixture 5 | 6 | from hydromt.model.components.spatial import SpatialModelComponent 7 | from hydromt.model.model import Model 8 | 9 | 10 | class FakeSpatialComponent(SpatialModelComponent): 11 | def __init__( 12 | self, 13 | model: Model, 14 | *, 15 | gdf: Optional[gpd.GeoDataFrame] = None, 16 | region_component: Optional[str] = None, 17 | ): 18 | super().__init__(model, region_component=region_component) 19 | self._gdf = gdf 20 | 21 | def write(self): 22 | self.write_region() 23 | 24 | def read(self): 25 | pass 26 | 27 | @property 28 | def _region_data(self): 29 | return self._gdf 30 | 31 | 32 | def test_get_region_with_reference(world, mocker: MockerFixture): 33 | mocker.patch("hydromt.model.model.PLUGINS") 34 | 35 | class FakeModel(Model): 36 | def __init__(self): 37 | super().__init__( 38 | region_component="other", 39 | components={ 40 | "other": FakeSpatialComponent(self, gdf=world), 41 | "component": FakeSpatialComponent(self, region_component="other"), 42 | }, 43 | ) 44 | 45 | model = FakeModel() 46 | assert model.region is world 47 | assert cast(SpatialModelComponent, model.component).region is world 48 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation_improvement.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation Improvement 3 | description: Report wrong or missing documentation 4 | labels: [Documentation, Needs refinement] 5 | 6 | body: 7 | - type: checkboxes 8 | attributes: 9 | label: HydroMT version checks 10 | options: 11 | - label: > 12 | I have checked that the issue still exists on the latest versions of the docs on `main` [here](https://github.com/Deltares/hydromt) 13 | required: true 14 | - type: dropdown 15 | id: kind 16 | attributes: 17 | description: What kind of documentation issue is this? 18 | label: Kind of issue 19 | options: 20 | - Docs are wrong 21 | - Docs are unclear 22 | - Docs are missing 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: location 27 | attributes: 28 | description: > 29 | If the docs are wrong or unclear please provide the URL of the documentation in question 30 | label: Location of the documentation 31 | - type: textarea 32 | id: problem 33 | attributes: 34 | description: > 35 | Please provide a description of the documentation problem 36 | label: Documentation problem 37 | validations: 38 | required: true 39 | - type: textarea 40 | id: suggested-fix 41 | attributes: 42 | description: > 43 | Please explain your suggested fix and why it's better than the existing documentation 44 | label: Suggested fix for documentation 45 | -------------------------------------------------------------------------------- /tests/model/test_example_model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from hydromt.model import ExampleModel 4 | 5 | 6 | def test_example_model(tmp_path_factory): 7 | example_model = ExampleModel( 8 | root=str(tmp_path_factory.mktemp("example_model")), 9 | data_libs=["artifact_data"], 10 | ) 11 | 12 | assert "config" in example_model.components 13 | assert "grid" in example_model.components 14 | 15 | # Update config 16 | example_model.config.update( 17 | data={ 18 | "parameter1": 10, 19 | "parameter2": "value2", 20 | } 21 | ) 22 | 23 | assert example_model.config.data["parameter1"] == 10 24 | assert example_model.config.data["parameter2"] == "value2" 25 | 26 | # Create grid from region 27 | bbox = [12.05, 45.30, 12.85, 45.65] 28 | example_model.grid.create_from_region( 29 | region={"bbox": bbox}, 30 | res=0.05, 31 | add_mask=False, 32 | align=True, 33 | ) 34 | 35 | assert example_model.grid.data.raster.res[0] == 0.05 36 | assert np.all(np.round(example_model.grid.data.raster.bounds, 2) == bbox) 37 | assert example_model.grid.data.sizes["y"] == 7 38 | 39 | example_model.grid.add_data_from_rasterdataset( 40 | raster_data="merit_hydro", 41 | variables=["elevtn", "basins"], 42 | reproject_method=["average", "mode"], 43 | mask_name="mask", 44 | ) 45 | 46 | assert "basins" in example_model.grid.data 47 | assert np.isclose( 48 | example_model.grid.data["elevtn"].raster.mask_nodata().mean().values, 3.9021976 49 | ) 50 | -------------------------------------------------------------------------------- /docs/overview/overview_usage_interface.rst: -------------------------------------------------------------------------------- 1 | HydroMT interface 2 | ----------------- 3 | 4 | HydroMT provides both a command line interface (CLI) and a Python application 5 | programming interface (API) to build and update models. Here are examples of 6 | how to use both interfaces to build a model from a configuration file. 7 | 8 | .. tab-set:: 9 | 10 | .. tab-item:: Command Line Interface (CLI) 11 | 12 | .. code-block:: console 13 | 14 | $ hydromt build wflow_sbm "./path/to/wflow_model" -d "./path/to/data_catalog.yml" -i "./path/to/build_options.yaml" -v 15 | 16 | .. tab-item:: Python API 17 | 18 | .. code-block:: python 19 | 20 | from hydromt_wflow import WflowSbmModel 21 | from hydromt.readers import read_workflow_yaml 22 | 23 | # Instantiate model 24 | model = WflowSbmModel( 25 | root="./path/to/wflow_model", 26 | data_catalog=["./path/to/data_catalog.yml"], 27 | ) 28 | # Read build options from yaml 29 | _, _, build_options = read_workflow_yaml( 30 | "./path/to/build_options.yaml" 31 | ) 32 | # Build model 33 | model.build(steps=build_options) 34 | 35 | 36 | If you use the :ref:`command line interface `, only a few high-level commands 37 | are available to build and update models or export data from the data catalog. If you use 38 | the :ref:`Python API `, you can also access the underlying methods of HydroMT 39 | to read data from a catalog, perform GIS operations or write your own plugin. 40 | -------------------------------------------------------------------------------- /.github/workflows/test-docker.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Test Docker images and Binder integration 3 | 4 | on: 5 | push: 6 | branches: [main] 7 | paths: 8 | - .github/workflows/test-docker.yml 9 | - .binder/** 10 | - pyproject.toml 11 | pull_request: 12 | branches: [main] 13 | paths: 14 | - .github/workflows/test-docker.yml 15 | - .binder/** 16 | - pyproject.toml 17 | 18 | jobs: 19 | test-binder: 20 | runs-on: ubuntu-latest 21 | concurrency: 22 | group: test-binder-${{ github.ref }} 23 | cancel-in-progress: true 24 | timeout-minutes: 30 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v6 28 | - name: Build the image 29 | uses: docker/build-push-action@v6 30 | with: 31 | push: false 32 | tags: hydromt-binder 33 | file: .binder/Dockerfile 34 | - name: 35 | id: test-image 36 | run: | 37 | # actually fail when something goes wrong 38 | set -e 39 | 40 | # run the docker image detached, it will start the jupyter server for us 41 | docker run -d -p 8888:8888 hydromt-binder 42 | 43 | # give it a second or two to start up 44 | sleep 10 45 | 46 | # check if we can reach the server 47 | curl -f "http://localhost:8888/api/" 48 | 49 | # only show logs if previous step failed 50 | - name: show logs 51 | if: failure() 52 | run: | 53 | echo "# Containers:" 54 | docker container ls 55 | 56 | echo "# Logs:" 57 | docker logs "$(docker container ls -q)" 58 | -------------------------------------------------------------------------------- /tests/stats/test_stats_design_events.py: -------------------------------------------------------------------------------- 1 | """Tests for the stats/design_events submodule.""" 2 | 3 | import numpy as np 4 | 5 | from hydromt.stats import design_events, extremes 6 | 7 | 8 | def test_get_peak_hydrographs(ts_extremes): 9 | ts_extremes = ts_extremes.isel(time=slice(365 * 10)) # smaller sample 10 | da_peaks = extremes.get_peaks( 11 | ts_extremes, 12 | ev_type="BM", 13 | period="182.625D", # this returns 20 peaks 14 | ) # default: ev_type='BM', period='year' 15 | da = design_events.get_peak_hydrographs( 16 | ts_extremes, da_peaks, wdw_size=7, n_peaks=20, normalize=False 17 | ).load() 18 | assert da.time.shape[0] == 7 19 | assert (da.argmax("time") == 3).all() # peak at time=3 20 | 21 | # Testing if maximum values are the same as the top 20 22 | peaks_1 = da_peaks.sel(stations=1).dropna(dim="time") 23 | max_station1 = da.sel(stations=1, peak=0, time=0) 24 | assert max_station1 == np.max(peaks_1) 25 | del da 26 | 27 | # Testing if normalize values are set to 1 28 | da = design_events.get_peak_hydrographs( 29 | ts_extremes, da_peaks, wdw_size=7, normalize=True 30 | ).load() 31 | damax_1 = da.sel(stations=1, time=0) 32 | assert (damax_1 == 1).all() 33 | 34 | # test when number of peaks varies between stations 35 | # set last 4 peaks of station 2 (index 1) to nan 36 | da_peaks[dict(stations=1, time=slice(365 * 8, -1))] = np.nan 37 | da = design_events.get_peak_hydrographs(ts_extremes, da_peaks, wdw_size=7).load() 38 | assert da.isel(time=3, stations=1).count("peak") == 16 39 | assert da.isel(time=3, stations=0).count("peak") == 20 40 | -------------------------------------------------------------------------------- /hydromt/_validators/__init__.py: -------------------------------------------------------------------------------- 1 | """Pydantic models for validation of various hydromt internal components.""" 2 | 3 | from enum import Enum 4 | 5 | from hydromt._validators.data_catalog_v0x import ( 6 | DataCatalogV0Item, 7 | DataCatalogV0ItemMetadata, 8 | DataCatalogV0MetaData, 9 | DataCatalogV0Validator, 10 | ) 11 | from hydromt._validators.data_catalog_v1x import ( 12 | DataCatalogV1Item, 13 | DataCatalogV1ItemMetadata, 14 | DataCatalogV1MetaData, 15 | DataCatalogV1Validator, 16 | ) 17 | from hydromt._validators.model_config import HydromtModelSetup, HydromtModelStep 18 | from hydromt._validators.region import ( 19 | BoundingBoxRegion, 20 | PathRegion, 21 | Region, 22 | validate_region, 23 | ) 24 | 25 | __all__ = [ 26 | "DataCatalogV1Item", 27 | "DataCatalogV1ItemMetadata", 28 | "DataCatalogV1MetaData", 29 | "DataCatalogV1Validator", 30 | "DataCatalogV0Item", 31 | "DataCatalogV0ItemMetadata", 32 | "DataCatalogV0MetaData", 33 | "DataCatalogV0Validator", 34 | "BoundingBoxRegion", 35 | "PathRegion", 36 | "Region", 37 | "validate_region", 38 | "HydromtModelStep", 39 | "HydromtModelSetup", 40 | "Format", 41 | ] 42 | 43 | 44 | class Format(Enum): 45 | v0 = 0 46 | v1 = 1 47 | 48 | # just a convenience function 49 | # with some nicer error messages 50 | @classmethod 51 | def from_str(cls, s: str) -> "Format": 52 | try: 53 | return cls[s.strip().lower()] 54 | except KeyError as e: 55 | raise ValueError( 56 | f"{e} is not a known valid Format, options are {list(cls.__members__.keys())}" 57 | ) 58 | -------------------------------------------------------------------------------- /docs/about/publications.rst: -------------------------------------------------------------------------------- 1 | .. _publications: 2 | 3 | Publications 4 | ============ 5 | 6 | How to cite? 7 | ------------ 8 | For publications, please cite our JOSS paper |joss_paper| 9 | 10 | Eilander et al., (2023). HydroMT: Automated and reproducible model building and analysis. Journal of Open Source Software, 8(83), 4897, https://doi.org/10.21105/joss.04897 11 | 12 | To cite a specific software version please use the DOI provided in the Zenodo badge |doi| that points to the latest release. 13 | 14 | 15 | Relevant publications using HydroMT 16 | ----------------------------------- 17 | 18 | Eilander et al., (2023). HydroMT: Automated and reproducible model building and analysis. Journal of Open Source Software, 8(83), 4897, https://doi.org/10.21105/joss.04897 19 | 20 | Eilander, D., Couasnon, A., Leijnse, T., Ikeuchi, H., Yamazaki, D., Muis, S., Dullaart, J., Winsemius, H. C., & Ward, P. J. (2023). A globally applicable framework for compound flood hazard modeling. Natural Hazards and Earth System Sciences, 23(2), 823–846. doi:10.5194/nhess-23-823-2023 21 | 22 | Sperna Weiland, F. C., Visser, R. D., Greve, P., Bisselink, B., Brunner, L., & Weerts, A. H. (2021). Estimating Regionalized Hydrological Impacts of Climate Change Over Europe by Performance-Based Weighting of CORDEX Projections. Frontiers in Water, 3. https://www.frontiersin.org/article/10.3389/frwa.2021.713537 23 | 24 | 25 | 26 | .. |doi| image:: https://zenodo.org/badge/348020332.svg 27 | :alt: Zenodo 28 | :target: https://zenodo.org/badge/latestdoi/348020332 29 | 30 | .. |joss_paper| image:: https://joss.theoj.org/papers/10.21105/joss.04897/status.svg 31 | :target: https://doi.org/10.21105/joss.04897 32 | -------------------------------------------------------------------------------- /hydromt/data_catalog/sources/factory.py: -------------------------------------------------------------------------------- 1 | """Factory function for DataSource.""" 2 | 3 | from inspect import isabstract 4 | from typing import Any, Dict, Union 5 | 6 | from hydromt.data_catalog.sources import ( 7 | DataFrameSource, 8 | DatasetSource, 9 | DataSource, 10 | GeoDataFrameSource, 11 | GeoDatasetSource, 12 | RasterDatasetSource, 13 | ) 14 | from hydromt.typing.type_def import DataType 15 | 16 | # Map DataType to DataSource, need to add here when implementing a new Type 17 | available_sources: Dict[DataType, DataSource] = { 18 | "DataFrame": DataFrameSource, 19 | "Dataset": DatasetSource, 20 | "RasterDataset": RasterDatasetSource, 21 | "GeoDataFrame": GeoDataFrameSource, 22 | "GeoDataset": GeoDatasetSource, 23 | } 24 | 25 | 26 | def create_source(data: Union[Dict[str, Any], DataSource]) -> DataSource: 27 | """Create a DataSource. 28 | 29 | Create a datasource from a dictionary, or another DataSource. 30 | """ 31 | if isinstance(data, DataSource): 32 | if isabstract(DataSource): 33 | raise ValueError("DataSource is an Abstract Class") 34 | else: 35 | # Already is a subclass of DataSource 36 | return data 37 | 38 | elif isinstance(data, dict): 39 | if data_type := data.get("data_type", None): 40 | if target_source := available_sources.get(data_type): 41 | return target_source.model_validate(data) 42 | 43 | raise ValueError(f"Unknown 'data_type': '{data_type}'") 44 | else: 45 | raise ValueError("DataSource needs 'data_type'.") 46 | else: 47 | raise ValueError(f"Invalid argument for creating DataSource: {data}") 48 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - main 8 | - v1 9 | paths: 10 | - tests/**/* 11 | - hydromt/**/* 12 | - data/**/* 13 | - pyproject.toml 14 | - pixi.lock 15 | - .github/workflows/tests.yml 16 | pull_request: 17 | branches: 18 | - main 19 | - v1 20 | paths: 21 | - tests/**/* 22 | - hydromt/**/* 23 | - data/**/* 24 | - pyproject.toml 25 | - pixi.lock 26 | - .github/workflows/tests.yml 27 | workflow_run: 28 | workflows: [Pixi auto update] 29 | types: 30 | - completed 31 | 32 | 33 | jobs: 34 | test: 35 | defaults: 36 | run: 37 | shell: bash -e -l {0} 38 | strategy: 39 | fail-fast: false 40 | matrix: 41 | os: [ubuntu-latest, windows-latest] 42 | python-version: ['311','312','313'] 43 | dependencies: ['min', 'full'] 44 | 45 | name: pytest ${{ matrix.dependencies }}-${{ matrix.python-version }} (${{ matrix.os }}) 46 | runs-on: ${{ matrix.os }} 47 | timeout-minutes: 30 48 | concurrency: 49 | group: ${{ github.workflow }}-${{ matrix.os }}-${{ matrix.dependencies }}-${{ matrix.python-version }}-${{ github.ref }} 50 | cancel-in-progress: true 51 | steps: 52 | - uses: actions/checkout@v6 53 | - uses: prefix-dev/setup-pixi@v0.9.3 54 | with: 55 | pixi-version: "v0.59.0" 56 | environments: ${{ matrix.dependencies }}-py${{ matrix.python-version }} 57 | - name: Test 58 | run: | 59 | export NUMBA_DISABLE_JIT=1 60 | pixi run --locked -e ${{ matrix.dependencies }}-py${{ matrix.python-version }} test 61 | -------------------------------------------------------------------------------- /tests/data_catalog/sources/test_dataframe_source.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Type 3 | 4 | import pandas as pd 5 | import pytest 6 | 7 | from hydromt.data_catalog.adapters import DataFrameAdapter 8 | from hydromt.data_catalog.drivers import DataFrameDriver 9 | from hydromt.data_catalog.sources import DataFrameSource 10 | from hydromt.data_catalog.uri_resolvers import URIResolver 11 | 12 | 13 | class TestDataFrameSource: 14 | @pytest.fixture 15 | def MockDataFrameSource( 16 | self, 17 | MockDataFrameDriver: Type[DataFrameDriver], 18 | mock_resolver: URIResolver, 19 | mock_df_adapter: DataFrameAdapter, 20 | managed_tmp_path: Path, 21 | ) -> DataFrameSource: 22 | managed_tmp_path.touch("test.xls") 23 | source = DataFrameSource( 24 | root=".", 25 | name="example_source", 26 | driver=MockDataFrameDriver(), 27 | uri_resolver=mock_resolver, 28 | data_adapter=mock_df_adapter, 29 | uri=str(managed_tmp_path / "test.xls"), 30 | ) 31 | return source 32 | 33 | def test_read_data( 34 | self, 35 | MockDataFrameSource: DataFrameSource, 36 | df: pd.DataFrame, 37 | ): 38 | pd.testing.assert_frame_equal(df, MockDataFrameSource.read_data()) 39 | 40 | def test_to_file_nodata( 41 | self, MockDataFrameSource: DataFrameSource, managed_tmp_path: Path, mocker 42 | ): 43 | output_path = managed_tmp_path / "output.csv" 44 | mocker.patch( 45 | "hydromt.data_catalog.sources.dataframe.DataFrameSource.read_data", 46 | return_value=None, 47 | ) 48 | p = MockDataFrameSource.to_file(output_path) 49 | assert p is None 50 | -------------------------------------------------------------------------------- /docs/about/intro.rst: -------------------------------------------------------------------------------- 1 | About HydroMT 2 | ============= 3 | 4 | .. grid:: 3 5 | :gutter: 1 6 | 7 | .. grid-item-card:: 8 | :text-align: center 9 | :link: about_why_hydromt 10 | :link-type: ref 11 | 12 | :octicon:`question;5em;sd-text-icon blue-icon` 13 | +++ 14 | Why HydroMT? 15 | 16 | .. grid-item-card:: 17 | :text-align: center 18 | :link: plugins 19 | :link-type: ref 20 | 21 | :octicon:`database;5em;sd-text-icon blue-icon` 22 | +++ 23 | Plugins 24 | 25 | .. grid-item-card:: 26 | :text-align: center 27 | :link: user_stories 28 | :link-type: ref 29 | 30 | :octicon:`book;5em;sd-text-icon blue-icon` 31 | +++ 32 | User Stories 33 | 34 | .. grid-item-card:: 35 | :text-align: center 36 | :link: publications 37 | :link-type: ref 38 | 39 | :octicon:`mortar-board;5em;sd-text-icon blue-icon` 40 | +++ 41 | Publications 42 | 43 | .. grid-item-card:: 44 | :text-align: center 45 | :link: contributors 46 | :link-type: ref 47 | 48 | :octicon:`people;5em;sd-text-icon blue-icon` 49 | +++ 50 | Contributors 51 | 52 | .. grid-item-card:: 53 | :text-align: center 54 | :link: dependencies 55 | :link-type: ref 56 | 57 | :octicon:`list-unordered;5em;sd-text-icon blue-icon` 58 | +++ 59 | Dependencies 60 | 61 | .. toctree:: 62 | :maxdepth: 2 63 | :caption: About HydroMT 64 | :hidden: 65 | 66 | Why HydroMT 67 | Plugins 68 | User stories 69 | Publications 70 | Contributors 71 | Dependencies 72 | -------------------------------------------------------------------------------- /docs/_static/deltares-blue.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 2 | -------------------------------------------------------------------------------- /docs/assets/example_catalog_simple.yml: -------------------------------------------------------------------------------- 1 | meta: 2 | roots: 3 | - /linux/path/to/data_root/ 4 | - C:\Windows\path\to\data_root 5 | - . 6 | version: version 7 | name: data_catalog_name 8 | 9 | era5: 10 | data_type: RasterDataset 11 | uri: meteo/era5_daily/nc_merged/era5_{year}_daily.nc 12 | driver: 13 | name: raster_xarray 14 | options: 15 | chunks: 16 | latitude: 250 17 | longitude: 240 18 | time: 30 19 | combine: by_coords 20 | decode_times: true 21 | parallel: true 22 | metadata: 23 | category: meteo 24 | notes: Extracted from Copernicus Climate Data Store; resampled by Deltares to 25 | daily frequency 26 | paper_doi: 10.1002/qj.3803 27 | paper_ref: Hersbach et al. (2019) 28 | url: https://doi.org/10.24381/cds.bd0915c6 29 | version: ERA5 daily data on pressure levels 30 | license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 31 | crs: 4326 32 | temporal_extent: 33 | start: '1950-01-02' 34 | end: '2023-11-30' 35 | spatial_extent: 36 | West: -0.125 37 | South: -90.125 38 | East: 359.875 39 | North: 90.125 40 | data_adapter: 41 | unit_add: 42 | temp: -273.15 43 | temp_dew: -273.15 44 | temp_max: -273.15 45 | temp_min: -273.15 46 | unit_mult: 47 | kin: 0.000277778 48 | kout: 0.000277778 49 | ssr: 0.000277778 50 | press_msl: 0.01 51 | rename: 52 | d2m: temp_dew 53 | msl: press_msl 54 | ssrd: kin 55 | t2m: temp 56 | tisr: kout 57 | tmax: temp_max 58 | tmin: temp_min 59 | tp: precip 60 | u10: wind10_u 61 | v10: wind10_v 62 | -------------------------------------------------------------------------------- /hydromt/_utils/__init__.py: -------------------------------------------------------------------------------- 1 | """Utility functions for hydromt that have no other home.""" 2 | 3 | from hydromt._utils.caching import cache_vrt_tiles, copy_to_local 4 | from hydromt._utils.dataset import ( 5 | _rename_vars, 6 | _set_metadata, 7 | _shift_dataset_time, 8 | _single_var_as_array, 9 | ) 10 | from hydromt._utils.deep_merge import _deep_merge 11 | from hydromt._utils.dictionaries import _partition_dictionaries 12 | from hydromt._utils.elevation import _elevation2rgba, _rgba2elevation 13 | from hydromt._utils.log import initialize_logging, to_file 14 | from hydromt._utils.nodata import _has_no_data, _set_raster_nodata, _set_vector_nodata 15 | from hydromt._utils.path import _make_config_paths_absolute, _make_config_paths_relative 16 | from hydromt._utils.rgetattr import _rgetattr 17 | from hydromt._utils.steps_validator import _validate_steps 18 | from hydromt._utils.temp_env import temp_env 19 | from hydromt._utils.unused_kwargs import _warn_on_unused_kwargs 20 | from hydromt._utils.uris import _is_valid_url, _strip_scheme 21 | 22 | __all__ = [ 23 | "cache_vrt_tiles", 24 | "copy_to_local", 25 | "_deep_merge", 26 | "_rename_vars", 27 | "_set_metadata", 28 | "initialize_logging", 29 | "to_file", 30 | "_shift_dataset_time", 31 | "_single_var_as_array", 32 | "_partition_dictionaries", 33 | "_elevation2rgba", 34 | "_rgba2elevation", 35 | "_has_no_data", 36 | "_set_raster_nodata", 37 | "_set_vector_nodata", 38 | "_rgetattr", 39 | "_validate_steps", 40 | "_warn_on_unused_kwargs", 41 | "_is_valid_url", 42 | "_strip_scheme", 43 | "_make_config_paths_absolute", 44 | "_make_config_paths_relative", 45 | "temp_env", 46 | ] 47 | 48 | 49 | class _classproperty(property): 50 | def __get__(self, owner_self, owner_cls): 51 | return self.fget(owner_cls) 52 | -------------------------------------------------------------------------------- /docs/_static/deltares-white.svg: -------------------------------------------------------------------------------- 1 | Artboard 1 2 | -------------------------------------------------------------------------------- /docs/dev/core_dev/test_ci.rst: -------------------------------------------------------------------------------- 1 | .. _test_ci: 2 | 3 | Test and CI 4 | ----------- 5 | 6 | We use `pytest `__ for testing and `github actions `_ for CI. 7 | - Unit tests are mandatory for new methods and workflows and integration tests are highly recommended for various 8 | - All tests should be contained in the tests directory in functions named `test_*`. 9 | - We use `SonarQube `_ to monitor the coverage of the tests and aim for high (>90%) coverage. This is work in progress. 10 | - Checkout this `comprehensive guide to pytest `_ for more info and tips. 11 | 12 | Running the tests 13 | ^^^^^^^^^^^^^^^^^ 14 | 15 | HydroMT's tests live in the tests folder and generally match the main package layout. 16 | Test should be run from the tests folder. 17 | 18 | To run the entire suite and the code coverage report: 19 | 20 | .. code-block:: console 21 | 22 | $ cd tests 23 | $ python -m pytest --verbose --cov=hydromt --cov-report term-missing 24 | 25 | A single test file: 26 | 27 | .. code-block:: console 28 | 29 | $ python -m pytest --verbose test_rio.py 30 | 31 | A single test: 32 | 33 | .. code-block:: console 34 | 35 | $ python -m pytest --verbose test_rio.py::test_object 36 | 37 | 38 | Alternatively, you can also run tests using the pixi install described in the :ref:`developer installation guide `. 39 | 40 | 41 | Run all tests: 42 | 43 | .. code-block:: console 44 | 45 | $ pixi run pytest 46 | 47 | Run tests in a specific file: 48 | 49 | .. code-block:: console 50 | 51 | $ pixi run pytest tests/test_rio.py 52 | 53 | Run a specific test function: 54 | 55 | .. code-block:: console 56 | 57 | $ pixi run pytest tests/test_rio.py::test_object 58 | -------------------------------------------------------------------------------- /docs/dev/plugin_dev/example_pixi.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | authors = ["example-author "] 3 | channels = ["conda-forge"] 4 | name = "my_plugin" 5 | platforms = ["win-64"] 6 | version = "0.1.0" 7 | 8 | [tasks] 9 | test = { cmd = "pytest tests" } 10 | 11 | # Install your code in editable mode, dependencies here are installed in all environments 12 | # Takes the dependencies from pyproject.toml, which will be overridden in the features below. 13 | [pypi-dependencies] 14 | my_plugin = { path = ".", editable = true } 15 | 16 | # Define features for different Python versions and Hydromt versions 17 | [feature.py310.dependencies] 18 | python = "3.10.*" 19 | 20 | [feature.py311.dependencies] 21 | python = "3.11.*" 22 | 23 | [feature.py312.dependencies] 24 | python = "3.12.*" 25 | 26 | [feature.py313.dependencies] 27 | python = "3.13.*" 28 | 29 | [feature.hydromt_dev.pypi-dependencies] 30 | # Get the latest version from main 31 | hydromt = { git = "https://github.com/Deltares/hydromt.git", branch = "main" } 32 | 33 | [feature.hydromt_latest.pypi-dependencies] 34 | # Replace with the actual stable version range you want to test against 35 | hydromt = ">=1.0,<2.0" 36 | 37 | # Define environments to combine features 38 | [environments] 39 | latest_310 = { features = ["py310", "hydromt_latest"], solve-group = "py310" } 40 | latest_311 = { features = ["py311", "hydromt_latest"], solve-group = "py311" } 41 | latest_312 = { features = ["py312", "hydromt_latest"], solve-group = "py312" } 42 | latest_313 = { features = ["py313", "hydromt_latest"], solve-group = "py313" } 43 | 44 | dev_310 = { features = ["py310", "hydromt_dev"], solve-group = "py310" } 45 | dev_311 = { features = ["py311", "hydromt_dev"], solve-group = "py311" } 46 | dev_312 = { features = ["py312", "hydromt_dev"], solve-group = "py312" } 47 | dev_313 = { features = ["py313", "hydromt_dev"], solve-group = "py313" } 48 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | description: Report incorrect behavior in the HydroMT library 4 | labels: [Bug, Needs refinement] 5 | 6 | body: 7 | - type: checkboxes 8 | id: checks 9 | attributes: 10 | label: HydroMT version checks 11 | options: 12 | - label: I have checked that this issue has not already been reported. 13 | required: true 14 | - label: I have checked that this bug exists on the latest version of HydroMT. 15 | required: true 16 | - type: textarea 17 | id: example 18 | attributes: 19 | description: > 20 | Please provide a minimal, copy-pastable example or a link to a public repository that reproduces the behavior. If providing a copy pastable example, 21 | you may assume your in a clean up to date version of hydromt with a python environment active. In the case of a repository, ensure the repository 22 | has a README.md which includes instructions to reproduce the behaviour. 23 | label: Reproducible Example 24 | validations: 25 | required: true 26 | - type: textarea 27 | id: current-behaviour 28 | attributes: 29 | description: > 30 | Please provide a description of the incorrect behaviour shown in the reproducible example 31 | label: Current behaviour 32 | validations: 33 | required: true 34 | - type: textarea 35 | id: expected-behaviour 36 | attributes: 37 | description: > 38 | Please provide a description of what you think the behaviour should be 39 | label: Desired behaviour 40 | validations: 41 | required: true 42 | - type: textarea 43 | id: additional 44 | attributes: 45 | description: > 46 | Please add any other context about the bug here 47 | label: Additional context 48 | validations: 49 | required: false 50 | -------------------------------------------------------------------------------- /hydromt/typing/deferred_file_close.py: -------------------------------------------------------------------------------- 1 | """Deferred file close handling to ensure data integrity during file operations.""" 2 | 3 | import logging 4 | import shutil 5 | from pathlib import Path 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | _MAX_CLOSE_ATTEMPTS = 2 10 | 11 | 12 | class DeferredFileClose: 13 | """Class to handle deferred file closing operations.""" 14 | 15 | def __init__(self, *, original_path: Path, temp_path: Path): 16 | self._original_path = original_path 17 | self._temp_path = temp_path 18 | self._close_attempts = 0 19 | 20 | def close(self) -> None: 21 | """Attempt to move the temporary file to the original path up to a maximum number of attempts.""" 22 | while self._close_attempts < _MAX_CLOSE_ATTEMPTS: 23 | try: 24 | logger.debug( 25 | f"Moving temporary file '{self._temp_path}' to destination '{self._original_path}'." 26 | ) 27 | shutil.move(self._temp_path, self._original_path) 28 | return 29 | except PermissionError as e: 30 | self._close_attempts += 1 31 | logger.error( 32 | f"Could not write to destination file {self._original_path} because the following error was raised: {e}" 33 | ) 34 | except FileNotFoundError: 35 | logger.warning( 36 | f"Could not find temporary file {self._temp_path}. It was likely already deleted by another component that updates the same dataset." 37 | ) 38 | return 39 | 40 | # already tried to close this too many times 41 | logger.error( 42 | f"Max write attempts to file {self._original_path} exceeded. Skipping... " 43 | f"Instead, data was written to a temporary file: {self._temp_path}." 44 | ) 45 | -------------------------------------------------------------------------------- /tests/data/test_sources1.yml: -------------------------------------------------------------------------------- 1 | meta: 2 | hydromt_version: '>=1.0a, <2.0' 3 | version: v1.0.0 4 | era5: 5 | data_type: RasterDataset 6 | uri: ERA5/daily/era5_{year}_daily.nc 7 | driver: 8 | name: raster_xarray 9 | options: 10 | chunks: 11 | time: 100 12 | longitude: 120 13 | latitude: 125 14 | concat_dim: time 15 | decode_times: true 16 | combine: by_coords 17 | parallel: true 18 | metadata: 19 | category: meteo 20 | history: Extracted from Copernicus Climate Data Store; resampled by Deltares to 21 | daily frequency 22 | paper_ref: Hersbach et al. (2019) 23 | paper_doi: 10.1002/qj.3803 24 | url: https://doi.org/10.24381/cds.bd0915c6 25 | version: ERA5 daily data on pressure levels 26 | license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 27 | crs: 4326 28 | data_adapter: 29 | unit_mult: 30 | precip: 1000 31 | press_msl: 0.01 32 | kin: 0.000277778 33 | kout: 0.000277778 34 | unit_add: 35 | time: 86400 36 | temp: -273.15 37 | temp_min: -273.15 38 | temp_max: -273.15 39 | rename: 40 | tp: precip 41 | t2m: temp 42 | tmin: temp_min 43 | tmax: temp_max 44 | msl: press_msl 45 | ssrd: kin 46 | tisr: kout 47 | hydro_lakes: 48 | data_type: GeoDataFrame 49 | uri: waterbodies/lake-db.gpkg 50 | driver: 51 | name: pyogrio 52 | metadata: 53 | category: surface water 54 | author: Arjen Haag 55 | version: 1.0 56 | info: HydroLAKES.v10_extract 57 | crs: 4326 58 | data_adapter: 59 | unit_mult: 60 | Area_avg: 1000000.0 61 | rename: 62 | Hylak_id: waterbody_id 63 | Lake_area: Area_avg 64 | Vol_total: Vol_avg 65 | Depth_avg: Depth_avg 66 | Dis_avg: Dis_avg 67 | Pour_long: xout 68 | Pour_lat: yout 69 | -------------------------------------------------------------------------------- /docs/api/api.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt 2 | 3 | .. _api_reference: 4 | 5 | ############# 6 | API reference 7 | ############# 8 | 9 | This page provides an auto-generated summary of HydroMT's API. For more details and examples, 10 | refer to the relevant chapters in the main part of the documentation. 11 | 12 | .. dropdown:: **General API** 13 | 14 | - :ref:`Command Line Interface (CLI) ` 15 | - :ref:`Entry points and plugins ` 16 | 17 | .. dropdown:: **Model API** 18 | 19 | - :ref:`Model ` 20 | - :ref:`Model components ` 21 | - :ref:`Model processes ` 22 | 23 | .. dropdown:: **Data API** 24 | 25 | - :ref:`DataCatalog ` 26 | - :ref:`DataSource ` 27 | - :ref:`URIResolvers ` 28 | - :ref:`Drivers ` 29 | - :ref:`Data Adapters ` 30 | 31 | .. dropdown:: **Supporting functionality** 32 | 33 | - :ref:`GIS functionalities ` 34 | - :ref:`Statistical methods ` 35 | 36 | .. dropdown:: **I/O** 37 | 38 | - :ref:`Readers ` 39 | - :ref:`Writers ` 40 | 41 | .. toctree:: 42 | :maxdepth: 2 43 | :hidden: 44 | :caption: General API 45 | 46 | cli 47 | plugin 48 | 49 | .. toctree:: 50 | :maxdepth: 2 51 | :hidden: 52 | :caption: Model API 53 | 54 | model 55 | model_components 56 | model_processes 57 | 58 | .. toctree:: 59 | :maxdepth: 2 60 | :hidden: 61 | :caption: Data API 62 | 63 | data_catalog 64 | data_source 65 | uri_resolvers 66 | drivers 67 | data_adapter 68 | 69 | .. toctree:: 70 | :maxdepth: 2 71 | :hidden: 72 | :caption: Supporting functionality 73 | 74 | gis 75 | stats 76 | 77 | .. toctree:: 78 | :maxdepth: 2 79 | :hidden: 80 | :caption: I/O 81 | 82 | readers 83 | writers 84 | -------------------------------------------------------------------------------- /hydromt/_utils/nodata.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union 2 | 3 | import geopandas as gpd 4 | import pandas as pd 5 | import xarray as xr 6 | 7 | from hydromt.typing import SourceMetadata 8 | 9 | __all__ = ["_has_no_data", "_set_vector_nodata", "_set_raster_nodata"] 10 | 11 | 12 | def _has_no_data( 13 | data: Optional[Union[pd.DataFrame, gpd.GeoDataFrame, xr.Dataset, xr.DataArray]], 14 | ) -> bool: 15 | """Check whether various data containers are empty.""" 16 | if data is None: 17 | return True 18 | elif isinstance(data, xr.Dataset): 19 | return all([v.size == 0 for v in data.data_vars.values()]) 20 | else: 21 | return len(data) == 0 22 | 23 | 24 | def _set_vector_nodata( 25 | ds: Optional[xr.Dataset], metadata: "SourceMetadata" 26 | ) -> Optional[xr.Dataset]: 27 | if ds is None: 28 | return None 29 | if metadata.nodata is not None: 30 | if not isinstance(metadata.nodata, dict): 31 | nodata = {k: metadata.nodata for k in ds.data_vars.keys()} 32 | else: 33 | nodata = metadata.nodata 34 | for k in ds.data_vars: 35 | mv = nodata.get(k, None) 36 | if mv is not None and ds[k].vector.nodata is None: 37 | ds[k].vector.set_nodata(mv) 38 | return ds 39 | 40 | 41 | def _set_raster_nodata( 42 | ds: Optional[xr.Dataset], metadata: "SourceMetadata" 43 | ) -> Optional[xr.Dataset]: 44 | if ds is None: 45 | return None 46 | if metadata.nodata is not None: 47 | if not isinstance(metadata.nodata, dict): 48 | nodata = {k: metadata.nodata for k in ds.data_vars.keys()} 49 | else: 50 | nodata = metadata.nodata 51 | for k in ds.data_vars: 52 | mv = nodata.get(k, None) 53 | if mv is not None and ds[k].raster.nodata is None: 54 | ds[k].raster.set_nodata(mv) 55 | return ds 56 | -------------------------------------------------------------------------------- /hydromt/error.py: -------------------------------------------------------------------------------- 1 | """All of the types for handeling errors within HydroMT.""" 2 | 3 | import inspect 4 | import logging 5 | from enum import Enum 6 | 7 | 8 | class DeprecatedError(Exception): 9 | """Simple custom class to raise an error for something that is now deprecated.""" 10 | 11 | def __init__(self, msg: str): 12 | """Initialise the object.""" 13 | self.base = "DeprecationError" 14 | self.message = msg 15 | 16 | def __str__(self): 17 | return f"{self.base}: {self.message}" 18 | 19 | 20 | class NoDataStrategy(Enum): 21 | """Strategy to handle nodata values.""" 22 | 23 | RAISE = "raise" 24 | WARN = "warn" 25 | IGNORE = "ignore" 26 | 27 | 28 | class NoDataException(Exception): 29 | """Exception raised for errors in the input. 30 | 31 | Attributes 32 | ---------- 33 | message -- explanation of the error 34 | """ 35 | 36 | def __init__(self, message="No data available"): 37 | self.message = message 38 | super().__init__(self.message) 39 | 40 | 41 | def exec_nodata_strat(msg: str, strategy: NoDataStrategy) -> None: 42 | """Execute nodata strategy. 43 | 44 | Uses the logger from the calling module if it has a logger. 45 | Otherwise creates a new logger with the calling module's name. 46 | Otherwise uses a backup logger from this current module. 47 | """ 48 | frame = inspect.currentframe() 49 | caller_frame = frame.f_back if frame else None 50 | module = inspect.getmodule(caller_frame) if caller_frame else None 51 | logger_name = getattr(module, "__name__", __name__) 52 | logger = getattr(module, "logger", logging.getLogger(logger_name)) 53 | 54 | if strategy == NoDataStrategy.RAISE: 55 | raise NoDataException(msg) 56 | elif strategy == NoDataStrategy.WARN: 57 | logger.warning(msg) 58 | elif strategy == NoDataStrategy.IGNORE: 59 | # do nothing 60 | pass 61 | -------------------------------------------------------------------------------- /docs/overview/intro.rst: -------------------------------------------------------------------------------- 1 | .. _getting_started: 2 | 3 | Getting started 4 | =============== 5 | 6 | .. grid:: 3 7 | :gutter: 1 8 | 9 | .. grid-item-card:: 10 | :text-align: center 11 | :link: installation_guide 12 | :link-type: ref 13 | 14 | :octicon:`gear;5em;sd-text-icon blue-icon` 15 | +++ 16 | Installation guide 17 | 18 | .. grid-item-card:: 19 | :text-align: center 20 | :link: overview_usage 21 | :link-type: ref 22 | 23 | :octicon:`device-desktop;5em;sd-text-icon blue-icon` 24 | +++ 25 | How to use HydroMT ? 26 | 27 | .. grid-item-card:: 28 | :text-align: center 29 | :link: intro_plugin_guide 30 | :link-type: ref 31 | 32 | :octicon:`terminal;5em;sd-text-icon blue-icon` 33 | +++ 34 | Creating your own plugin 35 | 36 | .. grid-item-card:: 37 | :text-align: center 38 | :link: examples_models 39 | :link-type: ref 40 | 41 | :octicon:`graph;5em;sd-text-icon blue-icon` 42 | +++ 43 | Examples: Working with models 44 | 45 | .. grid-item-card:: 46 | :text-align: center 47 | :link: examples_data 48 | :link-type: ref 49 | 50 | :octicon:`stack;5em;sd-text-icon blue-icon` 51 | +++ 52 | Examples: Data Catalog 53 | 54 | .. grid-item-card:: 55 | :text-align: center 56 | :link: examples_gis 57 | :link-type: ref 58 | 59 | :octicon:`tools;5em;sd-text-icon blue-icon` 60 | +++ 61 | Examples: GIS functionalities (advanced) 62 | 63 | 64 | .. toctree:: 65 | :caption: Getting started 66 | :maxdepth: 2 67 | :hidden: 68 | 69 | Installation 70 | Overview: How to use HydroMT ? 71 | Examples: Working with models 72 | Examples: Data Catalog 73 | Examples: GIS functionalities (advanced) 74 | -------------------------------------------------------------------------------- /tests/data_catalog/adapters/test_geodataframe_adapter.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import pytest 3 | 4 | from hydromt.data_catalog.adapters.geodataframe import GeoDataFrameAdapter 5 | from hydromt.error import NoDataException, NoDataStrategy 6 | from hydromt.typing import SourceMetadata 7 | 8 | 9 | class TestGeodataFrameAdapter: 10 | def test_transform_empty_gdf(self, geodf: gpd.GeoDataFrame, mocker): 11 | adapter = GeoDataFrameAdapter() 12 | 13 | empty_gdf = geodf.iloc[0:0] 14 | mocker.patch.object( 15 | adapter, 16 | "_set_nodata", 17 | return_value=empty_gdf, 18 | ) 19 | with pytest.raises( 20 | NoDataException, match="GeoDataFrame has no data after masking" 21 | ): 22 | adapter.transform( 23 | empty_gdf, 24 | mask=empty_gdf, 25 | metadata=SourceMetadata(), 26 | handle_nodata=NoDataStrategy.RAISE, 27 | ) 28 | 29 | def test_set_crs( 30 | self, 31 | geodf: gpd.GeoDataFrame, 32 | caplog: pytest.LogCaptureFixture, 33 | ): 34 | adapter = GeoDataFrameAdapter() 35 | 36 | gdf_no_crs = geodf.copy() 37 | gdf_no_crs.set_crs(None, allow_override=True, inplace=True) 38 | gdf_with_crs = adapter._set_crs(gdf_no_crs, crs=4326) 39 | 40 | assert gdf_with_crs.crs.to_epsg() == 4326 41 | 42 | gdf_no_crs.set_crs(None, allow_override=True, inplace=True) 43 | with pytest.raises( 44 | ValueError, match="GeoDataFrame: CRS not defined in data catalog or data." 45 | ): 46 | adapter._set_crs(gdf_no_crs, crs=None) 47 | caplog.set_level("WARNING") 48 | adapter._set_crs(geodf, crs=3857) 49 | assert ( 50 | "GeoDataFrame : CRS from data catalog does not match CRS of" 51 | " data. The original CRS will be used. Please check your data catalog." 52 | ) in caplog.text 53 | -------------------------------------------------------------------------------- /tests/data_catalog/sources/test_factory.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from hydromt.data_catalog.adapters import GeoDataFrameAdapter 4 | from hydromt.data_catalog.drivers import GeoDataFrameDriver 5 | from hydromt.data_catalog.sources import DataSource, GeoDataFrameSource, create_source 6 | 7 | 8 | class TestCreateSource: 9 | def test_creates_correct_submodel( 10 | self, 11 | MockGeoDataFrameDriver: type[GeoDataFrameDriver], 12 | mock_gdf_adapter: GeoDataFrameAdapter, 13 | ): 14 | submodel: DataSource = create_source( 15 | { 16 | "name": "geojsonfile", 17 | "data_type": "GeoDataFrame", 18 | "driver": MockGeoDataFrameDriver(), 19 | "data_adapter": mock_gdf_adapter, 20 | "uri": "test_uri", 21 | } 22 | ) 23 | assert isinstance(submodel, GeoDataFrameSource) 24 | 25 | def test_unknown_data_type( 26 | self, 27 | MockGeoDataFrameDriver: type[GeoDataFrameDriver], 28 | mock_gdf_adapter: GeoDataFrameAdapter, 29 | ): 30 | with pytest.raises(ValueError, match="Unknown 'data_type'"): 31 | create_source( 32 | { 33 | "name": "geojsonfile", 34 | "data_type": "Bogus", 35 | "driver": MockGeoDataFrameDriver(), 36 | "data_adapter": mock_gdf_adapter, 37 | "uri": "test_uri", 38 | } 39 | ) 40 | 41 | def test_no_data_type( 42 | self, 43 | MockGeoDataFrameDriver: type[GeoDataFrameDriver], 44 | mock_gdf_adapter: GeoDataFrameAdapter, 45 | ): 46 | with pytest.raises(ValueError, match="needs 'data_type'"): 47 | create_source( 48 | { 49 | "name": "geojsonfile", 50 | "driver": MockGeoDataFrameDriver(), 51 | "data_adapter": mock_gdf_adapter, 52 | "uri": "test_uri", 53 | } 54 | ) 55 | -------------------------------------------------------------------------------- /docs/assets/example_catalog.yml: -------------------------------------------------------------------------------- 1 | meta: 2 | roots: 3 | - /linux/path/to/data_root/ 4 | - C:\Windows\path\to\data_root 5 | - . 6 | version: version 7 | name: data_catalog_name 8 | 9 | era5: 10 | data_type: RasterDataset 11 | variants: 12 | - provider: netcdf 13 | uri: meteo/era5_daily/nc_merged/era5_{year}_daily.nc 14 | driver: 15 | name: raster_xarray 16 | options: 17 | chunks: 18 | latitude: 250 19 | longitude: 240 20 | time: 30 21 | combine: by_coords 22 | decode_times: true 23 | parallel: true 24 | - provider: zarr 25 | uri: meteo/era5_daily.zarr 26 | driver: 27 | name: raster_xarray 28 | options: 29 | chunks: auto 30 | metadata: 31 | category: meteo 32 | notes: Extracted from Copernicus Climate Data Store; resampled by Deltares to 33 | daily frequency 34 | paper_doi: 10.1002/qj.3803 35 | paper_ref: Hersbach et al. (2019) 36 | url: https://doi.org/10.24381/cds.bd0915c6 37 | version: ERA5 daily data on pressure levels 38 | license: https://cds.climate.copernicus.eu/cdsapp/#!/terms/licence-to-use-copernicus-products 39 | crs: 4326 40 | temporal_extent: 41 | start: '1950-01-02' 42 | end: '2023-11-30' 43 | spatial_extent: 44 | West: -0.125 45 | South: -90.125 46 | East: 359.875 47 | North: 90.125 48 | data_adapter: 49 | unit_add: 50 | temp: -273.15 51 | temp_dew: -273.15 52 | temp_max: -273.15 53 | temp_min: -273.15 54 | unit_mult: 55 | kin: 0.000277778 56 | kout: 0.000277778 57 | ssr: 0.000277778 58 | press_msl: 0.01 59 | rename: 60 | d2m: temp_dew 61 | msl: press_msl 62 | ssrd: kin 63 | t2m: temp 64 | tisr: kout 65 | tmax: temp_max 66 | tmin: temp_min 67 | tp: precip 68 | u10: wind10_u 69 | v10: wind10_v 70 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "HydroMT: Automated and reproducible model building and analysis", 3 | "description": "HydroMT (Hydro Model Tools) is an open-source Python package that facilitates the process of building and analyzing spatial geoscientific models with a focus on water system models. It does so by automating the workflow to go from raw data to a complete model instance which is ready to run and to analyse model results once the simulation has finished.", 4 | "upload_type": "software", 5 | "creators": [ 6 | { 7 | "affiliation": "Deltares", 8 | "name": "Eilander, Dirk", 9 | "orcid": "0000-0002-0951-8418" 10 | }, 11 | { 12 | "affiliation": "Deltares", 13 | "name": "Boisgontier, Hélène" 14 | }, 15 | { 16 | "affiliation": "Deltares", 17 | "name": "Bouaziz, Laurène J. E.", 18 | "orcid": "0000-0003-0597-8051" 19 | }, 20 | { 21 | "affiliation": "Deltares", 22 | "name": "Buitink, Joost" 23 | }, 24 | { 25 | "affiliation": "Deltares", 26 | "name": "Couasnon, Anaïs", 27 | "orcid": "0000-0001-9372-841X" 28 | }, 29 | { 30 | "affiliation": "Deltares", 31 | "name": "Dalmijn, Brendan" 32 | }, 33 | { 34 | "affiliation": "Deltares", 35 | "name": "Hegnauer, Mark" 36 | }, 37 | { 38 | "affiliation": "Deltares", 39 | "name": "de Jong, Tjalling" 40 | }, 41 | { 42 | "affiliation": "Deltares", 43 | "name": "Loos, Sibren" 44 | }, 45 | { 46 | "affiliation": "Deltares", 47 | "name": "Marth, Indra" 48 | }, 49 | { 50 | "affiliation": "Deltares", 51 | "name": "van Verseveld, Willem", 52 | "orcid": "0000-0003-3311-738X" 53 | } 54 | ], 55 | "access_right": "open", 56 | "license": "MIT" 57 | } 58 | -------------------------------------------------------------------------------- /docs/dev/core_dev/index.rst: -------------------------------------------------------------------------------- 1 | .. _core_developer_guide: 2 | 3 | Core Developer's guide 4 | ======================= 5 | 6 | Welcome to the HydroMT project. All contributions, bug reports, bug fixes, 7 | documentation improvements, enhancements, and ideas are welcome. Here's how we work. 8 | 9 | 10 | Rights 11 | ------ 12 | 13 | The MIT `license `_ applies to all contributions. 14 | 15 | 16 | .. grid:: 3 17 | :gutter: 1 18 | 19 | .. grid-item-card:: 20 | :text-align: center 21 | :link: dev_install 22 | :link-type: doc 23 | 24 | :octicon:`tools;5em;sd-text-icon blue-icon` 25 | +++ 26 | Development Installation 27 | 28 | 29 | .. grid-item-card:: 30 | :text-align: center 31 | :link: git_conventions 32 | :link-type: doc 33 | 34 | :octicon:`git-branch;5em;sd-text-icon blue-icon` 35 | +++ 36 | Git Conventions 37 | 38 | .. grid-item-card:: 39 | :text-align: center 40 | :link: code_conventions 41 | :link-type: doc 42 | 43 | :octicon:`code;5em;sd-text-icon blue-icon` 44 | +++ 45 | Code Conventions 46 | 47 | .. grid-item-card:: 48 | :text-align: center 49 | :link: documentation 50 | :link-type: doc 51 | 52 | :octicon:`book;5em;sd-text-icon blue-icon` 53 | +++ 54 | Documentation 55 | 56 | .. grid-item-card:: 57 | :text-align: center 58 | :link: test_ci 59 | :link-type: doc 60 | 61 | :octicon:`beaker;5em;sd-text-icon blue-icon` 62 | +++ 63 | Testing & CI 64 | 65 | .. grid-item-card:: 66 | :text-align: center 67 | :link: release 68 | :link-type: doc 69 | 70 | :octicon:`package;5em;sd-text-icon blue-icon` 71 | +++ 72 | Release Process 73 | 74 | .. toctree:: 75 | :hidden: 76 | 77 | dev_install 78 | git_conventions 79 | code_conventions 80 | documentation 81 | test_ci 82 | release 83 | -------------------------------------------------------------------------------- /hydromt/_utils/naming_convention.py: -------------------------------------------------------------------------------- 1 | from re import compile as compile_regex 2 | from re import error as regex_error 3 | from string import Formatter 4 | from typing import List, Optional, Pattern, Tuple 5 | 6 | _placeholders = frozenset({"year", "month", "variable", "name", "overview_level"}) 7 | 8 | 9 | def _expand_uri_placeholders( 10 | uri: str, 11 | *, 12 | placeholders: Optional[List[str]] = None, 13 | time_range: Optional[Tuple[str, str]] = None, 14 | variables: Optional[List[str]] = None, 15 | ) -> Tuple[str, List[str], Pattern[str]]: 16 | """Expand known placeholders in the URI.""" 17 | if placeholders is None: 18 | placeholders = [] 19 | keys: list[str] = [] 20 | pattern: str = "" 21 | 22 | if "{" in uri: 23 | uri_expanded = "" 24 | for literal_text, key, fmt, _ in Formatter().parse(uri): 25 | uri_expanded += literal_text 26 | pattern += literal_text 27 | if key is None: 28 | continue 29 | pattern += "(.*)" 30 | key_str = "{" + f"{key}:{fmt}" + "}" if fmt else "{" + key + "}" 31 | # remove unused fields 32 | if key in ["year", "month"] and time_range is None: 33 | uri_expanded += "*" 34 | elif key == "variable" and variables is None: 35 | uri_expanded += "*" 36 | elif key == "name": 37 | uri_expanded += "*" 38 | # escape unknown fields 39 | elif key is not None and key not in placeholders: 40 | uri_expanded = uri_expanded + "{" + key_str + "}" 41 | else: 42 | uri_expanded = uri_expanded + key_str 43 | keys.append(key) 44 | uri = uri_expanded 45 | 46 | # windows paths creating invalid escape sequences 47 | try: 48 | regex = compile_regex(pattern) 49 | except regex_error: 50 | # try it as raw path if regular string fails 51 | regex = compile_regex(pattern.encode("unicode_escape").decode()) 52 | 53 | return (uri, keys, regex) 54 | -------------------------------------------------------------------------------- /docs/user_guide/migration_guide/data_catalog.rst: -------------------------------------------------------------------------------- 1 | .. _data_catalog_migration: 2 | 3 | Migrating the Data Catalog 4 | ========================== 5 | 6 | Overview 7 | -------- 8 | The data catalog structure has been refactored to introduce a more modular design and 9 | clearer separation of responsibilities across several new classes (``DataSource``, ``Driver``, ``URIResolver``, and ``DataAdapter``): 10 | 11 | - ``URIResolver`` is in charge of parsing the path or URI of the file (e.g if you are using some keywords like 12 | ``{year}`` or ``{month}`` in your paths or if you want to read tiled raster) 13 | - ``Driver`` is in charge of reading the data from the source (e.g reading a netcdf file from a local disk or from cloud) 14 | - ``DataAdapter`` is in charge of harmonizing the data to standard HydroMT data structures (e.g. renaming variables, setting attributes, units conversion, etc.) 15 | - ``DataSource`` is the main class that ties everything together and is used by the ``DataCatalog`` to load data. 16 | 17 | Key format changes: 18 | 19 | - ``path`` is renamed to ``uri`` 20 | - **driver**: ``filesystem`` or ``driver_kwargs`` moved under ``driver``. ``driver`` can 21 | be a single string or a dictionnary with name and options (passed to underlying function 22 | that will read the data, e.g. xarray.open_mfdataset, etc.). 23 | - **data_adapter**:``unit_add``, ``unit_mult``, ``rename``, etc. moved under ``data_adapter`` 24 | - **uri_resolver**: can be specified mostly in the case of tiled rasters to pass required options. 25 | - **metadata**: ``crs`` and ``nodata`` are moved under ``metadata`` (renamed from ``meta``) 26 | - A single catalog entry can now reference multiple data variants or versions 27 | 28 | See more information about the current format in the :ref:`data catalog documentation `. 29 | 30 | How to upgrade 31 | -------------- 32 | All existing pre-defined catalogs have been updated to the new format. For your own catalogs, you can upgrade 33 | easily with the HydroMT ``check`` command: 34 | 35 | .. code-block:: bash 36 | 37 | hydromt check -d /path/to/data_catalog.yml --format v0 --upgrade -v 38 | -------------------------------------------------------------------------------- /docs/dev/core_dev/release.rst: -------------------------------------------------------------------------------- 1 | .. _create_release: 2 | 3 | 4 | Creating a release 5 | ------------------ 6 | 7 | 1. Go to the `actions` tab on Github, select `Create a release` from the actions listen to the left, then use the `run workflow` button to start the release process. You will be asked whether it will be a `major`, `minor` or `patch` release. Choose the appropriate action. 8 | 2. The action you just run will open a new PR for you with a new branch named `release/v`. (the `NEW_VERSION` will be calculated for you based on which kind of release you selected.) In the new PR, the changelog, hydromt version and sphinx `switcher.json` will be updated for you. Any changes you made to the `pyproject.toml` since the last release will be posted as a comment in the PR. You will need these during the Conda-forge release if there are any. 9 | 3. Every commit to this new branch will trigger the creation (and testing) of release artifacts. In our case those are: Documentation and the PyPi package (the conda release happens separately). After the artifacts are created, they will be uploaded to the repository's internal artifact cache. A bot will post links to these created artifacts in the PR which you can use to download and locally inspect them. 10 | 4. When you are happy with the release in the PR, you can simply merge it. We suggest naming the commit something like "Release v" 11 | 5. After the PR is merged, you will need to run the `Finalise a new release` action that will publish the latest artifacts created to their respective platform, it will also create a tag and a github release for you automatically. After this, a bot will open a new PR to the `main` branch, setting the hydromt version back to a dev version, and adding new headers to the `docs/changelog.rst` for unreleased features. The release is now done as far as this repo is concerned. 12 | 6. The newly published PyPi package will trigger a new PR to the `HydroMT feedstock repos of conda-forge `_. 13 | Here you can use the comment posted to the release PR to see if the `meta.yml` needs to be updated. Merge the PR to release the new version on conda-forge. 14 | 7. celebrate the new release! 15 | -------------------------------------------------------------------------------- /hydromt/data_catalog/adapters/data_adapter_base.py: -------------------------------------------------------------------------------- 1 | """BaseModel for DataAdapter.""" 2 | 3 | from datetime import timedelta 4 | from typing import Any, Dict, List, Optional 5 | 6 | from pydantic import BaseModel, ConfigDict, Field 7 | 8 | from hydromt.typing import TimeRange 9 | 10 | 11 | class DataAdapterBase(BaseModel): 12 | """BaseModel for DataAdapter.""" 13 | 14 | model_config = ConfigDict(extra="forbid") 15 | 16 | unit_add: Dict[str, Any] = Field(default_factory=dict) 17 | unit_mult: Dict[str, Any] = Field(default_factory=dict) 18 | rename: Dict[str, str] = Field(default_factory=dict) 19 | 20 | def _to_source_timerange( 21 | self, 22 | time_range: Optional[TimeRange], 23 | ) -> Optional[TimeRange]: 24 | """Transform a DataSource timerange to the source-native timerange. 25 | 26 | Parameters 27 | ---------- 28 | time_range : Optional[TimeRange] 29 | start and end datetime 30 | 31 | Returns 32 | ------- 33 | Optional[TimeRange] 34 | time_range in source format 35 | """ 36 | if time_range is None: 37 | return None 38 | elif dt := self.unit_add.get("time"): 39 | # subtract from source unit add 40 | return TimeRange( 41 | start=time_range.start - timedelta(seconds=dt), 42 | end=time_range.end - timedelta(seconds=dt), 43 | ) 44 | else: 45 | return time_range 46 | 47 | def _to_source_variables( 48 | self, variables: Optional[List[str]] 49 | ) -> Optional[List[str]]: 50 | """Transform DataSource variables to the source-native names. 51 | 52 | Parameters 53 | ---------- 54 | variables : Optional[List[str]] 55 | name(s) of the variables in the data. 56 | 57 | Returns 58 | ------- 59 | Optional[List[str]] 60 | _description_ 61 | """ 62 | if variables: 63 | inverse_rename_mapping: dict[str, str] = { 64 | v: k for k, v in self.rename.items() 65 | } 66 | return [inverse_rename_mapping.get(var, var) for var in variables] 67 | -------------------------------------------------------------------------------- /docs/api/model_processes.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.model 2 | 3 | .. _workflows_api: 4 | 5 | Model Processes 6 | =============== 7 | 8 | .. _workflows_grid_api: 9 | 10 | Grid 11 | ---- 12 | 13 | .. autosummary:: 14 | :toctree: ../_generated 15 | 16 | processes.grid.create_grid_from_region 17 | processes.grid.create_rotated_grid_from_geom 18 | processes.grid.grid_from_constant 19 | processes.grid.grid_from_rasterdataset 20 | processes.grid.grid_from_raster_reclass 21 | processes.grid.grid_from_geodataframe 22 | processes.grid.rotated_grid 23 | 24 | .. _workflows_mesh_api: 25 | 26 | Mesh 27 | ---- 28 | 29 | .. autosummary:: 30 | :toctree: ../_generated 31 | 32 | processes.mesh.create_mesh2d_from_region 33 | processes.mesh.create_mesh2d_from_mesh 34 | processes.mesh.create_mesh2d_from_geom 35 | processes.mesh.mesh2d_from_rasterdataset 36 | processes.mesh.mesh2d_from_raster_reclass 37 | 38 | .. _workflows_region_api: 39 | 40 | Region 41 | ------ 42 | 43 | .. autosummary:: 44 | :toctree: ../_generated 45 | 46 | processes.region.parse_region_basin 47 | processes.region.parse_region_bbox 48 | processes.region.parse_region_geom 49 | processes.region.parse_region_grid 50 | processes.region.parse_region_other_model 51 | processes.region.parse_region_mesh 52 | 53 | .. _workflows_basin_api: 54 | 55 | Basin mask 56 | ---------- 57 | 58 | .. autosummary:: 59 | :toctree: ../_generated 60 | 61 | processes.basin_mask.get_basin_geometry 62 | 63 | .. _workflows_rivers_api: 64 | 65 | River bathymetry 66 | ---------------- 67 | 68 | .. autosummary:: 69 | :toctree: ../_generated 70 | 71 | processes.rivers.river_width 72 | processes.rivers.river_depth 73 | 74 | .. _workflows_forcing_api: 75 | 76 | Meteo 77 | ----- 78 | 79 | .. autosummary:: 80 | :toctree: ../_generated 81 | 82 | processes.meteo.precip 83 | processes.meteo.temp 84 | processes.meteo.press 85 | processes.meteo.pet 86 | processes.meteo.wind 87 | processes.meteo.press_correction 88 | processes.meteo.temp_correction 89 | processes.meteo.resample_time 90 | processes.meteo.delta_freq 91 | processes.meteo.pet_debruin 92 | processes.meteo.pet_makkink 93 | processes.meteo.pm_fao56 94 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # file based on github/gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Temporary 12 | tmp/ 13 | sandbox/ 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | #*.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | .pytest_cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | .hypothesis/ 55 | *.log 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | .static_storage/ 64 | .media/ 65 | local_settings.py 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | docs/_generated/ 77 | docs/_examples 78 | 79 | # PyBuilder 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | examples/.ipynb_checkpoints 85 | examples/tmp* 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # celery beat schedule file 91 | celerybeat-schedule 92 | 93 | # SageMath parsed files 94 | *.sage.py 95 | 96 | # Environments 97 | .env 98 | .venv 99 | env/ 100 | venv/ 101 | ENV/ 102 | env.bak/ 103 | venv.bak/ 104 | *environment.yml 105 | 106 | # Spyder project settings 107 | .spyderproject 108 | .spyproject 109 | 110 | # PyCharm 111 | .idea 112 | 113 | # Rope project settings 114 | .ropeproject 115 | 116 | # mkdocs documentation 117 | /site 118 | 119 | # mypy 120 | .mypy_cache/ 121 | 122 | # dask 123 | dask-worker-space/ 124 | 125 | #ruff linting 126 | .ruff_cache 127 | .envrc 128 | pyrightconfig.json 129 | 130 | # Pixi 131 | .pixi 132 | 133 | # SonarQube 134 | .scannerwork 135 | -------------------------------------------------------------------------------- /tests/components/test_datasets_component.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import cast 3 | 4 | import pytest 5 | 6 | from hydromt.model import Model 7 | from hydromt.model.components.datasets import DatasetsComponent 8 | 9 | 10 | def test_model_dataset_key_error(tmp_path: Path): 11 | m = Model(root=tmp_path, mode="r+") 12 | m.add_component("test_dataset", DatasetsComponent(m)) 13 | component = cast(DatasetsComponent, m.get_component("test_dataset")) 14 | 15 | with pytest.raises(KeyError): 16 | component.data["1"] 17 | 18 | 19 | def test_model_dataset_sets_correctly(obsda, tmp_path: Path): 20 | m = Model(root=tmp_path, mode="r+") 21 | component = DatasetsComponent(m) 22 | m.add_component("test_dataset", component) 23 | 24 | # make a couple copies of the da for testing 25 | das = {str(i): obsda.copy() for i in range(5)} 26 | 27 | for i, d in das.items(): 28 | component.set(data=d, name=i) 29 | assert obsda.equals(component.data[i]) 30 | 31 | assert list(component.data.keys()) == list(map(str, range(5))) 32 | 33 | 34 | def test_model_dataset_reads_and_writes_correctly(obsda, tmp_path: Path): 35 | model = Model(root=tmp_path, mode="w+") 36 | component = DatasetsComponent(model) 37 | model.add_component("test_dataset", component) 38 | 39 | component.set(data=obsda, name="data") 40 | 41 | model.write() 42 | clean_model = Model(root=tmp_path, mode="r") 43 | clean_component = DatasetsComponent(clean_model) 44 | clean_model.add_component("test_dataset", clean_component) 45 | clean_model.read() 46 | 47 | # we'll know that these types will always be the same, which mypy doesn't know 48 | assert component.data["data"].equals(clean_component.data["data"]) # type: ignore 49 | 50 | 51 | def test_model_read_dataset(obsda, tmp_path: Path): 52 | write_path = tmp_path / "datasets" / "forcing.nc" 53 | write_path.parent.mkdir(parents=True, exist_ok=True) 54 | obsda.to_netcdf(write_path, engine="netcdf4") 55 | 56 | model = Model(root=tmp_path, mode="r") 57 | dataset_component = DatasetsComponent(model) 58 | model.add_component("forcing", dataset_component) 59 | 60 | component_data = dataset_component.data["forcing"] 61 | assert obsda.equals(component_data) 62 | -------------------------------------------------------------------------------- /tests/data_catalog/drivers/test_base_driver.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar 2 | 3 | from pydantic import Field 4 | 5 | from hydromt.data_catalog.drivers.base_driver import BaseDriver, DriverOptions 6 | 7 | 8 | class TestBaseDriver: 9 | def test_init_dict_all_explicit(self): 10 | driver: BaseDriver = BaseDriver.model_validate( 11 | { 12 | "name": "pyogrio", 13 | "filesystem": "memory", 14 | } 15 | ) 16 | 17 | assert driver.__class__.__qualname__ == "PyogrioDriver" 18 | assert driver.filesystem.get_fs().__class__.__qualname__ == "MemoryFileSystem" 19 | 20 | def test_init_dict_minimal_args(self): 21 | driver: BaseDriver = BaseDriver.model_validate({"name": "pyogrio"}) 22 | 23 | assert driver.__class__.__qualname__ == "PyogrioDriver" 24 | assert driver.filesystem.get_fs().__class__.__qualname__ == "LocalFileSystem" 25 | 26 | def test_serializes_name(self): 27 | driver = BaseDriver.model_validate({"name": "pyogrio"}) 28 | assert driver.model_dump().get("name") == "pyogrio" 29 | 30 | 31 | class TestDriverOptions: 32 | class CustomOptions(DriverOptions): 33 | """Custom options for testing.""" 34 | 35 | KWARGS_FOR_OPEN: ClassVar[set[str]] = {"custom_kwarg1"} 36 | 37 | custom_kwarg1: str = Field( 38 | default="value1", 39 | description="since this is in KWARGS_FOR_OPEN, it should be returned by get_kwargs().", 40 | ) 41 | custom_kwarg2: int = Field( 42 | default=123, 43 | description="since this is not in KWARGS_FOR_OPEN, it should not be returned by get_kwargs().", 44 | ) 45 | 46 | def test_get_kwargs(self): 47 | dct = { 48 | "chunks": {"x": 100, "y": 100}, # not a declared field, so always included 49 | "decode_times": True, # not a declared field, so always included 50 | "custom_kwarg1": "value1", # in KWARGS_FOR_OPEN, so included 51 | "custom_kwarg2": 123, # not in KWARGS_FOR_OPEN, so not included 52 | } 53 | options = self.CustomOptions(**dct) 54 | kwargs = options.get_kwargs() 55 | dct.pop("custom_kwarg2") # should not be in kwargs so remove 56 | assert kwargs == dct 57 | -------------------------------------------------------------------------------- /tests/components/test_tables_component.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import cast 3 | 4 | import pandas as pd 5 | import pytest 6 | 7 | from hydromt.model import Model 8 | from hydromt.model.components.tables import TablesComponent 9 | 10 | 11 | def test_model_tables_key_error(df, tmp_path: Path): 12 | m = Model(root=tmp_path, mode="r+") 13 | m.add_component("test_table", TablesComponent(m)) 14 | component = cast(TablesComponent, m.test_table) 15 | 16 | with pytest.raises(KeyError): 17 | component.data["1"] 18 | 19 | 20 | def test_model_tables_merges_correctly(df, tmp_path: Path): 21 | m = Model(root=tmp_path, mode="r+") 22 | m.add_component("test_table", TablesComponent(m)) 23 | component = cast(TablesComponent, m.test_table) 24 | 25 | # make a couple copies of the dfs for testing 26 | dfs = {str(i): df.copy() * i for i in range(5)} 27 | 28 | component.set(tables=dfs) 29 | 30 | computed = component.get_tables_merged() 31 | expected = pd.concat([df.assign(table_origin=name) for name, df in dfs.items()]) 32 | assert computed.equals(expected) 33 | 34 | 35 | def test_model_tables_sets_correctly(df, tmp_path: Path): 36 | m = Model(root=tmp_path, mode="r+") 37 | m.add_component("test_table", TablesComponent(m)) 38 | component = cast(TablesComponent, m.test_table) 39 | 40 | # make a couple copies of the dfs for testing 41 | dfs = {str(i): df.copy() for i in range(5)} 42 | 43 | for i, d in dfs.items(): 44 | component.set(tables=d, name=i) 45 | assert df.equals(component.data[i]) 46 | 47 | assert list(component.data.keys()) == list(map(str, range(5))) 48 | 49 | 50 | def test_model_tables_reads_and_writes_correctly(df, tmp_path: Path): 51 | model = Model(root=tmp_path, mode="r+") 52 | model.add_component("test_table", TablesComponent(model)) 53 | component = cast(TablesComponent, model.test_table) 54 | 55 | component.set(tables=df, name="table") 56 | 57 | model.write() 58 | clean_model = Model(root=tmp_path, mode="r") 59 | clean_model.add_component("test_table", TablesComponent(model)) 60 | clean_model.read() 61 | 62 | clean_component = cast(TablesComponent, clean_model.test_table) 63 | 64 | assert component.data["table"].equals(clean_component.data["table"]) 65 | -------------------------------------------------------------------------------- /docs/dev/plugin_dev/custom_implementation/index.rst: -------------------------------------------------------------------------------- 1 | .. _plugin_examples: 2 | 3 | ============================== 4 | Examples: Extending HydroMT 5 | ============================== 6 | 7 | This section provides detailed examples of how to extend HydroMT for your own plugin. 8 | Each page describes one aspect of customization, from implementing a new model class to defining data catalogs and resolvers. 9 | 10 | .. grid:: 3 11 | :gutter: 1 12 | 13 | .. grid-item-card:: 14 | :text-align: center 15 | :link: custom_model 16 | :link-type: ref 17 | 18 | :octicon:`book;5em;sd-text-icon blue-icon` 19 | +++ 20 | Implementing your own HydroMT Model class 21 | 22 | .. grid-item-card:: 23 | :text-align: center 24 | :link: custom_component 25 | :link-type: ref 26 | 27 | :octicon:`pencil;5em;sd-text-icon blue-icon` 28 | +++ 29 | Creating your own Model Components 30 | 31 | .. grid-item-card:: 32 | :text-align: center 33 | :link: custom_data_catalog 34 | :link-type: ref 35 | 36 | :octicon:`database;5em;sd-text-icon blue-icon` 37 | +++ 38 | Pre-defined Data Catalog for your plugin 39 | 40 | .. grid-item-card:: 41 | :text-align: center 42 | :link: custom_data_source 43 | :link-type: ref 44 | 45 | :octicon:`server;5em;sd-text-icon blue-icon` 46 | +++ 47 | Custom Data Source 48 | 49 | .. grid-item-card:: 50 | :text-align: center 51 | :link: custom_driver 52 | :link-type: ref 53 | 54 | :octicon:`server;5em;sd-text-icon blue-icon` 55 | +++ 56 | Custom Data Driver 57 | 58 | .. grid-item-card:: 59 | :text-align: center 60 | :link: custom_resolver 61 | :link-type: ref 62 | 63 | :octicon:`server;5em;sd-text-icon blue-icon` 64 | +++ 65 | Custom Resolver 66 | 67 | .. grid-item-card:: 68 | :text-align: center 69 | :link: custom_data_adapter 70 | :link-type: ref 71 | 72 | :octicon:`server;5em;sd-text-icon blue-icon` 73 | +++ 74 | Custom Data Adapter 75 | 76 | .. toctree:: 77 | :hidden: 78 | 79 | model 80 | component 81 | catalog 82 | data_source 83 | driver 84 | resolver 85 | data_adapter 86 | -------------------------------------------------------------------------------- /docs/about/dependencies.rst: -------------------------------------------------------------------------------- 1 | .. _dependencies: 2 | 3 | Dependencies 4 | ============ 5 | 6 | HydroMT builds on the latest packages in the scientific and geospatial python eco-system including: 7 | 8 | - **Core libraries:** xarray_, rioxarray_, pandas_, geopandas_, numpy_, pyflwdir_, 9 | dask_, numba_ 10 | - **Geospatial libraries:** affine, pyproj, shapely, xugrid 11 | - **Statistics:** bottleneck, scipy 12 | - **File I/O:** fsspec, mercantile, netCDF4, pooch, pyogrio, pyarrow, pystac, rasterio, 13 | requests, universal-pathlib, xmltodict, zarr 14 | - **Configuration and CLI tools:** click, importlib-metadata, pydantic, pydantic-settings, pyyaml 15 | - **System and support libraries:** packaging, toml, tomli-w 16 | - **Other (indirect) dependencies:** aiohappyeyeballs, aiohttp, aiosignal, annotated-types, asciitree, 17 | async-timeout, attrs, branca, certifi, cftime, charset-normalizer, click-plugins, cligj, cloudpickle, 18 | contourpy, cycler, fasteners, folium, fonttools, frozenlist, geoalchemy2, geographiclib, 19 | geopy, greenlet, idna, importlib-resources, jinja2, joblib, kiwisolver, llvmlite, 20 | locket, mapclassify, markupsafe, matplotlib, multidict, networkx, numba-celltree, numcodecs, partd, pillow, 21 | platformdirs, propcache, psycopg-binary, pydantic-core, pyparsing, python-dateutil, python-dotenv, pytz, 22 | scikit-learn, six, sqlalchemy, threadpoolctl, toolz, typing-extensions, typing-inspection, 23 | tzdata, urllib3, xyzservices, yarl, zipp 24 | 25 | HydroMT also comes with additional optional dependencies that can be installed 26 | to enable extra functionality: 27 | 28 | - **io**: gcsfs, fastparquet, openpyxl, pillow, s3fs 29 | - **gdal**: gdal 30 | - **extra**: matplotlib, pyet 31 | - **examples**: cartopy, jupyterlab, notebook 32 | - **slim**: gcsfs, fastparquet, openpyxl, pillow, s3fs, matplotlib, pyet, cartopy, jupyterlab, notebook 33 | 34 | You can use pip to install these extra dependencies, e.g., 35 | 36 | .. code-block:: bash 37 | 38 | pip install "hydromt[slim]" 39 | 40 | 41 | .. _xarray: https://xarray.pydata.org 42 | .. _geopandas: https://geopandas.org 43 | .. _pandas: https://pandas.pydata.org 44 | .. _rioxarray: https://corteva.github.io/rioxarray/stable/ 45 | .. _numpy: https://numpy.org 46 | .. _pyflwdir: https://deltares.github.io/pyflwdir 47 | .. _dask: https://dask.org 48 | .. _numba: https://numba.pydata.org 49 | -------------------------------------------------------------------------------- /docs/_static/switcher.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "v0.5.0", 4 | "version": "0.5.0", 5 | "url": "https://deltares.github.io/hydromt/v0.5.0/" 6 | }, 7 | { 8 | "name": "v0.6.0", 9 | "version": "0.6.0", 10 | "url": "https://deltares.github.io/hydromt/v0.6.0/" 11 | }, 12 | { 13 | "name": "v0.7.0", 14 | "version": "0.7.0", 15 | "url": "https://deltares.github.io/hydromt/v0.7.0/" 16 | }, 17 | { 18 | "name": "v0.7.1", 19 | "version": "0.7.1", 20 | "url": "https://deltares.github.io/hydromt/v0.7.1/" 21 | }, 22 | { 23 | "name": "v0.8.0", 24 | "version": "0.8.0", 25 | "url": "https://deltares.github.io/hydromt/v0.8.0/" 26 | }, 27 | { 28 | "name": "v0.9.0", 29 | "version": "0.9.0", 30 | "url": "https://deltares.github.io/hydromt/v0.9.0/" 31 | }, 32 | { 33 | "name": "v0.9.1", 34 | "version": "0.9.1", 35 | "url": "https://deltares.github.io/hydromt/v0.9.1/" 36 | }, 37 | { 38 | "name": "v0.9.2", 39 | "version": "0.9.2", 40 | "url": "https://deltares.github.io/hydromt/v0.9.2/" 41 | }, 42 | { 43 | "name": "v0.9.3", 44 | "version": "0.9.3", 45 | "url": "https://deltares.github.io/hydromt/v0.9.3/" 46 | }, 47 | { 48 | "name": "v0.9.4", 49 | "version": "0.9.4", 50 | "url": "https://deltares.github.io/hydromt/v0.9.4/" 51 | }, 52 | { 53 | "name": "v0.10.0", 54 | "version": "0.10.0", 55 | "url": "https://deltares.github.io/hydromt/v0.10.0/" 56 | }, 57 | { 58 | "name": "v0.10.1", 59 | "version": "0.10.1", 60 | "url": "https://deltares.github.io/hydromt/v0.10.0/" 61 | }, 62 | { 63 | "name": "v1.0.0", 64 | "version": "1.0.0", 65 | "url": "https://deltares.github.io/hydromt/v1.0.0/" 66 | }, 67 | { 68 | "name": "v1.0.1", 69 | "version": "1.0.1", 70 | "url": "https://deltares.github.io/hydromt/v1.0.1/" 71 | }, 72 | { 73 | "name": "v1.1.0", 74 | "version": "1.1.0", 75 | "url": "https://deltares.github.io/hydromt/v1.1.0/" 76 | }, 77 | { 78 | "name": "v1.2.0", 79 | "version": "1.2.0", 80 | "url": "https://deltares.github.io/hydromt/v1.2.0/" 81 | }, 82 | { 83 | "name": "v1.3.0", 84 | "version": "1.3.0", 85 | "url": "https://deltares.github.io/hydromt/v1.3.0/" 86 | }, 87 | { 88 | "name": "latest", 89 | "version": "latest", 90 | "url": "https://deltares.github.io/hydromt/latest/" 91 | } 92 | ] 93 | -------------------------------------------------------------------------------- /hydromt/data_catalog/drivers/dataset/dataset_driver.py: -------------------------------------------------------------------------------- 1 | """Abstract driver to read datasets.""" 2 | 3 | import logging 4 | from abc import ABC, abstractmethod 5 | from pathlib import Path 6 | from typing import Any 7 | 8 | import xarray as xr 9 | 10 | from hydromt.data_catalog.drivers.base_driver import ( 11 | BaseDriver, 12 | ) 13 | from hydromt.error import NoDataStrategy 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class DatasetDriver(BaseDriver, ABC): 19 | """Abstract Driver to read Datasets.""" 20 | 21 | @abstractmethod 22 | def read( 23 | self, uris: list[str], *, handle_nodata: NoDataStrategy = NoDataStrategy.RAISE 24 | ) -> xr.Dataset: 25 | """ 26 | Read data from one or more URIs into an xarray Dataset. 27 | 28 | This abstract method defines the interface for all dataset drivers. Subclasses 29 | should implement data loading logic appropriate for the format being read. 30 | 31 | Parameters 32 | ---------- 33 | uris : list[str] 34 | List of URIs to read data from. 35 | handle_nodata : NoDataStrategy, optional 36 | Strategy to handle missing or empty data. Default is NoDataStrategy.RAISE. 37 | 38 | Returns 39 | ------- 40 | xr.Dataset 41 | The loaded dataset. 42 | """ 43 | ... 44 | 45 | @abstractmethod 46 | def write( 47 | self, 48 | path: Path | str, 49 | data: xr.Dataset, 50 | *, 51 | write_kwargs: dict[str, Any] | None = None, 52 | ) -> Path: 53 | """ 54 | Write an xarray Dataset to disk. 55 | 56 | This abstract method defines the interface for all Dataset-based drivers. 57 | Subclasses should implement logic for writing datasets in specific formats 58 | (e.g., NetCDF, Zarr). 59 | 60 | Parameters 61 | ---------- 62 | path : Path | str 63 | Destination path or URI where the Dataset should be written. 64 | data : xr.Dataset 65 | The Dataset to write. 66 | write_kwargs : dict[str, Any], optional 67 | Additional keyword arguments to pass to the underlying xarray write function 68 | (e.g., `to_zarr`, `to_netcdf`). Default is None. 69 | 70 | Returns 71 | ------- 72 | Path 73 | The path where the data was written. 74 | 75 | """ 76 | ... 77 | -------------------------------------------------------------------------------- /data/predefined_catalogs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # NOTE this file has been deprecated and will be removed in future versions 3 | # 4 | # list of predefined data catalogs 5 | # 6 | # : 7 | # urlpath: 8 | # versions: 9 | # : 10 | # notes: 11 | # 12 | # Notes: 13 | # if new versions of the data_catalogs are created to table below should be updated by: 14 | # - adding the new version with a rgit meference to main, e.g., v2023.2: main 15 | # - updating the git hash or git tag of the previous version, e.g., v2023.1: 0bf8e2a1e716095dc6df54a5e9114ce88da0701b 16 | # the git hash of the previous version can be found by browsing to the catalog file in the github main branch and then clicking on the history button 17 | 18 | deltares_data: 19 | urlpath: https://raw.githubusercontent.com/Deltares/hydromt/{version}/data/catalogs/deltares_data.yml 20 | versions: 21 | v2024.1.30: 50863d777404a4a91b5f66bf285b3edaa56713b1 22 | v2023.12.21: 392206b21b26e62e00ae76db7ffc61a3b95e2175 23 | v2023.12.19: bf25e79dcbda67612a75588cd782d57abe3881de 24 | v2023.2: 0bf8e2a1e716095dc6df54a5e9114ce88da0701b 25 | v2022.7: e082da339f22cb1fc3571eec5a901a21d1c8a7bd 26 | v2022.5: d88cc47bd4ecc83de38c00aa554a7d48ad23ec23 27 | notes: This data is only accessible when connected to the Deltares network. 28 | gcs_cmip6_data: 29 | urlpath: https://raw.githubusercontent.com/Deltares/hydromt/{version}/data/catalogs/gcs_cmip6_data.yml 30 | versions: 31 | v2024.1.30: 503f9a400e8df89ecca240091d8eba9de1fedf75 32 | v2023.2: 0144d5dadfb76a9f2bdb22226b484e83c9751c34 33 | notes: This data uses CMIP6 data stored in Google Cloud Service. 34 | aws_data: 35 | urlpath: https://raw.githubusercontent.com/Deltares/hydromt/{version}/data/catalogs/aws_data.yml 36 | versions: 37 | v2024.1.30: 503f9a400e8df89ecca240091d8eba9de1fedf75 38 | v2023.2: 897e5c5272875f1c066f393798b7ae59721c9e9d 39 | notes: This data are stored in public Amazon Web Services. 40 | artifact_data: 41 | urlpath: https://raw.githubusercontent.com/Deltares/hydromt/{version}/data/catalogs/artifact_data.yml 42 | versions: 43 | v0.0.9: 503f9a400e8df89ecca240091d8eba9de1fedf75 44 | v0.0.8: 202874eb4fe3415d0608ea81cd61620af6f5816a 45 | notes: This data archive contains a sample dataset for the Piave basin (North Italy) to be used for tests and docs/demo purposes. 46 | -------------------------------------------------------------------------------- /docs/api/drivers.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: hydromt.data_catalog.drivers 2 | 3 | .. _driver_api: 4 | 5 | ======= 6 | Drivers 7 | ======= 8 | 9 | The Hydromt drivers module provides drivers for various datasets and formats. 10 | Each driver implements `read` and optionally `write` methods, along with configuration options and a file system handler. 11 | 12 | 13 | FileSystem 14 | -------------- 15 | 16 | All drivers rely on shared type definitions from :mod:`hydromt.typing`. 17 | 18 | .. currentmodule:: hydromt.typing 19 | 20 | .. autosummary:: 21 | :toctree: ../_generated 22 | :nosignatures: 23 | 24 | FSSpecFileSystem 25 | 26 | .. currentmodule:: hydromt.data_catalog.drivers 27 | 28 | 29 | Driver Base Classes 30 | -------------------- 31 | 32 | .. autosummary:: 33 | :toctree: ../_generated 34 | :nosignatures: 35 | 36 | BaseDriver 37 | DriverOptions 38 | 39 | Raster Data Drivers 40 | -------------------- 41 | 42 | .. autosummary:: 43 | :toctree: ../_generated 44 | :nosignatures: 45 | 46 | RasterDatasetDriver 47 | RasterDatasetXarrayDriver 48 | RasterXarrayOptions 49 | RasterioDriver 50 | RasterioOptions 51 | 52 | Vector & Geospatial Drivers 53 | ---------------------------- 54 | 55 | .. autosummary:: 56 | :toctree: ../_generated 57 | :nosignatures: 58 | 59 | GeoDataFrameDriver 60 | PyogrioDriver 61 | GeoDataFrameTableDriver 62 | GeoDataFrameTableOptions 63 | 64 | Tabular Data Drivers 65 | --------------------- 66 | 67 | .. autosummary:: 68 | :toctree: ../_generated 69 | :nosignatures: 70 | 71 | DataFrameDriver 72 | PandasDriver 73 | 74 | Geospatial Dataset Drivers 75 | --------------------------- 76 | 77 | .. autosummary:: 78 | :toctree: ../_generated 79 | :nosignatures: 80 | 81 | GeoDatasetDriver 82 | GeoDatasetOptions 83 | GeoDatasetXarrayDriver 84 | GeoDatasetVectorDriver 85 | 86 | General Dataset Drivers 87 | ------------------------ 88 | 89 | .. autosummary:: 90 | :toctree: ../_generated 91 | :nosignatures: 92 | 93 | DatasetDriver 94 | DatasetXarrayDriver 95 | DatasetXarrayOptions 96 | 97 | Preprocessing 98 | ------------- 99 | 100 | .. autosummary:: 101 | :toctree: ../_generated 102 | 103 | preprocessing.harmonise_dims 104 | preprocessing.remove_duplicates 105 | preprocessing.round_latlon 106 | preprocessing.to_datetimeindex 107 | --------------------------------------------------------------------------------