├── tests ├── fixtures │ ├── __init__.py │ ├── fixture_inputfile.py │ ├── fixture_fixed_format_parser.py │ ├── fixture_run_coupler_function.py │ ├── common.py │ ├── fixture_ribasim.py │ ├── fixture_ribametamod.py │ └── fixture_paths.py ├── common_scripts │ ├── __init__.py │ └── mf6_water_balance │ │ ├── __init__.py │ │ ├── MF6_wbal_listing.py │ │ └── combine.py ├── data │ ├── modstrip │ │ └── input │ │ │ ├── msw │ │ │ ├── scap_svat.inp │ │ │ ├── init_svat.inp │ │ │ ├── sel_svat_bda.inp │ │ │ ├── sel_svat_csv.inp │ │ │ ├── mete_stat.inp │ │ │ ├── mod2svat.inp │ │ │ ├── svat2etrefgrid.inp │ │ │ ├── svat2precgrid.inp │ │ │ ├── infi_svat.inp │ │ │ ├── uscl_svat.inp │ │ │ ├── idf_svat.inp │ │ │ ├── svat2swnr_roff.inp │ │ │ ├── area_svat.inp │ │ │ ├── sel_key_svat_per.inp │ │ │ └── para_sim_template.inp │ │ │ ├── GWF_1 │ │ │ ├── MODELOUTPUT │ │ │ │ ├── HEAD │ │ │ │ │ └── .gitkeep │ │ │ │ └── BUDGET │ │ │ │ │ └── .gitkeep │ │ │ ├── MODELINPUT │ │ │ │ ├── DRN6 │ │ │ │ │ └── DRN_T1.ARR │ │ │ │ ├── MS_MF6.RCH6 │ │ │ │ ├── RIV6 │ │ │ │ │ └── RIV_T1.ARR │ │ │ │ ├── MS_MF6.IC6 │ │ │ │ ├── MS_MF6.DRN6 │ │ │ │ ├── MS_MF6.RIV6 │ │ │ │ ├── DIS6 │ │ │ │ │ ├── IBOUND_L1.ARR │ │ │ │ │ ├── IBOUND_L2.ARR │ │ │ │ │ └── IBOUND_L3.ARR │ │ │ │ ├── STO │ │ │ │ │ ├── SY_L1.ARR │ │ │ │ │ ├── SY_L2.ARR │ │ │ │ │ └── SY_L3.ARR │ │ │ │ ├── NPF │ │ │ │ │ ├── K33_L1.ARR │ │ │ │ │ ├── K33_L2.ARR │ │ │ │ │ └── K33_L3.ARR │ │ │ │ ├── MS_MF6.NPF6 │ │ │ │ └── MS_MF6.DIS6 │ │ │ └── MS_MF6.NAM │ │ │ ├── NODENR2SVAT.DXC │ │ │ ├── RCHINDEX2SVAT.DXC │ │ │ ├── MFSIM.NAM │ │ │ └── MFSIM.IMS6 │ └── bucket_model │ │ ├── metaswap │ │ ├── init_svat.inp │ │ ├── percolmax.txt │ │ ├── scap_svat.inp │ │ ├── SEL_SVAT_BDA.INP │ │ ├── SEL_SVAT_CSV.INP │ │ ├── mod2svat.inp │ │ ├── idf_svat.inp │ │ ├── svat2swnr_roff.inp │ │ ├── infi_svat.inp │ │ ├── para_sim_template.inp │ │ ├── area_svat.inp │ │ ├── sel_key_svat_per.inp │ │ ├── MOD-SIM.TXT │ │ └── PreMetaSWAP.key │ │ ├── modflow6 │ │ ├── GWF_1 │ │ │ ├── riv-1 │ │ │ │ └── riv.dat │ │ │ ├── ic.ic │ │ │ ├── oc.oc │ │ │ ├── riv-1.riv │ │ │ ├── dis │ │ │ │ └── idomain.bin │ │ │ ├── sto.sto │ │ │ ├── GWF_1.nam │ │ │ ├── npf.npf │ │ │ ├── dis.dis │ │ │ └── rch.rch │ │ ├── mfsim.nam │ │ ├── solver.ims │ │ └── time_discretization.tdis │ │ ├── exchanges │ │ ├── riv-1.tsv │ │ ├── sprinkling_index.dxc │ │ ├── nodenr2svat.dxc │ │ ├── ponding_index.dxc │ │ └── rchindex2svat.dxc │ │ └── ribasim │ │ ├── database.gpkg │ │ └── ribasim.toml ├── conftest.py ├── test_imod_coupler │ ├── test_parser.py │ ├── test_logger.py │ ├── test_config_cases.py │ ├── test_msw_wrapper.py │ ├── test_waterbalance_script.py │ ├── test_mapping.py │ ├── test_utilities.py │ ├── test_config.py │ ├── test_mf6_wrapper.py │ └── test_ribamod_cases.py └── test_primod │ ├── test_mixin.py │ ├── test_node_svat_mapping.py │ └── test_wel_svat_mapping.py ├── imod_coupler ├── drivers │ ├── __init__.py │ ├── metamod │ │ ├── __init__.py │ │ └── config.py │ ├── ribamod │ │ ├── __init__.py │ │ └── config.py │ ├── ribametamod │ │ ├── __init__.py │ │ └── config.py │ ├── kernel_config.py │ └── driver.py ├── __init__.py ├── config.py ├── parser.py ├── kernelwrappers │ └── ribasim_wrapper.py ├── __main__.py └── logging │ └── exchange_collector.py ├── open-vscode.bat ├── .teamcity ├── .idea │ ├── .gitignore │ ├── kotlinc.xml │ ├── misc.xml │ ├── compiler.xml │ └── jarRepositories.xml ├── README ├── Primod │ ├── Project.kt │ └── buildTypes │ │ ├── Primod_TestPrimodPython312Win64.kt │ │ └── Primod_TestPrimodWin64.kt ├── IMODCollector │ ├── Project.kt │ └── buildTypes │ │ └── IMODCollector_X64development.kt ├── _Self │ ├── Project.kt │ ├── vcsRoots │ │ ├── MetaSwapLookupTable.kt │ │ └── ImodCoupler.kt │ └── buildTypes │ │ └── TestbenchCouplerWin64.kt ├── settings.kts └── pom.xml ├── .gitattributes ├── .sonarcloud.properties ├── pre-processing ├── primod │ ├── typing.py │ ├── driver_coupling │ │ ├── __init__.py │ │ ├── driver_coupling_base.py │ │ ├── metamod.py │ │ └── util.py │ ├── mapping │ │ ├── __init__.py │ │ ├── svat_user_demand_mapping.py │ │ ├── svat_basin_mapping.py │ │ ├── node_svat_mapping.py │ │ ├── rch_svat_mapping.py │ │ ├── wel_svat_mapping.py │ │ └── mappingbase.py │ ├── __init__.py │ ├── model_mixin.py │ └── coupled_model.py ├── README.md ├── pyproject.toml └── LICENSE ├── mypy.ini ├── .github ├── dependabot.yml └── workflows │ ├── pixi_auto_update.yml │ └── lint.yml ├── .vscode ├── extensions.json ├── launch.json └── settings.json ├── configuration.md ├── scripts ├── templates │ └── .env.jinja ├── generate_env_file.py └── download_imod_collector.py ├── pyproject.toml ├── .gitignore ├── pixi.toml └── README.md /tests/fixtures/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /imod_coupler/drivers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/common_scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /imod_coupler/drivers/metamod/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /imod_coupler/drivers/ribamod/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/scap_svat.inp: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /imod_coupler/drivers/ribametamod/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /imod_coupler/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "2024.3.0" 2 | -------------------------------------------------------------------------------- /tests/common_scripts/mf6_water_balance/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELOUTPUT/HEAD/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /open-vscode.bat: -------------------------------------------------------------------------------- 1 | pixi run --environment dev code . | exit 2 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELOUTPUT/BUDGET/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/init_svat.inp: -------------------------------------------------------------------------------- 1 | rootzone_pF 2 | 0.5 -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/percolmax.txt: -------------------------------------------------------------------------------- 1 | 1 0.090 2 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/init_svat.inp: -------------------------------------------------------------------------------- 1 | Rootzone_pF 2 | 2.2 3 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/riv-1/riv.dat: -------------------------------------------------------------------------------- 1 | 1 2 4 0.5 500 0.0 -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/sel_svat_bda.inp: -------------------------------------------------------------------------------- 1 | 1 2 | 2 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/sel_svat_csv.inp: -------------------------------------------------------------------------------- 1 | 1 2 | 2 3 | -------------------------------------------------------------------------------- /tests/data/bucket_model/exchanges/riv-1.tsv: -------------------------------------------------------------------------------- 1 | basin_index bound_index 2 | 0 0 3 | -------------------------------------------------------------------------------- /.teamcity/.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /shelf/ 3 | /workspace.xml 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # GitHub syntax highlighting 2 | pixi.lock linguist-language=YAML 3 | 4 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/mete_stat.inp: -------------------------------------------------------------------------------- 1 | 260 52.00 5.2 10.0 2 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/mod2svat.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/NODENR2SVAT.DXC: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 -------------------------------------------------------------------------------- /tests/data/modstrip/input/RCHINDEX2SVAT.DXC: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/svat2etrefgrid.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 1 1 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/svat2precgrid.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 1 1 3 | -------------------------------------------------------------------------------- /.sonarcloud.properties: -------------------------------------------------------------------------------- 1 | sonar.projectKey=Deltares_imod_coupler 2 | sonar.organization=deltares 3 | sonar.tests=tests 4 | -------------------------------------------------------------------------------- /pre-processing/primod/typing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | Int = np.int_ 4 | Float = np.float64 5 | Bool = np.bool_ 6 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | plugins = pydantic.mypy, numpy.typing.mypy_plugin, pandera.mypy 3 | strict = True 4 | ignore_missing_imports = True -------------------------------------------------------------------------------- /tests/data/bucket_model/ribasim/database.gpkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Deltares/imod_coupler/main/tests/data/bucket_model/ribasim/database.gpkg -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/ic.ic: -------------------------------------------------------------------------------- 1 | begin options 2 | end options 3 | 4 | begin griddata 5 | strt 6 | constant -2.0 7 | end griddata 8 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/infi_svat.inp: -------------------------------------------------------------------------------- 1 | 1 1.000 -9999. -9999. -9999. -9999. 2 | 2 1.000 -9999. -9999. -9999. -9999. 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/uscl_svat.inp: -------------------------------------------------------------------------------- 1 | 1 1.0000 1.0000 1.0000 1.0000 2 | 2 1.0000 1.0000 1.0000 1.0000 3 | -------------------------------------------------------------------------------- /tests/data/bucket_model/exchanges/sprinkling_index.dxc: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 1 3 1 3 | 1 6 1 -------------------------------------------------------------------------------- /tests/data/bucket_model/ribasim/ribasim.toml: -------------------------------------------------------------------------------- 1 | starttime = 2020-01-01 00:00:00 2 | endtime = 2021-01-01 00:00:00 3 | input_dir = "." 4 | results_dir = "results" 5 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/idf_svat.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 50.00 250.00 2 | 2 2 1 50.00 150.00 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/DRN6/DRN_T1.ARR: -------------------------------------------------------------------------------- 1 | 1 2 1 -5.000000 100.0000 1 2 | 1 3 1 -5.000000 100.0000 1 3 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/svat2swnr_roff.inp: -------------------------------------------------------------------------------- 1 | 1 0 1.000 1.0 1.0 1.00 1.00 2 | 2 0 1.000 1.0 1.0 1.00 1.00 3 | -------------------------------------------------------------------------------- /.teamcity/.idea/kotlinc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.RCH6: -------------------------------------------------------------------------------- 1 | BEGIN OPTIONS 2 | END OPTIONS 3 | 4 | BEGIN DIMENSIONS 5 | MAXBOUND 2 6 | END DIMENSIONS 7 | 8 | BEGIN PERIOD 1 9 | 1 2 1 0.0 10 | 1 3 1 0.0 11 | END PERIOD -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/oc.oc: -------------------------------------------------------------------------------- 1 | begin options 2 | budget fileout GWF_1/GWF_1.cbc 3 | head fileout GWF_1/GWF_1.hds 4 | end options 5 | 6 | begin period 1 7 | save head last 8 | save budget last 9 | end period 10 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/RIV6/RIV_T1.ARR: -------------------------------------------------------------------------------- 1 | 1 2 1 10.00000 100.0000 10.00000 0.000000 1 2 | 1 3 1 10.00000 100.0000 10.00000 0.000000 1 3 | -------------------------------------------------------------------------------- /pre-processing/README.md: -------------------------------------------------------------------------------- 1 | # Primod 2 | 3 | 4 | Primod is a pre-processing tool for [iMOD Coupler](https://github.com/Deltares/imod_coupler). 5 | Its usage is documented in the [iMOD Suite documentation](https://github.com/Deltares/imod_coupler). -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/riv-1.riv: -------------------------------------------------------------------------------- 1 | begin options 2 | save_flows 3 | end options 4 | 5 | begin dimensions 6 | maxbound 1 7 | end dimensions 8 | 9 | begin period 1 10 | open/close GWF_1/riv-1/riv.dat (binary) 11 | end period 12 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/scap_svat.inp: -------------------------------------------------------------------------------- 1 | 1 25.00 0 2 | 3 25.00 0 3 | 6 25.00 0 4 | -------------------------------------------------------------------------------- /.teamcity/README: -------------------------------------------------------------------------------- 1 | The archive contains settings for a TeamCity project. 2 | 3 | To edit the settings in IntelliJ Idea, open the pom.xml and 4 | select the 'Open as a project' option. 5 | 6 | If you want to move this dsl to version control, save it in the 7 | .teamcity directory. -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/SEL_SVAT_BDA.INP: -------------------------------------------------------------------------------- 1 | 1 2 | 2 3 | 3 4 | 4 5 | 5 6 | 6 7 | 7 8 | 8 9 | 9 10 | 10 11 | 11 12 | 12 13 | 13 14 | 14 15 | 15 -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/SEL_SVAT_CSV.INP: -------------------------------------------------------------------------------- 1 | 1 2 | 2 3 | 3 4 | 4 5 | 5 6 | 6 7 | 7 8 | 8 9 | 9 10 | 10 11 | 11 12 | 12 13 | 13 14 | 14 15 | 15 -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/dis/idomain.bin: -------------------------------------------------------------------------------- 1 | - -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/sto.sto: -------------------------------------------------------------------------------- 1 | begin options 2 | end options 3 | 4 | begin griddata 5 | iconvert 6 | constant 0 7 | ss 8 | constant 0.001 9 | sy 10 | constant 0.1 11 | end griddata 12 | 13 | begin period 1 14 | transient 15 | end period 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" # Location of package manifests 6 | schedule: 7 | interval: "daily" 8 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/GWF_1.nam: -------------------------------------------------------------------------------- 1 | begin options 2 | end options 3 | 4 | begin packages 5 | dis6 GWF_1/dis.dis dis 6 | npf6 GWF_1/npf.npf npf 7 | ic6 GWF_1/ic.ic ic 8 | sto6 GWF_1/sto.sto sto 9 | oc6 GWF_1/oc.oc oc 10 | riv6 GWF_1/riv-1.riv riv-1 11 | rch6 GWF_1/rch.rch rch_msw 12 | end packages 13 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-python.python", 4 | "ms-python.mypy-type-checker", 5 | "charliermarsh.ruff", 6 | "njpwerner.autodocstring", 7 | "tamasfe.even-better-toml", 8 | "davidanson.vscode-markdownlint", 9 | "samuelcolvin.jinjahtml" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/area_svat.inp: -------------------------------------------------------------------------------- 1 | 1 10000.0 0.000 1 1 0.300 260 1.000 1.000 50.00 250.00 2 2 1 2 | 2 10000.0 0.000 1 1 1.000 260 1.000 1.000 50.00 150.00 3 3 1 3 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/mfsim.nam: -------------------------------------------------------------------------------- 1 | begin options 2 | end options 3 | 4 | begin timing 5 | tdis6 time_discretization.tdis 6 | end timing 7 | 8 | begin models 9 | gwf6 GWF_1/GWF_1.nam GWF_1 10 | end models 11 | 12 | begin exchanges 13 | end exchanges 14 | 15 | begin solutiongroup 1 16 | ims6 solver.ims GWF_1 17 | end solutiongroup 18 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.IC6: -------------------------------------------------------------------------------- 1 | # IC6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | END OPTIONS 7 | 8 | #Initial Head Data 9 | 10 | BEGIN GRIDDATA 11 | STRT LAYERED 12 | CONSTANT -0.1000000E+01 13 | CONSTANT 0.0000000E+00 14 | CONSTANT 0.0000000E+00 15 | END GRIDDATA 16 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | pytest_plugins = [ 2 | "fixtures.fixture_fixed_format_parser", 3 | "fixtures.fixture_ribasim", 4 | "fixtures.fixture_metaswap", 5 | "fixtures.fixture_modflow", 6 | "fixtures.fixture_paths", 7 | "fixtures.fixture_inputfile", 8 | "fixtures.fixture_ribametamod", 9 | "fixtures.fixture_run_coupler_function", 10 | ] 11 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.DRN6: -------------------------------------------------------------------------------- 1 | # DRN6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | END OPTIONS 7 | 8 | #General Dimensions 9 | 10 | BEGIN DIMENSIONS 11 | MAXBOUND 2 12 | END DIMENSIONS 13 | 14 | BEGIN PERIOD 1 15 | OPEN/CLOSE .\GWF_1\MODELINPUT\DRN6\DRN_T1.ARR 1.0 (FREE) -1 16 | END PERIOD 17 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.RIV6: -------------------------------------------------------------------------------- 1 | # RIV6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | END OPTIONS 7 | 8 | #General Dimensions 9 | 10 | BEGIN DIMENSIONS 11 | MAXBOUND 2 12 | END DIMENSIONS 13 | 14 | BEGIN PERIOD 1 15 | OPEN/CLOSE .\GWF_1\MODELINPUT\RIV6\RIV_T1.ARR 1.0 (FREE) -1 16 | END PERIOD 17 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/solver.ims: -------------------------------------------------------------------------------- 1 | begin options 2 | print_option summary 3 | end options 4 | 5 | begin nonlinear 6 | outer_dvclose 0.0001 7 | outer_maximum 500 8 | end nonlinear 9 | 10 | begin linear 11 | inner_maximum 100 12 | inner_dvclose 0.0001 13 | inner_rclose 0.001 14 | linear_acceleration cg 15 | relaxation_factor 0.97 16 | end linear 17 | -------------------------------------------------------------------------------- /.teamcity/Primod/Project.kt: -------------------------------------------------------------------------------- 1 | package Primod 2 | 3 | import Primod.buildTypes.* 4 | import jetbrains.buildServer.configs.kotlin.* 5 | import jetbrains.buildServer.configs.kotlin.Project 6 | 7 | object Project : Project({ 8 | id("Primod") 9 | name = "Primod" 10 | 11 | buildType(Primod_TestPrimodPython312Win64) 12 | 13 | template(Primod_TestPrimodWin64) 14 | }) 15 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/DIS6/IBOUND_L1.ARR: -------------------------------------------------------------------------------- 1 | 0 2 | 2 3 | 1 4 | 0 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/DIS6/IBOUND_L2.ARR: -------------------------------------------------------------------------------- 1 | 0 2 | 2 3 | 1 4 | 0 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/DIS6/IBOUND_L3.ARR: -------------------------------------------------------------------------------- 1 | 0 2 | 2 3 | 1 4 | 0 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/npf.npf: -------------------------------------------------------------------------------- 1 | begin options 2 | save_flows 3 | variablecv 4 | end options 5 | 6 | begin griddata 7 | icelltype layered 8 | constant 0 9 | constant 0 10 | constant 0 11 | k layered 12 | constant 1.0 13 | constant 0.01 14 | constant 1.0 15 | k33 layered 16 | constant 0.1 17 | constant 0.001 18 | constant 0.1 19 | end griddata 20 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/STO/SY_L1.ARR: -------------------------------------------------------------------------------- 1 | 2.781481527845242E-307 2 | 4.450470264052955E-307 3 | 4.227874603747198E-307 4 | 2.225202877897679E-307 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | -9999.00000000000 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/STO/SY_L2.ARR: -------------------------------------------------------------------------------- 1 | 2.781481527845242E-307 2 | 4.450470264052955E-307 3 | 4.227874603747198E-307 4 | 2.225202877897679E-307 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | -9999.00000000000 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/STO/SY_L3.ARR: -------------------------------------------------------------------------------- 1 | 2.781481527845242E-307 2 | 4.450470264052955E-307 3 | 4.227874603747198E-307 4 | 2.225202877897679E-307 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | -9999.00000000000 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /.teamcity/IMODCollector/Project.kt: -------------------------------------------------------------------------------- 1 | package IMODCollector 2 | 3 | import IMODCollector.buildTypes.* 4 | import jetbrains.buildServer.configs.kotlin.* 5 | import jetbrains.buildServer.configs.kotlin.Project 6 | 7 | object Project : Project({ 8 | id("IMODCollector") 9 | name = "iMOD_Collector" 10 | description = "Collect iMOD6 coupled components + coupler into a single package" 11 | buildType(IMODCollector_X64development) 12 | }) 13 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/NPF/K33_L1.ARR: -------------------------------------------------------------------------------- 1 | 0.000000000000000E+000 2 | 5.000000000000000E-003 3 | 5.000000000000000E-003 4 | 0.000000000000000E+000 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/NPF/K33_L2.ARR: -------------------------------------------------------------------------------- 1 | 0.000000000000000E+000 2 | 1.000000000000000E-004 3 | 1.000000000000000E-004 4 | 0.000000000000000E+000 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/NPF/K33_L3.ARR: -------------------------------------------------------------------------------- 1 | 0.000000000000000E+000 2 | 1.000000000000000E-004 3 | 1.000000000000000E-004 4 | 0.000000000000000E+000 5 | DIMENSIONS 6 | 1 7 | 4 8 | 0.000000000000000E+000 9 | 0.000000000000000E+000 10 | 100.000000000000 11 | 400.000000000000 12 | 3.402823466385289E+038 13 | 0 14 | 100.000000000000 15 | 100.000000000000 16 | -------------------------------------------------------------------------------- /.teamcity/Primod/buildTypes/Primod_TestPrimodPython312Win64.kt: -------------------------------------------------------------------------------- 1 | package Primod.buildTypes 2 | 3 | import jetbrains.buildServer.configs.kotlin.* 4 | 5 | object Primod_TestPrimodPython312Win64 : BuildType({ 6 | templates(Primod_TestPrimodWin64) 7 | name = "Test Primod Python 3.12 Win64" 8 | description = "Win64 Regression testbench for MODFLOW6/MetaSWAP coupler" 9 | 10 | params { 11 | param("pixi-environment", "py312") 12 | } 13 | }) 14 | -------------------------------------------------------------------------------- /pre-processing/primod/driver_coupling/__init__.py: -------------------------------------------------------------------------------- 1 | from primod.driver_coupling.metamod import MetaModDriverCoupling 2 | from primod.driver_coupling.ribameta import RibaMetaDriverCoupling 3 | from primod.driver_coupling.ribamod import ( 4 | RibaModActiveDriverCoupling, 5 | RibaModPassiveDriverCoupling, 6 | ) 7 | 8 | __all__ = ( 9 | "MetaModDriverCoupling", 10 | "RibaMetaDriverCoupling", 11 | "RibaModActiveDriverCoupling", 12 | "RibaModPassiveDriverCoupling", 13 | ) 14 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/dis.dis: -------------------------------------------------------------------------------- 1 | begin options 2 | xorigin 50.0 3 | yorigin 50.0 4 | end options 5 | 6 | begin dimensions 7 | nlay 3 8 | nrow 3 9 | ncol 5 10 | end dimensions 11 | 12 | begin griddata 13 | delr 14 | constant 100.0 15 | delc 16 | constant 100.0 17 | top 18 | constant 0.0 19 | botm layered 20 | constant -0.2 21 | constant -10.2 22 | constant -110.2 23 | idomain 24 | open/close GWF_1/dis/idomain.bin (binary) 25 | end griddata 26 | -------------------------------------------------------------------------------- /.teamcity/_Self/Project.kt: -------------------------------------------------------------------------------- 1 | package _Self 2 | 3 | import _Self.buildTypes.* 4 | import _Self.vcsRoots.* 5 | import jetbrains.buildServer.configs.kotlin.* 6 | import jetbrains.buildServer.configs.kotlin.Project 7 | 8 | object Project : Project({ 9 | description = "Python scripts coupling components" 10 | 11 | vcsRoot(MetaSwapLookupTable) 12 | vcsRoot(ImodCoupler) 13 | 14 | buildType(TestbenchCouplerWin64) 15 | 16 | subProject(Primod.Project) 17 | subProject(IMODCollector.Project) 18 | }) 19 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_parser.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import imod_coupler.parser 4 | from imod_coupler import __version__ 5 | 6 | 7 | def test_get_version(capsys: pytest.CaptureFixture[str]) -> None: 8 | with pytest.raises(SystemExit) as cm: 9 | imod_coupler.parser.parse_args(["--version"]) 10 | assert cm.value.code == 0 11 | captured = capsys.readouterr() 12 | output_version = captured.out.strip() 13 | assert output_version is not None 14 | assert output_version == __version__ 15 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/mod2svat.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 3 | 3 3 1 4 | 4 4 1 5 | 5 5 1 6 | 6 6 1 7 | 7 7 1 8 | 8 8 1 9 | 9 9 1 10 | 10 10 1 11 | 11 11 1 12 | 12 12 1 13 | 13 13 1 14 | 14 14 1 15 | 15 15 1 16 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/GWF_1/rch.rch: -------------------------------------------------------------------------------- 1 | begin options 2 | save_flows 3 | end options 4 | 5 | begin dimensions 6 | maxbound 15 7 | end dimensions 8 | 9 | begin period 1 10 | 1 1 1 0.0 11 | 1 2 1 0.0 12 | 1 3 1 0.0 13 | 1 1 2 0.0 14 | 1 2 2 0.0 15 | 1 3 2 0.0 16 | 1 1 3 0.0 17 | 1 2 3 0.0 18 | 1 3 3 0.0 19 | 1 1 4 0.0 20 | 1 2 4 0.0 21 | 1 3 4 0.0 22 | 1 1 5 0.0 23 | 1 2 5 0.0 24 | 1 3 5 0.0 25 | end period 26 | -------------------------------------------------------------------------------- /.teamcity/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /.teamcity/_Self/vcsRoots/MetaSwapLookupTable.kt: -------------------------------------------------------------------------------- 1 | package _Self.vcsRoots 2 | 3 | import jetbrains.buildServer.configs.kotlin.* 4 | import jetbrains.buildServer.configs.kotlin.vcs.SvnVcsRoot 5 | 6 | object MetaSwapLookupTable : SvnVcsRoot({ 7 | name = "MetaSwap_LookupTable" 8 | url = "https://repos.deltares.nl/repos/DSCTestbench/trunk/cases/e150_metaswap/f00_common/c00_common/LHM2016_v01vrz" 9 | userName = "%svn_buildserver_username%" 10 | password = "credentialsJSON:4fe21828-8cba-44b7-b969-203d6a5d0a5f" 11 | }) 12 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/idf_svat.inp: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 1 2 3 | 3 1 3 4 | 4 1 4 5 | 5 1 5 6 | 6 2 1 7 | 7 2 2 8 | 8 2 3 9 | 9 2 4 10 | 10 2 5 11 | 11 3 1 12 | 12 3 2 13 | 13 3 3 14 | 14 3 4 15 | 15 3 5 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/MFSIM.NAM: -------------------------------------------------------------------------------- 1 | # MFSIM.NAM File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | END OPTIONS 7 | 8 | #Timing Options 9 | 10 | BEGIN TIMING 11 | TDIS6 .\MFSIM.TDIS6 12 | END TIMING 13 | 14 | #List of Models 15 | 16 | BEGIN MODELS 17 | GWF6 .\GWF_1\MS_MF6.NAM GWF_1 18 | END MODELS 19 | 20 | #List of Exchanges 21 | 22 | BEGIN EXCHANGES 23 | END EXCHANGES 24 | 25 | #Definition of Numerical Solution 26 | 27 | BEGIN SOLUTIONGROUP 1 28 | MXITER 1 29 | IMS6 .\MFSIM.IMS6 GWF_1 30 | END SOLUTIONGROUP 31 | -------------------------------------------------------------------------------- /tests/data/bucket_model/exchanges/nodenr2svat.dxc: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 3 | 3 3 1 4 | 4 4 1 5 | 5 5 1 6 | 6 6 1 7 | 7 7 1 8 | 8 8 1 9 | 9 9 1 10 | 10 10 1 11 | 11 11 1 12 | 12 12 1 13 | 13 13 1 14 | 14 14 1 15 | 15 15 1 16 | -------------------------------------------------------------------------------- /tests/data/bucket_model/exchanges/ponding_index.dxc: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 1 2 1 3 | 1 3 1 4 | 1 4 1 5 | 1 5 1 6 | 1 6 1 7 | 1 7 1 8 | 1 8 1 9 | 1 9 1 10 | 1 10 1 11 | 1 11 1 12 | 1 12 1 13 | 1 13 1 14 | 1 14 1 15 | 1 15 1 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/MFSIM.IMS6: -------------------------------------------------------------------------------- 1 | # IMS6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General options 4 | 5 | BEGIN OPTIONS 6 | PRINT_OPTION SUMMARY 7 | COMPLEXITY MODERATE 8 | CSV_OUTER_OUTPUT FILEOUT MFSIM.CSV 9 | END OPTIONS 10 | 11 | #Nonlinear options 12 | 13 | BEGIN NONLINEAR 14 | OUTER_HCLOSE 0.1000000E-02 15 | OUTER_MAXIMUM 150 16 | END NONLINEAR 17 | 18 | #Linear options 19 | 20 | BEGIN LINEAR 21 | INNER_MAXIMUM 50 22 | INNER_HCLOSE 0.1000000E-02 23 | INNER_RCLOSE 10.00000 24 | RELAXATION_FACTOR 0.9800000 25 | END LINEAR 26 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/__init__.py: -------------------------------------------------------------------------------- 1 | from primod.mapping.node_basin_mapping import ActiveNodeBasinMapping, NodeBasinMapping 2 | from primod.mapping.node_svat_mapping import NodeSvatMapping 3 | from primod.mapping.rch_svat_mapping import RechargeSvatMapping 4 | from primod.mapping.svat_basin_mapping import SvatBasinMapping 5 | from primod.mapping.wel_svat_mapping import WellSvatMapping 6 | 7 | __all__ = ( 8 | "ActiveNodeBasinMapping", 9 | "NodeBasinMapping", 10 | "NodeSvatMapping", 11 | "RechargeSvatMapping", 12 | "SvatBasinMapping", 13 | "WellSvatMapping", 14 | ) 15 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MS_MF6.NAM: -------------------------------------------------------------------------------- 1 | # MS_MF6.NAM File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | LIST .\GWF_1\MS_MF6.LST 7 | END OPTIONS 8 | 9 | #List of Packages 10 | 11 | BEGIN PACKAGES 12 | DIS6 .\GWF_1\MODELINPUT\MS_MF6.DIS6 13 | IC6 .\GWF_1\MODELINPUT\MS_MF6.IC6 14 | NPF6 .\GWF_1\MODELINPUT\MS_MF6.NPF6 15 | OC6 .\GWF_1\MODELINPUT\MS_MF6.OC6 16 | STO6 .\GWF_1\MODELINPUT\MS_MF6.STO6 17 | DRN6 .\GWF_1\MODELINPUT\MS_MF6.DRN6 18 | RIV6 .\GWF_1\MODELINPUT\MS_MF6.RIV6 19 | RCH6 .\GWF_1\MODELINPUT\MS_MF6.RCH6 RCH_MSW 20 | END PACKAGES 21 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_logger.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from loguru import logger 4 | 5 | from imod_coupler.config import LogLevel 6 | from imod_coupler.utils import setup_logger 7 | 8 | 9 | def test_log_file( 10 | tmp_path: Path, 11 | ) -> None: 12 | """Assures that logs are written to file after `setup_logger` is called""" 13 | 14 | test_string = "Test" 15 | log_file = tmp_path / "imod_coupler.log" 16 | setup_logger(LogLevel.INFO, log_file) 17 | logger.warning(test_string) 18 | 19 | with open(log_file) as f: 20 | assert test_string in f.read() 21 | -------------------------------------------------------------------------------- /.teamcity/.idea/compiler.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /pre-processing/primod/__init__.py: -------------------------------------------------------------------------------- 1 | from primod.driver_coupling import ( 2 | MetaModDriverCoupling, 3 | RibaMetaDriverCoupling, 4 | RibaModActiveDriverCoupling, 5 | RibaModPassiveDriverCoupling, 6 | ) 7 | from primod.metamod import MetaMod 8 | from primod.ribametamod import RibaMetaMod 9 | from primod.ribamod import RibaMod 10 | 11 | __all__ = ( 12 | "MetaMod", 13 | "RibaMod", 14 | "RibaMetaMod", 15 | "MetaModDriverCoupling", 16 | "RibaMetaDriverCoupling", 17 | "RibaModActiveDriverCoupling", 18 | "RibaModPassiveDriverCoupling", 19 | ) 20 | 21 | __version__ = "2024.3.0" 22 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_config_cases.py: -------------------------------------------------------------------------------- 1 | from imod.mf6 import Modflow6Simulation 2 | from imod.msw import MetaSwapModel 3 | from primod import MetaMod, MetaModDriverCoupling 4 | 5 | 6 | def case_sprinkling( 7 | coupled_mf6_model: Modflow6Simulation, 8 | prepared_msw_model: MetaSwapModel, 9 | ) -> MetaMod: 10 | driver_coupling = MetaModDriverCoupling( 11 | mf6_model="GWF_1", mf6_recharge_package="rch_msw", mf6_wel_package="wells_msw" 12 | ) 13 | return MetaMod( 14 | prepared_msw_model, 15 | coupled_mf6_model, 16 | coupling_list=[driver_coupling], 17 | ) 18 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python Debugger: Current File", 9 | "type": "python", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal", 13 | "justMyCode": false, 14 | "purpose":["debug-test"], 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /tests/fixtures/fixture_inputfile.py: -------------------------------------------------------------------------------- 1 | import pytest_cases 2 | 3 | 4 | @pytest_cases.fixture(scope="function") 5 | def output_config_toml() -> str: 6 | """ 7 | returns the content of an input file for the logger. 8 | """ 9 | input_file_content = """ 10 | [general] 11 | output_dir = "." 12 | 13 | [exchanges.example_flux_output] 14 | type = "netcdf" 15 | 16 | [exchanges.example_stage_output] 17 | type = "netcdf" 18 | """ 19 | return input_file_content 20 | -------------------------------------------------------------------------------- /imod_coupler/config.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class LogLevel(str, Enum): 7 | DEBUG = "DEBUG" 8 | INFO = "INFO" 9 | WARNING = "WARNING" 10 | ERROR = "ERROR" 11 | CRITICAL = "CRITICAL" 12 | 13 | 14 | class DriverType(str, Enum): 15 | METAMOD = "metamod" 16 | RIBAMOD = "ribamod" 17 | RIBAMETAMOD = "ribametamod" 18 | 19 | 20 | class BaseConfig(BaseModel): 21 | """Model for the base config validated by pydantic""" 22 | 23 | log_level: LogLevel = LogLevel.INFO 24 | timing: bool = False 25 | driver_type: DriverType 26 | driver: BaseModel 27 | -------------------------------------------------------------------------------- /pre-processing/primod/driver_coupling/driver_coupling_base.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from pathlib import Path 3 | from typing import Any 4 | 5 | from pydantic import BaseModel 6 | 7 | 8 | class DriverCoupling(BaseModel, abc.ABC): 9 | """ 10 | Abstract base class for driver couplings. 11 | """ 12 | 13 | # Config required for e.g. geodataframes 14 | model_config = {"arbitrary_types_allowed": True} 15 | 16 | @abc.abstractmethod 17 | def derive_mapping(self, *args: Any, **kwargs: Any) -> Any: 18 | pass 19 | 20 | @abc.abstractmethod 21 | def write_exchanges(self, directory: Path, coupled_model: Any) -> dict[str, Any]: 22 | pass 23 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.NPF6: -------------------------------------------------------------------------------- 1 | # NPF6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | #General Options 4 | 5 | BEGIN OPTIONS 6 | ALTERNATIVE_CELL_AVERAGING AMT-HMK 7 | THICKSTRT 8 | END OPTIONS 9 | 10 | #Geology Options 11 | 12 | BEGIN GRIDDATA 13 | ICELLTYPE LAYERED 14 | CONSTANT 0 15 | CONSTANT 0 16 | CONSTANT 0 17 | K LAYERED 18 | CONSTANT 0.5000000E-02 19 | CONSTANT 0.1000000E-03 20 | CONSTANT 0.1000000E-03 21 | K33 LAYERED 22 | OPEN/CLOSE .\GWF_1\MODELINPUT\NPF\K33_L1.ARR FACTOR 1.0D0 IPRN -1 23 | OPEN/CLOSE .\GWF_1\MODELINPUT\NPF\K33_L2.ARR FACTOR 1.0D0 IPRN -1 24 | OPEN/CLOSE .\GWF_1\MODELINPUT\NPF\K33_L3.ARR FACTOR 1.0D0 IPRN -1 25 | END GRIDDATA 26 | -------------------------------------------------------------------------------- /tests/data/bucket_model/exchanges/rchindex2svat.dxc: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 3 | 3 3 1 4 | 4 4 1 5 | 5 5 1 6 | 6 6 1 7 | 7 7 1 8 | 8 8 1 9 | 9 9 1 10 | 10 10 1 11 | 11 11 1 12 | 12 12 1 13 | 13 13 1 14 | 14 14 1 15 | 15 15 1 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /.teamcity/_Self/vcsRoots/ImodCoupler.kt: -------------------------------------------------------------------------------- 1 | package _Self.vcsRoots 2 | 3 | import jetbrains.buildServer.configs.kotlin.* 4 | import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot 5 | 6 | object ImodCoupler : GitVcsRoot({ 7 | name = "imod_coupler" 8 | url = "https://github.com/Deltares/imod_coupler" 9 | branch = "main" 10 | branchSpec = """ 11 | +:refs/heads/* 12 | +:refs/tags/* 13 | -:refs/heads/gh-pages 14 | """.trimIndent() 15 | useTagsAsBranches = true 16 | checkoutPolicy = GitVcsRoot.AgentCheckoutPolicy.USE_MIRRORS 17 | authMethod = password { 18 | userName = "teamcity-deltares" 19 | password = "credentialsJSON:abf605ce-e382-4b10-b5de-8a7640dc58d9" 20 | } 21 | }) 22 | -------------------------------------------------------------------------------- /imod_coupler/parser.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from collections.abc import Sequence 3 | from typing import Any 4 | 5 | from imod_coupler import __version__ 6 | 7 | 8 | def parse_args(args: Sequence[str] | None = None) -> Any: 9 | parser = argparse.ArgumentParser() 10 | 11 | parser.add_argument( 12 | "config_path", 13 | action="store", 14 | help="specify the path to the configuration file", 15 | ) 16 | 17 | parser.add_argument( 18 | "--enable-debug-native", 19 | action="store_true", 20 | help="stop the script to wait for the native debugger", 21 | ) 22 | 23 | parser.add_argument("--version", action="version", version=__version__) 24 | 25 | return parser.parse_args(args) 26 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true, 7 | "[python]": { 8 | "editor.defaultFormatter": "charliermarsh.ruff", 9 | "editor.formatOnSave": true, 10 | "editor.codeActionsOnSave": { 11 | "source.fixAll": "explicit" 12 | } 13 | }, 14 | "mypy-type-checker.importStrategy": "fromEnvironment", 15 | "files.associations": { 16 | ".env.jinja": "jinja-properties" 17 | }, 18 | "cSpell.diagnosticLevel": "Hint", 19 | "sonarlint.connectedMode.project": { 20 | "connectionId": "deltares", 21 | "projectKey": "Deltares_imod_coupler" 22 | } 23 | } -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/svat2swnr_roff.inp: -------------------------------------------------------------------------------- 1 | 1 0 0.002 1.0 1.0 2 | 2 0 0.002 1.0 1.0 3 | 3 0 0.002 1.0 1.0 4 | 4 0 0.002 1.0 1.0 5 | 5 0 0.002 1.0 1.0 6 | 6 0 0.002 1.0 1.0 7 | 7 0 0.002 1.0 1.0 8 | 8 0 0.002 1.0 1.0 9 | 9 0 0.002 1.0 1.0 10 | 10 0 0.002 1.0 1.0 11 | 11 0 0.002 1.0 1.0 12 | 12 0 0.002 1.0 1.0 13 | 13 0 0.002 1.0 1.0 14 | 14 0 0.002 1.0 1.0 15 | 15 0 0.002 1.0 1.0 16 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/infi_svat.inp: -------------------------------------------------------------------------------- 1 | 1 1.000 -9999.0 -9999.0 -9999.0 -9999.0 2 | 2 1.000 -9999.0 -9999.0 -9999.0 -9999.0 3 | 3 1.000 -9999.0 -9999.0 -9999.0 -9999.0 4 | 4 1.000 -9999.0 -9999.0 -9999.0 -9999.0 5 | 5 1.000 -9999.0 -9999.0 -9999.0 -9999.0 6 | 6 1.000 -9999.0 -9999.0 -9999.0 -9999.0 7 | 7 1.000 -9999.0 -9999.0 -9999.0 -9999.0 8 | 8 1.000 -9999.0 -9999.0 -9999.0 -9999.0 9 | 9 1.000 -9999.0 -9999.0 -9999.0 -9999.0 10 | 10 1.000 -9999.0 -9999.0 -9999.0 -9999.0 11 | 11 1.000 -9999.0 -9999.0 -9999.0 -9999.0 12 | 12 1.000 -9999.0 -9999.0 -9999.0 -9999.0 13 | 13 1.000 -9999.0 -9999.0 -9999.0 -9999.0 14 | 14 1.000 -9999.0 -9999.0 -9999.0 -9999.0 15 | 15 1.000 -9999.0 -9999.0 -9999.0 -9999.0 16 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/GWF_1/MODELINPUT/MS_MF6.DIS6: -------------------------------------------------------------------------------- 1 | # DIS6 File Generated by iMOD [V5_0 X64 Optimized] 2 | 3 | General Options 4 | 5 | BEGIN OPTIONS 6 | LENGTH_UNITS METERS 7 | XORIGIN 0.000 8 | YORIGIN 0.000 9 | ANGROT 0.0 10 | END OPTIONS 11 | 12 | #Model Dimensions 13 | 14 | BEGIN DIMENSIONS 15 | NLAY 3 16 | NROW 4 17 | NCOL 1 18 | END DIMENSIONS 19 | 20 | #Cell Sizes 21 | 22 | BEGIN GRIDDATA 23 | DELR 24 | CONSTANT 0.1000000E+03 25 | DELC 26 | CONSTANT 0.1000000E+03 27 | 28 | #Vertical Configuration 29 | 30 | TOP 31 | CONSTANT 0.0000000E+00 32 | BOTM LAYERED 33 | CONSTANT -0.1000000E+03 34 | CONSTANT -0.2000000E+03 35 | CONSTANT -0.3000000E+03 36 | 37 | #Boundary Settings 38 | 39 | IDOMAIN LAYERED 40 | OPEN/CLOSE .\GWF_1\MODELINPUT\DIS6\IBOUND_L1.ARR FACTOR 1 IPRN -1 41 | OPEN/CLOSE .\GWF_1\MODELINPUT\DIS6\IBOUND_L2.ARR FACTOR 1 IPRN -1 42 | OPEN/CLOSE .\GWF_1\MODELINPUT\DIS6\IBOUND_L3.ARR FACTOR 1 IPRN -1 43 | END GRIDDATA 44 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/para_sim_template.inp: -------------------------------------------------------------------------------- 1 | vegetation_mdl = 1 2 | evapotranspiration_mdl = 1 3 | saltstress_mdl = 0 4 | surfacewater_mdl = 0 5 | infilimsat_opt = 0 6 | netcdf_per = 0 7 | postmsw_opt = 0 8 | dtgw = 1.0 9 | dtsw = 1.0 10 | ipstep = 2 11 | nxlvage_dim = 366 12 | co2 = 404.32 13 | fact_beta2 = 1.0 14 | rcsoil = 0.15 15 | iterur1 = 3 16 | iterur2 = 5 17 | tdbgsm = 91.0 18 | tdedsm = 270.0 19 | clocktime = 0 20 | unsa_svat_path = "{{unsat_path}}" 21 | iybg = 1971 22 | tdbg = 0.0 23 | simgro_opt = -1 ! simgro output file 24 | idf_per = 1 ! Writing IDF files 25 | idf_xmin = 50.0 26 | idf_ymin = 50.0 27 | idf_dx = 100.00 28 | idf_dy = 100.00 29 | idf_ncol = 5 30 | idf_nrow = 3 31 | idf_sx = 0 32 | idf_sy = 0 33 | idf_nodata = -9999.00 34 | -------------------------------------------------------------------------------- /.github/workflows/pixi_auto_update.yml: -------------------------------------------------------------------------------- 1 | name: Pixi auto update 2 | 3 | on: 4 | schedule: 5 | # At 03:00 on day 3 of the month 6 | - cron: "0 3 3 * *" 7 | # on demand 8 | workflow_dispatch: 9 | 10 | jobs: 11 | auto-update: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v5 15 | with: 16 | ssh-key: ${{ secrets.SSH_PRIVATE_KEY }} 17 | - uses: prefix-dev/setup-pixi@v0.9.3 18 | with: 19 | pixi-version: "latest" 20 | cache: false 21 | - name: Update pixi lock file 22 | run: | 23 | rm pixi.lock 24 | pixi install 25 | - uses: peter-evans/create-pull-request@v7 26 | with: 27 | token: ${{ secrets.GITHUB_TOKEN }} 28 | branch: update/pixi-lock 29 | title: Update pixi lock file 30 | commit-message: "Update `pixi.lock`" 31 | body: Update pixi dependencies to the latest version. 32 | author: "GitHub " 33 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_msw_wrapper.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | from pathlib import Path 3 | 4 | import pytest 5 | from test_modstrip import fill_para_sim_template 6 | 7 | from imod_coupler.kernelwrappers.msw_wrapper import MswWrapper 8 | 9 | 10 | @pytest.mark.skip("metaswap can't be initialized and finalized more than once") 11 | def test_msw_wrapper_can_be_initialized_and_finalized_multiple_times( 12 | metaswap_dll_devel: Path, 13 | metaswap_dll_dep_dir_devel: Path, 14 | tmp_path_dev: Path, 15 | tmodel_short_input_folder: Path, 16 | metaswap_lookup_table: Path, 17 | ) -> None: 18 | shutil.copytree(tmodel_short_input_folder, tmp_path_dev) 19 | msw = MswWrapper( 20 | metaswap_dll_devel, 21 | metaswap_dll_dep_dir_devel, 22 | tmp_path_dev / "MetaSWAP", 23 | False, 24 | ) 25 | 26 | fill_para_sim_template(tmp_path_dev / "MetaSWAP", metaswap_lookup_table) 27 | msw.initialize() 28 | msw.finalize() 29 | msw.initialize() 30 | msw.finalize() 31 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_waterbalance_script.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from common_scripts.mf6_water_balance.combine import create_modflow_waterbalance_file 4 | from test_utilities import numeric_csvfiles_equal 5 | 6 | eps = 1e-4 7 | tolerance_balance: dict[str, tuple[float, float]] = { 8 | "default": (2 * eps, 2 * eps), 9 | } 10 | 11 | 12 | def test_waterbalance_script_case_1( 13 | tmp_path: Path, reference_result_folder: Path, test_data_folder: Path 14 | ): 15 | script_test_data_folder = test_data_folder / "waterbalance_script" 16 | csv_result_file = tmp_path / "waterbalance_output.csv" 17 | 18 | create_modflow_waterbalance_file( 19 | script_test_data_folder / "T-MODEL-D.LST", 20 | output_file_csv=csv_result_file, 21 | ) 22 | 23 | csv_reference_file = ( 24 | reference_result_folder 25 | / "test_waterbalance_script" 26 | / "waterbalance_output_1.csv" 27 | ) 28 | 29 | assert numeric_csvfiles_equal( 30 | csv_result_file, csv_reference_file, ";", tolerance_balance 31 | ) 32 | -------------------------------------------------------------------------------- /pre-processing/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "primod" 7 | description = "Pre-process iMOD Coupler" 8 | readme = "README.md" 9 | authors = [ 10 | { name = "Hendrik Kok", email = "Hendrik.Kok@deltares.nl" }, 11 | { name = "Robert Leander", email = "Robert.Leander@deltares.nl" }, 12 | { name = "Joeri van Engelen", email = "Joeri.vanEngelen@deltares.nl" }, 13 | { name = "Huite Bootsma", email = "Huite.Bootsma@deltares.nl" }, 14 | ] 15 | license = { text = "MIT" } 16 | classifiers = [ 17 | "Intended Audience :: Science/Research", 18 | "Topic :: Scientific/Engineering :: Hydrology", 19 | ] 20 | requires-python = ">=3.10" 21 | dependencies = [ 22 | "geopandas", 23 | "imod>=1", 24 | "numpy", 25 | "pandas", 26 | "pydantic", 27 | "ribasim", 28 | "tomli-w", 29 | "xarray", 30 | ] 31 | dynamic = ["version"] 32 | 33 | [tool.hatch.version] 34 | path = "primod/__init__.py" 35 | 36 | [project.urls] 37 | Source = "https://github.com/Deltares/imod_coupler/pre-processing" 38 | -------------------------------------------------------------------------------- /configuration.md: -------------------------------------------------------------------------------- 1 | ## Configuration file 2 | 3 | The configuration file is necessary to describe the model and its dependencies. 4 | It is in the [toml](https://toml.io/en/) format and should have a `.toml` extension. 5 | 6 | Note that `toml` uses quote marks differently than `python`. Single quotes in toml (`''`) are interpreted similarly to how python would interpret a rawstring (`r''` or `r""`), whereas double quotes (`""`) are interpreted in a similar manner to regular strings in python (`""` or `''`). This matters for paths on Windows, for which we advice to use single quotes. 7 | 8 | ```toml 9 | # This is a configuration file for the imod_coupler 10 | # Relative paths are interpreted as relative to the configuration file path 11 | 12 | [kernels] 13 | [kernels.modflow6] 14 | dll = '/path/to/libmf6.dll' 15 | model = '.' 16 | 17 | [kernels.metaswap] 18 | dll = '/path/to/MetaSWAP.dll' 19 | model = './GWF_1/MSWAPINPUT' 20 | dll_dependency = '/path/to/MPICHDLL' 21 | 22 | 23 | [[exchanges]] 24 | # Two kernels per exchange 25 | kernels = ['modflow6', 'metaswap'] 26 | 27 | ``` 28 | -------------------------------------------------------------------------------- /pre-processing/LICENSE: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /tests/fixtures/fixture_fixed_format_parser.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | # Separate function from the one in fixed_format module. This one fails if not 5 | # able to read value for tests. 6 | @pytest.fixture(scope="session") 7 | def fixed_format_parser(): 8 | def function(file, metadata_dict): 9 | results = {} 10 | for key in metadata_dict: 11 | results[key] = [] 12 | 13 | with open(file) as f: 14 | lines = f.readlines() 15 | for line in lines: 16 | if line == "\n": 17 | continue 18 | for varname, metadata in metadata_dict.items(): 19 | # Take first part of line 20 | value = line[: metadata.column_width] 21 | # Convert to correct type 22 | converted_value = metadata.dtype(value) 23 | # Add to results 24 | results[varname].append(converted_value) 25 | # Truncate line 26 | line = line[metadata.column_width :] 27 | return results 28 | 29 | return function 30 | -------------------------------------------------------------------------------- /tests/fixtures/fixture_run_coupler_function.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sys 3 | from collections.abc import Callable 4 | from pathlib import Path 5 | 6 | import pytest 7 | 8 | from imod_coupler.__main__ import run_coupler 9 | 10 | 11 | @pytest.fixture(scope="session") 12 | def run_coupler_function(imod_coupler_exec_devel: Path) -> Callable[[Path], None]: 13 | """ 14 | Replacement for subprocess.run. 15 | If pydevd is loaded, don't use subprocess.run but call run_coupler directly. 16 | Otherwise it would not be possible to attach the debugger. 17 | pydevd is loaded when starting the debugger via Visual Studio Code (PyCharm is untested). 18 | """ 19 | if "pydevd" in sys.modules: 20 | 21 | def run_coupler_debug(file: Path) -> None: 22 | try: 23 | run_coupler(file) 24 | except Exception as ex: 25 | raise subprocess.CalledProcessError(1, "run_coupler", None, str(ex)) 26 | 27 | return run_coupler_debug 28 | else: 29 | return lambda file: subprocess.run([imod_coupler_exec_devel, file], check=True) # type: ignore # not interested in the return type of subprocess.run 30 | -------------------------------------------------------------------------------- /pre-processing/primod/model_mixin.py: -------------------------------------------------------------------------------- 1 | """Module containing mixins for specific kernels, for example MODFLOW.""" 2 | 3 | from imod.mf6 import Modflow6Simulation, StructuredDiscretization 4 | from imod.mf6.mf6_wel_adapter import Mf6Wel 5 | 6 | 7 | class MetaModMixin: 8 | """MetaSWAP-Modflow coupling specific methods.""" 9 | 10 | @staticmethod 11 | def get_mf6_pkgs_for_metaswap( 12 | coupling_dict: dict[str, str], mf6_simulation: Modflow6Simulation 13 | ) -> tuple[StructuredDiscretization, Mf6Wel | None]: 14 | """ 15 | Get names of DIS and possibly WEL packages from coupling_dict then fetch 16 | these MODFLOW 6 packages from simulation. 17 | """ 18 | mf6_model_key = coupling_dict["mf6_model"] 19 | gwf_model = mf6_simulation[mf6_model_key] 20 | mf6_dis_key = gwf_model.get_diskey() 21 | mf6_dis_pkg = gwf_model[mf6_dis_key] 22 | 23 | mf6_wel_pkg = None 24 | if "mf6_msw_well_pkg" in coupling_dict.keys(): 25 | mf6_well_key = coupling_dict["mf6_msw_well_pkg"] 26 | mf6_wel_pkg = gwf_model.prepare_wel_for_mf6(mf6_well_key, True, True) 27 | return mf6_dis_pkg, mf6_wel_pkg 28 | -------------------------------------------------------------------------------- /scripts/templates/.env.jinja: -------------------------------------------------------------------------------- 1 | IMOD_COLLECTOR_DEVEL="{{ imod_collector_dev_path|replace("\\", "/") }}" 2 | IMOD_COLLECTOR_REGRESSION="{{ imod_collector_regression_path|replace("\\", "/") }}" 3 | METASWAP_LOOKUP_TABLE="{{ metaswap_lookup_table_path|replace("\\", "/") }}" 4 | 5 | # Specify an absolute path here to use a packaged version of iMOD Coupler 6 | IMOD_COUPLER_EXEC_DEVEL="imodc" 7 | IMOD_COUPLER_EXEC_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/imod_coupler/imodc.exe" 8 | METASWAP_DLL_DEP_DIR_DEVEL="${IMOD_COLLECTOR_DEVEL}/metaswap" 9 | METASWAP_DLL_DEP_DIR_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/metaswap" 10 | METASWAP_DLL_DEVEL="${IMOD_COLLECTOR_DEVEL}/metaswap/MetaSWAP.dll" 11 | METASWAP_DLL_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/metaswap/MetaSWAP.dll" 12 | MODFLOW_DLL_DEVEL="${IMOD_COLLECTOR_DEVEL}/modflow6/libmf6.dll" 13 | MODFLOW_DLL_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/modflow6/libmf6.dll" 14 | RIBASIM_DLL_DEP_DIR_DEVEL="${IMOD_COLLECTOR_DEVEL}/ribasim/bin" 15 | RIBASIM_DLL_DEP_DIR_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/ribasim/bin" 16 | RIBASIM_DLL_DEVEL="${IMOD_COLLECTOR_DEVEL}/ribasim/bin/libribasim.dll" 17 | RIBASIM_DLL_REGRESSION="${IMOD_COLLECTOR_REGRESSION}/ribasim/bin/libribasim.dll" 18 | -------------------------------------------------------------------------------- /imod_coupler/kernelwrappers/ribasim_wrapper.py: -------------------------------------------------------------------------------- 1 | from ctypes import byref, c_int, create_string_buffer 2 | 3 | from xmipy import XmiWrapper 4 | 5 | 6 | class RibasimWrapper(XmiWrapper): 7 | def get_constant_int(self, name: str) -> int: 8 | match name: 9 | case "BMI_LENVARTYPE": 10 | return 51 11 | case "BMI_LENGRIDTYPE": 12 | return 17 13 | case "BMI_LENVARADDRESS": 14 | return 68 15 | case "BMI_LENCOMPONENTNAME": 16 | return 256 17 | case "BMI_LENVERSION": 18 | return 256 19 | case "BMI_LENERRMESSAGE": 20 | return 1025 21 | raise ValueError(f"{name} does not map to an integer exposed by Ribasim") 22 | 23 | def init_julia(self) -> None: 24 | argument = create_string_buffer(0) 25 | self.lib.init_julia(c_int(0), byref(argument)) 26 | 27 | def shutdown_julia(self) -> None: 28 | self.lib.shutdown_julia(c_int(0)) 29 | 30 | def update_subgrid_level(self) -> None: 31 | self.lib.update_subgrid_level() 32 | 33 | def execute(self, config_file: str) -> None: 34 | self._execute_function(self.lib.execute, config_file.encode()) 35 | -------------------------------------------------------------------------------- /tests/test_primod/test_mixin.py: -------------------------------------------------------------------------------- 1 | from imod.mf6 import Modflow6Simulation, StructuredDiscretization 2 | from imod.mf6.mf6_wel_adapter import Mf6Wel 3 | from primod.model_mixin import MetaModMixin 4 | from pytest import fixture 5 | 6 | 7 | @fixture(scope="function") 8 | def coupling_dict() -> dict[str, str]: 9 | return { 10 | "mf6_model": "GWF_1", 11 | "mf6_recharge_package": "rch_msw", 12 | "mf6_msw_well_pkg": "wells_msw", 13 | } 14 | 15 | 16 | def test_get_mf6_pkgs_for_metaswap__sprinkling( 17 | coupling_dict: dict[str, str], coupled_mf6_model: Modflow6Simulation 18 | ): 19 | mf6_dis, mf6_wel = MetaModMixin.get_mf6_pkgs_for_metaswap( 20 | coupling_dict, coupled_mf6_model 21 | ) 22 | 23 | assert isinstance(mf6_dis, StructuredDiscretization) 24 | assert isinstance(mf6_wel, Mf6Wel) 25 | 26 | 27 | def test_get_mf6_pkgs_for_metaswap__no_sprinkling( 28 | coupling_dict: dict[str, str], coupled_mf6_model: Modflow6Simulation 29 | ): 30 | coupling_dict.pop("mf6_msw_well_pkg") 31 | mf6_dis, mf6_wel = MetaModMixin.get_mf6_pkgs_for_metaswap( 32 | coupling_dict, coupled_mf6_model 33 | ) 34 | 35 | assert isinstance(mf6_dis, StructuredDiscretization) 36 | assert mf6_wel is None 37 | -------------------------------------------------------------------------------- /.teamcity/settings.kts: -------------------------------------------------------------------------------- 1 | import jetbrains.buildServer.configs.kotlin.* 2 | 3 | /* 4 | The settings script is an entry point for defining a single 5 | TeamCity project. TeamCity looks for the 'settings.kts' file in a 6 | project directory and runs it if it's found, so the script name 7 | shouldn't be changed and its package should be the same as the 8 | project's id. 9 | 10 | The script should contain a single call to the project() function 11 | with a Project instance or an init function as an argument. 12 | 13 | VcsRoots, BuildTypes, and Templates of this project must be 14 | registered inside project using the vcsRoot(), buildType(), and 15 | template() methods respectively. 16 | 17 | Subprojects can be defined either in their own settings.kts or by 18 | calling the subProjects() method in this project. 19 | 20 | To debug settings scripts in command-line, run the 21 | 22 | mvnDebug org.jetbrains.teamcity:teamcity-configs-maven-plugin:generate 23 | 24 | command and attach your debugger to the port 8000. 25 | 26 | To debug in IntelliJ Idea, open the 'Maven Projects' tool window (View -> 27 | Tool Windows -> Maven Projects), find the generate task 28 | node (Plugins -> teamcity-configs -> teamcity-configs:generate), 29 | the 'Debug' option is available in the context menu for the task. 30 | */ 31 | 32 | version = "2024.03" 33 | project(_Self.Project) -------------------------------------------------------------------------------- /pre-processing/primod/mapping/svat_user_demand_mapping.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | from numpy.typing import NDArray 5 | 6 | from primod.mapping.mappingbase import GenericMapping 7 | from primod.typing import Int 8 | 9 | 10 | class SvatUserDemandMapping(GenericMapping): 11 | def __init__( 12 | self, 13 | name: str, 14 | gridded_user_demand: xr.DataArray, 15 | user_demand_ids: pd.Series, 16 | svat: xr.DataArray, 17 | index: NDArray[Int], 18 | ): 19 | condition = svat > 0 20 | user_id = xr.where(condition, gridded_user_demand, np.nan) # type: ignore 21 | include = user_id.notnull().to_numpy() 22 | user_id_values = user_id.to_numpy()[include].astype(int) 23 | user_index = np.searchsorted(user_demand_ids, user_id_values) 24 | 25 | coupled_svats = svat.where(gridded_user_demand.notnull(), other=-1) 26 | # Set all "deeper" subunits (higher than 0) to -1 so they are filtered away. 27 | coupled_svats.loc[{"subunit": slice(1, None)}] = -1 28 | svat_index_values = coupled_svats.to_numpy().ravel()[index] 29 | svat_index_values = svat_index_values[svat_index_values > 0].astype(int) 30 | 31 | self.name = name 32 | self.dataframe = pd.DataFrame( 33 | data={"user_demand_index": user_index, "svat_index": svat_index_values} 34 | ) 35 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: 3 | push: 4 | branches: [main] 5 | pull_request: 6 | concurrency: 7 | group: ${{ github.workflow }}-${{ github.ref }} 8 | cancel-in-progress: true 9 | jobs: 10 | ruff: 11 | name: Ruff 12 | runs-on: ubuntu-latest 13 | continue-on-error: true 14 | steps: 15 | - uses: actions/checkout@v5 16 | - uses: prefix-dev/setup-pixi@v0.9.3 17 | with: 18 | pixi-version: "latest" 19 | - name: Run ruff format 20 | run: | 21 | pixi run format-check 22 | - name: Run ruff check 23 | run: | 24 | pixi run ruff 25 | 26 | mypy: 27 | name: Mypy 28 | runs-on: ubuntu-latest 29 | continue-on-error: true 30 | steps: 31 | - uses: actions/checkout@v5 32 | - uses: prefix-dev/setup-pixi@v0.9.3 33 | with: 34 | pixi-version: "latest" 35 | - name: Run mypy on imodc 36 | run: | 37 | pixi run mypy-imodc 38 | - name: Run mypy on primod 39 | run: | 40 | pixi run mypy-primod 41 | 42 | check_packages: 43 | name: Check packages 44 | runs-on: ubuntu-latest 45 | continue-on-error: true 46 | steps: 47 | - uses: actions/checkout@v5 48 | - uses: prefix-dev/setup-pixi@v0.9.3 49 | with: 50 | pixi-version: "latest" 51 | - name: Check packages 52 | run: | 53 | pixi run check-package-primod 54 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/svat_basin_mapping.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | from numpy.typing import NDArray 5 | 6 | from primod.mapping.mappingbase import GenericMapping 7 | from primod.typing import Int 8 | 9 | 10 | class SvatBasinMapping(GenericMapping): 11 | def __init__( 12 | self, 13 | name: str, 14 | gridded_basin: xr.DataArray, 15 | basin_ids: pd.Series, 16 | svat: xr.DataArray, 17 | index: NDArray[Int], 18 | ): 19 | condition = svat > 0 20 | basin_id = xr.where(condition, gridded_basin, np.nan) # type: ignore 21 | include = basin_id.notnull().to_numpy() 22 | basin_id_values = basin_id.to_numpy()[include].astype(int) 23 | basin_index = np.searchsorted(basin_ids, basin_id_values) 24 | 25 | # TODO (Huite): I'm not entirely sure this is the correct logic! 26 | # I don't quite understand the whole index business. 27 | # This should probably be simplified for all MetaModMappings too. 28 | coupled_svats = svat.where(gridded_basin.notnull(), other=-1) 29 | svat_index_values = coupled_svats.to_numpy().ravel()[index] 30 | svat_index_values = svat_index_values[svat_index_values > 0].astype(int) 31 | 32 | self.name = name 33 | self.dataframe = pd.DataFrame( 34 | data={"basin_index": basin_index, "svat_index": svat_index_values} 35 | ) 36 | -------------------------------------------------------------------------------- /.teamcity/.idea/jarRepositories.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | 10 | 14 | 15 | 19 | 20 | 24 | 25 | 29 | 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "imod_coupler" 7 | description = "iMOD Coupler can be used to couple hydrological kernels" 8 | readme = "README.md" 9 | authors = [ 10 | { name = "Martijn Russcher", email = "Martijn.Russcher@deltares.nl" }, 11 | { name = "Robert Leander", email = "Robert.Leander@deltares.nl" }, 12 | { name = "Joeri van Engelen", email = "Joeri.vanEngelen@deltares.nl" }, 13 | { name = "Hendrik Kok", email = "Hendrik.Kok@deltares.nl"}, 14 | { name = "Huite Bootsma", email = "Huite.Bootsma@deltares.nl" }, 15 | ] 16 | license = { text = "MIT" } 17 | classifiers = [ 18 | "Intended Audience :: Science/Research", 19 | "Topic :: Scientific/Engineering :: Hydrology", 20 | ] 21 | requires-python = ">=3.10" 22 | dependencies = [ 23 | "h5netcdf", 24 | "loguru", 25 | "numpy", 26 | "pydantic", 27 | "ribasim_api", 28 | "scipy", 29 | "tomli-w", 30 | "tomli", 31 | "xmipy", 32 | ] 33 | dynamic = ["version"] 34 | 35 | [tool.hatch.version] 36 | path = "imod_coupler/__init__.py" 37 | 38 | [project.scripts] 39 | imodc = "imod_coupler.__main__:main" 40 | 41 | [project.urls] 42 | Source = "https://github.com/Deltares/imod_coupler" 43 | 44 | [tool.ruff] 45 | lint.select = ["C4", "E", "F", "I", "NPY", "PD", "UP"] 46 | lint.ignore = ["E501", "PD004"] 47 | lint.fixable = ["I"] 48 | 49 | [tool.mypy] 50 | plugins = ["pydantic.mypy", "numpy.typing.mypy_plugin"] 51 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/area_svat.inp: -------------------------------------------------------------------------------- 1 | 1 10000.0 0.000 1 1 0.100 260 1.000 1.000 2 | 2 10000.0 0.000 1 1 0.100 260 1.000 1.000 3 | 3 10000.0 0.000 1 1 0.100 260 1.000 1.000 4 | 4 10000.0 0.000 1 1 0.100 260 1.000 1.000 5 | 5 10000.0 0.000 1 1 0.100 260 1.000 1.000 6 | 6 10000.0 0.000 1 1 0.100 260 1.000 1.000 7 | 7 10000.0 0.000 1 1 0.100 260 1.000 1.000 8 | 8 10000.0 0.000 1 1 0.100 260 1.000 1.000 9 | 9 10000.0 0.000 1 1 0.100 260 1.000 1.000 10 | 10 10000.0 0.000 1 1 0.100 260 1.000 1.000 11 | 11 10000.0 0.800 1 1 0.100 260 1.000 1.000 12 | 12 10000.0 0.800 1 1 0.100 260 1.000 1.000 13 | 13 10000.0 0.800 1 1 0.100 260 1.000 1.000 14 | 14 10000.0 0.800 1 1 0.100 260 1.000 1.000 15 | 15 10000.0 0.800 1 1 0.100 260 1.000 1.000 16 | -------------------------------------------------------------------------------- /imod_coupler/drivers/ribamod/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import Any 4 | 5 | from pydantic import BaseModel, FilePath, field_validator 6 | 7 | from imod_coupler.drivers.kernel_config import Modflow6, Ribasim 8 | 9 | 10 | class Kernels(BaseModel): 11 | modflow6: Modflow6 12 | ribasim: Ribasim 13 | 14 | 15 | class Coupling(BaseModel): 16 | mf6_model: str # the MODFLOW 6 model that will be coupled 17 | mf6_active_river_packages: dict[str, str] 18 | mf6_active_drainage_packages: dict[str, str] 19 | mf6_passive_river_packages: dict[str, str] 20 | mf6_passive_drainage_packages: dict[str, str] 21 | output_config_file: FilePath | None = None 22 | 23 | @field_validator("output_config_file") 24 | @classmethod 25 | def resolve_file_path(cls, file_path: FilePath) -> FilePath: 26 | return file_path.resolve() 27 | 28 | 29 | class RibaModConfig(BaseModel): 30 | kernels: Kernels 31 | coupling: list[Coupling] 32 | 33 | def __init__(self, config_dir: Path, **data: Any) -> None: 34 | """Model for the Ribamod config validated by pydantic 35 | 36 | The validation expects current working directory at config file level 37 | so it is changed during initialization 38 | 39 | Args: 40 | config_dir (Path): Directory where the config file resides 41 | """ 42 | os.chdir(config_dir) 43 | super().__init__(**data) 44 | 45 | @field_validator("coupling") 46 | @classmethod 47 | def restrict_coupling_count(cls, coupling: list[Coupling]) -> list[Coupling]: 48 | if len(coupling) == 0: 49 | raise ValueError("At least one coupling has to be defined.") 50 | if len(coupling) > 1: 51 | raise ValueError("Multi-model coupling is not yet supported.") 52 | return coupling 53 | -------------------------------------------------------------------------------- /scripts/generate_env_file.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import jinja2 4 | 5 | 6 | def generate_env_file() -> None: 7 | template_generator = jinja2.Environment( 8 | loader=jinja2.FileSystemLoader("scripts/templates"), 9 | autoescape=True, 10 | ) 11 | template = template_generator.get_template(".env.jinja") 12 | with open(".env", "w") as f: 13 | f.write( 14 | template.render( 15 | imod_collector_dev_path=_get_imod_collector_path("develop").resolve(), 16 | imod_collector_regression_path=_get_imod_collector_path( 17 | "regression" 18 | ).resolve(), 19 | metaswap_lookup_table_path=_get_metaswap_path().resolve(), 20 | ) 21 | ) 22 | 23 | 24 | def _get_imod_collector_path(tag: str) -> Path: 25 | """ 26 | Find an existing path of imod_collector. 27 | Extract the numeric suffix from each path and find the path with the highest number 28 | """ 29 | search_path = Path(".imod_collector") 30 | paths = search_path.glob(f"{tag}_*") 31 | if not paths: 32 | raise ValueError(f"No paths found for tag '{tag}'") 33 | paths_with_numbers = [(path, int(path.name.split("_")[1])) for path in paths] 34 | if not paths_with_numbers: 35 | raise ValueError(f"No numeric suffixes found in paths for tag {tag}") 36 | 37 | path_with_highest_number = max(paths_with_numbers, key=lambda x: x[1])[0] 38 | return Path(path_with_highest_number) 39 | 40 | 41 | def _get_metaswap_path() -> Path: 42 | metaswap_path = Path(".imod_collector/e150_metaswap") 43 | if not metaswap_path.exists(): 44 | raise ValueError(f"Metaswap lookup table not found at {metaswap_path}") 45 | return metaswap_path 46 | 47 | 48 | if __name__ == "__main__": 49 | generate_env_file() 50 | -------------------------------------------------------------------------------- /imod_coupler/drivers/kernel_config.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, DirectoryPath, FilePath, field_validator 2 | 3 | 4 | class Modflow6(BaseModel): 5 | dll: FilePath 6 | dll_dep_dir: DirectoryPath | None = None 7 | work_dir: DirectoryPath 8 | 9 | @field_validator("dll") 10 | @classmethod 11 | def resolve_dll(cls, dll: FilePath) -> FilePath: 12 | return dll.resolve() 13 | 14 | @field_validator("dll_dep_dir") 15 | @classmethod 16 | def resolve_dll_dep_dir( 17 | cls, dll_dep_dir: DirectoryPath | None 18 | ) -> DirectoryPath | None: 19 | if dll_dep_dir is not None: 20 | dll_dep_dir = dll_dep_dir.resolve() 21 | return dll_dep_dir 22 | 23 | 24 | class Metaswap(BaseModel): 25 | dll: FilePath 26 | dll_dep_dir: DirectoryPath | None = None 27 | work_dir: DirectoryPath 28 | 29 | @field_validator("dll") 30 | @classmethod 31 | def resolve_dll(cls, dll: FilePath) -> FilePath: 32 | return dll.resolve() 33 | 34 | @field_validator("dll_dep_dir") 35 | @classmethod 36 | def resolve_dll_dep_dir( 37 | cls, dll_dep_dir: DirectoryPath | None 38 | ) -> DirectoryPath | None: 39 | if dll_dep_dir is not None: 40 | dll_dep_dir = dll_dep_dir.resolve() 41 | return dll_dep_dir 42 | 43 | 44 | class Ribasim(BaseModel): 45 | dll: FilePath 46 | dll_dep_dir: DirectoryPath | None 47 | config_file: FilePath 48 | 49 | @field_validator("dll") 50 | @classmethod 51 | def resolve_dll(cls, dll: FilePath) -> FilePath: 52 | return dll.resolve() 53 | 54 | @field_validator("dll_dep_dir") 55 | @classmethod 56 | def resolve_dll_dep_dir( 57 | cls, dll_dep_dir: DirectoryPath | None 58 | ) -> DirectoryPath | None: 59 | if dll_dep_dir is not None: 60 | dll_dep_dir = dll_dep_dir.resolve() 61 | return dll_dep_dir 62 | -------------------------------------------------------------------------------- /imod_coupler/__main__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | from pathlib import Path 5 | 6 | import tomli as tomllib 7 | from loguru import logger 8 | 9 | # Trick to force the inclusion of the fft package, 10 | # because it is 'needed' by array_api_compat\numpy 11 | # To be fixed/removed as soon as possible !! 12 | from scipy._lib.array_api_compat.numpy import fft # noqa: F401 13 | 14 | from imod_coupler import __version__ 15 | from imod_coupler.config import BaseConfig 16 | from imod_coupler.drivers.driver import get_driver 17 | from imod_coupler.parser import parse_args 18 | from imod_coupler.utils import setup_logger 19 | 20 | 21 | def main() -> None: 22 | args = parse_args() 23 | 24 | if args.enable_debug_native: 25 | # wait for native debugging 26 | input(f"PID: {os.getpid()}, press any key to continue ....") 27 | 28 | config_path = Path(args.config_path).resolve() 29 | 30 | try: 31 | run_coupler(config_path) 32 | except: # noqa: E722 33 | logger.exception("iMOD Coupler run failed with: ") 34 | sys.exit(1) 35 | 36 | 37 | def run_coupler(config_path: Path) -> None: 38 | with open(config_path, "rb") as f: 39 | config_dict = tomllib.load(f) 40 | 41 | config_dir = config_path.parent 42 | base_config = BaseConfig(**config_dict) 43 | setup_logger(base_config.log_level, config_dir / "imod_coupler.log") 44 | logger.info(f"iMOD Coupler {__version__}") 45 | 46 | if base_config.timing: 47 | start = time.perf_counter() 48 | 49 | driver = get_driver(config_dict, config_dir, base_config) 50 | driver.execute() 51 | 52 | # Report timing 53 | if base_config.timing: 54 | driver.report_timing_totals() 55 | end = time.perf_counter() 56 | logger.info(f"Total elapsed time: {end - start:0.4f} seconds") 57 | 58 | 59 | if __name__ == "__main__": 60 | # execute only if run as a script 61 | main() 62 | -------------------------------------------------------------------------------- /pre-processing/primod/coupled_model.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from collections.abc import Sequence 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | from primod.driver_coupling.driver_coupling_base import DriverCoupling 7 | 8 | 9 | class CoupledModel(abc.ABC): 10 | coupling_list: Sequence[DriverCoupling] 11 | 12 | @abc.abstractmethod 13 | def write(self, directory: str | Path, *args: Any, **kwargs: Any) -> None: 14 | pass 15 | 16 | @abc.abstractmethod 17 | def write_toml(self, directory: str | Path, *args: Any, **kwargs: Any) -> None: 18 | pass 19 | 20 | @staticmethod 21 | def _merge_coupling_dicts(dicts: list[dict[str, Any]]) -> dict[str, Any]: 22 | coupling_dict: dict[str, dict[str, Any] | Any] = {} 23 | for top_dict in dicts: 24 | for top_key, top_value in top_dict.items(): 25 | if isinstance(top_value, dict): 26 | if top_key not in coupling_dict: 27 | coupling_dict[top_key] = {} 28 | for key, filename in top_value.items(): 29 | coupling_dict[top_key][key] = filename 30 | else: 31 | coupling_dict[top_key] = top_value 32 | return coupling_dict 33 | 34 | def write_exchanges(self, directory: str | Path) -> dict[str, Any]: 35 | """ 36 | Write exchanges and return their filenames for the coupler 37 | configuration file. 38 | """ 39 | directory = Path(directory) 40 | exchange_dir = Path(directory) / "exchanges" 41 | exchange_dir.mkdir(exist_ok=True, parents=True) 42 | 43 | coupling_dicts = [] 44 | for coupling in self.coupling_list: 45 | coupling_dict = coupling.write_exchanges( 46 | directory=exchange_dir, coupled_model=self 47 | ) 48 | coupling_dicts.append(coupling_dict) 49 | 50 | # FUTURE: if we support multiple MF6 models, group them by name before 51 | # merging, and return a list of coupling_dicts. 52 | merged_coupling_dict = self._merge_coupling_dicts(coupling_dicts) 53 | return merged_coupling_dict 54 | -------------------------------------------------------------------------------- /tests/fixtures/common.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import xarray as xr 4 | from imod.mf6.wel import LayeredWell 5 | from numpy import float64, int_ 6 | from numpy.typing import NDArray 7 | 8 | 9 | def grid_sizes() -> tuple[ 10 | list[float], 11 | list[float], 12 | NDArray[int_], 13 | float, 14 | float, 15 | NDArray[float64], 16 | ]: 17 | x = [100.0, 200.0, 300.0, 400.0, 500.0] 18 | y = [300.0, 200.0, 100.0] 19 | dz = np.array([0.2, 10.0, 100.0]) 20 | 21 | layer = np.arange(len(dz)) + 1 22 | dx = x[1] - x[0] 23 | dy = y[1] - y[0] 24 | 25 | return x, y, layer, dx, dy, dz 26 | 27 | 28 | def get_times() -> pd.DatetimeIndex: 29 | freq = "D" 30 | return pd.date_range(start="1/1/1971", end="8/1/1971", freq=freq) 31 | 32 | 33 | def create_wells(idomain: xr.DataArray, wel_layer: int | None = None) -> LayeredWell: 34 | """ 35 | Create wells, deactivate inactive cells. This function wouldn't be necessary 36 | if iMOD Python had a package to specify wells based on grids. 37 | """ 38 | 39 | if wel_layer is None: 40 | wel_layer = 3 41 | 42 | x = idomain.coords["x"].to_numpy() 43 | y = idomain.coords["y"].to_numpy() 44 | 45 | x_grid, y_grid = np.meshgrid(x, y) 46 | 47 | is_inactive = ~idomain.sel(layer=wel_layer).astype(bool) 48 | id_inactive = np.argwhere(is_inactive.values) + 1 49 | 50 | ix = np.ravel(x_grid) 51 | iy = np.ravel(y_grid) 52 | 53 | to_deactivate = np.full_like(ix, False, dtype=bool) 54 | for i in id_inactive: 55 | is_cell = (iy == i[0]) & (ix == i[1]) 56 | to_deactivate = to_deactivate | is_cell 57 | 58 | ix_active = ix[~to_deactivate] 59 | iy_active = iy[~to_deactivate] 60 | 61 | rate = np.zeros(ix_active.shape) 62 | layer = np.full_like(ix_active, wel_layer, dtype=int) 63 | 64 | return LayeredWell(ix_active, iy_active, layer, rate) 65 | 66 | 67 | def create_wells_max_layer(idomain: xr.DataArray) -> LayeredWell: 68 | """ 69 | Create wells in deepest layer of MODFLOW 6 model 70 | """ 71 | 72 | wel_layer = idomain.layer.max().item() 73 | return create_wells(idomain, wel_layer) 74 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_mapping.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | from imod import mf6, msw 4 | from numpy.testing import assert_almost_equal, assert_array_equal 5 | from primod.mapping.rch_svat_mapping import RechargeSvatMapping 6 | from pytest_cases import parametrize_with_cases 7 | 8 | from imod_coupler.utils import create_mapping 9 | 10 | 11 | @parametrize_with_cases( 12 | "src_idx,tgt_idx,nsrc,ntgt,operator,expected_map_dense,expected_mask", 13 | prefix="util_", 14 | ) 15 | def test_create_mapping( 16 | src_idx, tgt_idx, nsrc, ntgt, operator, expected_map_dense, expected_mask 17 | ): 18 | """ 19 | Test create_mapping function. Argument names are equivalent to those in the 20 | create_mapping function. 21 | """ 22 | 23 | map_out, mask = create_mapping(src_idx, tgt_idx, nsrc, ntgt, operator) 24 | 25 | assert issubclass(map_out.dtype.type, np.floating) 26 | assert issubclass(mask.dtype.type, np.integer) 27 | 28 | assert map_out.shape == (ntgt, nsrc) 29 | assert map_out.nnz == len(src_idx) 30 | assert mask.shape == (ntgt,) 31 | 32 | assert_almost_equal(map_out.toarray(), expected_map_dense) 33 | assert_array_equal(mask, expected_mask) 34 | 35 | 36 | @parametrize_with_cases("recharge", prefix="rch", has_tag="succeed") 37 | def test_recharge_mapping( 38 | recharge: mf6.Recharge, prepared_msw_model: msw.MetaSwapModel 39 | ): 40 | """ 41 | Test Recharge package validation 42 | """ 43 | index, svat = prepared_msw_model["grid"].generate_index_array() 44 | 45 | rch_svat_mapping = RechargeSvatMapping(svat, recharge, index=index) 46 | 47 | assert np.all(rch_svat_mapping.dataset["layer"] == 1) 48 | assert np.all(rch_svat_mapping.dataset["svat"] == svat) 49 | assert rch_svat_mapping.dataset["rch_id"].max() == 12 50 | assert rch_svat_mapping.dataset["rch_active"].sum() == 12 51 | 52 | 53 | @parametrize_with_cases("recharge", prefix="rch", has_tag="fail") 54 | def test_recharge_mapping_fail( 55 | recharge: mf6.Recharge, prepared_msw_model: msw.MetaSwapModel 56 | ): 57 | index, svat = prepared_msw_model["grid"].generate_index_array() 58 | 59 | with pytest.raises(ValueError): 60 | RechargeSvatMapping(svat, recharge, index=index) 61 | -------------------------------------------------------------------------------- /tests/common_scripts/mf6_water_balance/MF6_wbal_listing.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import re 4 | from enum import Enum 5 | from pathlib import Path 6 | 7 | import pandas as pd 8 | 9 | 10 | class status(Enum): 11 | NO_OPERATION = 0 12 | VOLUME_IN = 1 13 | VOLUME_OUT = 2 14 | 15 | 16 | def listfile_to_dataframe(file_in: Path) -> pd.DataFrame: 17 | ignore = ["IN - OUT", "DISCREPANCY"] 18 | df_data_out = pd.DataFrame() 19 | budgetblock_counter = -1 20 | with open(file_in) as fnin_mflist: 21 | stat = status.NO_OPERATION 22 | for line in fnin_mflist: 23 | if re.match(r"^.*TIME SUMMARY", line): 24 | stat = status.NO_OPERATION 25 | elif re.match(r"\s*OUT:\s+OUT:", line): 26 | stat = status.VOLUME_OUT 27 | postfix = "_OUT" 28 | elif re.match(r"\s*IN:\s+IN:", line): 29 | stat = status.VOLUME_IN 30 | postfix = "_IN" 31 | elif m := re.match(r"^\s*VOLUME.* BUDGET.*STRESS PERIOD\s+(\d+)", line): 32 | loose_words_in_string = m.string.strip().split() 33 | time_step = int(loose_words_in_string[-4][:-1]) 34 | stress_period = int(loose_words_in_string[-1]) 35 | budgetblock_counter = budgetblock_counter + 1 36 | df_data_out.loc[budgetblock_counter, "timestep"] = int(time_step) 37 | df_data_out.loc[budgetblock_counter, "stress_period"] = int( 38 | stress_period 39 | ) 40 | stat = status.NO_OPERATION 41 | elif any(pattern in line for pattern in ignore): 42 | continue 43 | elif stat in [status.VOLUME_IN, status.VOLUME_OUT]: 44 | matches = re.match(r"^\s*([\s\w\-]+\s*=)\s*([^\s]+)", line) 45 | if matches: 46 | if "TOTAL IN" in line or "TOTAL OUT" in line: 47 | continue 48 | splitter = matches.group(1) 49 | _, part2 = re.split(splitter, line)[-2:] 50 | thisval = float(part2.split()[0]) 51 | pkgtype = re.sub(r"\s+", "_", re.sub(r"\s*=\s*", "", splitter)) 52 | pkgname = f"{pkgtype}:{part2.split()[1]}" # modflow6 format 53 | df_data_out.loc[budgetblock_counter, pkgname + postfix] = thisval 54 | return df_data_out 55 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_utilities.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def diff_per_column_dataframe( 8 | df1: pd.DataFrame, 9 | df2: pd.DataFrame, 10 | tolerance: dict[str, tuple[float, float]], 11 | ) -> tuple[dict[str, list[int]], dict[str, list[int]], dict[str, tuple[bool, bool]]]: 12 | failed = {} 13 | absfailedndx = {} 14 | relfailedndx = {} 15 | for varname in list(df1)[1:]: 16 | if varname not in df2: 17 | failed[varname] = (True, True) 18 | s1 = df1[varname] 19 | s2 = df2[varname] 20 | if varname in tolerance: 21 | (abstol, reltol) = tolerance[varname] 22 | else: 23 | (abstol, reltol) = tolerance["default"] 24 | # only where both are nan 25 | nan_match = np.logical_and(s1.isna(), s2.isna()) 26 | # where abolute matches, but the matching nans are excused 27 | abs_match = np.logical_or((abs(s2 - s1) <= abstol), nan_match) 28 | # where relative matches, but the matching nans are excused 29 | rel_match = np.logical_or((abs(s2 - s1) <= abs(s1 * reltol)), nan_match) 30 | absfailedndx[varname] = list(s2[~abs_match].index) 31 | relfailedndx[varname] = list(s2[~rel_match].index) 32 | failed[varname] = ( 33 | len(absfailedndx[varname]) > 0, 34 | len(relfailedndx[varname]) > 0, 35 | ) 36 | return absfailedndx, relfailedndx, failed 37 | 38 | 39 | def numeric_csvfiles_equal( 40 | file1: Path, 41 | file2: Path, 42 | sep: str, 43 | tolerance: dict[str, tuple[float, float]], 44 | ) -> bool: 45 | df1 = pd.read_csv( 46 | file1, 47 | sep=sep, 48 | ) 49 | df2 = pd.read_csv( 50 | file2, 51 | sep=sep, 52 | ) 53 | if df1.shape[0] != df2.shape[0]: 54 | print(f"the dataframes in {file1} and {file2} differ in length") 55 | return False 56 | 57 | # rownumbers with significant difference per variable 58 | _, _, failed = diff_per_column_dataframe(df1, df2, tolerance) 59 | # is there any significant difference whatsoever? 60 | columns_with_differences = [v[0] for v in failed.items() if v[1] != (False, False)] 61 | is_different = any(columns_with_differences) 62 | 63 | # print column name with differences 64 | if is_different: 65 | print("columns with differences:") 66 | print(columns_with_differences) 67 | 68 | return not is_different 69 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # Tests 132 | tests/temp 133 | report.xml 134 | 135 | .pixi 136 | .imod_collector -------------------------------------------------------------------------------- /tests/common_scripts/mf6_water_balance/combine.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from pathlib import Path 4 | 5 | import netCDF4 as nc 6 | import numpy as np 7 | import pandas as pd 8 | 9 | from common_scripts.mf6_water_balance.MF6_wbal_listing import listfile_to_dataframe 10 | 11 | 12 | def create_modflow_waterbalance_file( 13 | mf_listfile: Path, 14 | output_file_xlsx: Path | None = None, 15 | output_file_netcdf: Path | None = None, 16 | output_file_csv: Path | None = None, 17 | ) -> None: 18 | """ 19 | this function creates a csv, excel or netcdf file with the water-balance information found in 20 | the .lst file of the modflow groundwater flow model 21 | """ 22 | modflow_results_dataframe = listfile_to_dataframe(mf_listfile) 23 | 24 | if output_file_netcdf is not None: 25 | print("Writing NetCDF") 26 | writeNC(output_file_netcdf, modflow_results_dataframe, singlevar=False) 27 | if output_file_csv is not None: 28 | print("Writing CSV") 29 | writeCSV(output_file_csv, modflow_results_dataframe) 30 | if output_file_xlsx is not None: 31 | print("Writing XLSX") 32 | writeXLS(output_file_xlsx, modflow_results_dataframe) 33 | 34 | 35 | def writeNC(ncname: Path, df: pd.DataFrame, singlevar: bool): 36 | nvar = len(df.columns) 37 | with nc.Dataset(ncname, "w") as ds: 38 | ds.createDimension("time", len(df.index)) 39 | if singlevar: 40 | namelen = 22 41 | ds.createDimension("id", len(df.columns)) 42 | ds.createDimension("nchar", namelen) 43 | xchgvar = ds.createVariable( 44 | "exchange", 45 | "f8", 46 | ( 47 | "time", 48 | "id", 49 | ), 50 | ) 51 | namevar = ds.createVariable( 52 | "varname", 53 | "S1", 54 | ( 55 | "id", 56 | "nchar", 57 | ), 58 | ) 59 | xchgvar[:] = df.to_numpy() 60 | 61 | for ivar in range(nvar): 62 | varname = df.columns[ivar] 63 | if singlevar: 64 | namevar[ivar] = nc.stringtochar(np.array([varname], f"S{namelen:d}")) 65 | else: 66 | xchgvar = ds.createVariable(varname, "f8", ("time",)) 67 | xchgvar[:] = df[varname].to_numpy() 68 | 69 | 70 | def writeXLS(xlsname: Path, df: pd.DataFrame) -> None: 71 | writer = pd.ExcelWriter(xlsname) 72 | df.to_excel(writer, sheet_name="combined") 73 | writer.save() 74 | 75 | 76 | def writeCSV(csvname: Path, df: pd.DataFrame) -> None: 77 | colsep = ";" 78 | 79 | df.to_csv(csvname, sep=colsep, na_rep="nan") 80 | -------------------------------------------------------------------------------- /scripts/download_imod_collector.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import xml.etree.ElementTree as ET 4 | import zipfile 5 | from pathlib import Path 6 | 7 | import httpx 8 | from tqdm import tqdm 9 | 10 | 11 | def download_imod_collector(tag: str | None) -> None: 12 | build_id, build_number = _get_build_info(tag) 13 | folder_name = (tag or "develop") + f"_{build_number}" 14 | target_folder = Path(".imod_collector") / folder_name 15 | 16 | if target_folder.exists(): 17 | print( 18 | f"iMOD collector already downloaded at '{target_folder}', remove the folder if you want to enforce re-downloading." 19 | ) 20 | return 21 | 22 | token = os.environ["TEAMCITY_TOKEN"] 23 | # check for the old naming convention 24 | if tag == "regression": 25 | _stream(build_id, token, "imod_coupler_windows", target_folder) 26 | else: 27 | _stream(build_id, token, "imod_collector", target_folder) 28 | 29 | 30 | def _stream(build_id: int, token: int, name: str, target_folder: str): 31 | with httpx.stream( 32 | "GET", 33 | f"https://dpcbuild.deltares.nl/app/rest/builds/{build_id}/artifacts/content/{name}.zip", 34 | headers={"Authorization": f"Bearer {token}"}, 35 | ) as response: 36 | response.raise_for_status() 37 | zip_path = Path(".pixi/imod_coupler_windows.zip") 38 | _download_to_file(response, zip_path) 39 | _unzip_to_target(target_folder, zip_path) 40 | 41 | os.remove(zip_path) 42 | 43 | 44 | def _get_build_info(tag: str | None) -> tuple[str, str]: 45 | token = os.environ["TEAMCITY_TOKEN"] 46 | tag_string = f",tag:{tag}" if tag else "" 47 | info_url = f"https://dpcbuild.deltares.nl/app/rest/builds/buildType:iMOD6_IMOD6collectorDaily_ReleaseX64,count:1,branch:main,status:SUCCESS{tag_string}" 48 | 49 | info_response = httpx.get( 50 | info_url, 51 | headers={"Authorization": f"Bearer {token}"}, 52 | ) 53 | info_response.raise_for_status() 54 | info_xml = ET.fromstring(info_response.content) 55 | return info_xml.attrib["id"], info_xml.attrib["number"] 56 | 57 | 58 | def _download_to_file(response: httpx.Response, target_path: Path) -> None: 59 | with open(target_path, "wb") as f: 60 | progress_bar = tqdm( 61 | total=int(response.headers["Content-Length"]), 62 | unit_scale=True, 63 | unit="B", 64 | unit_divisor=1024, 65 | desc="Downloading iMOD collector", 66 | ) 67 | for chunk in response.iter_bytes(chunk_size=1024): 68 | if chunk: 69 | f.write(chunk) 70 | progress_bar.update(len(chunk)) 71 | 72 | 73 | def _unzip_to_target(target_folder: Path, source_path: Path) -> None: 74 | with zipfile.ZipFile(source_path) as z: 75 | for file in tqdm(z.namelist(), desc="Extracting iMOD collector", unit="files"): 76 | z.extract(file, target_folder) 77 | 78 | 79 | if __name__ == "__main__": 80 | download_imod_collector(sys.argv[1] if len(sys.argv) > 1 else None) 81 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/node_svat_mapping.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | from imod import mf6 4 | from imod.msw.fixed_format import VariableMetaData 5 | from numpy.typing import NDArray 6 | 7 | from primod.mapping.mappingbase import MetaModMapping 8 | from primod.typing import Int 9 | 10 | 11 | class NodeSvatMapping(MetaModMapping): 12 | """ 13 | This contains the data to connect MODFLOW 6 cells (user nodes) to MetaSWAP 14 | svats. 15 | 16 | This class is responsible for the file `nodenr2svat.dxc`. 17 | 18 | Parameters 19 | ---------- 20 | svat: array of floats (xr.DataArray) 21 | SVAT units. This array must have a subunit coordinate to describe 22 | different land uses. 23 | modflow_dis: mf6.StructuredDiscretization 24 | Modflow 6 structured discretization 25 | """ 26 | 27 | _file_name = "nodenr2svat.dxc" 28 | _metadata_dict = { 29 | "mod_id": VariableMetaData(10, 1, 9999999, int), 30 | "free": VariableMetaData(2, None, None, str), 31 | "svat": VariableMetaData(10, 1, 9999999, int), 32 | "layer": VariableMetaData(5, 0, 9999, int), 33 | } 34 | 35 | _with_subunit = ("mod_id", "svat", "layer") 36 | _to_fill = ("free",) 37 | 38 | def __init__( 39 | self, 40 | svat: xr.DataArray, 41 | modflow_dis: mf6.StructuredDiscretization, 42 | index: NDArray[Int], 43 | ): 44 | super().__init__() 45 | self.index = index 46 | self.dataset["svat"] = svat 47 | self.dataset["layer"] = xr.full_like(svat, 1) 48 | idomain_top_layer = modflow_dis["idomain"].sel(layer=1, drop=True) 49 | # Test if equal to or larger than 1, to ignore idomain == -1 as well. 50 | # Don't assign to self.dataset, as grid extent might differ from svat 51 | self.idomain_active = idomain_top_layer >= 1 52 | self._pkgcheck() 53 | self._create_mod_id() 54 | 55 | def _create_mod_id(self) -> None: 56 | """ 57 | Create modflow indices for the recharge layer, which is where 58 | infiltration will take place. 59 | """ 60 | self.dataset["mod_id"] = xr.full_like( 61 | self.dataset["svat"], fill_value=0, dtype=np.int64 62 | ) 63 | 64 | n_subunit = self.dataset["subunit"].size 65 | n_mod = self.idomain_active.sum() 66 | 67 | idomain_active = self.idomain_active.to_numpy() 68 | 69 | # idomain does not have a subunit dimension, so tile for n_subunits 70 | mod_id_1d = np.tile(np.arange(1, n_mod + 1), (n_subunit, 1)) 71 | 72 | self.dataset["mod_id"].to_numpy()[:, idomain_active] = mod_id_1d 73 | 74 | def _pkgcheck(self) -> None: 75 | # Check if active msw cell inactive in idomain 76 | active = self.dataset["svat"] != 0 77 | inactive_in_idomain = active > self.idomain_active 78 | 79 | if inactive_in_idomain.any(): 80 | raise ValueError( 81 | "Active MetaSWAP cell detected in inactive cell in Modflow6 idomain" 82 | ) 83 | -------------------------------------------------------------------------------- /pixi.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | name = "imod_coupler" 3 | version = "0.1.0" 4 | channels = ["conda-forge"] 5 | platforms = ["win-64", "linux-64"] 6 | 7 | [system-requirements] 8 | linux = "4.4.0" 9 | 10 | [tasks] 11 | # Build 12 | build-imod-coupler = "rm -rf dist && pyinstaller imod_coupler/__main__.py --name imodc" 13 | 14 | [dependencies] 15 | geopandas = ">=1.0.0" 16 | netCDF4 = "*" 17 | loguru = "*" 18 | numpy = ">=2.0" 19 | pydantic = ">=2.11.0" 20 | pyinstaller = "*" 21 | python = ">=3.10" 22 | xarray = ">=2025.8.0" 23 | scipy = "*" 24 | tomli = "*" 25 | tomli-w = "*" 26 | xmipy = "*" 27 | imod = "1.0.0.post1" 28 | 29 | [pypi-dependencies] 30 | ribasim = { git = "https://github.com/Deltares/Ribasim.git", tag = "v2025.6.0", subdirectory = "python/ribasim" } 31 | ribasim_testmodels = { git = "https://github.com/Deltares/Ribasim.git", tag = "v2025.6.0", subdirectory = "python/ribasim_testmodels" } 32 | imod_coupler = { path = ".", editable = true } 33 | primod = { path = "pre-processing", editable = true } 34 | 35 | [feature.common.tasks] 36 | # Install 37 | install-metaswap-testmodels = "svn checkout https://repos.deltares.nl/repos/DSCTestbench/trunk/cases/e150_metaswap/f00_common/c00_common/LHM2016_v01vrz .imod_collector/e150_metaswap" 38 | install-imod-collector = "python scripts/download_imod_collector.py" 39 | install-imod-collector-regression = "python scripts/download_imod_collector.py regression" 40 | generate-env-file = "python scripts/generate_env_file.py" 41 | 42 | # Tests 43 | test-primod = "pytest --junitxml=report.xml tests/test_primod" 44 | 45 | [feature.common.dependencies] 46 | python-build = "*" 47 | httpx = "*" 48 | ipython = "*" 49 | jupyterlab = "*" 50 | mypy = "*" 51 | pytest = "*" 52 | pytest-cases = "*" 53 | pytest-dotenv = "*" 54 | pytest-xdist = "*" 55 | ruff = "*" 56 | tqdm = "*" 57 | twine = "*" 58 | 59 | [feature.dev.tasks] 60 | # Install 61 | install-test-dependencies = { depends-on = [ 62 | "install-metaswap-testmodels", 63 | "install-imod-collector", 64 | "install-imod-collector-regression", 65 | "generate-env-file", 66 | ] } 67 | 68 | test-imod-coupler = "pytest -vvv -s --numprocesses=auto --dist=loadgroup --basetemp=tests/temp --junitxml=report.xml tests/test_imod_coupler" 69 | tests = { depends-on = ["test-primod", "test-imod-coupler"] } 70 | # Lint 71 | mypy-imodc = "mypy --ignore-missing-imports --strict imod_coupler" 72 | mypy-primod = "mypy --ignore-missing-imports pre-processing/primod" 73 | format = "ruff format ." 74 | format-check = "ruff format --check ." 75 | ruff = "ruff check ." 76 | check-package-primod = { cmd = "rm --recursive --force dist && python -m build && twine check --strict dist/*", cwd = "pre-processing" } 77 | lint = { depends-on = ["format", "ruff", "mypy-imodc", "mypy-primod"] } 78 | # Publish primod 79 | publish-primod = { cmd = "rm --recursive --force dist && python -m build && twine check dist/* && twine upload dist/*", cwd = "pre-processing" } 80 | 81 | [feature.py312.dependencies] 82 | python = "3.12.*" 83 | 84 | [environments] 85 | default = { features = ["py312"], solve-group = "py312" } 86 | dev = { features = ["py312", "dev", "common"], solve-group = "py312" } 87 | py312 = { features = ["py312", "common"], solve-group = "py312" } 88 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/rch_svat_mapping.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | from imod import mf6 4 | from imod.msw.fixed_format import VariableMetaData 5 | from numpy.typing import NDArray 6 | 7 | from primod.mapping.mappingbase import MetaModMapping 8 | from primod.typing import Int 9 | 10 | 11 | class RechargeSvatMapping(MetaModMapping): 12 | """ 13 | This contains the data to connect MODFLOW 6 recharge cells to MetaSWAP 14 | svats. 15 | 16 | This class is responsible for the file `rchindex2svat.dxc`. 17 | 18 | Parameters 19 | ---------- 20 | svat: array of floats (xr.DataArray) 21 | SVAT units. This array must have a subunit coordinate to describe 22 | different land uses. 23 | recharge: mf6.Recharge 24 | Modflow 6 Recharge package to connect to. Note that the recharge rate 25 | should be provided as a 2D grid with a (y, x) dimension. MetaModMapping 26 | will throw an error if a grid is provided with different dimensions. 27 | """ 28 | 29 | _file_name = "rchindex2svat.dxc" 30 | _metadata_dict = { 31 | "rch_id": VariableMetaData(10, 1, 9999999, int), 32 | "free": VariableMetaData(2, None, None, str), 33 | "svat": VariableMetaData(10, 1, 9999999, int), 34 | "layer": VariableMetaData(5, 0, 9999, int), 35 | } 36 | 37 | _with_subunit = ("rch_id", "svat", "layer") 38 | _to_fill = ("free",) 39 | 40 | def __init__(self, svat: xr.DataArray, recharge: mf6.Recharge, index: NDArray[Int]): 41 | super().__init__() 42 | self.index = index 43 | self.dataset["svat"] = svat 44 | self.dataset["layer"] = xr.full_like(svat, 1) 45 | rate = recharge.dataset["rate"] 46 | if "layer" in rate.dims: 47 | rate_no_layer = rate.squeeze("layer", drop=True) 48 | else: 49 | # 'layer' can be still in a coord, so ensure is also dropped 50 | rate_no_layer = rate.drop_vars("layer", errors="ignore") 51 | self.dataset["rch_active"] = rate_no_layer.notnull() 52 | self._pkgcheck() 53 | self._create_rch_id() 54 | 55 | def _create_rch_id(self) -> None: 56 | self.dataset["rch_id"] = xr.full_like( 57 | self.dataset["svat"], fill_value=0, dtype=np.int64 58 | ) 59 | 60 | n_subunit = self.dataset["subunit"].size 61 | n_rch = self.dataset["rch_active"].sum() 62 | 63 | rch_active = self.dataset["rch_active"].to_numpy() 64 | 65 | # recharge does not have a subunit dimension, so tile for n_subunits 66 | rch_id: NDArray[np.int_] = np.tile(np.arange(1, n_rch + 1), (n_subunit, 1)) 67 | 68 | self.dataset["rch_id"].to_numpy()[:, rch_active] = rch_id 69 | 70 | def _pkgcheck(self) -> None: 71 | rch_dims = self.dataset["rch_active"].dims 72 | if rch_dims != ("y", "x"): 73 | raise ValueError( 74 | "Recharge grid can only have dimensions ('y', 'x'). Got " 75 | f"{rch_dims} instead" 76 | ) 77 | 78 | # Check if active msw cell inactive in recharge 79 | active = self.dataset["svat"] != 0 80 | inactive_in_rch = active > self.dataset["rch_active"] 81 | 82 | if inactive_in_rch.any(): 83 | raise ValueError( 84 | "Active MetaSWAP cell detected in inactive cell in Modflow6 recharge" 85 | ) 86 | -------------------------------------------------------------------------------- /imod_coupler/drivers/metamod/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import Any 4 | 5 | from pydantic import BaseModel, FilePath, ValidationInfo, field_validator 6 | 7 | from imod_coupler.drivers.kernel_config import Metaswap, Modflow6 8 | 9 | 10 | class Kernels(BaseModel): 11 | modflow6: Modflow6 12 | metaswap: Metaswap 13 | 14 | 15 | class Coupling(BaseModel): 16 | mf6_model: str # the MODFLOW 6 model that will be coupled 17 | mf6_msw_recharge_pkg: str # the recharge package that will be used for coupling 18 | mf6_msw_well_pkg: str | None = ( 19 | None # the well package that will be used for coupling when sprinkling is active 20 | ) 21 | mf6_msw_node_map: FilePath # the path to the node map file 22 | mf6_msw_recharge_map: FilePath # the path to the recharge map file 23 | mf6_msw_sprinkling_map_groundwater: FilePath | None = ( 24 | None # the path to the sprinkling map file 25 | ) 26 | # for deprecation warning on label 27 | mf6_msw_sprinkling_map: FilePath | None = None 28 | 29 | output_config_file: FilePath | None = None 30 | 31 | @field_validator("mf6_msw_node_map", "mf6_msw_recharge_map", "output_config_file") 32 | @classmethod 33 | def resolve_file_path(cls, file_path: FilePath) -> FilePath: 34 | return file_path.resolve() 35 | 36 | @field_validator("mf6_msw_sprinkling_map_groundwater") 37 | @classmethod 38 | def validate_mf6_msw_sprinkling_map( 39 | cls, mf6_msw_sprinkling_map_groundwater: FilePath | None, info: ValidationInfo 40 | ) -> FilePath | None: 41 | assert info.data is not None 42 | if mf6_msw_sprinkling_map_groundwater is not None: 43 | if info.data.get("mf6_msw_well_pkg") is None: 44 | raise ValueError( 45 | "If 'mf6_msw_sprinkling_map_groundwater is set, then `mf6_msw_well_pkg` needs to be set." 46 | ) 47 | return mf6_msw_sprinkling_map_groundwater.resolve() 48 | return mf6_msw_sprinkling_map_groundwater 49 | 50 | @field_validator("mf6_msw_sprinkling_map") 51 | @classmethod 52 | def validate_sprinkling_map_label( 53 | cls, mf6_msw_sprinkling_map: FilePath | None 54 | ) -> None: 55 | if mf6_msw_sprinkling_map is not None: 56 | raise ValueError( 57 | "The use of 'mf6_msw_sprinkling_map' label is depricated; now use mf6_msw_sprinkling_map_groundwater" 58 | ) 59 | 60 | 61 | class MetaModConfig(BaseModel): 62 | kernels: Kernels 63 | coupling: list[Coupling] 64 | 65 | def __init__(self, config_dir: Path, **data: Any) -> None: 66 | """Model for the MetaMod config validated by pydantic 67 | 68 | The validation expects current working directory at config file level 69 | so it is changed during initialization 70 | 71 | Args: 72 | config_dir (Path): Directory where the config file resides 73 | """ 74 | os.chdir(config_dir) 75 | super().__init__(**data) 76 | 77 | @field_validator("coupling") 78 | @classmethod 79 | def restrict_coupling_count(cls, coupling: list[Coupling]) -> list[Coupling]: 80 | if len(coupling) == 0: 81 | raise ValueError("At least one coupling has to be defined.") 82 | if len(coupling) > 1: 83 | raise ValueError("Multi-model coupling is not yet supported.") 84 | return coupling 85 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/sel_key_svat_per.inp: -------------------------------------------------------------------------------- 1 | decSic 0 2 | decSpdmac 0 3 | decSpdmic 0 4 | decS01 0 5 | decS02 0 6 | decS03 0 7 | decS04 0 8 | decS05 0 9 | decS06 0 10 | decS07 0 11 | decS08 0 12 | decS09 0 13 | decS10 0 14 | decS11 0 15 | decS12 0 16 | decS13 0 17 | decS14 0 18 | decS15 0 19 | decS16 0 20 | decS17 0 21 | decS18 0 22 | Pm 1 23 | Psgw 1 24 | Pssw 1 25 | Esp 0 26 | Eic 0 27 | Epd 0 28 | Ebs 0 29 | Tact 0 30 | qrun 1 31 | qdr 1 32 | qspgw 1 33 | qmodf 1 34 | vcr 1 35 | qmodfbot 0 36 | qsim 0 37 | qsimcorrmf 0 38 | dprzvg 0 39 | dprztb 0 40 | htvg 0 41 | lai 0 42 | slcv 0 43 | Siccap 0 44 | fT 0 45 | fEic 0 46 | fEbs 0 47 | fEpd 0 48 | ETref 0 49 | Ebspot 0 50 | Tpot 0 51 | Trel 0 52 | ETact 1 53 | Psswdem 0 54 | qinf 0 55 | qmr 0 56 | qm02 0 57 | qm03 0 58 | qm04 0 59 | qm05 0 60 | qm06 0 61 | qm07 0 62 | qm08 0 63 | qm09 0 64 | qm10 0 65 | qm11 0 66 | qm12 0 67 | qm13 0 68 | qm14 0 69 | qm15 0 70 | qm16 0 71 | qm17 0 72 | qm18 0 73 | Sic 0 74 | Spdmac 0 75 | Spdmic 0 76 | S01 0 77 | Ssd01 0 78 | Ssd02 0 79 | Ssd03 0 80 | Ssd04 0 81 | Ssd05 0 82 | Ssd06 0 83 | Ssd07 0 84 | Ssd08 0 85 | Ssd09 0 86 | Ssd10 0 87 | Ssd11 0 88 | Ssd12 0 89 | Ssd13 0 90 | Ssd14 0 91 | Ssd15 0 92 | Ssd16 0 93 | Ssd17 0 94 | Ssd18 0 95 | Ssdtot 0 96 | decStot 1 97 | phrz01 0 98 | phrz02 0 99 | phrz03 0 100 | phrz04 0 101 | phrz05 0 102 | phrz06 0 103 | phrz07 0 104 | phrz08 0 105 | phrz09 0 106 | phrz10 0 107 | phrz11 0 108 | phrz12 0 109 | phrz13 0 110 | phrz14 0 111 | phrz15 0 112 | phrz16 0 113 | phrz17 0 114 | phrz18 0 115 | Hpd 0 116 | Hgw 1 117 | dHgw 0 118 | sc1 1 119 | Hgwmodf 0 120 | dHgwmodf 0 121 | Hsw 0 122 | TempCmnday 0 123 | TempCmxday 0 124 | TempC 0 125 | Nrel 0 126 | Rad 0 127 | Hum 0 128 | Wind 0 129 | Rnt 0 130 | HG 0 131 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/sel_key_svat_per.inp: -------------------------------------------------------------------------------- 1 | decSic 0 2 | decSpdmac 0 3 | decSpdmic 0 4 | decS01 0 5 | decS02 0 6 | decS03 0 7 | decS04 0 8 | decS05 0 9 | decS06 0 10 | decS07 0 11 | decS08 0 12 | decS09 0 13 | decS10 0 14 | decS11 0 15 | decS12 0 16 | decS13 0 17 | decS14 0 18 | decS15 0 19 | decS16 0 20 | decS17 0 21 | decS18 0 22 | Pm 1 23 | Psgw 0 24 | Pssw 0 25 | Esp 0 26 | Eic 0 27 | Epd 0 28 | Ebs 0 29 | Tact 0 30 | qrun 1 31 | qdr 0 32 | qspgw 0 33 | qmodf 0 34 | vcr 0 35 | qmodfbot 0 36 | qsim 1 37 | qsimcorrmf 0 38 | dprzvg 0 39 | dprztb 0 40 | htvg 0 41 | lai 0 42 | slcv 0 43 | Siccap 0 44 | fT 0 45 | fEic 0 46 | fEbs 0 47 | fEpd 0 48 | ETref 1 49 | Ebspot 0 50 | Tpot 0 51 | Trel 0 52 | ETact 1 53 | Psswdem 0 54 | qinf 0 55 | qmr 0 56 | qm02 0 57 | qm03 0 58 | qm04 0 59 | qm05 0 60 | qm06 0 61 | qm07 0 62 | qm08 0 63 | qm09 0 64 | qm10 0 65 | qm11 0 66 | qm12 0 67 | qm13 0 68 | qm14 0 69 | qm15 0 70 | qm16 0 71 | qm17 0 72 | qm18 0 73 | Sic 0 74 | Spdmac 0 75 | Spdmic 0 76 | S01 0 77 | Ssd01 0 78 | Ssd02 0 79 | Ssd03 0 80 | Ssd04 0 81 | Ssd05 0 82 | Ssd06 0 83 | Ssd07 0 84 | Ssd08 0 85 | Ssd09 0 86 | Ssd10 0 87 | Ssd11 0 88 | Ssd12 0 89 | Ssd13 0 90 | Ssd14 0 91 | Ssd15 0 92 | Ssd16 0 93 | Ssd17 0 94 | Ssd18 0 95 | Ssdtot 0 96 | decStot 0 97 | phrz01 0 98 | phrz02 0 99 | phrz03 0 100 | phrz04 0 101 | phrz05 0 102 | phrz06 0 103 | phrz07 0 104 | phrz08 0 105 | phrz09 0 106 | phrz10 0 107 | phrz11 0 108 | phrz12 0 109 | phrz13 0 110 | phrz14 0 111 | phrz15 0 112 | phrz16 0 113 | phrz17 0 114 | phrz18 0 115 | Hpd 0 116 | Hgw 1 117 | dHgw 0 118 | sc1 0 119 | Hgwmodf 0 120 | dHgwmodf 0 121 | Hsw 0 122 | TempCmnday 0 123 | TempCmxday 0 124 | TempC 0 125 | Nrel 0 126 | Rad 0 127 | Hum 0 128 | Wind 0 129 | Rnt 0 130 | HG 0 131 | -------------------------------------------------------------------------------- /.teamcity/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | iMOD6_Coupler Config DSL Script 5 | iMOD6_Coupler 6 | iMOD6_Coupler_dsl 7 | 1.0-SNAPSHOT 8 | 9 | 10 | org.jetbrains.teamcity 11 | configs-dsl-kotlin-parent 12 | 1.0-SNAPSHOT 13 | 14 | 15 | 16 | 17 | jetbrains-all 18 | https://download.jetbrains.com/teamcity-repository 19 | 20 | true 21 | 22 | 23 | 24 | teamcity-server 25 | https://dpcbuild.deltares.nl/app/dsl-plugins-repository 26 | 27 | true 28 | 29 | 30 | 31 | 32 | 33 | 34 | JetBrains 35 | https://download.jetbrains.com/teamcity-repository 36 | 37 | 38 | 39 | 40 | ${basedir} 41 | 42 | 43 | kotlin-maven-plugin 44 | org.jetbrains.kotlin 45 | ${kotlin.version} 46 | 47 | 48 | 49 | 50 | compile 51 | process-sources 52 | 53 | compile 54 | 55 | 56 | 57 | test-compile 58 | process-test-sources 59 | 60 | test-compile 61 | 62 | 63 | 64 | 65 | 66 | org.jetbrains.teamcity 67 | teamcity-configs-maven-plugin 68 | ${teamcity.dsl.version} 69 | 70 | kotlin 71 | target/generated-configs 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | org.jetbrains.teamcity 80 | configs-dsl-kotlin-latest 81 | ${teamcity.dsl.version} 82 | compile 83 | 84 | 85 | org.jetbrains.teamcity 86 | configs-dsl-kotlin-plugins-latest 87 | 1.0-SNAPSHOT 88 | pom 89 | compile 90 | 91 | 92 | org.jetbrains.kotlin 93 | kotlin-stdlib-jdk8 94 | ${kotlin.version} 95 | compile 96 | 97 | 98 | org.jetbrains.kotlin 99 | kotlin-script-runtime 100 | ${kotlin.version} 101 | compile 102 | 103 | 104 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/wel_svat_mapping.py: -------------------------------------------------------------------------------- 1 | from io import TextIOWrapper 2 | from typing import Any 3 | 4 | import numpy as np 5 | import pandas as pd 6 | import xarray as xr 7 | from imod.mf6.mf6_wel_adapter import Mf6Wel 8 | from imod.msw.fixed_format import VariableMetaData 9 | from numpy.typing import NDArray 10 | 11 | from primod.mapping.mappingbase import MetaModMapping 12 | from primod.typing import Int 13 | 14 | 15 | class WellSvatMapping(MetaModMapping): 16 | """ 17 | This contains the data to connect MODFLOW 6 well cells to MetaSWAP svats. 18 | 19 | This class is responsible for the file `wellindex2svat.dxc`. 20 | 21 | Parameters 22 | ---------- 23 | svat: array of floats (xr.DataArray) 24 | SVAT units. This array must have a subunit coordinate to describe 25 | different land uses. 26 | well: mf6.Well 27 | Modflow 6 Well package to connect to. 28 | """ 29 | 30 | _file_name = "wellindex2svat.dxc" 31 | _metadata_dict = { 32 | "wel_id": VariableMetaData(10, 1, 9999999, int), 33 | "free": VariableMetaData(2, None, None, str), 34 | "svat": VariableMetaData(10, 1, 9999999, int), 35 | "layer": VariableMetaData(5, 0, 9999, int), 36 | } 37 | 38 | _with_subunit = ("wel_id", "svat", "layer") 39 | _to_fill = ("free",) 40 | 41 | def __init__(self, svat: xr.DataArray, well: Mf6Wel, index: NDArray[Int]): 42 | super().__init__() 43 | self.index = index 44 | self.well = well 45 | well_mod_id, well_svat, layer = self._create_well_id(svat) 46 | self.dataset["wel_id"] = well_mod_id 47 | self.dataset["svat"] = well_svat 48 | self.dataset["layer"] = layer 49 | 50 | def _create_well_id( 51 | self, svat: pd.DataFrame 52 | ) -> tuple[NDArray[Any], NDArray[Any], NDArray[Any]]: 53 | """ 54 | Get modflow indices, svats, and layer number for the wells 55 | """ 56 | well_cellid = self.well["cellid"] 57 | if len(well_cellid.coords["dim_cellid"]) != 3: 58 | raise TypeError("Coupling to unstructured grids is not supported.") 59 | 60 | # Convert to Python's 0-based index 61 | well_layer = well_cellid.sel(dim_cellid="layer").data 62 | well_row = well_cellid.sel(dim_cellid="row").data - 1 63 | well_column = well_cellid.sel(dim_cellid="column").data - 1 64 | 65 | n_subunit = svat["subunit"].size 66 | 67 | well_svat = svat.to_numpy()[:, well_row, well_column] 68 | well_active = well_svat != 0 69 | 70 | well_svat_1d = well_svat[well_active] 71 | 72 | # Tile well_layers for each subunit 73 | layer = np.tile(well_layer, (n_subunit, 1)) 74 | layer_1d = layer[well_active] 75 | 76 | well_id = well_cellid.coords["ncellid"] + 1 77 | well_id_1d = np.tile(well_id, (n_subunit, 1))[well_active] 78 | 79 | return (well_id_1d, well_svat_1d, layer_1d) 80 | 81 | def _render(self, file: TextIOWrapper, *args: Any, **kwargs: Any) -> None: 82 | data_dict: dict[str, Any] = {} 83 | data_dict["svat"] = self.dataset["svat"].to_numpy() 84 | data_dict["layer"] = self.dataset["layer"].to_numpy() 85 | data_dict["wel_id"] = self.dataset["wel_id"].to_numpy() 86 | 87 | for var in self._to_fill: 88 | data_dict[var] = "" 89 | 90 | dataframe = pd.DataFrame( 91 | data=data_dict, columns=list(self._metadata_dict.keys()) 92 | ) 93 | 94 | self._check_range(dataframe) 95 | self.write_dataframe_fixed_width(file, dataframe) 96 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/MOD-SIM.TXT: -------------------------------------------------------------------------------- 1 | 1 1 1 2 | 2 2 1 3 | 3 3 1 4 | 4 4 1 5 | 5 5 1 6 | 6 6 1 7 | 7 7 1 8 | 8 8 1 9 | 9 9 1 10 | 10 10 1 11 | 11 11 1 12 | 12 12 1 13 | 13 13 1 14 | 14 14 1 15 | 15 15 1 16 | 16 16 1 17 | 17 17 1 18 | 18 18 1 19 | 19 19 1 20 | 20 20 1 21 | 21 21 1 22 | 22 22 1 23 | 23 23 1 24 | 24 24 1 25 | 25 25 1 26 | 26 26 1 27 | 27 27 1 28 | 28 28 1 29 | 29 29 1 30 | 30 30 1 31 | 31 31 1 32 | 32 32 1 33 | 33 33 1 34 | 34 34 1 35 | 35 35 1 36 | 36 36 1 37 | 37 37 1 38 | 38 38 1 39 | 39 39 1 40 | 40 40 1 41 | 41 41 1 42 | 42 42 1 43 | 43 43 1 44 | 44 44 1 45 | 45 45 1 46 | 46 46 1 47 | 47 47 1 48 | 48 48 1 49 | 49 49 1 50 | 50 50 1 51 | 51 51 1 52 | 52 52 1 53 | 53 53 1 54 | 54 54 1 55 | 55 55 1 56 | 56 56 1 57 | 57 57 1 58 | 58 58 1 59 | 59 59 1 60 | 60 60 1 61 | 61 61 1 62 | 62 62 1 63 | 63 63 1 64 | 64 64 1 65 | 65 65 1 66 | 66 66 1 67 | 67 67 1 68 | 68 68 1 69 | 69 69 1 70 | 70 70 1 71 | 71 71 1 72 | 72 72 1 73 | 73 73 1 74 | 74 74 1 75 | 75 75 1 76 | 76 76 1 77 | 77 77 1 78 | 78 78 1 79 | 79 79 1 80 | 80 80 1 81 | 81 81 1 82 | 82 82 1 83 | 83 83 1 84 | 84 84 1 85 | 85 85 1 86 | 86 86 1 87 | 87 87 1 88 | 88 88 1 89 | 89 89 1 90 | 90 90 1 91 | 91 91 1 92 | 92 92 1 93 | 93 93 1 94 | 94 94 1 95 | 95 95 1 96 | 96 96 1 97 | 97 97 1 98 | 98 98 1 99 | 99 99 1 100 | 100 100 1 101 | 101 101 1 102 | 102 102 1 103 | 103 103 1 104 | 104 104 1 105 | 105 105 1 106 | 106 106 1 107 | 107 107 1 108 | 108 108 1 109 | 109 109 1 110 | 110 110 1 111 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # iMOD Coupler 2 | 3 | The `imod_coupler` is used to couple hydrological kernels. 4 | It currently focuses on groundwater and supports coupling between MetaSWAP and Modflow6. 5 | 6 | It as command line app that can be run via 7 | 8 | ```sh 9 | imodc /path/to/imod_coupler.toml 10 | ``` 11 | 12 | In order to receive help for its usage, run 13 | 14 | ```sh 15 | imodc --help 16 | ``` 17 | 18 | ## Issues 19 | 20 | Deltares colleagues can find the issue tracker at [Jira](https://issuetracker.deltares.nl/secure/RapidBoard.jspa?rapidView=469&projectKey=IMOD6&view=planning&selectedIssue=IMOD6-840) 21 | 22 | ## Contributing 23 | 24 | In order to develop on `imod_coupler` locally, please follow the following steps: 25 | 26 | - Create an access token at the [TeamCity build server](https://dpcbuild.deltares.nl/profile.html?item=accessTokens#). 27 | Choose permission scope: ``. 28 | - Store the token in your local user environment as `TEAMCITY_TOKEN`. 29 | This token will be used to download artifacts from Teamcity, make sure to store it well. 30 | - Download and install [pixi](https://pixi.sh). 31 | - Download and install [svn](https://tortoisesvn.net/downloads.html). 32 | Make sure to install the svn command line tools as well. 33 | - Download the Git repository of `imod_coupler` and navigate to the root of the project. 34 | - Create the environment by executing the following in your terminal: 35 | 36 | ```sh 37 | pixi install --environment=dev 38 | ``` 39 | 40 | - Install the test dependencies by executing the following in your terminal. 41 | It automatically downloads the [latest imod_collector](https://dpcbuild.deltares.nl/buildConfiguration/iMOD6_IMOD6collectorDaily_ReleaseX64?branch=%3Cdefault%3E&mode=builds) and [regression imod_collector](https://dpcbuild.deltares.nl/buildConfiguration/iMOD6_IMOD6collectorDaily_ReleaseX64?branch=%3Cdefault%3E&mode=builds&tag=regression) from the build server. 42 | It downloads the [MetaSWAP lookup table](https://repos.deltares.nl/repos/DSCTestbench/trunk/cases/e150_metaswap/f00_common/c00_common/LHM2016_v01vrz). 43 | It also generates a `.env` that contains the paths to the downloaded imod_collectors. 44 | 45 | ```sh 46 | pixi run install-test-dependencies 47 | ``` 48 | 49 | `install-test-dependencies` creates a `.env` file in the root of the project with the required environment variables pointing to the paths of imod_collector that can be found in the `.pixi` folder. 50 | 51 | - The tests can then be run with: 52 | 53 | ```sh 54 | pixi run tests 55 | ``` 56 | 57 | - Lint the codebase with: 58 | 59 | ```sh 60 | pixi run lint 61 | ``` 62 | 63 | - When developing with visual studio code, it is recommended to open the application via `open-vscode.bat`. 64 | This will open the application in a new vscode window with the correct environment variables set. 65 | 66 | ### Debugging 67 | 68 | When debugging the unit tests in visual studio code with the test explorer, you can encounter some problems. 69 | Both MODFLOW 6 and MetaSWAP might behave unpredicateble when being initialized and finalized multiple times. 70 | 71 | When you only run, not debug, unit tests, this is not the case, since there is a switch statement that determines if we should call `subprocess.run()`, or stay within the main thread. 72 | See the fixture for `run_coupler_function` for more information. 73 | 74 | ### Troubleshooting 75 | 76 | If you encounter errors while running the tests, it might be that your pip dependencies are outdated. 77 | This happens when you have pulled the latest changes from imod_coupler. 78 | In that case you need to update the pip dependencies as well. 79 | Try running: 80 | 81 | ```sh 82 | pixi run update-git-dependencies 83 | ``` 84 | -------------------------------------------------------------------------------- /imod_coupler/logging/exchange_collector.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from pathlib import Path 3 | from typing import Any 4 | 5 | import netCDF4 as nc 6 | import numpy as np 7 | import tomli 8 | from numpy.typing import NDArray 9 | from typing_extensions import Self 10 | 11 | 12 | class AbstractExchange(abc.ABC): 13 | @abc.abstractmethod 14 | def write_exchange(self, exchange: NDArray[Any], time: float) -> None: 15 | pass 16 | 17 | @abc.abstractmethod 18 | def finalize(self) -> None: 19 | pass 20 | 21 | 22 | class NetcdfExchangeLogger(AbstractExchange): 23 | output_file: Path 24 | name: str 25 | 26 | def __init__(self, name: str, output_dir: Path, properties: dict[str, Any]): 27 | if not (Path.is_dir(output_dir)): 28 | Path.mkdir(output_dir) 29 | output_file = Path.joinpath(output_dir, name + ".nc") 30 | self.ds = nc.Dataset(output_file, "w") 31 | self.name = name 32 | 33 | def initfile(self, ndx: int) -> None: 34 | self.nodedim = self.ds.createDimension("id", ndx) 35 | self.timedim = self.ds.createDimension("time", None) 36 | self.timevar = self.ds.createVariable("time", "f8", ("time",)) 37 | self.datavar = self.ds.createVariable( 38 | "xchg", 39 | "f8", 40 | ( 41 | "time", 42 | "id", 43 | ), 44 | ) 45 | self.pos = 0 46 | 47 | def write_exchange( 48 | self, exchange: NDArray[Any], time: float, sync: bool = False 49 | ) -> None: 50 | if len(self.ds.dimensions) == 0: 51 | self.initfile(len(exchange)) 52 | loc = np.where(self.timevar[:] == time) 53 | if np.size(loc) > 0: 54 | first = int(loc[0]) 55 | self.datavar[first, :] = exchange[:] 56 | else: 57 | self.timevar[self.pos] = time 58 | self.datavar[self.pos, :] = exchange[:] 59 | self.pos += 1 60 | if sync: 61 | self.ds.sync() 62 | 63 | def finalize(self) -> None: 64 | self.ds.close() 65 | 66 | 67 | class ExchangeCollector: 68 | exchanges: dict[str, AbstractExchange] 69 | output_dir: Path 70 | 71 | def __init__(self, config: dict[str, dict[str, Any]] | None = None): 72 | self.exchanges = {} 73 | 74 | @classmethod 75 | def from_file(cls, output_toml_file: Path) -> Self: 76 | with open(output_toml_file, "rb") as f: 77 | toml_dict = tomli.load(f) 78 | return cls.from_config(toml_dict) 79 | 80 | @classmethod 81 | def from_config(cls, config: dict[str, dict[str, Any]]) -> Self: 82 | new_instance = cls() 83 | general_settings = config["general"] 84 | new_instance.output_dir = Path(general_settings["output_dir"]) 85 | 86 | exchanges_config = config["exchanges"] 87 | 88 | for exchange_name, dict_def in exchanges_config.items(): 89 | new_instance.exchanges[exchange_name] = new_instance.create_exchange_object( 90 | exchange_name, dict_def 91 | ) 92 | return new_instance 93 | 94 | def log_exchange(self, name: str, exchange: NDArray[Any], time: float) -> None: 95 | if name in self.exchanges.keys(): 96 | self.exchanges[name].write_exchange(exchange, time) 97 | 98 | def create_exchange_object( 99 | self, flux_name: str, dict_def: dict[str, Any] 100 | ) -> AbstractExchange: 101 | typename = dict_def["type"] 102 | if typename == "netcdf": 103 | return NetcdfExchangeLogger(flux_name, self.output_dir, dict_def) 104 | raise ValueError("unkwnown type of exchange logger") 105 | 106 | def finalize(self) -> None: 107 | for exchange in self.exchanges.values(): 108 | exchange.finalize() 109 | -------------------------------------------------------------------------------- /imod_coupler/drivers/driver.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | from abc import ABC, abstractmethod 6 | from pathlib import Path 7 | from typing import Any 8 | 9 | from loguru import logger 10 | 11 | from imod_coupler.config import BaseConfig 12 | 13 | 14 | def resolve_path(libname: str) -> str: 15 | match sys.platform.lower(): 16 | case "win32": 17 | env_var = "PATH" 18 | case "linux" | "linux2" | "darwin": 19 | env_var = "LD_LIBRARY_PATH" 20 | case _: 21 | return libname 22 | 23 | if os.path.isfile(libname): 24 | return libname 25 | if env_var in os.environ: 26 | pathdef: str = os.environ[env_var] 27 | for dir in pathdef.split(os.pathsep): 28 | full_path = Path(dir) / libname 29 | if full_path.is_file(): 30 | return str(full_path) 31 | return libname # if resolution failed, give it back to the call site 32 | 33 | 34 | class Driver(ABC): 35 | """Driver base class 36 | 37 | Inherit from this class when creating a new driver 38 | """ 39 | 40 | def execute(self) -> None: 41 | """Execute the driver""" 42 | 43 | # This will initialize and couple the kernels 44 | self.initialize() 45 | 46 | # Run the time loop 47 | while self.get_current_time() < self.get_end_time(): 48 | self.update() 49 | 50 | logger.info("New simulation terminated normally") 51 | 52 | self.finalize() 53 | 54 | @abstractmethod 55 | def initialize(self) -> None: 56 | """Initialize the coupled models""" 57 | ... 58 | 59 | @abstractmethod 60 | def update(self) -> None: 61 | """Perform a single time step""" 62 | ... 63 | 64 | @abstractmethod 65 | def finalize(self) -> None: 66 | """Cleanup the resources""" 67 | ... 68 | 69 | @abstractmethod 70 | def get_current_time(self) -> float: 71 | """Return current time""" 72 | ... 73 | 74 | @abstractmethod 75 | def get_end_time(self) -> float: 76 | """Return end time""" 77 | ... 78 | 79 | @abstractmethod 80 | def report_timing_totals(self) -> None: 81 | """Report total time spent on coupling""" 82 | ... 83 | 84 | 85 | def get_driver( 86 | config_dict: dict[str, Any], config_dir: Path, base_config: BaseConfig 87 | ) -> Driver: 88 | from imod_coupler.drivers.metamod.config import MetaModConfig 89 | from imod_coupler.drivers.metamod.metamod import MetaMod 90 | from imod_coupler.drivers.ribametamod.config import RibaMetaModConfig 91 | from imod_coupler.drivers.ribametamod.ribametamod import RibaMetaMod 92 | from imod_coupler.drivers.ribamod.config import RibaModConfig 93 | from imod_coupler.drivers.ribamod.ribamod import RibaMod 94 | 95 | # resolve library locations using which 96 | for kernel in config_dict["driver"]["kernels"].values(): 97 | if "dll" in kernel: 98 | kernel["dll"] = resolve_path(kernel["dll"]) 99 | 100 | if base_config.driver_type == "metamod": 101 | metamod_config = MetaModConfig(config_dir=config_dir, **config_dict["driver"]) 102 | return MetaMod(base_config, metamod_config) 103 | elif base_config.driver_type == "ribamod": 104 | ribamod_config = RibaModConfig(config_dir=config_dir, **config_dict["driver"]) 105 | return RibaMod(base_config, ribamod_config) 106 | elif base_config.driver_type == "ribametamod": 107 | ribametamod_config = RibaMetaModConfig( 108 | config_dir=config_dir, **config_dict["driver"] 109 | ) 110 | return RibaMetaMod(base_config, ribametamod_config) 111 | else: 112 | raise ValueError(f"Driver type {base_config.driver_type} is not supported.") 113 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_config.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from functools import reduce 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import pydantic 7 | import pytest 8 | import tomli 9 | import tomli_w 10 | from primod.metamod import MetaMod 11 | from pytest_cases import parametrize_with_cases 12 | 13 | from imod_coupler.__main__ import run_coupler 14 | 15 | 16 | def get_from_container(data_dict: dict[Any, Any], map_list: list[Any]) -> Any: 17 | """Gets the nested value of a container 18 | Adapted from https://stackoverflow.com/a/14692747/11038610""" 19 | return reduce(operator.getitem, map_list, data_dict) 20 | 21 | 22 | def set_container(data_dict: dict[Any, Any], map_list: list[Any], value: Any) -> None: 23 | """Sets the nested value of a container 24 | Adapted from https://stackoverflow.com/a/14692747/11038610 25 | """ 26 | get_from_container(data_dict, map_list[:-1])[map_list[-1]] = value 27 | 28 | 29 | cases_missing_files = [ 30 | ["driver", "kernels", "modflow6", "dll"], 31 | ["driver", "kernels", "metaswap", "dll"], 32 | ["driver", "coupling", 0, "mf6_msw_node_map"], 33 | ["driver", "coupling", 0, "mf6_msw_recharge_map"], 34 | ["driver", "coupling", 0, "mf6_msw_sprinkling_map_groundwater"], 35 | ] 36 | 37 | 38 | @pytest.mark.parametrize( 39 | "map_list", 40 | cases_missing_files, 41 | ) 42 | @parametrize_with_cases("metamod_sprinkling") 43 | def test_missing_files( 44 | metamod_sprinkling: MetaMod, 45 | map_list: list[Any], 46 | tmp_path: Path, 47 | modflow_dll_devel: Path, 48 | metaswap_dll_devel: Path, 49 | metaswap_dll_dep_dir_devel: Path, 50 | ) -> None: 51 | """This test assures that missing files result in an ValidationError""" 52 | 53 | metamod_sprinkling.write( 54 | tmp_path, 55 | modflow6_dll=modflow_dll_devel, 56 | metaswap_dll=metaswap_dll_devel, 57 | metaswap_dll_dependency=metaswap_dll_dep_dir_devel, 58 | ) 59 | 60 | config_path = tmp_path / metamod_sprinkling._toml_name 61 | 62 | with open(config_path, "rb") as f: 63 | config_dict = tomli.load(f) 64 | 65 | # Create temp file 66 | tmp_file = tmp_path / "tmp_file" 67 | tmp_file.touch() 68 | 69 | # Let the dict value point to the file 70 | set_container(config_dict, map_list, str(tmp_file)) 71 | 72 | # Write the config file 73 | with open(config_path, "wb") as f: 74 | tomli_w.dump(config_dict, f) 75 | 76 | # Delete the tmp_file 77 | tmp_file.unlink() 78 | 79 | with pytest.raises(pydantic.ValidationError): 80 | run_coupler(config_path) 81 | 82 | 83 | @parametrize_with_cases("metamod_sprinkling") 84 | def test_sprinkling_requires_files( 85 | metamod_sprinkling: MetaMod, 86 | tmp_path: Path, 87 | modflow_dll_devel: Path, 88 | metaswap_dll_devel: Path, 89 | metaswap_dll_dep_dir_devel: Path, 90 | ) -> None: 91 | """This test assures that if sprinkling is activated, 92 | sprinkling files must be there as well. 93 | If not it must raise a ValueError.""" 94 | 95 | metamod_sprinkling.write( 96 | tmp_path, 97 | modflow6_dll=modflow_dll_devel, 98 | metaswap_dll=metaswap_dll_devel, 99 | metaswap_dll_dependency=metaswap_dll_dep_dir_devel, 100 | ) 101 | 102 | config_path = tmp_path / metamod_sprinkling._toml_name 103 | 104 | with open(config_path, "rb") as f: 105 | config_dict = tomli.load(f) 106 | 107 | # Get the path of `mf6_msw_sprinkling_map` 108 | sprinkling_map = config_path.parent / get_from_container( 109 | config_dict, ["driver", "coupling", 0, "mf6_msw_sprinkling_map_groundwater"] 110 | ) 111 | # Delete `mf6_msw_sprinkling_map` 112 | 113 | sprinkling_map.unlink() 114 | 115 | with pytest.raises(pydantic.ValidationError): 116 | run_coupler(config_path) 117 | -------------------------------------------------------------------------------- /tests/fixtures/fixture_ribasim.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import pytest 6 | import pytest_cases 7 | import ribasim 8 | import ribasim_testmodels 9 | 10 | from imod_coupler.kernelwrappers.ribasim_wrapper import RibasimWrapper 11 | 12 | solver_algorithm: str = "QNDF" 13 | 14 | 15 | def add_subgrid(model: ribasim.Model) -> ribasim.Model: 16 | """Add 1:1 subgrid levels to model""" 17 | 18 | profile_df = model.basin.profile.df 19 | _, basin_id = np.unique(profile_df["node_id"], return_inverse=True) 20 | geometry = model.basin.node.df["geometry"] 21 | x = geometry.x.iloc[basin_id].to_numpy() 22 | y = geometry.y.iloc[basin_id].to_numpy() 23 | subgrid_df = pd.DataFrame( 24 | data={ 25 | "node_id": profile_df["node_id"], 26 | "subgrid_id": basin_id, 27 | "basin_level": profile_df["level"], 28 | "subgrid_level": profile_df["level"], 29 | "meta_x": x, 30 | "meta_y": y, 31 | } 32 | ) 33 | model.basin.subgrid.df = subgrid_df 34 | return model 35 | 36 | 37 | @pytest_cases.fixture(scope="function") 38 | def ribasim_bucket_model() -> ribasim.Model: 39 | bucket = ribasim_testmodels.bucket_model() 40 | bucket.endtime = datetime(2023, 1, 1, 0, 0) 41 | bucket.solver.algorithm = solver_algorithm 42 | return add_subgrid(bucket) 43 | 44 | 45 | @pytest_cases.fixture(scope="function") 46 | def ribasim_bucket_model_no_subgrid() -> ribasim.Model: 47 | bucket = ribasim_testmodels.bucket_model() 48 | bucket.endtime = datetime(2023, 1, 1, 0, 0) 49 | bucket.solver.algorithm = solver_algorithm 50 | return bucket 51 | 52 | 53 | @pytest_cases.fixture(scope="function") 54 | def ribasim_backwater_model() -> ribasim.Model: 55 | backwater = ribasim_testmodels.backwater_model() 56 | backwater.solver.algorithm = solver_algorithm 57 | backwater.solver.reltol = 1e-08 58 | backwater.solver.abstol = 1e-08 59 | return add_subgrid(backwater) 60 | 61 | 62 | @pytest_cases.fixture(scope="function") 63 | def ribasim_two_basin_model() -> ribasim.Model: 64 | twobasin = ribasim_testmodels.two_basin_model() 65 | twobasin.solver.algorithm = solver_algorithm 66 | return twobasin 67 | 68 | 69 | @pytest_cases.fixture(scope="function") 70 | def ribasim_two_basin_model_dbg() -> ribasim.Model: 71 | model = ribasim_testmodels.two_basin_model() 72 | model.solver.algorithm = solver_algorithm 73 | return model 74 | 75 | 76 | @pytest.fixture(scope="session") 77 | def ribasim_basic_model() -> ribasim.Model: 78 | return ribasim_testmodels.basic_model() 79 | 80 | 81 | @pytest.fixture(scope="session") 82 | def ribasim_basic_transient_model(ribasim_basic_model) -> ribasim.Model: 83 | return ribasim_testmodels.basic_transient_model(ribasim_basic_model) 84 | 85 | 86 | @pytest.fixture(scope="session") 87 | def ribasim_leaky_bucket_model() -> ribasim.Model: 88 | return ribasim_testmodels.leaky_bucket_model() 89 | 90 | 91 | @pytest.fixture(scope="session") 92 | def ribasim_user_demand_model() -> ribasim.Model: 93 | return ribasim_testmodels.user_demand_model() 94 | 95 | 96 | @pytest.fixture(scope="session", autouse=True) 97 | def load_julia( 98 | ribasim_dll_devel, 99 | ribasim_dll_dep_dir_devel, 100 | ) -> None: 101 | libribasim = RibasimWrapper(ribasim_dll_devel, ribasim_dll_dep_dir_devel) 102 | libribasim.init_julia() 103 | 104 | 105 | @pytest.fixture(scope="function") 106 | def libribasim(ribasim_dll_devel, ribasim_dll_dep_dir_devel, request) -> RibasimWrapper: 107 | # lib_path, lib_folder = libribasim_paths 108 | libribasim = RibasimWrapper(ribasim_dll_devel, ribasim_dll_dep_dir_devel) 109 | 110 | # If initialized, call finalize() at end of use 111 | request.addfinalizer(libribasim.__del__) 112 | return libribasim 113 | -------------------------------------------------------------------------------- /tests/data/modstrip/input/msw/para_sim_template.inp: -------------------------------------------------------------------------------- 1 | vegetation_mdl = 1 ! Vegetation model (1/2/3) 1= simple, 2= WOFOST, 3=2 + feedback 2 | evapotranspiration_mdl = 1 ! Evapotranspiration model (1/2/3) 1= simple, 2= PenMon as ETref, 3= PenMon full 3 | saltstress_mdl = 0 ! Salt stress model (0/1) 0= none 1 = Maas-Hoffman 4 | surfacewater_mdl = 0 ! Surface water model (0/1/2/5) 0= none 1 = Surfw 2= + Sobek 5 = +SWQN 5 | infilimsat_opt = 0 6 | netcdf_per = 0 7 | postmsw_opt = 0 ! Online postmsw (0/1) 0= not 1 = online 8 | save&restore_opt = 0 ! Enablement online restore (0/1/2) 0= not 1 = normal accuracy 2 = testing accuracy 9 | unsa_svat_path = "{{unsat_path}}" ! Location of unsa-database 10 | dtgw = 1.000000 ! Groundwater/soil water time step 11 | dtsw = 1.000000 ! Time step fast processes 12 | * 13 | * Parameters for processes 14 | * 15 | ipstep = 2 16 | nxlvage_dim = 366 17 | infilimsat_opt = 0 18 | co2 = 404.21 19 | fact_beta2 = 1.0 ! Calibration factor of beta2 (Boesten) for rainfall time step 20 | rcsoil = 0.15 ! refelection coefficient of soil 21 | * 22 | iterur1 = 3 ! Outer cycle iteration for start of smoothing 23 | iterur2 = 5 ! Outer cycle iteration with full smoothing 24 | idbg = 0.000000 ! Starting julian day of simulation 25 | iybg = 1971 ! Starting year of simulation 26 | tdbgsm = 0.0 ! Beginning of summer water management period 27 | tdedsm = 366.0 ! End of summer water management period 28 | clocktime = 0 ! Produces files for clocking cpu/realtime used 29 | svat_csvdigits = 1 30 | svat_gt = 1 ! File with 14-day gw. levels 31 | svat_per = 0 ! File with period-info SVATs 32 | svat_per_csv = 0 ! Files with period-info selected SVATs 33 | svat_dtgw = 0 ! File with dtgw-info SVATs 34 | svat_dtgw_csv = 1 ! Files with dtgw-info selected SVATs 35 | svat2gw_dtgw = 0 ! File with dtgw-info flow to GW as system volume 36 | svat_vg_per = 0 ! File with period-info vegetation model 37 | svat_vg_per_csv = 0 ! File with period-info vegetation model selected SVATs 38 | svat_vg_day = 0 ! File with day-info vegetation model 39 | svat_vg_day_csv = 1 ! File with day-info vegetation model selected SVATs 40 | drng_per = 0 ! File with period-info drainage links 41 | sw_per = 0 ! File with period-info Surfw 42 | sw_per_csv = 0 ! Files with period-info selected Surfw nr's 43 | sw_dtgw = 0 ! File with dtgw-info Surfw 44 | sw_dtgw_csv = 0 ! Files with dtgw-info selected Surfw nr's 45 | sw_hq_dtgw = 0 ! File with dtgw-info H,Q of Surfw 46 | sw_dtsw = 0 ! File with dtsw-info Surfw 47 | sw_hq_dtsw = 0 ! File with dtsw-info H,Q of Surfw 48 | svat_per_unf = 0 ! File with period-info SVAT for postmetaswap 49 | modf_per_unf = 0 ! File with period-info MODFLOW for postmetaswap 50 | sw_dtgw_unf = 0 ! File with dtgw-info Surfw for water quality 51 | * 52 | * 53 | -------------------------------------------------------------------------------- /tests/data/bucket_model/modflow6/time_discretization.tdis: -------------------------------------------------------------------------------- 1 | begin options 2 | time_units days 3 | start_date_time 1971-01-01T00:00:00.000000000 4 | end options 5 | 6 | begin dimensions 7 | nper 212 8 | end dimensions 9 | 10 | begin perioddata 11 | 1.0 1 1.0 12 | 1.0 1 1.0 13 | 1.0 1 1.0 14 | 1.0 1 1.0 15 | 1.0 1 1.0 16 | 1.0 1 1.0 17 | 1.0 1 1.0 18 | 1.0 1 1.0 19 | 1.0 1 1.0 20 | 1.0 1 1.0 21 | 1.0 1 1.0 22 | 1.0 1 1.0 23 | 1.0 1 1.0 24 | 1.0 1 1.0 25 | 1.0 1 1.0 26 | 1.0 1 1.0 27 | 1.0 1 1.0 28 | 1.0 1 1.0 29 | 1.0 1 1.0 30 | 1.0 1 1.0 31 | 1.0 1 1.0 32 | 1.0 1 1.0 33 | 1.0 1 1.0 34 | 1.0 1 1.0 35 | 1.0 1 1.0 36 | 1.0 1 1.0 37 | 1.0 1 1.0 38 | 1.0 1 1.0 39 | 1.0 1 1.0 40 | 1.0 1 1.0 41 | 1.0 1 1.0 42 | 1.0 1 1.0 43 | 1.0 1 1.0 44 | 1.0 1 1.0 45 | 1.0 1 1.0 46 | 1.0 1 1.0 47 | 1.0 1 1.0 48 | 1.0 1 1.0 49 | 1.0 1 1.0 50 | 1.0 1 1.0 51 | 1.0 1 1.0 52 | 1.0 1 1.0 53 | 1.0 1 1.0 54 | 1.0 1 1.0 55 | 1.0 1 1.0 56 | 1.0 1 1.0 57 | 1.0 1 1.0 58 | 1.0 1 1.0 59 | 1.0 1 1.0 60 | 1.0 1 1.0 61 | 1.0 1 1.0 62 | 1.0 1 1.0 63 | 1.0 1 1.0 64 | 1.0 1 1.0 65 | 1.0 1 1.0 66 | 1.0 1 1.0 67 | 1.0 1 1.0 68 | 1.0 1 1.0 69 | 1.0 1 1.0 70 | 1.0 1 1.0 71 | 1.0 1 1.0 72 | 1.0 1 1.0 73 | 1.0 1 1.0 74 | 1.0 1 1.0 75 | 1.0 1 1.0 76 | 1.0 1 1.0 77 | 1.0 1 1.0 78 | 1.0 1 1.0 79 | 1.0 1 1.0 80 | 1.0 1 1.0 81 | 1.0 1 1.0 82 | 1.0 1 1.0 83 | 1.0 1 1.0 84 | 1.0 1 1.0 85 | 1.0 1 1.0 86 | 1.0 1 1.0 87 | 1.0 1 1.0 88 | 1.0 1 1.0 89 | 1.0 1 1.0 90 | 1.0 1 1.0 91 | 1.0 1 1.0 92 | 1.0 1 1.0 93 | 1.0 1 1.0 94 | 1.0 1 1.0 95 | 1.0 1 1.0 96 | 1.0 1 1.0 97 | 1.0 1 1.0 98 | 1.0 1 1.0 99 | 1.0 1 1.0 100 | 1.0 1 1.0 101 | 1.0 1 1.0 102 | 1.0 1 1.0 103 | 1.0 1 1.0 104 | 1.0 1 1.0 105 | 1.0 1 1.0 106 | 1.0 1 1.0 107 | 1.0 1 1.0 108 | 1.0 1 1.0 109 | 1.0 1 1.0 110 | 1.0 1 1.0 111 | 1.0 1 1.0 112 | 1.0 1 1.0 113 | 1.0 1 1.0 114 | 1.0 1 1.0 115 | 1.0 1 1.0 116 | 1.0 1 1.0 117 | 1.0 1 1.0 118 | 1.0 1 1.0 119 | 1.0 1 1.0 120 | 1.0 1 1.0 121 | 1.0 1 1.0 122 | 1.0 1 1.0 123 | 1.0 1 1.0 124 | 1.0 1 1.0 125 | 1.0 1 1.0 126 | 1.0 1 1.0 127 | 1.0 1 1.0 128 | 1.0 1 1.0 129 | 1.0 1 1.0 130 | 1.0 1 1.0 131 | 1.0 1 1.0 132 | 1.0 1 1.0 133 | 1.0 1 1.0 134 | 1.0 1 1.0 135 | 1.0 1 1.0 136 | 1.0 1 1.0 137 | 1.0 1 1.0 138 | 1.0 1 1.0 139 | 1.0 1 1.0 140 | 1.0 1 1.0 141 | 1.0 1 1.0 142 | 1.0 1 1.0 143 | 1.0 1 1.0 144 | 1.0 1 1.0 145 | 1.0 1 1.0 146 | 1.0 1 1.0 147 | 1.0 1 1.0 148 | 1.0 1 1.0 149 | 1.0 1 1.0 150 | 1.0 1 1.0 151 | 1.0 1 1.0 152 | 1.0 1 1.0 153 | 1.0 1 1.0 154 | 1.0 1 1.0 155 | 1.0 1 1.0 156 | 1.0 1 1.0 157 | 1.0 1 1.0 158 | 1.0 1 1.0 159 | 1.0 1 1.0 160 | 1.0 1 1.0 161 | 1.0 1 1.0 162 | 1.0 1 1.0 163 | 1.0 1 1.0 164 | 1.0 1 1.0 165 | 1.0 1 1.0 166 | 1.0 1 1.0 167 | 1.0 1 1.0 168 | 1.0 1 1.0 169 | 1.0 1 1.0 170 | 1.0 1 1.0 171 | 1.0 1 1.0 172 | 1.0 1 1.0 173 | 1.0 1 1.0 174 | 1.0 1 1.0 175 | 1.0 1 1.0 176 | 1.0 1 1.0 177 | 1.0 1 1.0 178 | 1.0 1 1.0 179 | 1.0 1 1.0 180 | 1.0 1 1.0 181 | 1.0 1 1.0 182 | 1.0 1 1.0 183 | 1.0 1 1.0 184 | 1.0 1 1.0 185 | 1.0 1 1.0 186 | 1.0 1 1.0 187 | 1.0 1 1.0 188 | 1.0 1 1.0 189 | 1.0 1 1.0 190 | 1.0 1 1.0 191 | 1.0 1 1.0 192 | 1.0 1 1.0 193 | 1.0 1 1.0 194 | 1.0 1 1.0 195 | 1.0 1 1.0 196 | 1.0 1 1.0 197 | 1.0 1 1.0 198 | 1.0 1 1.0 199 | 1.0 1 1.0 200 | 1.0 1 1.0 201 | 1.0 1 1.0 202 | 1.0 1 1.0 203 | 1.0 1 1.0 204 | 1.0 1 1.0 205 | 1.0 1 1.0 206 | 1.0 1 1.0 207 | 1.0 1 1.0 208 | 1.0 1 1.0 209 | 1.0 1 1.0 210 | 1.0 1 1.0 211 | 1.0 1 1.0 212 | 1.0 1 1.0 213 | 1.0 1 1.0 214 | 1.0 1 1.0 215 | 1.0 1 1.0 216 | 1.0 1 1.0 217 | 1.0 1 1.0 218 | 1.0 1 1.0 219 | 1.0 1 1.0 220 | 1.0 1 1.0 221 | 1.0 1 1.0 222 | 1.0 1 1.0 223 | end perioddata 224 | -------------------------------------------------------------------------------- /tests/fixtures/fixture_ribametamod.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import pytest_cases 5 | import xarray as xr 6 | from imod import mf6, msw 7 | 8 | from .fixture_metaswap import metaswap_model 9 | 10 | 11 | def make_msw_model( 12 | gwf: mf6.GroundwaterFlowModel, nsubunits: xr.DataArray | None = None 13 | ) -> msw.MetaSwapModel: 14 | times = gwf["time_discretization"]["time"] 15 | unsaturated_database = "./unsat_database" 16 | 17 | idomain = gwf["GWF_1"]["dis"]["idomain"] 18 | if "layer" in idomain.dims: 19 | idomain_layer1 = idomain.sel(layer=1, drop=True) 20 | if nsubunits is None: 21 | nsubunits = xr.ones_like(idomain) 22 | 23 | isubunits = np.arange(nsubunits.max()) 24 | area = xr.ones_like(idomain_layer1, dtype=np.float64) * ( 25 | np.diff(idomain.x)[0] * -np.diff(idomain.y)[0] 26 | ) 27 | 28 | # no svats where river is defined 29 | no_river = xr.full_like(area, fill_value=True, dtype=np.bool_) 30 | for sys in gwf["GWF_1"].keys(): 31 | package = gwf["GWF_1"][sys] 32 | if isinstance(package, mf6.River): 33 | cond = package["conductance"] 34 | if "layer" in cond.dims: 35 | cond = cond.isel(layer=0, drop=True) 36 | no_river.values = np.logical_and( 37 | cond.isnull().to_numpy(), # noqa: PD003 38 | no_river.notnull().to_numpy(), 39 | ) 40 | area = (area.assign_coords(subunit=0).expand_dims(subunit=isubunits)) / nsubunits 41 | area = area.where((area.subunit < nsubunits) & no_river) 42 | 43 | active = (xr.ones_like(idomain_layer1) == 1).where( 44 | nsubunits.notnull() & no_river, other=False 45 | ) 46 | 47 | # Clip off 48 | modflow_active = idomain.sel(layer=1, drop=True).astype(bool) 49 | 50 | area = area.where(modflow_active) 51 | active = active & modflow_active 52 | 53 | msw_model = metaswap_model( 54 | times, area, active, gwf["GWF_1"]["dis"], unsaturated_database 55 | ) 56 | # remove bizar large et-flux 57 | evaporation = msw_model["meteo_grid"].dataset["evapotranspiration"] 58 | msw_model["meteo_grid"].dataset["evapotranspiration"] = evaporation * 0.01 59 | return msw_model 60 | 61 | 62 | def ad_msw_model( 63 | mf6_model: mf6.GroundwaterFlowModel, 64 | metaswap_lookup_table: Path, 65 | ) -> msw.MetaSwapModel: 66 | # for now only 1-1 svat-mf6node coupling 67 | nsubunits = xr.ones_like( 68 | mf6_model["GWF_1"]["dis"]["idomain"].sel(layer=1, drop=True), 69 | dtype=np.int32, 70 | ) 71 | msw_model = make_msw_model(mf6_model, nsubunits) 72 | # Override unsat_svat_path with path from environment 73 | msw_model.simulation_settings["unsa_svat_path"] = metaswap_lookup_table 74 | return msw_model 75 | 76 | 77 | @pytest_cases.fixture(scope="function") 78 | def msw_bucket_model( 79 | mf6_bucket_model: mf6.GroundwaterFlowModel, 80 | metaswap_lookup_table: Path, 81 | ) -> msw.MetaSwapModel: 82 | return ad_msw_model(mf6_bucket_model, metaswap_lookup_table) 83 | 84 | 85 | @pytest_cases.fixture(scope="function") 86 | def msw_backwater_model( 87 | mf6_backwater_model: mf6.GroundwaterFlowModel, 88 | metaswap_lookup_table: Path, 89 | ) -> msw.MetaSwapModel: 90 | return ad_msw_model(mf6_backwater_model, metaswap_lookup_table) 91 | 92 | 93 | @pytest_cases.fixture(scope="function") 94 | def msw_two_basin_model( 95 | mf6_two_basin_model: mf6.GroundwaterFlowModel, 96 | metaswap_lookup_table: Path, 97 | ) -> msw.MetaSwapModel: 98 | return ad_msw_model(mf6_two_basin_model, metaswap_lookup_table) 99 | 100 | 101 | @pytest_cases.fixture(scope="function") 102 | def msw_two_basin_model_3layer( 103 | mf6_two_basin_model_3layer: mf6.GroundwaterFlowModel, 104 | metaswap_lookup_table: Path, 105 | ) -> msw.MetaSwapModel: 106 | return ad_msw_model(mf6_two_basin_model_3layer, metaswap_lookup_table) 107 | -------------------------------------------------------------------------------- /.teamcity/IMODCollector/buildTypes/IMODCollector_X64development.kt: -------------------------------------------------------------------------------- 1 | package IMODCollector.buildTypes 2 | 3 | import _Self.vcsRoots.ImodCoupler 4 | import jetbrains.buildServer.configs.kotlin.* 5 | import jetbrains.buildServer.configs.kotlin.buildFeatures.PullRequests 6 | import jetbrains.buildServer.configs.kotlin.buildFeatures.commitStatusPublisher 7 | import jetbrains.buildServer.configs.kotlin.buildFeatures.pullRequests 8 | import jetbrains.buildServer.configs.kotlin.buildSteps.script 9 | 10 | object IMODCollector_X64development : BuildType({ 11 | name = "x64_development" 12 | description = "Collect all Release_x64 kernels in the iMOD6 suite" 13 | 14 | artifactRules = """ 15 | coupler/dist/ => imod_collector.zip!/ 16 | modflow6/ => imod_collector.zip!/modflow6/ 17 | metaswap/ => imod_collector.zip!/metaswap/ 18 | ribasim/ => imod_collector.zip!/ribasim/ 19 | """.trimIndent() 20 | 21 | params { 22 | param("conda_env_path", "%system.teamcity.build.checkoutDir%/imod_collector_env") 23 | param("reverse.dep.Modflow_Modflow6Release.MODFLOW6_Version", "6.6.3") 24 | param("reverse.dep.Modflow_Modflow6Release.MODFLOW6_Platform", "win64") 25 | } 26 | 27 | vcs { 28 | root(_Self.vcsRoots.ImodCoupler, "+:. => ./coupler") 29 | 30 | cleanCheckout = true 31 | } 32 | 33 | steps { 34 | script { 35 | name = "Download Release Ribasim" 36 | scriptContent = """ 37 | curl -L -o ribasim_windows.zip https://github.com/Deltares/Ribasim/releases/download/v2025.6.0/ribasim_windows.zip 38 | unzip "ribasim_windows.zip" 39 | """.trimIndent() 40 | } 41 | script { 42 | name = "Install iMOD Coupler" 43 | enabled = false 44 | workingDir = "coupler" 45 | scriptContent = """ 46 | call conda activate %conda_env_path% 47 | call pip install -e . 48 | """.trimIndent() 49 | } 50 | script { 51 | name = "Create executable with pyinstaller" 52 | workingDir = "coupler" 53 | scriptContent = """ 54 | rmdir dist /s /q 55 | pixi run -e dev pyinstaller --onefile imod_coupler/__main__.py --name imodc 56 | """.trimIndent() 57 | } 58 | script { 59 | name = "Get version from imod coupler" 60 | workingDir = "coupler" 61 | scriptContent = """call dist\imodc --version""" 62 | } 63 | } 64 | 65 | features { 66 | commitStatusPublisher { 67 | vcsRootExtId = "${ImodCoupler.id}" 68 | publisher = github { 69 | githubUrl = "https://api.github.com" 70 | authType = personalToken { 71 | token = "credentialsJSON:6b37af71-1f2f-4611-8856-db07965445c0" 72 | } 73 | } 74 | } 75 | pullRequests { 76 | vcsRootExtId = "${ImodCoupler.id}" 77 | provider = github { 78 | authType = token { 79 | token = "credentialsJSON:71420214-373c-4ccd-ba32-2ea886843f62" 80 | } 81 | filterAuthorRole = PullRequests.GitHubRoleFilter.MEMBER 82 | } 83 | } 84 | } 85 | 86 | dependencies { 87 | dependency(AbsoluteId("Modflow_Modflow6Release")) { 88 | snapshot { 89 | onDependencyFailure = FailureAction.FAIL_TO_START 90 | } 91 | artifacts { 92 | artifactRules = "+:MODFLOW6.zip!** => modflow6" 93 | } 94 | } 95 | 96 | artifacts(AbsoluteId("MSWMOD_MetaSWAP_MetaSWAPBuildWin64")) { 97 | cleanDestination = true 98 | buildRule = lastSuccessful("+::branches/update_4210") 99 | artifactRules = "MetaSWAP.zip!/x64/Release => metaswap" 100 | } 101 | } 102 | 103 | requirements { 104 | equals("env.OS", "Windows_NT") 105 | } 106 | }) 107 | -------------------------------------------------------------------------------- /tests/fixtures/fixture_paths.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import dotenv 5 | import pytest 6 | from pytest import FixtureRequest 7 | 8 | 9 | @pytest.fixture(scope="function") 10 | def tmp_path_dev( 11 | tmp_path: Path, 12 | ) -> Path: 13 | return tmp_path / "develop" 14 | 15 | 16 | @pytest.fixture(scope="function") 17 | def tmp_path_reg( 18 | tmp_path: Path, 19 | ) -> Path: 20 | return tmp_path / "regression" 21 | 22 | 23 | @pytest.fixture(scope="session", autouse=True) 24 | def load_dotenv() -> None: 25 | dotenv.load_dotenv() 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def imod_coupler_exec_devel() -> Path: 30 | return Path(os.environ["IMOD_COUPLER_EXEC_DEVEL"]) 31 | 32 | 33 | @pytest.fixture(scope="session") 34 | def imod_coupler_exec_regression() -> Path: 35 | return Path(os.environ["IMOD_COUPLER_EXEC_REGRESSION"]) 36 | 37 | 38 | @pytest.fixture(scope="session") 39 | def metaswap_dll_dep_dir_devel() -> Path: 40 | return Path(os.environ["METASWAP_DLL_DEP_DIR_DEVEL"]) 41 | 42 | 43 | @pytest.fixture(scope="session") 44 | def metaswap_dll_dep_dir_regression() -> Path: 45 | return Path(os.environ["METASWAP_DLL_DEP_DIR_REGRESSION"]) 46 | 47 | 48 | @pytest.fixture(scope="session") 49 | def metaswap_dll_devel() -> Path: 50 | return Path(os.environ["METASWAP_DLL_DEVEL"]) 51 | 52 | 53 | @pytest.fixture(scope="session") 54 | def metaswap_dll_regression() -> Path: 55 | return Path(os.environ["METASWAP_DLL_REGRESSION"]) 56 | 57 | 58 | @pytest.fixture(scope="session") 59 | def metaswap_lookup_table() -> Path: 60 | return Path(os.environ["METASWAP_LOOKUP_TABLE"]) 61 | 62 | 63 | @pytest.fixture(scope="session") 64 | def modflow_dll_devel() -> Path: 65 | return Path(os.environ["MODFLOW_DLL_DEVEL"]) 66 | 67 | 68 | @pytest.fixture(scope="session") 69 | def modflow_dll_regression() -> Path: 70 | return Path(os.environ["MODFLOW_DLL_REGRESSION"]) 71 | 72 | 73 | @pytest.fixture(scope="session") 74 | def ribasim_dll_dep_dir_devel() -> Path: 75 | return Path(os.environ["RIBASIM_DLL_DEP_DIR_DEVEL"]) 76 | 77 | 78 | @pytest.fixture(scope="session") 79 | def ribasim_dll_dep_dir_regression() -> Path: 80 | return Path(os.environ["RIBASIM_DLL_DEP_DIR_REGRESSION"]) 81 | 82 | 83 | @pytest.fixture(scope="session") 84 | def ribasim_dll_devel() -> Path: 85 | return Path(os.environ["RIBASIM_DLL_DEVEL"]) 86 | 87 | 88 | @pytest.fixture(scope="session") 89 | def ribasim_dll_regression() -> Path: 90 | return Path(os.environ["RIBASIM_DLL_REGRESSION"]) 91 | 92 | 93 | @pytest.fixture(scope="function") 94 | def modstrip_loc() -> Path: 95 | return Path(__file__).parent.parent.absolute() / "data" / "modstrip" 96 | 97 | 98 | @pytest.fixture(scope="function") 99 | def test_data_folder() -> Path: 100 | return Path(__file__).parent.parent.absolute() / "data" 101 | 102 | 103 | @pytest.fixture(scope="function") 104 | def reference_result_folder() -> Path: 105 | return Path(__file__).parent.parent.absolute() / "reference_output" 106 | 107 | 108 | @pytest.fixture(scope="function") 109 | def bucket_ribametamod_loc() -> Path: 110 | return Path(__file__).parent.parent.absolute() / "data" / "bucket_model" 111 | 112 | 113 | @pytest.fixture(scope="function") 114 | def ribametamod_backwater_tot_svat_ref(request: FixtureRequest) -> Path: 115 | return ( 116 | request.path.parent 117 | / "reference_output" 118 | / "test_ribametamod_backwater" 119 | / "tot_svat_per.csv" 120 | ) 121 | 122 | 123 | @pytest.fixture(scope="function") 124 | def ribametamod_bucket_tot_svat_ref(request: FixtureRequest) -> Path: 125 | return ( 126 | request.path.parent 127 | / "reference_output" 128 | / "test_ribametamod_bucket" 129 | / "tot_svat_per.csv" 130 | ) 131 | 132 | 133 | @pytest.fixture(scope="function") 134 | def ribametamod_two_basin_tot_svat_ref(request: FixtureRequest) -> Path: 135 | return ( 136 | request.path.parent 137 | / "reference_output" 138 | / "test_ribametamod_two_basin" 139 | / "tot_svat_per.csv" 140 | ) 141 | -------------------------------------------------------------------------------- /imod_coupler/drivers/ribametamod/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | from typing import Any 4 | 5 | from pydantic import BaseModel, FilePath, ValidationInfo, field_validator 6 | 7 | from imod_coupler.drivers.kernel_config import Metaswap, Modflow6, Ribasim 8 | 9 | 10 | class Kernels(BaseModel): 11 | modflow6: Modflow6 12 | ribasim: Ribasim | None 13 | metaswap: Metaswap | None 14 | 15 | 16 | class Coupling(BaseModel): 17 | mf6_model: str # the MODFLOW 6 model that will be coupled 18 | mf6_active_river_packages: dict[str, str] 19 | mf6_active_drainage_packages: dict[str, str] 20 | mf6_passive_river_packages: dict[str, str] 21 | mf6_passive_drainage_packages: dict[str, str] 22 | 23 | mf6_msw_recharge_pkg: str = ( 24 | "" # the recharge package that will be used for coupling 25 | ) 26 | mf6_msw_well_pkg: str | None = ( 27 | None # the well package that will be used for coupling when sprinkling is active 28 | ) 29 | mf6_msw_node_map: FilePath | None = None # the path to the node map file 30 | mf6_msw_recharge_map: FilePath | None = None # the pach to the recharge map file 31 | mf6_msw_sprinkling_map_groundwater: FilePath | None = ( 32 | None # the path to the sprinkling map file (optional) 33 | ) 34 | # for deprecation warning on label 35 | mf6_msw_sprinkling_map: FilePath | None = None 36 | 37 | mf6_msw_ponding_map_groundwater: FilePath | None = ( 38 | None # the path to the ponding map file (optional) 39 | ) 40 | output_config_file: FilePath | None = None 41 | 42 | rib_msw_sprinkling_map_surface_water: FilePath | None = ( 43 | None # the path to the sprinkling map file 44 | ) 45 | rib_msw_ponding_map_surface_water: FilePath | None = ( 46 | None # the path to the ponding map file 47 | ) 48 | 49 | @field_validator( 50 | "output_config_file", 51 | "mf6_msw_node_map", 52 | "mf6_msw_recharge_map", 53 | "output_config_file", 54 | ) 55 | @classmethod 56 | def resolve_file_path(cls, file_path: FilePath) -> FilePath: 57 | return file_path.resolve() 58 | 59 | @field_validator("mf6_msw_sprinkling_map_groundwater") 60 | @classmethod 61 | def validate_mf6_msw_sprinkling_map( 62 | cls, mf6_msw_sprinkling_map_groundwater: FilePath | None, info: ValidationInfo 63 | ) -> FilePath | None: 64 | assert info.data is not None 65 | if mf6_msw_sprinkling_map_groundwater is not None: 66 | if info.data.get("mf6_msw_well_pkg") is None: 67 | raise ValueError( 68 | "If 'mf6_msw_sprinkling_map_groundwater is set, then `mf6_msw_well_pkg` needs to be set." 69 | ) 70 | return mf6_msw_sprinkling_map_groundwater.resolve() 71 | return mf6_msw_sprinkling_map_groundwater 72 | 73 | @field_validator("mf6_msw_sprinkling_map") 74 | @classmethod 75 | def validate_sprinkling_map_label( 76 | cls, mf6_msw_sprinkling_map: FilePath | None 77 | ) -> None: 78 | if mf6_msw_sprinkling_map is not None: 79 | raise ValueError( 80 | "The use of 'mf6_msw_sprinkling_map' label is depricated; now use mf6_msw_sprinkling_map_groundwater" 81 | ) 82 | 83 | 84 | class RibaMetaModConfig(BaseModel): 85 | kernels: Kernels 86 | coupling: list[Coupling] 87 | 88 | def __init__(self, config_dir: Path, **data: Any) -> None: 89 | """Model for the Ribamod config validated by pydantic 90 | 91 | The validation expects current working directory at config file level 92 | so it is changed during initialization 93 | 94 | Args: 95 | config_dir (Path): Directory where the config file resides 96 | """ 97 | os.chdir(config_dir) 98 | super().__init__(**data) 99 | 100 | @field_validator("coupling") 101 | @classmethod 102 | def restrict_coupling_count(cls, coupling: list[Coupling]) -> list[Coupling]: 103 | if len(coupling) == 0: 104 | raise ValueError("At least one coupling has to be defined.") 105 | if len(coupling) > 1: 106 | raise ValueError("Multi-model coupling is not yet supported.") 107 | return coupling 108 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_mf6_wrapper.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | from imod import mf6 5 | 6 | from imod_coupler.kernelwrappers.mf6_wrapper import Mf6Drainage, Mf6River, Mf6Wrapper 7 | 8 | 9 | def test_mf6_river( 10 | mf6_model_with_river: mf6.Modflow6Simulation, 11 | modflow_dll_devel: Path, 12 | tmp_path_dev: Path, 13 | ) -> None: 14 | mf6_model_with_river.write(tmp_path_dev) 15 | 16 | mf6wrapper = Mf6Wrapper( 17 | lib_path=modflow_dll_devel, 18 | working_directory=tmp_path_dev, 19 | ) 20 | mf6wrapper.initialize() 21 | mf6wrapper.prepare_time_step(0.0) 22 | 23 | mf6_river = Mf6River( 24 | mf6_wrapper=mf6wrapper, 25 | mf6_flowmodel_key="GWF_1", 26 | mf6_pkg_key="Oosterschelde", 27 | ) 28 | # The nodelist should be set properly after prepare_time_step. 29 | mf6_river.set_private_nodelist() 30 | 31 | # The nodelist should be set after 32 | assert (mf6_river.nodelist != -1).any() 33 | 34 | # This guards against setting below elevation. 35 | mf6_river.set_water_level(np.full_like(mf6_river.water_level, -123.0)) 36 | stage_address = mf6wrapper.get_var_address("STAGE", "GWF_1", "Oosterschelde") 37 | stage = mf6wrapper.get_value_ptr(stage_address) 38 | assert (stage > -10.0).all() 39 | 40 | # This accesses directly. 41 | mf6_river.stage[:] = -123.0 42 | stage_address = mf6wrapper.get_var_address("STAGE", "GWF_1", "Oosterschelde") 43 | stage = mf6wrapper.get_value_ptr(stage_address) 44 | np.testing.assert_allclose(stage, -123.0) 45 | mf6wrapper.finalize() 46 | 47 | 48 | def test_mf6_drainage( 49 | mf6_model_with_river: mf6.Modflow6Simulation, 50 | modflow_dll_devel: Path, 51 | tmp_path_dev: Path, 52 | ) -> None: 53 | mf6_model_with_river.write(tmp_path_dev) 54 | 55 | mf6wrapper = Mf6Wrapper( 56 | lib_path=modflow_dll_devel, 57 | working_directory=tmp_path_dev, 58 | ) 59 | mf6wrapper.initialize() 60 | mf6wrapper.prepare_time_step(0.0) 61 | 62 | mf6_drainage = Mf6Drainage( 63 | mf6_wrapper=mf6wrapper, 64 | mf6_flowmodel_key="GWF_1", 65 | mf6_pkg_key="Drainage", 66 | ) 67 | mf6_drainage.set_private_nodelist() 68 | 69 | # The nodelist should be set properly after prepare_time_step. 70 | assert (mf6_drainage.private_nodelist != -1).any() 71 | 72 | # This guards against setting below elevation. 73 | mf6_drainage.set_water_level(np.full_like(mf6_drainage.water_level, -123.0)) 74 | elev_address = mf6wrapper.get_var_address("ELEV", "GWF_1", "Drainage") 75 | elev = mf6wrapper.get_value_ptr(elev_address) 76 | assert (elev > -10.0).all() 77 | 78 | # This accesses directly. 79 | mf6_drainage.elevation[:] = -123.0 80 | elev_address = mf6wrapper.get_var_address("ELEV", "GWF_1", "Drainage") 81 | elev = mf6wrapper.get_value_ptr(elev_address) 82 | np.testing.assert_allclose(elev, -123.0) 83 | mf6wrapper.finalize() 84 | 85 | 86 | def test_mf6_get_river_flux( 87 | mf6_model_with_river: mf6.Modflow6Simulation, 88 | modflow_dll_devel: Path, 89 | tmp_path_dev: Path, 90 | ) -> None: 91 | mf6_model_with_river.write(tmp_path_dev) 92 | mf6wrapper = Mf6Wrapper( 93 | lib_path=modflow_dll_devel, 94 | working_directory=tmp_path_dev, 95 | ) 96 | mf6wrapper.initialize() 97 | mf6_river = Mf6River( 98 | mf6_wrapper=mf6wrapper, 99 | mf6_flowmodel_key="GWF_1", 100 | mf6_pkg_key="Oosterschelde", 101 | ) 102 | 103 | mf6wrapper.prepare_time_step(0.0) 104 | mf6wrapper.prepare_solve(1) 105 | 106 | # now first solve, because "get_river_drain_flux" needs the actual solution to be formulated. 107 | max_iter = mf6wrapper.get_value_ptr("SLN_1/MXITER")[0] 108 | for _ in range(1, max_iter + 1): 109 | has_converged = mf6wrapper.solve(1) 110 | if has_converged: 111 | break 112 | heads = mf6wrapper.get_head("GWF_1") 113 | q = mf6_river.get_flux(heads) 114 | q_expected = np.array( 115 | [ 116 | -0.0, 117 | 10.654179, 118 | 10.402491, 119 | 10.396607, 120 | 10.396469, 121 | -0.0, 122 | 10.654179, 123 | 10.402491, 124 | 10.396607, 125 | 10.396469, 126 | -0.0, 127 | 10.654179, 128 | 10.402491, 129 | 10.396607, 130 | 10.396469, 131 | ] 132 | ) 133 | np.testing.assert_allclose(q, q_expected) 134 | -------------------------------------------------------------------------------- /.teamcity/Primod/buildTypes/Primod_TestPrimodWin64.kt: -------------------------------------------------------------------------------- 1 | package Primod.buildTypes 2 | 3 | import _Self.vcsRoots.ImodCoupler 4 | import jetbrains.buildServer.configs.kotlin.* 5 | import jetbrains.buildServer.configs.kotlin.buildFeatures.XmlReport 6 | import jetbrains.buildServer.configs.kotlin.buildFeatures.commitStatusPublisher 7 | import jetbrains.buildServer.configs.kotlin.buildFeatures.xmlReport 8 | import jetbrains.buildServer.configs.kotlin.buildSteps.script 9 | import jetbrains.buildServer.configs.kotlin.triggers.vcs 10 | 11 | object Primod_TestPrimodWin64 : Template({ 12 | name = "Test Primod Win64" 13 | 14 | publishArtifacts = PublishMode.ALWAYS 15 | 16 | params { 17 | param("env.METASWAP_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/metaswap/MetaSWAP.dll") 18 | param("env.IMOD_COUPLER_EXEC_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/imod_coupler/imodc.exe") 19 | param("env.MODFLOW_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/modflow6/libmf6.dll") 20 | param("env.MODFLOW_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/modflow6/libmf6.dll") 21 | param("env.RIBASIM_DLL_DEP_DIR_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/ribasim/bin") 22 | param("env.RIBASIM_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/ribasim/bin/libribasim.dll") 23 | param("conda_env_path", "%system.teamcity.build.checkoutDir%/imod_coupler_testbench_env") 24 | param("env.METASWAP_DLL_DEP_DIR_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/metaswap") 25 | param("env.METASWAP_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/metaswap/MetaSWAP.dll") 26 | param("env.METASWAP_DLL_DEP_DIR_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/metaswap") 27 | param("env.IMOD_COUPLER_EXEC_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/imodc.exe") 28 | param("env.RIBASIM_DLL_DEP_DIR_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/ribasim/bin") 29 | param("env.METASWAP_LOOKUP_TABLE", "%system.teamcity.build.checkoutDir%/lookup_table") 30 | param("env.RIBASIM_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/ribasim/bin/libribasim.dll") 31 | } 32 | 33 | vcs { 34 | root(_Self.vcsRoots.ImodCoupler, ". => imod_coupler") 35 | root(_Self.vcsRoots.MetaSwapLookupTable, ". => lookup_table") 36 | 37 | cleanCheckout = true 38 | branchFilter = """ 39 | +:* 40 | -:release_imod56 41 | """.trimIndent() 42 | } 43 | 44 | steps { 45 | script { 46 | name = "Run tests" 47 | id = "RUNNER_1503" 48 | workingDir = "imod_coupler" 49 | scriptContent = "pixi run --environment %pixi-environment% test-primod" 50 | } 51 | } 52 | 53 | triggers { 54 | vcs { 55 | id = "TRIGGER_340" 56 | triggerRules = "+:root=${ImodCoupler.id}:**" 57 | 58 | } 59 | } 60 | 61 | features { 62 | commitStatusPublisher { 63 | id = "BUILD_EXT_142" 64 | vcsRootExtId = "${ImodCoupler.id}" 65 | publisher = github { 66 | githubUrl = "https://api.github.com" 67 | authType = personalToken { 68 | token = "credentialsJSON:6b37af71-1f2f-4611-8856-db07965445c0" 69 | } 70 | } 71 | } 72 | xmlReport { 73 | id = "BUILD_EXT_145" 74 | reportType = XmlReport.XmlReportType.JUNIT 75 | rules = "imod_coupler/report.xml" 76 | verbose = true 77 | } 78 | } 79 | 80 | dependencies { 81 | dependency(IMODCollector.buildTypes.IMODCollector_X64development) { 82 | snapshot { 83 | onDependencyFailure = FailureAction.FAIL_TO_START 84 | } 85 | 86 | artifacts { 87 | cleanDestination = true 88 | artifactRules = """ 89 | imod_collector.zip!** => imod_collector_devel 90 | """.trimIndent() 91 | } 92 | artifacts { 93 | buildRule = tag("regression") 94 | cleanDestination = true 95 | artifactRules = "imod_coupler_windows.zip!** => imod_collector_regression" 96 | } 97 | } 98 | } 99 | 100 | requirements { 101 | equals("env.OS", "Windows_NT", "RQ_195") 102 | } 103 | }) 104 | -------------------------------------------------------------------------------- /tests/test_primod/test_node_svat_mapping.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from pathlib import Path 3 | 4 | import numpy as np 5 | import pytest 6 | import xarray as xr 7 | from imod import mf6 8 | from numpy.testing import assert_equal 9 | from primod import mapping 10 | 11 | 12 | def test_simple_model(fixed_format_parser): 13 | x = [1.0, 2.0, 3.0] 14 | y = [3.0, 2.0, 1.0] 15 | subunit = [0, 1] 16 | dx = 1.0 17 | dy = -1.0 18 | # fmt: off 19 | svat = xr.DataArray( 20 | np.array( 21 | [ 22 | [[0, 1, 0], 23 | [0, 0, 0], 24 | [0, 2, 0]], 25 | 26 | [[0, 3, 0], 27 | [0, 4, 0], 28 | [0, 0, 0]], 29 | ] 30 | ), 31 | dims=("subunit", "y", "x"), 32 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 33 | ) 34 | # fmt: on 35 | index = (svat != 0).to_numpy().ravel() 36 | 37 | like = xr.full_like(svat.sel(subunit=1, drop=True), 1.0, dtype=float).expand_dims( 38 | layer=[1, 2, 3] 39 | ) 40 | 41 | dis = mf6.StructuredDiscretization( 42 | top=1.0, 43 | bottom=xr.full_like(like, 0.0), 44 | idomain=xr.full_like(like, 1, dtype=int), 45 | ) 46 | 47 | grid_data = mapping.node_svat_mapping.NodeSvatMapping(svat, dis, index=index) 48 | 49 | with tempfile.TemporaryDirectory() as output_dir: 50 | output_dir = Path(output_dir) 51 | grid_data.write(output_dir) 52 | 53 | results = fixed_format_parser( 54 | output_dir / mapping.node_svat_mapping.NodeSvatMapping._file_name, 55 | mapping.node_svat_mapping.NodeSvatMapping._metadata_dict, 56 | ) 57 | 58 | assert_equal(results["mod_id"], np.array([2, 8, 2, 5])) 59 | assert_equal(results["svat"], np.array([1, 2, 3, 4])) 60 | assert_equal(results["layer"], np.array([1, 1, 1, 1])) 61 | 62 | 63 | def test_simple_model_1_subunit(fixed_format_parser): 64 | x = [1.0, 2.0, 3.0] 65 | y = [3.0, 2.0, 1.0] 66 | subunit = [0] 67 | dx = 1.0 68 | dy = -1.0 69 | # fmt: off 70 | svat = xr.DataArray( 71 | np.array( 72 | [ 73 | [[0, 1, 0], 74 | [0, 0, 0], 75 | [0, 2, 0]], 76 | ] 77 | ), 78 | dims=("subunit", "y", "x"), 79 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 80 | ) 81 | # fmt: on 82 | index = (svat != 0).to_numpy().ravel() 83 | 84 | like = xr.full_like(svat.sel(subunit=0, drop=True), 1.0, dtype=float).expand_dims( 85 | layer=[1, 2, 3] 86 | ) 87 | 88 | dis = mf6.StructuredDiscretization( 89 | top=1.0, 90 | bottom=xr.full_like(like, 0.0), 91 | idomain=xr.full_like(like, 1, dtype=int), 92 | ) 93 | 94 | grid_data = mapping.node_svat_mapping.NodeSvatMapping(svat, dis, index=index) 95 | 96 | with tempfile.TemporaryDirectory() as output_dir: 97 | output_dir = Path(output_dir) 98 | grid_data.write(output_dir) 99 | 100 | results = fixed_format_parser( 101 | output_dir / mapping.node_svat_mapping.NodeSvatMapping._file_name, 102 | mapping.node_svat_mapping.NodeSvatMapping._metadata_dict, 103 | ) 104 | 105 | assert_equal(results["mod_id"], np.array([2, 8])) 106 | assert_equal(results["svat"], np.array([1, 2])) 107 | assert_equal(results["layer"], np.array([1, 1])) 108 | 109 | 110 | def test_inactive_idomain_in_svat(): 111 | x = [1.0, 2.0, 3.0] 112 | y = [3.0, 2.0, 1.0] 113 | subunit = [0, 1] 114 | dx = 1.0 115 | dy = -1.0 116 | # fmt: off 117 | svat = xr.DataArray( 118 | np.array( 119 | [ 120 | [[0, 1, 0], 121 | [0, 0, 0], 122 | [0, 2, 0]], 123 | 124 | [[0, 3, 0], 125 | [0, 4, 0], 126 | [0, 0, 0]], 127 | ] 128 | ), 129 | dims=("subunit", "y", "x"), 130 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 131 | ) 132 | # fmt: on 133 | 134 | like = xr.full_like(svat.sel(subunit=1, drop=True), 1.0, dtype=float).expand_dims( 135 | layer=[1, 2, 3] 136 | ) 137 | 138 | idomain = xr.full_like(like, 1, dtype=int) 139 | idomain[:, 1, :] = 0 140 | 141 | dis = mf6.StructuredDiscretization( 142 | top=1.0, 143 | bottom=xr.full_like(like, 0.0), 144 | idomain=idomain, 145 | ) 146 | index = (svat != 0).to_numpy().ravel() 147 | 148 | with pytest.raises(ValueError): 149 | mapping.node_svat_mapping.NodeSvatMapping(svat, dis, index=index) 150 | -------------------------------------------------------------------------------- /pre-processing/primod/driver_coupling/metamod.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Any 3 | 4 | from imod.mf6 import GroundwaterFlowModel 5 | from imod.msw import GridData, MetaSwapModel, Sprinkling 6 | 7 | from primod.driver_coupling.driver_coupling_base import DriverCoupling 8 | from primod.mapping.node_svat_mapping import NodeSvatMapping 9 | from primod.mapping.rch_svat_mapping import RechargeSvatMapping 10 | from primod.mapping.wel_svat_mapping import WellSvatMapping 11 | 12 | 13 | class MetaModDriverCoupling(DriverCoupling): 14 | """ 15 | Attributes 16 | ---------- 17 | mf6_model : str 18 | The model of the driver. 19 | mf6_recharge_package: str 20 | Key of Modflow 6 recharge package to which MetaSWAP is coupled. 21 | mf6_wel_package: str or None 22 | Optional key of Modflow 6 well package to which MetaSWAP sprinkling is 23 | coupled. 24 | """ 25 | 26 | mf6_model: str 27 | mf6_recharge_package: str 28 | mf6_wel_package: str | None = None 29 | 30 | def _check_sprinkling( 31 | self, msw_model: MetaSwapModel, gwf_model: GroundwaterFlowModel 32 | ) -> bool: 33 | sprinkling_key = msw_model.get_pkgkey(Sprinkling, optional_package=True) 34 | sprinkling_in_msw = sprinkling_key is not None 35 | sprinkling_in_mf6 = self.mf6_wel_package in gwf_model.keys() 36 | 37 | value = False 38 | match (sprinkling_in_msw, sprinkling_in_mf6): 39 | case (True, False): 40 | raise ValueError( 41 | f"No package named {self.mf6_wel_package} found in Modflow 6 model, " 42 | "but Sprinkling package found in MetaSWAP. " 43 | "iMOD Coupler requires a Well Package " 44 | "to couple wells." 45 | ) 46 | case (False, True): 47 | raise ValueError( 48 | f"Modflow 6 Well package {self.mf6_wel_package} specified for sprinkling, " 49 | "but no Sprinkling package found in MetaSWAP model." 50 | ) 51 | case (True, True): 52 | value = True 53 | case (False, False): 54 | value = False 55 | 56 | return value 57 | 58 | def derive_mapping( 59 | self, msw_model: MetaSwapModel, gwf_model: GroundwaterFlowModel 60 | ) -> tuple[NodeSvatMapping, RechargeSvatMapping, WellSvatMapping | None]: 61 | if self.mf6_recharge_package not in gwf_model.keys(): 62 | raise ValueError( 63 | f"No package named {self.mf6_recharge_package} detected in Modflow 6 model. " 64 | "iMOD_coupler requires a Recharge package." 65 | ) 66 | 67 | grid_data_key = [ 68 | pkgname for pkgname, pkg in msw_model.items() if isinstance(pkg, GridData) 69 | ][0] 70 | 71 | dis = gwf_model[gwf_model.get_diskey()] 72 | 73 | index, svat = msw_model[grid_data_key].generate_index_array() 74 | grid_mapping = NodeSvatMapping(svat=svat, modflow_dis=dis, index=index) 75 | 76 | recharge = gwf_model[self.mf6_recharge_package] 77 | 78 | rch_mapping = RechargeSvatMapping(svat, recharge, index=index) 79 | 80 | if self._check_sprinkling(msw_model=msw_model, gwf_model=gwf_model): 81 | well = gwf_model.prepare_wel_for_mf6(self.mf6_wel_package, True, True) 82 | well_mapping = WellSvatMapping(svat, well, index=index) 83 | return grid_mapping, rch_mapping, well_mapping 84 | else: 85 | return grid_mapping, rch_mapping, None 86 | 87 | def write_exchanges(self, directory: Path, coupled_model: Any) -> dict[str, Any]: 88 | mf6_simulation = coupled_model.mf6_simulation 89 | gwf_model = mf6_simulation[self.mf6_model] 90 | msw_model = coupled_model.msw_model 91 | 92 | grid_mapping, rch_mapping, well_mapping = self.derive_mapping( 93 | msw_model=msw_model, 94 | gwf_model=gwf_model, 95 | ) 96 | 97 | coupling_dict: dict[str, Any] = {} 98 | coupling_dict["mf6_model"] = self.mf6_model 99 | 100 | coupling_dict["mf6_msw_node_map"] = grid_mapping.write(directory) 101 | coupling_dict["mf6_msw_recharge_pkg"] = self.mf6_recharge_package 102 | coupling_dict["mf6_msw_recharge_map"] = rch_mapping.write(directory) 103 | 104 | if well_mapping is not None: 105 | coupling_dict["mf6_msw_well_pkg"] = self.mf6_wel_package 106 | coupling_dict["mf6_msw_sprinkling_map_groundwater"] = well_mapping.write( 107 | directory 108 | ) 109 | 110 | return coupling_dict 111 | -------------------------------------------------------------------------------- /pre-processing/primod/mapping/mappingbase.py: -------------------------------------------------------------------------------- 1 | import abc 2 | from io import TextIOWrapper 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import pandas as pd 7 | import xarray as xr 8 | from imod.msw.fixed_format import VariableMetaData, format_fixed_width 9 | from numpy.typing import NDArray 10 | 11 | 12 | class GenericMapping(abc.ABC): 13 | name: str 14 | dataframe: pd.DataFrame 15 | 16 | def write(self, directory: Path) -> str: 17 | """ 18 | Write mapping to .tsv file 19 | 20 | Parameters 21 | ---------- 22 | directory: str or Path 23 | directory in which exchange file should be written 24 | 25 | """ 26 | filename = f"{self.name}.tsv" 27 | self.dataframe.to_csv(directory / filename, sep="\t", index=False) 28 | return f"./{directory.name}/{filename}" 29 | 30 | 31 | class MetaModMapping(abc.ABC): 32 | """ 33 | MappingBase is used to share methods for specific packages with no time 34 | component for multiple two-component mappings. 35 | 36 | It is intended as an abstract base class, only to inherit from, to implement new 37 | packages. 38 | """ 39 | 40 | __slots__ = "_pkg_id", "dataset", "index" 41 | _metadata_dict: dict[str, VariableMetaData] 42 | _with_subunit: tuple[str, str, str] 43 | _to_fill: tuple[str] 44 | _file_name: str 45 | 46 | def __init__(self) -> None: 47 | self.dataset = xr.Dataset() 48 | 49 | def __getitem__(self, key: str) -> Any: 50 | return self.dataset.__getitem__(key) 51 | 52 | def __setitem__(self, key: str, value: Any) -> None: 53 | self.dataset.__setitem__(key, value) 54 | 55 | def isel(self) -> None: 56 | raise NotImplementedError( 57 | "Selection on packages not yet supported. " 58 | "To make a selection on the xr.Dataset, call dataset.isel instead. " 59 | "You can create a new package with a selection by calling (**dataset.isel(**selection))" 60 | ) 61 | 62 | def sel(self) -> None: 63 | raise NotImplementedError( 64 | "Selection on packages not yet supported. " 65 | "To make a selection on the xr.Dataset, call dataset.sel instead. " 66 | "You can create a new package with a selection by calling (**dataset.sel(**selection))" 67 | ) 68 | 69 | def _check_range(self, dataframe: pd.DataFrame) -> None: 70 | for varname in dataframe: 71 | min_value = self._metadata_dict[varname].min_value 72 | max_value = self._metadata_dict[varname].max_value 73 | if (dataframe[varname] < min_value).any() or ( 74 | dataframe[varname] > max_value 75 | ).any(): 76 | raise ValueError( 77 | f"{varname}: not all values are within range ({min_value}-{max_value})." 78 | ) 79 | 80 | def write_dataframe_fixed_width( 81 | self, file: TextIOWrapper, dataframe: pd.DataFrame 82 | ) -> None: 83 | for row in dataframe.itertuples(): 84 | for index, metadata in enumerate(self._metadata_dict.values()): 85 | content = format_fixed_width(row[index + 1], metadata) 86 | file.write(content) 87 | file.write("\n") 88 | 89 | def _index_da(self, da: pd.DataFrame, index: NDArray[Any]) -> Any: 90 | return da.to_numpy().ravel()[index] 91 | 92 | def _render( 93 | self, file: TextIOWrapper, index: NDArray[Any], svat: pd.DataFrame 94 | ) -> None: 95 | data_dict = {"svat": svat.to_numpy().ravel()[index]} 96 | 97 | for var in self._with_subunit: 98 | data_dict[var] = self._index_da(self.dataset[var], index) 99 | 100 | for var in self._to_fill: 101 | data_dict[var] = "" 102 | 103 | dataframe = pd.DataFrame( 104 | data=data_dict, columns=list(self._metadata_dict.keys()) 105 | ) 106 | 107 | self._check_range(dataframe) 108 | self.write_dataframe_fixed_width(file, dataframe) 109 | 110 | def write(self, directory: str | Path) -> str: 111 | """ 112 | Write mapping to .dxc file. 113 | 114 | Parameters 115 | ---------- 116 | directory: str or Path 117 | directory in which exchange file should be written 118 | index: np.array 119 | 120 | """ 121 | # Force to Path 122 | directory = Path(directory) 123 | # TODO: figure out how to please mypy with the slots here? 124 | index = self.index # type: ignore 125 | 126 | with open(directory / self._file_name, "w") as f: 127 | self._render(f, index=index, svat=self.dataset["svat"]) 128 | return f"./{directory.name}/{self._file_name}" 129 | -------------------------------------------------------------------------------- /tests/test_imod_coupler/test_ribamod_cases.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | 3 | import geopandas as gpd 4 | import ribasim 5 | from imod.mf6 import Drainage, GroundwaterFlowModel, Modflow6Simulation, River 6 | from primod import RibaMod, RibaModActiveDriverCoupling 7 | 8 | 9 | def create_basin_definition( 10 | ribasim_model: ribasim.Model, buffersize: float 11 | ) -> gpd.GeoDataFrame: 12 | basin_geometry = ribasim_model.basin.node.df["geometry"] 13 | basin_ids = ribasim_model.basin.node.df.index 14 | # Call to_numpy() to get rid of the index 15 | basin_definition = gpd.GeoDataFrame( 16 | data={"node_id": basin_ids.to_numpy()}, 17 | geometry=basin_geometry.buffer(buffersize).to_numpy(), 18 | ) 19 | return basin_definition 20 | 21 | 22 | def case_bucket_model( 23 | mf6_bucket_model: Modflow6Simulation, 24 | ribasim_bucket_model: ribasim.Model, 25 | ) -> RibaMod: 26 | mf6_modelname, mf6_model = get_mf6_gwf_modelnames(mf6_bucket_model)[0] 27 | mf6_active_river_packages = get_mf6_river_packagenames(mf6_model) 28 | 29 | ribasim_bucket_model.endtime = dt.datetime(2023, 1, 1, 0, 0) 30 | basin_definition = create_basin_definition(ribasim_bucket_model, buffersize=100.0) 31 | 32 | driver_coupling = RibaModActiveDriverCoupling( 33 | mf6_model=mf6_modelname, 34 | ribasim_basin_definition=basin_definition, 35 | mf6_packages=mf6_active_river_packages, 36 | ) 37 | 38 | return RibaMod( 39 | ribasim_model=ribasim_bucket_model, 40 | mf6_simulation=mf6_bucket_model, 41 | coupling_list=[driver_coupling], 42 | ) 43 | 44 | 45 | def case_backwater_model( 46 | mf6_backwater_model: Modflow6Simulation, 47 | ribasim_backwater_model: ribasim.Model, 48 | ) -> RibaMod: 49 | mf6_modelname, mf6_model = get_mf6_gwf_modelnames(mf6_backwater_model)[0] 50 | mf6_active_river_packages = get_mf6_river_packagenames(mf6_model) 51 | mf6_active_drainage_packages = get_mf6_drainage_packagenames(mf6_model) 52 | 53 | basin_definition = create_basin_definition(ribasim_backwater_model, buffersize=5.0) 54 | 55 | driver_coupling = RibaModActiveDriverCoupling( 56 | mf6_model=mf6_modelname, 57 | ribasim_basin_definition=basin_definition, 58 | mf6_packages=mf6_active_river_packages + mf6_active_drainage_packages, 59 | ) 60 | 61 | return RibaMod( 62 | ribasim_model=ribasim_backwater_model, 63 | mf6_simulation=mf6_backwater_model, 64 | coupling_list=[driver_coupling], 65 | ) 66 | 67 | 68 | def two_basin_variation( 69 | mf6_two_basin_model: Modflow6Simulation, 70 | ribasim_two_basin_model: ribasim.Model, 71 | ) -> RibaMod: 72 | """Utility to setup two_basin models.""" 73 | mf6_modelname, mf6_model = get_mf6_gwf_modelnames(mf6_two_basin_model)[0] 74 | mf6_active_river_packages = get_mf6_river_packagenames(mf6_model) 75 | 76 | basin_definition = create_basin_definition( 77 | ribasim_two_basin_model, buffersize=250.0 78 | ) 79 | 80 | driver_coupling = RibaModActiveDriverCoupling( 81 | mf6_model=mf6_modelname, 82 | ribasim_basin_definition=basin_definition, 83 | mf6_packages=mf6_active_river_packages, 84 | ) 85 | 86 | return RibaMod( 87 | ribasim_model=ribasim_two_basin_model, 88 | mf6_simulation=mf6_two_basin_model, 89 | coupling_list=[driver_coupling], 90 | ) 91 | 92 | 93 | def case_two_basin_model( 94 | mf6_two_basin_model: Modflow6Simulation, 95 | ribasim_two_basin_model: ribasim.Model, 96 | ) -> RibaMod: 97 | return two_basin_variation(mf6_two_basin_model, ribasim_two_basin_model) 98 | 99 | 100 | def case_partial_two_basin_model( 101 | mf6_partial_two_basin_model: Modflow6Simulation, 102 | ribasim_two_basin_model: ribasim.Model, 103 | ) -> RibaMod: 104 | return two_basin_variation(mf6_partial_two_basin_model, ribasim_two_basin_model) 105 | 106 | 107 | def get_mf6_gwf_modelnames( 108 | mf6_simulation: Modflow6Simulation, 109 | ) -> list[tuple[str, GroundwaterFlowModel]]: 110 | """ 111 | Get names of gwf models in mf6 simulation 112 | """ 113 | return [ 114 | (key, value) 115 | for key, value in mf6_simulation.items() 116 | if isinstance(value, GroundwaterFlowModel) 117 | ] 118 | 119 | 120 | def get_mf6_river_packagenames(mf6_model: GroundwaterFlowModel) -> list[str]: 121 | """ 122 | Get names of river packages in mf6 simulation 123 | """ 124 | return [key for key, value in mf6_model.items() if isinstance(value, River)] 125 | 126 | 127 | def get_mf6_drainage_packagenames(mf6_model: GroundwaterFlowModel) -> list[str]: 128 | """ 129 | Get names of river packages in mf6 simulation 130 | """ 131 | return [key for key, value in mf6_model.items() if isinstance(value, Drainage)] 132 | -------------------------------------------------------------------------------- /tests/data/bucket_model/metaswap/PreMetaSWAP.key: -------------------------------------------------------------------------------- 1 | 0.3827989 2 | 181 3 | 1.000 1.000 1.000 1.000 1.000 1.000 1.000 1.000 4 | 1.000 1.000 1.000 1.000 1.000 1.000 1.000 5.000 5 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 6 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 7 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 8 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 9 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 10 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 11 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 12 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 13 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 14 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 15 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 16 | 5.000 5.000 5.000 5.000 5.000 5.000 5.000 5.000 17 | 25.000 25.000 25.000 25.000 25.000 25.000 25.000 25.000 18 | 50.000 50.000 50.000 50.000 50.000 50.000 50.000 50.000 19 | 50.000 50.000 50.000 50.000 50.000 50.000 50.000 50.000 20 | 50.000 50.000 50.000 50.000 50.000 50.000 50.000 50.000 21 | 50.000 50.000 50.000 50.000 50.000 50.000 50.000 50.000 22 | 50.000 50.000 50.000 50.000 50.000 50.000 50.000 50.000 23 | 50.000 50.000 50.000 50.000 50.000 50.000 100.000 100.000 24 | 100.000 100.000 100.000 500.000 500.000 500.000 500.000 500.000 25 | 1000.000 1000.000 1000.000 1000.000 100.000 26 | 2.0 2.0 2.0 -8.0 -10.0 -170.0 0.0 0.0 27 | 2 28 | 0.0 1.0 1.0 1.0 29 | 1 30 | 1 BOFEK2012_1 0.000 -9999.000 -9999.000 -9999.000 31 | 3 32 | 15 22 181 33 | 0.0000 0.7516 0.0000 0.0915 0.0000 0.0000 0.0000 51.0 0.0 49.0 31.0 starb17_M01KV03 34 | 0.0000 0.9438 0.0000 0.5776 0.0000 0.0000 0.0000 100.0 0.0 0.0 76.5 staro17_M01KV03 35 | 0.0000 0.8040 0.0000 0.5776 0.0000 0.0000 0.0000 100.0 0.0 0.0 76.5 staro17_85p_M01KV03 36 | 30 37 | 5.0 10.0 15.0 20.0 25.0 30.0 35.0 40.0 38 | 45.0 50.0 55.0 60.0 65.0 70.0 75.0 80.0 39 | 85.0 90.0 95.0 100.0 105.0 110.0 115.0 120.0 40 | 125.0 130.0 135.0 140.0 145.0 150.0 41 | 52 42 | 0.0 -10.0 -20.0 -30.0 -40.0 -50.0 -60.0 -70.0 43 | -80.0 -90.0 -100.0 -110.0 -120.0 -130.0 -140.0 -150.0 44 | -160.0 -170.0 -180.0 -190.0 -200.0 -220.0 -240.0 -260.0 45 | -280.0 -300.0 -340.0 -380.0 -420.0 -460.0 -500.0 -600.0 46 | -700.0 -800.0 -900.0 -1000.0 -1200.0 -1400.0 -1600.0 -1800.0 47 | -2000.0 -2500.0 -3000.0 -3500.0 -4000.0 -4500.0 -5000.0 -6000.0 48 | -7000.0 -8000.0 -9000.0 -10000.0 49 | 76 50 | -200.00 -180.00 -160.00 -140.00 -130.00 -120.00 -110.00 -100.00 51 | -90.00 -80.00 -70.00 -60.00 -50.00 -46.00 -42.00 -38.00 52 | -34.00 -30.00 -28.00 -26.00 -24.00 -22.00 -20.00 -19.00 53 | -18.00 -17.00 -16.00 -15.00 -14.00 -13.00 -12.00 -11.00 54 | -10.00 -9.00 -8.00 -7.00 -6.00 -5.00 -4.00 -3.00 55 | -2.50 -2.00 -1.50 -1.25 -1.00 -0.90 -0.80 -0.70 56 | -0.60 -0.50 -0.45 -0.40 -0.35 -0.30 -0.25 -0.20 57 | -0.15 -0.10 -0.08 -0.06 -0.04 -0.03 -0.02 -0.01 58 | -0.00 -0.00 -0.00 -0.00 -0.00 -0.00 -0.00 -0.00 59 | -0.00 -0.00 -0.00 0.00 60 | 83 61 | 0.000 0.000 0.000 0.000 0.000 0.000 0.000 0.000 62 | 0.000 0.000 0.000 0.001 0.002 0.005 0.007 0.010 63 | 0.013 0.015 0.018 0.020 0.025 0.030 0.035 0.040 64 | 0.045 0.050 0.055 0.060 0.065 0.070 0.075 0.080 65 | 0.085 0.090 0.095 0.100 0.110 0.120 0.130 0.140 66 | 0.150 0.160 0.170 0.180 0.190 0.200 0.210 0.220 67 | 0.230 0.240 0.250 0.260 0.270 0.280 0.290 0.300 68 | 0.320 0.340 0.360 0.380 0.400 0.420 0.440 0.460 69 | 0.480 0.500 0.525 0.550 0.575 0.600 0.650 0.700 70 | 0.750 0.800 0.850 0.900 0.950 1.000 1.100 1.200 71 | 1.300 1.400 1.500 72 | 18 73 | 1 0.20 -5.00 -7.00 -10.00 -13.00 -16.00 -20.00 -25.00 -30.00 -35.00 -40.00 -45.00 -50.00 -60.00 -70.00 -80.00 -100.00 74 | -------------------------------------------------------------------------------- /pre-processing/primod/driver_coupling/util.py: -------------------------------------------------------------------------------- 1 | import geopandas as gpd 2 | import numpy as np 3 | import pandas as pd 4 | import ribasim 5 | import xarray as xr 6 | from imod.mf6 import Drainage, GroundwaterFlowModel, Modflow6Simulation, River 7 | from numpy.typing import NDArray 8 | from ribasim.config import Basin, UserDemand 9 | 10 | from primod.typing import Int 11 | 12 | 13 | def _get_gwf_modelnames(mf6_simulation: Modflow6Simulation) -> list[str]: 14 | """ 15 | Get names of gwf models in mf6 simulation 16 | """ 17 | return [ 18 | key 19 | for key, value in mf6_simulation.items() 20 | if isinstance(value, GroundwaterFlowModel) 21 | ] 22 | 23 | 24 | def _validate_node_ids( 25 | dataframe: pd.DataFrame, definition: gpd.GeoDataFrame | xr.Dataset | xr.DataArray 26 | ) -> pd.Series: 27 | # Validate 28 | if isinstance(definition, xr.Dataset): 29 | data_vars = list(definition.data_vars.keys()) 30 | nodes = np.unique(definition.to_dataframe()[data_vars]) 31 | node_id = nodes[np.isfinite(nodes)] 32 | elif isinstance(definition, xr.DataArray): 33 | definition.name = "basin_definition" 34 | nodes = definition.to_dataset().to_dataframe()["basin_definition"] 35 | node_id = nodes[np.isfinite(nodes)] 36 | else: 37 | if "node_id" not in definition.columns: 38 | raise ValueError( 39 | 'Definition must contain "node_id" column.' 40 | f"Columns in dataframe: {definition.columns}" 41 | ) 42 | node_id = definition["node_id"].to_numpy() 43 | 44 | basin_ids: NDArray[Int] = np.unique(dataframe.index) 45 | missing = ~np.isin(node_id, basin_ids) 46 | if missing.any(): 47 | missing_nodes = node_id[missing] 48 | raise ValueError( 49 | "The node IDs of these nodes in definition do not " 50 | f"occur in the Ribasim model: {missing_nodes}" 51 | ) 52 | return basin_ids 53 | 54 | 55 | def _validate_time_window( 56 | ribasim_model: ribasim.Model, 57 | mf6_simulation: Modflow6Simulation, 58 | ) -> None: 59 | def to_timestamp(xr_time: xr.DataArray) -> pd.Timestamp: 60 | return pd.Timestamp(xr_time.to_numpy().item()) 61 | 62 | mf6_timedis = mf6_simulation["time_discretization"].dataset 63 | mf6_start = to_timestamp(mf6_timedis["time"].isel(time=0)).to_pydatetime() 64 | time_delta = pd.to_timedelta( 65 | mf6_timedis["timestep_duration"].isel(time=-1).item(), unit="days" 66 | ) 67 | mf6_end = ( 68 | to_timestamp(mf6_timedis["time"].isel(time=-1)) + time_delta 69 | ).to_pydatetime() 70 | 71 | ribasim_start = ribasim_model.starttime 72 | ribasim_end = ribasim_model.endtime 73 | if ribasim_start != mf6_start or ribasim_end != mf6_end: 74 | raise ValueError( 75 | "Ribasim simulation time window does not match MODFLOW6.\n" 76 | f"Ribasim: {ribasim_start} to {ribasim_end}\n" 77 | f"MODFLOW6: {mf6_start} to {mf6_end}\n" 78 | ) 79 | return 80 | 81 | 82 | def _validate_keys( 83 | gwf_model: GroundwaterFlowModel, 84 | active_keys: list[str], 85 | passive_keys: list[str], 86 | expected_type: River | Drainage, 87 | ) -> None: 88 | active_keys_set = set(active_keys) 89 | passive_keys_set = set(passive_keys) 90 | intersection = active_keys_set.intersection(passive_keys_set) 91 | if intersection: 92 | raise ValueError(f"active and passive keys share members: {intersection}") 93 | present = [k for k, v in gwf_model.items() if isinstance(v, expected_type)] 94 | missing = (active_keys_set | passive_keys_set).difference(present) 95 | if missing: 96 | raise ValueError( 97 | f"keys with expected type {expected_type.__name__} are not " 98 | f"present in the model: {missing}" 99 | ) 100 | 101 | 102 | def _nullify_ribasim_exchange_input( 103 | ribasim_component: Basin | UserDemand, 104 | coupled_node_ids: NDArray[Int], 105 | columns: list[str], 106 | ) -> None: 107 | """ 108 | Set the input forcing to NoData for drainage and infiltration. 109 | 110 | Ribasim will otherwise overwrite the values set by the coupler. 111 | """ 112 | 113 | # FUTURE: in coupling to MetaSWAP, the runoff should be set nodata as well. 114 | def _nullify(df: pd.DataFrame, fill: float = np.nan) -> None: 115 | """E.g. set drainage, infiltration, runoff columns to nodata if present in df""" 116 | if df is not None: 117 | columns_present = list(set(columns).intersection(df.columns)) 118 | if len(columns_present) > 0: 119 | df.loc[df["node_id"].isin(coupled_node_ids), columns_present] = fill 120 | return 121 | 122 | _nullify(ribasim_component.static.df) 123 | _nullify(ribasim_component.time.df) 124 | return 125 | -------------------------------------------------------------------------------- /tests/test_primod/test_wel_svat_mapping.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from pathlib import Path 3 | 4 | import numpy as np 5 | import xarray as xr 6 | from imod.mf6.mf6_wel_adapter import Mf6Wel, cellid_from_arrays__structured 7 | from numpy.testing import assert_equal 8 | from primod.mapping.wel_svat_mapping import WellSvatMapping 9 | 10 | 11 | def test_simple_model(fixed_format_parser): 12 | x = [1.0, 2.0, 3.0] 13 | y = [1.0, 2.0, 3.0] 14 | subunit = [0, 1] 15 | dx = 1.0 16 | dy = 1.0 17 | # fmt: off 18 | svat = xr.DataArray( 19 | np.array( 20 | [ 21 | [[0, 1, 0], 22 | [0, 0, 0], 23 | [0, 2, 0]], 24 | 25 | [[0, 3, 0], 26 | [0, 4, 0], 27 | [0, 0, 0]], 28 | ] 29 | ), 30 | dims=("subunit", "y", "x"), 31 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 32 | ) 33 | # fmt: on 34 | index = (svat != 0).to_numpy().ravel() 35 | 36 | # Well 37 | cellid = cellid_from_arrays__structured( 38 | layer=[3, 2, 1], row=[1, 2, 3], column=[2, 2, 2] 39 | ) 40 | well_rate = xr.DataArray([-5.0] * 3, coords={"index": [0, 1, 2]}, dims=("index",)) 41 | well = Mf6Wel( 42 | cellid=cellid, 43 | rate=well_rate, 44 | ) 45 | 46 | coupler_mapping = WellSvatMapping(svat, well, index=index) 47 | 48 | with tempfile.TemporaryDirectory() as output_dir: 49 | output_dir = Path(output_dir) 50 | coupler_mapping.write(output_dir) 51 | 52 | results = fixed_format_parser( 53 | output_dir / WellSvatMapping._file_name, 54 | WellSvatMapping._metadata_dict, 55 | ) 56 | 57 | assert_equal(results["wel_id"], np.array([1, 3, 1, 2])) 58 | assert_equal(results["svat"], np.array([1, 2, 3, 4])) 59 | assert_equal(results["layer"], np.array([3, 1, 3, 2])) 60 | 61 | 62 | def test_simple_model_1_subunit(fixed_format_parser): 63 | x = [1.0, 2.0, 3.0] 64 | y = [1.0, 2.0, 3.0] 65 | subunit = [0] 66 | dx = 1.0 67 | dy = 1.0 68 | # fmt: off 69 | svat = xr.DataArray( 70 | np.array( 71 | [ 72 | [[0, 1, 0], 73 | [0, 0, 0], 74 | [0, 2, 0]], 75 | ] 76 | ), 77 | dims=("subunit", "y", "x"), 78 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 79 | ) 80 | # fmt: on 81 | index = (svat != 0).to_numpy().ravel() 82 | 83 | # Well 84 | cellid = cellid_from_arrays__structured(layer=[3, 2], row=[1, 3], column=[2, 2]) 85 | well_rate = xr.DataArray([-5.0] * 2, coords={"index": [0, 1]}, dims=("index",)) 86 | well = Mf6Wel( 87 | cellid=cellid, 88 | rate=well_rate, 89 | ) 90 | 91 | coupler_mapping = WellSvatMapping(svat, well, index=index) 92 | 93 | with tempfile.TemporaryDirectory() as output_dir: 94 | output_dir = Path(output_dir) 95 | coupler_mapping.write(output_dir) 96 | 97 | results = fixed_format_parser( 98 | output_dir / WellSvatMapping._file_name, 99 | WellSvatMapping._metadata_dict, 100 | ) 101 | 102 | assert_equal(results["wel_id"], np.array([1, 2])) 103 | assert_equal(results["svat"], np.array([1, 2])) 104 | assert_equal(results["layer"], np.array([3, 2])) 105 | 106 | 107 | def test_simple_model_inactive(fixed_format_parser): 108 | """ 109 | Test with first well in inactive metaswap cell. This should increase the 110 | wel_id number, as the first modflow 6 well is not coupled to. 111 | """ 112 | 113 | x = [1.0, 2.0, 3.0] 114 | y = [1.0, 2.0, 3.0] 115 | subunit = [0, 1] 116 | dx = 1.0 117 | dy = 1.0 118 | # fmt: off 119 | svat = xr.DataArray( 120 | np.array( 121 | [ 122 | [[0, 1, 0], 123 | [0, 0, 0], 124 | [0, 2, 0]], 125 | 126 | [[0, 3, 0], 127 | [0, 4, 0], 128 | [0, 0, 0]], 129 | ] 130 | ), 131 | dims=("subunit", "y", "x"), 132 | coords={"subunit": subunit, "y": y, "x": x, "dx": dx, "dy": dy} 133 | ) 134 | # fmt: on 135 | index = (svat != 0).to_numpy().ravel() 136 | 137 | # Well 138 | cellid = cellid_from_arrays__structured( 139 | layer=[1, 3, 2, 1], row=[1, 1, 2, 3], column=[1, 2, 2, 2] 140 | ) 141 | well_rate = xr.DataArray( 142 | [-5.0] * 4, coords={"index": [0, 1, 2, 3]}, dims=("index",) 143 | ) 144 | well = Mf6Wel( 145 | cellid=cellid, 146 | rate=well_rate, 147 | ) 148 | 149 | coupler_mapping = WellSvatMapping(svat, well, index=index) 150 | 151 | with tempfile.TemporaryDirectory() as output_dir: 152 | output_dir = Path(output_dir) 153 | coupler_mapping.write(output_dir) 154 | 155 | results = fixed_format_parser( 156 | output_dir / WellSvatMapping._file_name, 157 | WellSvatMapping._metadata_dict, 158 | ) 159 | 160 | assert_equal(results["wel_id"], np.array([2, 4, 2, 3])) 161 | assert_equal(results["svat"], np.array([1, 2, 3, 4])) 162 | assert_equal(results["layer"], np.array([3, 1, 3, 2])) 163 | -------------------------------------------------------------------------------- /.teamcity/_Self/buildTypes/TestbenchCouplerWin64.kt: -------------------------------------------------------------------------------- 1 | package _Self.buildTypes 2 | 3 | import IMODCollector.buildTypes.IMODCollector_X64development 4 | import _Self.vcsRoots.ImodCoupler 5 | import jetbrains.buildServer.configs.kotlin.* 6 | import jetbrains.buildServer.configs.kotlin.buildFeatures.XmlReport 7 | import jetbrains.buildServer.configs.kotlin.buildFeatures.commitStatusPublisher 8 | import jetbrains.buildServer.configs.kotlin.buildFeatures.xmlReport 9 | import jetbrains.buildServer.configs.kotlin.buildSteps.script 10 | import jetbrains.buildServer.configs.kotlin.triggers.finishBuildTrigger 11 | import jetbrains.buildServer.configs.kotlin.triggers.vcs 12 | 13 | object TestbenchCouplerWin64 : BuildType({ 14 | name = "Testbench Coupler Win64" 15 | description = "Win64 Regression testbench for MODFLOW6/MetaSWAP coupler" 16 | 17 | artifactRules = """imod_coupler\tests\temp => test_output.zip""" 18 | publishArtifacts = PublishMode.ALWAYS 19 | 20 | params { 21 | param("env.PIXI_FROZEN", "true") 22 | param("env.METASWAP_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/metaswap/MetaSWAP.dll") 23 | param("env.IMOD_COUPLER_EXEC_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/imod_coupler/imodc.exe") 24 | param("env.MODFLOW_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/modflow6/libmf6.dll") 25 | param("env.MODFLOW_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/modflow6/libmf6.dll") 26 | param("env.RIBASIM_DLL_DEP_DIR_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/ribasim/bin") 27 | param("env.RIBASIM_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/ribasim/bin/libribasim.dll") 28 | param("conda_env_path", "%system.teamcity.build.checkoutDir%/imod_coupler_testbench_env") 29 | param("env.METASWAP_DLL_DEP_DIR_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/metaswap") 30 | param("env.METASWAP_DLL_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/metaswap/MetaSWAP.dll") 31 | param("env.METASWAP_DLL_DEP_DIR_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/metaswap") 32 | param("env.IMOD_COUPLER_EXEC_DEVEL", "%system.teamcity.build.checkoutDir%/imod_collector_devel/imodc.exe") 33 | param("env.RIBASIM_DLL_DEP_DIR_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/ribasim/bin") 34 | param("env.METASWAP_LOOKUP_TABLE", "%system.teamcity.build.checkoutDir%/lookup_table") 35 | param("env.RIBASIM_DLL_REGRESSION", "%system.teamcity.build.checkoutDir%/imod_collector_regression/ribasim/bin/libribasim.dll") 36 | } 37 | 38 | vcs { 39 | root(_Self.vcsRoots.ImodCoupler, ". => imod_coupler") 40 | root(_Self.vcsRoots.MetaSwapLookupTable, ". => lookup_table") 41 | 42 | cleanCheckout = true 43 | branchFilter = """ 44 | +:* 45 | -:release_imod56 46 | """.trimIndent() 47 | } 48 | 49 | steps { 50 | script { 51 | name = "Set up pixi" 52 | workingDir = "imod_coupler" 53 | scriptContent = """ 54 | pixi --version 55 | pixi install -e dev 56 | pixi list -e dev 57 | """.trimIndent() 58 | } 59 | script { 60 | name = "Run tests" 61 | workingDir = "imod_coupler" 62 | scriptContent = """ 63 | pixi run -e dev test-imod-coupler 64 | """.trimIndent() 65 | } 66 | } 67 | 68 | triggers { 69 | finishBuildTrigger { 70 | buildType = "${IMODCollector_X64development.id}" 71 | successfulOnly = true 72 | } 73 | vcs { 74 | triggerRules = "+:root=${ImodCoupler.id}:**" 75 | 76 | } 77 | } 78 | 79 | features { 80 | commitStatusPublisher { 81 | vcsRootExtId = "${ImodCoupler.id}" 82 | publisher = github { 83 | githubUrl = "https://api.github.com" 84 | authType = personalToken { 85 | token = "credentialsJSON:6b37af71-1f2f-4611-8856-db07965445c0" 86 | } 87 | } 88 | } 89 | xmlReport { 90 | reportType = XmlReport.XmlReportType.JUNIT 91 | rules = "imod_coupler/report.xml" 92 | verbose = true 93 | } 94 | } 95 | 96 | failureConditions { 97 | executionTimeoutMin = 120 98 | } 99 | 100 | dependencies { 101 | dependency(IMODCollector.buildTypes.IMODCollector_X64development) { 102 | snapshot { 103 | onDependencyFailure = FailureAction.FAIL_TO_START 104 | } 105 | 106 | artifacts { 107 | cleanDestination = true 108 | artifactRules = """ 109 | imod_collector.zip!** => imod_collector_devel 110 | """.trimIndent() 111 | } 112 | artifacts { 113 | buildRule = tag("regression") 114 | cleanDestination = true 115 | artifactRules = "imod_coupler_windows.zip!** => imod_collector_regression" 116 | } 117 | } 118 | } 119 | 120 | requirements { 121 | equals("env.OS", "Windows_NT") 122 | } 123 | }) 124 | --------------------------------------------------------------------------------