├── .git_archival.txt ├── .gitattributes ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .readthedocs.yml ├── .zenodo.json ├── AUTHORS.md ├── CHANGELOG.md ├── CITATION.bib ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── docs ├── Makefile └── source │ ├── _static │ └── custom.css │ ├── _templates │ ├── autosummary │ │ ├── class.rst │ │ └── module.rst │ └── layout.html │ ├── api.rst │ ├── authors.rst │ ├── changelog.rst │ ├── conf.py │ ├── contents.rst │ ├── index.rst │ ├── pics │ ├── 05_ordinary.png │ ├── 05_simple.png │ ├── 06_ensemble.png │ ├── 07_00_std.png │ ├── 07_01_lognormal.png │ ├── 07_02_binary.png │ ├── 07_03_zinnharvey.png │ ├── 07_04_arcsin.png │ ├── 07_05_combine.png │ ├── 09_cond_ens.png │ ├── 20_gstools.png │ ├── 20_pykrige.png │ ├── 2d_pgs.png │ ├── 3d_gau_field.png │ ├── 3d_pgs.png │ ├── GS_3d_vector_field.png │ ├── GS_pyvista_cut.png │ ├── cond_ens.png │ ├── cov_model_vario.png │ ├── demonstrator.png │ ├── exp_vario_fit.png │ ├── gau_field.png │ ├── gstools.ico │ ├── gstools.png │ ├── gstools_150.png │ ├── paraview.png │ ├── pyvista_export.png │ ├── srf_tut_exp_ani_rot.png │ ├── srf_tut_gau_field.png │ ├── srf_tut_gau_field_ens.png │ ├── srf_tut_merge.png │ ├── srf_tut_unstr.png │ ├── srf_vector_field.png │ ├── stab_vario_fit.png │ ├── tplstable_field.png │ ├── vario_tut_aniso_fit_exp.png │ ├── vario_tut_fit_exp.png │ ├── vario_tut_herten.png │ ├── vario_tut_new_herten.png │ ├── vec_srf_tut_exp.png │ └── vec_srf_tut_gau.png │ └── tutorials.rst ├── examples ├── 00_misc │ ├── 00_tpl_stable.py │ ├── 01_export.py │ ├── 02_check_rand_meth_sampling.py │ ├── 04_herten.py │ ├── 05_standalone_field.py │ ├── README.rst │ ├── grid_dim_origin_spacing.txt │ └── herten_transmissivity.gz ├── 01_random_field │ ├── 00_gaussian.py │ ├── 01_srf_ensemble.py │ ├── 02_fancier.py │ ├── 03_unstr_srf_export.py │ ├── 04_srf_merge.py │ ├── 05_mesh_ensemble.py │ ├── 06_pyvista_support.py │ ├── 07_higher_dimensions.py │ ├── 08_fourier.py │ ├── 09_fourier_trans.py │ └── README.rst ├── 02_cov_model │ ├── 00_intro.py │ ├── 01_basic_methods.py │ ├── 02_aniso_rotation.py │ ├── 03_spectral_methods.py │ ├── 04_different_scales.py │ ├── 05_additional_para.py │ ├── 06_fitting_para_ranges.py │ └── README.rst ├── 03_variogram │ ├── 00_fit_variogram.py │ ├── 01_find_best_model.py │ ├── 02_multi_vario.py │ ├── 03_directional_2d.py │ ├── 04_directional_3d.py │ ├── 05_auto_fit_variogram.py │ ├── 06_auto_bin_latlon.py │ └── README.rst ├── 04_vector_field │ ├── 00_2d_vector_field.py │ ├── 01_3d_vector_field.py │ └── README.rst ├── 05_kriging │ ├── 00_simple_kriging.py │ ├── 01_ordinary_kriging.py │ ├── 02_pykrige_interface.py │ ├── 03_compare_kriging.py │ ├── 04_extdrift_kriging.py │ ├── 05_universal_kriging.py │ ├── 06_detrended_kriging.py │ ├── 07_detrended_ordinary_kriging.py │ ├── 08_measurement_errors.py │ ├── 09_pseudo_inverse.py │ └── README.rst ├── 06_conditioned_fields │ ├── 00_condition_ensemble.py │ ├── 01_2D_condition_ensemble.py │ └── README.rst ├── 07_transformations │ ├── 00_log_normal.py │ ├── 01_binary.py │ ├── 02_discrete.py │ ├── 03_zinn_harvey.py │ ├── 04_bimodal.py │ ├── 05_combinations.py │ └── README.rst ├── 08_geo_coordinates │ ├── 00_field_generation.py │ ├── 01_dwd_krige.py │ ├── README.rst │ ├── de_borders.txt │ └── temp_obs.txt ├── 09_spatio_temporal │ ├── 01_precip_1d.py │ ├── 02_precip_2d.py │ ├── 03_geographic_coordinates.py │ └── README.rst ├── 10_normalizer │ ├── 00_lognormal_kriging.py │ ├── 01_auto_fit.py │ ├── 02_compare.py │ └── README.rst ├── 11_plurigaussian │ ├── 00_simple.py │ ├── 01_pgs.py │ ├── 02_spatial_relations.py │ ├── 03_correlations.py │ ├── 04_3d_pgs.py │ ├── 05_conditioned.py │ ├── 06_periodic.py │ ├── README.rst │ └── conditional_values.npz └── 12_sum_model │ ├── 00_simple_sum_model.py │ ├── 01_fitting_sum_model.py │ └── README.rst ├── pyproject.toml ├── src └── gstools │ ├── __init__.py │ ├── config.py │ ├── covmodel │ ├── __init__.py │ ├── base.py │ ├── fit.py │ ├── models.py │ ├── plot.py │ ├── sum_tools.py │ ├── tools.py │ └── tpl_models.py │ ├── field │ ├── __init__.py │ ├── base.py │ ├── cond_srf.py │ ├── generator.py │ ├── pgs.py │ ├── plot.py │ ├── srf.py │ ├── tools.py │ └── upscaling.py │ ├── krige │ ├── __init__.py │ ├── base.py │ ├── methods.py │ └── tools.py │ ├── normalizer │ ├── __init__.py │ ├── base.py │ ├── methods.py │ └── tools.py │ ├── random │ ├── __init__.py │ ├── rng.py │ └── tools.py │ ├── tools │ ├── __init__.py │ ├── export.py │ ├── geometric.py │ ├── misc.py │ └── special.py │ ├── transform │ ├── __init__.py │ ├── array.py │ └── field.py │ └── variogram │ ├── __init__.py │ ├── binning.py │ └── variogram.py └── tests ├── data └── variogram.txt ├── test_condition.py ├── test_covmodel.py ├── test_export.py ├── test_field.py ├── test_fouriergen.py ├── test_incomprrandmeth.py ├── test_krige.py ├── test_latlon.py ├── test_normalizer.py ├── test_pgs.py ├── test_randmeth.py ├── test_rng.py ├── test_srf.py ├── test_sum_model.py ├── test_temporal.py ├── test_transform.py ├── test_variogram_structured.py └── test_variogram_unstructured.py /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 1edd2b93bf429803d716a9cbab125362583575de 2 | node-date: 2025-04-28T13:31:01+02:00 3 | describe-name: v1.7.0 4 | ref-names: HEAD -> main, tag: v1.7.0 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | .git_archival.txt export-subst 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | docs/output.txt 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | tags 105 | /test_* 106 | 107 | # own stuff 108 | info/ 109 | 110 | # Cython generated C code 111 | *.c 112 | *.cpp 113 | 114 | # generated version file 115 | src/gstools/_version.py 116 | 117 | # generated docs 118 | docs/source/examples/ 119 | docs/source/api/ 120 | docs/source/sg_execution_times.rst 121 | 122 | # other settings 123 | .vscode/ 124 | *.DS_Store 125 | 126 | *.zip 127 | 128 | *.vtu 129 | *.vtr 130 | *.vtk 131 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | 8 | sphinx: 9 | configuration: docs/source/conf.py 10 | 11 | formats: [pdf] 12 | 13 | python: 14 | install: 15 | - method: pip 16 | path: . 17 | extra_requirements: 18 | - doc 19 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "license": "LGPL-3.0-or-later", 3 | "contributors": [ 4 | { 5 | "type": "Other", 6 | "name": "Bane Sullivan" 7 | }, 8 | { 9 | "orcid": "0000-0002-2547-8102", 10 | "affiliation": "Helmholtz Centre for Environmental Research - UFZ", 11 | "type": "ResearchGroup", 12 | "name": "Falk He\u00dfe" 13 | }, 14 | { 15 | "orcid": "0000-0002-8783-6198", 16 | "affiliation": "Hydrogeology Group, Department of Earth Science, Utrecht University, Netherlands", 17 | "type": "ResearchGroup", 18 | "name": "Alraune Zech" 19 | }, 20 | { 21 | "orcid": "0000-0002-7798-7080", 22 | "affiliation": "Helmholtz Centre for Environmental Research - UFZ", 23 | "type": "Supervisor", 24 | "name": "Sabine Attinger" 25 | } 26 | ], 27 | "language": "eng", 28 | "keywords": [ 29 | "geostatistics", 30 | "kriging", 31 | "random fields", 32 | "covariance models", 33 | "variogram", 34 | "Python", 35 | "GeoStat-Framework" 36 | ], 37 | "creators": [ 38 | { 39 | "orcid": "0000-0001-9060-4008", 40 | "affiliation": "Helmholtz Centre for Environmental Research - UFZ", 41 | "name": "Sebastian M\u00fcller" 42 | }, 43 | { 44 | "orcid": "0000-0001-9362-1372", 45 | "affiliation": "Helmholtz Centre for Environmental Research - UFZ", 46 | "name": "Lennart Sch\u00fcler" 47 | } 48 | ] 49 | } -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Authors 2 | 3 | GSTools is available on [GitHub](https://github.com/GeoStat-Framework/GSTools) 4 | and was created by following people. 5 | 6 | 7 | ## Core developers 8 | 9 | - Sebastian Müller, GitHub: [@MuellerSeb](https://github.com/MuellerSeb), Email: 10 | - Lennart Schüler, GitHub: [@LSchueler](https://github.com/LSchueler), Email: 11 | 12 | 13 | ## Contributors (in order of contributions) 14 | 15 | - Falk Heße, GitHub: [@fhesze](https://github.com/fhesze), Email: 16 | - Bane Sullivan, GitHub: [@banesullivan](https://github.com/banesullivan) 17 | - Tobias Glaubach, GitHub: [@TobiasGlaubach](https://github.com/TobiasGlaubach) 18 | -------------------------------------------------------------------------------- /CITATION.bib: -------------------------------------------------------------------------------- 1 | @Article{gmd-15-3161-2022, 2 | AUTHOR = {M\"uller, S. and Sch\"uler, L. and Zech, A. and He{\ss}e, F.}, 3 | TITLE = {\texttt{GSTools} v1.3: a toolbox for geostatistical modelling in Python}, 4 | JOURNAL = {Geoscientific Model Development}, 5 | VOLUME = {15}, 6 | YEAR = {2022}, 7 | NUMBER = {7}, 8 | PAGES = {3161--3182}, 9 | URL = {https://gmd.copernicus.org/articles/15/3161/2022/}, 10 | DOI = {10.5194/gmd-15-3161-2022} 11 | } 12 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python3 -msphinx 7 | SPHINXPROJ = GSTools 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | dl.py.property { 2 | display: block !important; 3 | } 4 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/class.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. autoclass:: {{ objname }} 6 | :members: 7 | :undoc-members: 8 | :inherited-members: 9 | :show-inheritance: 10 | 11 | .. raw:: latex 12 | 13 | \clearpage 14 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. currentmodule:: {{ fullname }} 4 | 5 | .. automodule:: {{ fullname }} 6 | 7 | .. raw:: latex 8 | 9 | \clearpage 10 | -------------------------------------------------------------------------------- /docs/source/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | {% block menu %} 3 | 4 | {{ super() }} 5 |
6 | 7 | 12 |
13 | 14 | 21 |
22 |
23 | 27 | {% endblock %} 28 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | GSTools API 3 | =========== 4 | 5 | .. automodule:: gstools 6 | 7 | .. raw:: latex 8 | 9 | \clearpage 10 | -------------------------------------------------------------------------------- /docs/source/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../AUTHORS.md 2 | :parser: myst_parser.docutils_ 3 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../CHANGELOG.md 2 | :parser: myst_parser.docutils_ 3 | -------------------------------------------------------------------------------- /docs/source/contents.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Contents 3 | ======== 4 | 5 | .. toctree:: 6 | :includehidden: 7 | :maxdepth: 3 8 | 9 | index 10 | tutorials 11 | api 12 | authors 13 | changelog 14 | -------------------------------------------------------------------------------- /docs/source/pics/05_ordinary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/05_ordinary.png -------------------------------------------------------------------------------- /docs/source/pics/05_simple.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/05_simple.png -------------------------------------------------------------------------------- /docs/source/pics/06_ensemble.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/06_ensemble.png -------------------------------------------------------------------------------- /docs/source/pics/07_00_std.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_00_std.png -------------------------------------------------------------------------------- /docs/source/pics/07_01_lognormal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_01_lognormal.png -------------------------------------------------------------------------------- /docs/source/pics/07_02_binary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_02_binary.png -------------------------------------------------------------------------------- /docs/source/pics/07_03_zinnharvey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_03_zinnharvey.png -------------------------------------------------------------------------------- /docs/source/pics/07_04_arcsin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_04_arcsin.png -------------------------------------------------------------------------------- /docs/source/pics/07_05_combine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/07_05_combine.png -------------------------------------------------------------------------------- /docs/source/pics/09_cond_ens.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/09_cond_ens.png -------------------------------------------------------------------------------- /docs/source/pics/20_gstools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/20_gstools.png -------------------------------------------------------------------------------- /docs/source/pics/20_pykrige.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/20_pykrige.png -------------------------------------------------------------------------------- /docs/source/pics/2d_pgs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/2d_pgs.png -------------------------------------------------------------------------------- /docs/source/pics/3d_gau_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/3d_gau_field.png -------------------------------------------------------------------------------- /docs/source/pics/3d_pgs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/3d_pgs.png -------------------------------------------------------------------------------- /docs/source/pics/GS_3d_vector_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/GS_3d_vector_field.png -------------------------------------------------------------------------------- /docs/source/pics/GS_pyvista_cut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/GS_pyvista_cut.png -------------------------------------------------------------------------------- /docs/source/pics/cond_ens.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/cond_ens.png -------------------------------------------------------------------------------- /docs/source/pics/cov_model_vario.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/cov_model_vario.png -------------------------------------------------------------------------------- /docs/source/pics/demonstrator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/demonstrator.png -------------------------------------------------------------------------------- /docs/source/pics/exp_vario_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/exp_vario_fit.png -------------------------------------------------------------------------------- /docs/source/pics/gau_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/gau_field.png -------------------------------------------------------------------------------- /docs/source/pics/gstools.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/gstools.ico -------------------------------------------------------------------------------- /docs/source/pics/gstools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/gstools.png -------------------------------------------------------------------------------- /docs/source/pics/gstools_150.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/gstools_150.png -------------------------------------------------------------------------------- /docs/source/pics/paraview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/paraview.png -------------------------------------------------------------------------------- /docs/source/pics/pyvista_export.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/pyvista_export.png -------------------------------------------------------------------------------- /docs/source/pics/srf_tut_exp_ani_rot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_tut_exp_ani_rot.png -------------------------------------------------------------------------------- /docs/source/pics/srf_tut_gau_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_tut_gau_field.png -------------------------------------------------------------------------------- /docs/source/pics/srf_tut_gau_field_ens.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_tut_gau_field_ens.png -------------------------------------------------------------------------------- /docs/source/pics/srf_tut_merge.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_tut_merge.png -------------------------------------------------------------------------------- /docs/source/pics/srf_tut_unstr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_tut_unstr.png -------------------------------------------------------------------------------- /docs/source/pics/srf_vector_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/srf_vector_field.png -------------------------------------------------------------------------------- /docs/source/pics/stab_vario_fit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/stab_vario_fit.png -------------------------------------------------------------------------------- /docs/source/pics/tplstable_field.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/tplstable_field.png -------------------------------------------------------------------------------- /docs/source/pics/vario_tut_aniso_fit_exp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vario_tut_aniso_fit_exp.png -------------------------------------------------------------------------------- /docs/source/pics/vario_tut_fit_exp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vario_tut_fit_exp.png -------------------------------------------------------------------------------- /docs/source/pics/vario_tut_herten.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vario_tut_herten.png -------------------------------------------------------------------------------- /docs/source/pics/vario_tut_new_herten.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vario_tut_new_herten.png -------------------------------------------------------------------------------- /docs/source/pics/vec_srf_tut_exp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vec_srf_tut_exp.png -------------------------------------------------------------------------------- /docs/source/pics/vec_srf_tut_gau.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/docs/source/pics/vec_srf_tut_gau.png -------------------------------------------------------------------------------- /docs/source/tutorials.rst: -------------------------------------------------------------------------------- 1 | .. _tutorials: 2 | 3 | ================= 4 | GSTools Tutorials 5 | ================= 6 | 7 | In the following you will find several Tutorials on how to use GSTools to 8 | explore its whole beauty and power. 9 | 10 | 11 | .. toctree:: 12 | :includehidden: 13 | :maxdepth: 1 14 | 15 | examples/01_random_field/index 16 | examples/02_cov_model/index 17 | examples/03_variogram/index 18 | examples/04_vector_field/index 19 | examples/05_kriging/index 20 | examples/06_conditioned_fields/index 21 | examples/07_transformations/index 22 | examples/08_geo_coordinates/index 23 | examples/09_spatio_temporal/index 24 | examples/10_normalizer/index 25 | examples/11_plurigaussian/index 26 | examples/12_sum_model/index 27 | examples/00_misc/index 28 | 29 | .. only:: html 30 | 31 | **Youtube Tutorial on GSTools** 32 | 33 | .. youtube:: qZBJ-AZXq6Q 34 | :width: 100% 35 | 36 | | 37 | 38 | Gallery 39 | ======= 40 | 41 | .. minigallery:: 42 | 43 | ../../examples/**/*.py 44 | -------------------------------------------------------------------------------- /examples/00_misc/00_tpl_stable.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Truncated Power Law Variograms 3 | ------------------------------ 4 | 5 | GSTools also implements truncated power law variograms, 6 | which can be represented as a superposition of scale dependant modes 7 | in form of standard variograms, which are truncated by 8 | a lower- :math:`\ell_{\mathrm{low}}` and 9 | an upper length-scale :math:`\ell_{\mathrm{up}}`. 10 | 11 | This example shows the truncated power law (:any:`TPLStable`) based on the 12 | :any:`Stable` covariance model and is given by 13 | 14 | .. math:: 15 | \gamma_{\ell_{\mathrm{low}},\ell_{\mathrm{up}}}(r) = 16 | \intop_{\ell_{\mathrm{low}}}^{\ell_{\mathrm{up}}} 17 | \gamma(r,\lambda) \frac{\rm d \lambda}{\lambda} 18 | 19 | with `Stable` modes on each scale: 20 | 21 | .. math:: 22 | \gamma(r,\lambda) &= 23 | \sigma^2(\lambda)\cdot\left(1- 24 | \exp\left[- \left(\frac{r}{\lambda}\right)^{\alpha}\right] 25 | \right)\\ 26 | \sigma^2(\lambda) &= C\cdot\lambda^{2H} 27 | 28 | which gives Gaussian modes for ``alpha=2`` 29 | or Exponential modes for ``alpha=1``. 30 | 31 | For :math:`\ell_{\mathrm{low}}=0` this results in: 32 | 33 | .. math:: 34 | \gamma_{\ell_{\mathrm{up}}}(r) &= 35 | \sigma^2_{\ell_{\mathrm{up}}}\cdot\left(1- 36 | \frac{2H}{\alpha} \cdot 37 | E_{1+\frac{2H}{\alpha}} 38 | \left[\left(\frac{r}{\ell_{\mathrm{up}}}\right)^{\alpha}\right] 39 | \right) \\ 40 | \sigma^2_{\ell_{\mathrm{up}}} &= 41 | C\cdot\frac{\ell_{\mathrm{up}}^{2H}}{2H} 42 | """ 43 | 44 | import numpy as np 45 | 46 | import gstools as gs 47 | 48 | x = y = np.linspace(0, 100, 100) 49 | model = gs.TPLStable( 50 | dim=2, # spatial dimension 51 | var=1, # variance (C is calculated internally, so variance is actually 1) 52 | len_low=0, # lower truncation of the power law 53 | len_scale=10, # length scale (a.k.a. range), len_up = len_low + len_scale 54 | nugget=0.1, # nugget 55 | anis=0.5, # anisotropy between main direction and transversal ones 56 | angles=np.pi / 4, # rotation angles 57 | alpha=1.5, # shape parameter from the stable model 58 | hurst=0.7, # hurst coefficient from the power law 59 | ) 60 | srf = gs.SRF(model, mean=1.0, seed=19970221) 61 | srf.structured([x, y]) 62 | srf.plot() 63 | -------------------------------------------------------------------------------- /examples/00_misc/01_export.py: -------------------------------------------------------------------------------- 1 | """ 2 | Exporting Fields 3 | ---------------- 4 | 5 | GSTools provides simple exporting routines to convert generated fields to 6 | `VTK `__ files. 7 | 8 | These can be viewed for example with `Paraview `__. 9 | """ 10 | 11 | # sphinx_gallery_thumbnail_path = 'pics/paraview.png' 12 | import gstools as gs 13 | 14 | x = y = range(100) 15 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 16 | srf = gs.SRF(model) 17 | field = srf((x, y), mesh_type="structured") 18 | srf.vtk_export(filename="field") 19 | 20 | ############################################################################### 21 | # The result displayed with Paraview: 22 | # 23 | # .. image:: ../../pics/paraview.png 24 | # :width: 400px 25 | # :align: center 26 | -------------------------------------------------------------------------------- /examples/00_misc/02_check_rand_meth_sampling.py: -------------------------------------------------------------------------------- 1 | """ 2 | Check Random Sampling 3 | --------------------- 4 | """ 5 | 6 | import numpy as np 7 | from matplotlib import pyplot as plt 8 | from mpl_toolkits.mplot3d import Axes3D 9 | 10 | import gstools as gs 11 | 12 | 13 | def norm_rad(vec): 14 | """Direction on the unit sphere.""" 15 | vec = np.array(vec, ndmin=2) 16 | norm = np.zeros(vec.shape[1]) 17 | for i in range(vec.shape[0]): 18 | norm += vec[i] ** 2 19 | norm = np.sqrt(norm) 20 | return np.einsum("j,ij->ij", 1 / norm, vec), norm 21 | 22 | 23 | def plot_rand_meth_samples(generator): 24 | """Plot the samples of the rand meth class.""" 25 | norm, rad = norm_rad(generator._cov_sample) 26 | 27 | fig = plt.figure(figsize=(10, 4)) 28 | 29 | if generator.model.dim == 3: 30 | ax = fig.add_subplot(121, projection=Axes3D.name) 31 | u = np.linspace(0, 2 * np.pi, 100) 32 | v = np.linspace(0, np.pi, 100) 33 | x = np.outer(np.cos(u), np.sin(v)) 34 | y = np.outer(np.sin(u), np.sin(v)) 35 | z = np.outer(np.ones(np.size(u)), np.cos(v)) 36 | ax.plot_surface(x, y, z, rstride=4, cstride=4, color="b", alpha=0.1) 37 | ax.scatter(norm[0], norm[1], norm[2]) 38 | elif generator.model.dim == 2: 39 | ax = fig.add_subplot(121) 40 | u = np.linspace(0, 2 * np.pi, 100) 41 | x = np.cos(u) 42 | y = np.sin(u) 43 | ax.plot(x, y, color="b", alpha=0.1) 44 | ax.scatter(norm[0], norm[1]) 45 | ax.set_aspect("equal") 46 | else: 47 | ax = fig.add_subplot(121) 48 | ax.bar(-1, np.sum(np.isclose(norm, -1)), color="C0") 49 | ax.bar(1, np.sum(np.isclose(norm, 1)), color="C0") 50 | ax.set_xticks([-1, 1]) 51 | ax.set_xticklabels(("-1", "1")) 52 | ax.set_title("Direction sampling") 53 | 54 | ax = fig.add_subplot(122) 55 | x = np.linspace(0, 10 / generator.model.integral_scale) 56 | y = generator.model.spectral_rad_pdf(x) 57 | ax.plot(x, y, label="radial spectral density") 58 | sample_in = np.sum(rad <= np.max(x)) 59 | ax.hist(rad[rad <= np.max(x)], bins=sample_in // 50, density=True) 60 | ax.set_xlim([0, np.max(x)]) 61 | ax.set_title(f"Radius samples shown {sample_in}/{len(rad)}") 62 | ax.legend() 63 | plt.show() 64 | 65 | 66 | model = gs.Stable(dim=3, alpha=1.5) 67 | srf = gs.SRF(model, seed=2020) 68 | plot_rand_meth_samples(srf.generator) 69 | -------------------------------------------------------------------------------- /examples/00_misc/05_standalone_field.py: -------------------------------------------------------------------------------- 1 | """ 2 | Standalone Field class 3 | ---------------------- 4 | 5 | The :any:`Field` class of GSTools can be used to plot arbitrary data in nD. 6 | 7 | In the following example we will produce 10000 random points in 4D with 8 | random values and plot them. 9 | """ 10 | 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | rng = np.random.RandomState(19970221) 16 | x0 = rng.rand(10000) * 100.0 17 | x1 = rng.rand(10000) * 100.0 18 | x2 = rng.rand(10000) * 100.0 19 | x3 = rng.rand(10000) * 100.0 20 | values = rng.rand(10000) * 100.0 21 | 22 | ############################################################################### 23 | # Only thing needed to instantiate the Field is the dimension. 24 | # 25 | # Afterwards we can call the instance like all other Fields 26 | # (:any:`SRF`, :any:`Krige` or :any:`CondSRF`), but with an additional field. 27 | 28 | plotter = gs.field.Field(dim=4) 29 | plotter(pos=(x0, x1, x2, x3), field=values) 30 | plotter.plot() 31 | -------------------------------------------------------------------------------- /examples/00_misc/README.rst: -------------------------------------------------------------------------------- 1 | Miscellaneous Tutorials 2 | ======================= 3 | 4 | More examples which do not really fit into other categories. Some are not more 5 | than a code snippet, while others are more complex and more than one part of 6 | GSTools is involved. 7 | 8 | Examples 9 | -------- 10 | -------------------------------------------------------------------------------- /examples/00_misc/grid_dim_origin_spacing.txt: -------------------------------------------------------------------------------- 1 | 1.000000000000000000e+03 1.000000000000000000e+03 2 | 0.000000000000000000e+00 0.000000000000000000e+00 3 | 5.000000000000000278e-02 5.000000000000000278e-02 4 | -------------------------------------------------------------------------------- /examples/00_misc/herten_transmissivity.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/examples/00_misc/herten_transmissivity.gz -------------------------------------------------------------------------------- /examples/01_random_field/00_gaussian.py: -------------------------------------------------------------------------------- 1 | r""" 2 | A Very Simple Example 3 | --------------------- 4 | 5 | We are going to start with a very simple example of a spatial random field 6 | with an isotropic Gaussian covariance model and following parameters: 7 | 8 | - variance :math:`\sigma^2=1` 9 | - correlation length :math:`\lambda=10` 10 | 11 | First, we set things up and create the axes for the field. We are going to 12 | need the :any:`SRF` class for the actual generation of the spatial random field. 13 | But :any:`SRF` also needs a covariance model and we will simply take the 14 | :any:`Gaussian` model. 15 | """ 16 | 17 | import gstools as gs 18 | 19 | x = y = range(100) 20 | 21 | ############################################################################### 22 | # Now we create the covariance model with the parameters :math:`\sigma^2` and 23 | # :math:`\lambda` and hand it over to :any:`SRF`. By specifying a seed, 24 | # we make sure to create reproducible results: 25 | 26 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 27 | srf = gs.SRF(model, seed=20170519) 28 | 29 | ############################################################################### 30 | # With these simple steps, everything is ready to create our first random field. 31 | # We will create the field on a structured grid (as you might have guessed from 32 | # the `x` and `y`), which makes it easier to plot. 33 | 34 | field = srf.structured([x, y]) 35 | srf.plot() 36 | 37 | ############################################################################### 38 | # Wow, that was pretty easy! 39 | -------------------------------------------------------------------------------- /examples/01_random_field/01_srf_ensemble.py: -------------------------------------------------------------------------------- 1 | """ 2 | Creating an Ensemble of Fields 3 | ------------------------------ 4 | 5 | Creating an ensemble of random fields would also be 6 | a great idea. Let's reuse most of the previous code. 7 | 8 | We will set the position tuple `pos` before generation to reuse it afterwards. 9 | """ 10 | 11 | import matplotlib.pyplot as pt 12 | import numpy as np 13 | 14 | import gstools as gs 15 | 16 | x = y = np.arange(100) 17 | 18 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 19 | srf = gs.SRF(model) 20 | srf.set_pos([x, y], "structured") 21 | 22 | ############################################################################### 23 | # This time, we did not provide a seed to :any:`SRF`, as the seeds will used 24 | # during the actual computation of the fields. We will create four ensemble 25 | # members, for better visualisation, save them in to srf class and in a first 26 | # step, we will be using the loop counter as the seeds. 27 | 28 | ens_no = 4 29 | for i in range(ens_no): 30 | srf(seed=i, store=f"field{i}") 31 | 32 | ############################################################################### 33 | # Now let's have a look at the results. We can access the fields by name or 34 | # index: 35 | 36 | fig, ax = pt.subplots(2, 2, sharex=True, sharey=True) 37 | ax = ax.flatten() 38 | for i in range(ens_no): 39 | ax[i].imshow(srf[i].T, origin="lower") 40 | pt.show() 41 | 42 | ############################################################################### 43 | # Using better Seeds 44 | # ^^^^^^^^^^^^^^^^^^ 45 | # 46 | # It is not always a good idea to use incrementing seeds. Therefore GSTools 47 | # provides a seed generator :any:`MasterRNG`. The loop, in which the fields are 48 | # generated would then look like 49 | 50 | from gstools.random import MasterRNG 51 | 52 | seed = MasterRNG(20170519) 53 | for i in range(ens_no): 54 | srf(seed=seed(), store=f"better_field{i}") 55 | -------------------------------------------------------------------------------- /examples/01_random_field/02_fancier.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Creating Fancier Fields 3 | ----------------------- 4 | 5 | Only using Gaussian covariance fields gets boring. Now we are going to create 6 | much rougher random fields by using an exponential covariance model and we are going to make them anisotropic. 7 | 8 | The code is very similar to the previous examples, but with a different 9 | covariance model class :any:`Exponential`. As model parameters we a using 10 | following 11 | 12 | - variance :math:`\sigma^2=1` 13 | - correlation length :math:`\lambda=(12, 3)^T` 14 | - rotation angle :math:`\theta=\pi/8` 15 | 16 | """ 17 | 18 | import numpy as np 19 | 20 | import gstools as gs 21 | 22 | x = y = np.arange(100) 23 | model = gs.Exponential(dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8) 24 | srf = gs.SRF(model, seed=20170519) 25 | srf.structured([x, y]) 26 | srf.plot() 27 | 28 | ############################################################################### 29 | # The anisotropy ratio could also have been set with 30 | 31 | model = gs.Exponential(dim=2, var=1, len_scale=12, anis=0.25, angles=np.pi / 8) 32 | -------------------------------------------------------------------------------- /examples/01_random_field/03_unstr_srf_export.py: -------------------------------------------------------------------------------- 1 | """ 2 | Using an Unstructured Grid 3 | -------------------------- 4 | 5 | For many applications, the random fields are needed on an unstructured grid. 6 | Normally, such a grid would be read in, but we can simply generate one and 7 | then create a random field at those coordinates. 8 | """ 9 | 10 | import numpy as np 11 | 12 | import gstools as gs 13 | 14 | ############################################################################### 15 | # Creating our own unstructured grid 16 | seed = gs.random.MasterRNG(19970221) 17 | rng = np.random.RandomState(seed()) 18 | x = rng.randint(0, 100, size=10000) 19 | y = rng.randint(0, 100, size=10000) 20 | 21 | model = gs.Exponential(dim=2, var=1, len_scale=[12, 3], angles=np.pi / 8) 22 | srf = gs.SRF(model, seed=20170519) 23 | field = srf((x, y)) 24 | srf.vtk_export("field") 25 | # Or create a PyVista dataset 26 | # mesh = srf.to_pyvista() 27 | 28 | ############################################################################### 29 | ax = srf.plot() 30 | ax.set_aspect("equal") 31 | 32 | ############################################################################### 33 | # Comparing this image to the previous one, you can see that be using the same 34 | # seed, the same field can be computed on different grids. 35 | -------------------------------------------------------------------------------- /examples/01_random_field/04_srf_merge.py: -------------------------------------------------------------------------------- 1 | """ 2 | Merging two Fields 3 | ------------------ 4 | 5 | We can even generate the same field realisation on different grids. Let's try 6 | to merge two unstructured rectangular fields. 7 | 8 | """ 9 | 10 | # sphinx_gallery_thumbnail_number = 2 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | # creating our own unstructured grid 16 | seed = gs.random.MasterRNG(19970221) 17 | rng = np.random.RandomState(seed()) 18 | x = rng.randint(0, 100, size=10000) 19 | y = rng.randint(0, 100, size=10000) 20 | 21 | model = gs.Exponential(dim=2, var=1, len_scale=[12, 3], angles=np.pi / 8) 22 | srf = gs.SRF(model, seed=20170519) 23 | field1 = srf((x, y)) 24 | srf.plot() 25 | ############################################################################### 26 | # But now we extend the field on the right hand side by creating a new 27 | # unstructured grid and calculating a field with the same parameters and the 28 | # same seed on it: 29 | 30 | # new grid 31 | seed = gs.random.MasterRNG(20011012) 32 | rng = np.random.RandomState(seed()) 33 | x2 = rng.randint(99, 150, size=10000) 34 | y2 = rng.randint(20, 80, size=10000) 35 | 36 | field2 = srf((x2, y2)) 37 | ax = srf.plot() 38 | ax.tricontourf(x, y, field1.T, levels=256) 39 | ax.set_aspect("equal") 40 | 41 | ############################################################################### 42 | # The slight mismatch where the two fields were merged is merely due to 43 | # interpolation problems of the plotting routine. You can convince yourself 44 | # be increasing the resolution of the grids by a factor of 10. 45 | # 46 | # Of course, this merging could also have been done by appending the grid 47 | # point ``(x2, y2)`` to the original grid ``(x, y)`` before generating the field. 48 | # But one application scenario would be to generate hugh fields, which would not 49 | # fit into memory anymore. 50 | -------------------------------------------------------------------------------- /examples/01_random_field/05_mesh_ensemble.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generating Fields on Meshes 3 | --------------------------- 4 | 5 | GSTools provides an interface for meshes, to support 6 | `meshio `_ and 7 | `ogs5py `_ meshes. 8 | 9 | When using `meshio`, the generated fields will be stored immediately in the 10 | mesh container. 11 | 12 | There are two options to generate a field on a given mesh: 13 | 14 | - `points="points"` will generate a field on the mesh points 15 | - `points="centroids"` will generate a field on the cell centroids 16 | 17 | In this example, we will generate a simple mesh with the aid of 18 | `meshzoo `_. 19 | """ 20 | 21 | import matplotlib.pyplot as plt 22 | import matplotlib.tri as tri 23 | import meshio 24 | import meshzoo 25 | import numpy as np 26 | 27 | import gstools as gs 28 | 29 | # generate a triangulated hexagon with meshzoo 30 | points, cells = meshzoo.ngon(6, 4) 31 | mesh = meshio.Mesh(points, {"triangle": cells}) 32 | 33 | ############################################################################### 34 | # Now we prepare the SRF class as always. We will generate an ensemble of 35 | # fields on the generated mesh. 36 | 37 | # number of fields 38 | fields_no = 12 39 | # model setup 40 | model = gs.Gaussian(dim=2, len_scale=0.5) 41 | srf = gs.SRF(model, mean=1) 42 | 43 | ############################################################################### 44 | # To generate fields on a mesh, we provide a separate method: :any:`SRF.mesh`. 45 | # First we generate fields on the mesh-centroids controlled by a seed. 46 | # You can specify the field name by the keyword `name`. 47 | 48 | for i in range(fields_no): 49 | srf.mesh(mesh, points="centroids", name=f"c-field-{i}", seed=i) 50 | 51 | ############################################################################### 52 | # Now we generate fields on the mesh-points again controlled by a seed. 53 | 54 | for i in range(fields_no): 55 | srf.mesh(mesh, points="points", name=f"p-field-{i}", seed=i) 56 | 57 | ############################################################################### 58 | # To get an impression we now want to plot the generated fields. 59 | # Luckily, matplotlib supports triangular meshes. 60 | 61 | triangulation = tri.Triangulation(points[:, 0], points[:, 1], cells) 62 | # figure setup 63 | cols = 4 64 | rows = int(np.ceil(fields_no / cols)) 65 | 66 | ############################################################################### 67 | # Cell data can be easily visualized with matplotlibs `tripcolor`. 68 | # To highlight the cell structure, we use `triplot`. 69 | 70 | fig = plt.figure(figsize=[2 * cols, 2 * rows]) 71 | for i, field in enumerate(mesh.cell_data, 1): 72 | ax = fig.add_subplot(rows, cols, i) 73 | ax.tripcolor(triangulation, mesh.cell_data[field][0]) 74 | ax.triplot(triangulation, linewidth=0.5, color="k") 75 | ax.set_aspect("equal") 76 | fig.tight_layout() 77 | 78 | ############################################################################### 79 | # Point data is plotted via `tricontourf`. 80 | 81 | fig = plt.figure(figsize=[2 * cols, 2 * rows]) 82 | for i, field in enumerate(mesh.point_data, 1): 83 | ax = fig.add_subplot(rows, cols, i) 84 | ax.tricontourf(triangulation, mesh.point_data[field]) 85 | ax.triplot(triangulation, linewidth=0.5, color="k") 86 | ax.set_aspect("equal") 87 | fig.tight_layout() 88 | plt.show() 89 | 90 | ############################################################################### 91 | # Last but not least, `meshio` can be used for what is does best: Exporting. 92 | # Tada! 93 | 94 | mesh.write("mesh_ensemble.vtk") 95 | -------------------------------------------------------------------------------- /examples/01_random_field/06_pyvista_support.py: -------------------------------------------------------------------------------- 1 | """ 2 | Using PyVista meshes 3 | -------------------- 4 | 5 | `PyVista `__ is a helper module for the 6 | Visualization Toolkit (VTK) that takes a different approach on interfacing with 7 | VTK through NumPy and direct array access. 8 | 9 | It provides mesh data structures and filtering methods for spatial datasets, 10 | makes 3D plotting simple and is built for large/complex data geometries. 11 | 12 | The :any:`Field.mesh` method enables easy field creation on PyVista meshes 13 | used by the :any:`SRF` or :any:`Krige` class. 14 | """ 15 | 16 | # sphinx_gallery_thumbnail_path = 'pics/GS_pyvista_cut.png' 17 | import pyvista as pv 18 | 19 | import gstools as gs 20 | 21 | ############################################################################### 22 | # We create a structured grid with PyVista containing 50 segments on all three 23 | # axes each with a length of 2 (whatever unit). 24 | 25 | dims, spacing = (50, 50, 50), (2, 2, 2) 26 | grid = pv.ImageData(dimensions=dims, spacing=spacing) 27 | 28 | ############################################################################### 29 | # Now we set up the SRF class as always. We'll use an anisotropic model. 30 | 31 | model = gs.Gaussian(dim=3, len_scale=[16, 8, 4], angles=(0.8, 0.4, 0.2)) 32 | srf = gs.SRF(model, seed=19970221) 33 | 34 | ############################################################################### 35 | # The PyVista mesh can now be directly passed to the :any:`SRF.mesh` method. 36 | # When dealing with meshes, one can choose if the field should be generated 37 | # on the mesh-points (`"points"`) or the cell-centroids (`"centroids"`). 38 | # 39 | # In addition we can set a name, under which the resulting field is stored 40 | # in the mesh. 41 | 42 | srf.mesh(grid, points="points", name="random-field") 43 | 44 | ############################################################################### 45 | # Now we have access to PyVista's abundancy of methods to explore the field. 46 | # 47 | # .. note:: 48 | # PyVista is not working on readthedocs, but you can try it out yourself by 49 | # uncommenting the following line of code. 50 | 51 | # grid.contour(isosurfaces=8).plot() 52 | 53 | ############################################################################### 54 | # The result should look like this: 55 | # 56 | # .. image:: ../../pics/GS_pyvista_cut.png 57 | # :width: 400px 58 | # :align: center 59 | -------------------------------------------------------------------------------- /examples/01_random_field/07_higher_dimensions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Higher Dimensions 3 | ----------------- 4 | 5 | GSTools provides experimental support for higher dimensions. 6 | 7 | Anisotropy is the same as in lower dimensions: 8 | 9 | - in `n` dimensions we need `(n-1)` anisotropy ratios 10 | 11 | Rotation on the other hand is a bit more complex. 12 | With increasing dimensions more and more rotation angles are added in order 13 | to properply describe the rotated axes of anisotropy. 14 | 15 | By design the first rotation angles coincide with the lower ones: 16 | 17 | - 2D (rotation in x-y plane) -> 3D: first angle describes xy-plane rotation 18 | - 3D (Tait-Bryan angles) -> 4D: first 3 angles coincide with Tait-Bryan angles 19 | 20 | By increasing the dimension from `n` to `(n+1)`, `n` angles are added: 21 | 22 | - 2D (1 angle) -> 3D: 3 angles (2 added) 23 | - 3D (3 angles) -> 4D: 6 angles (3 added) 24 | 25 | the following list of rotation-planes are described by the list of 26 | angles in the model: 27 | 28 | 1. x-y plane 29 | 2. x-z plane 30 | 3. y-z plane 31 | 4. x-v plane 32 | 5. y-v plane 33 | 6. z-v plane 34 | 7. ... 35 | 36 | The rotation direction in these planes have alternating signs 37 | in order to match Tait-Bryan in 3D. 38 | 39 | Let's have a look at a 4D example, where we naively add a 4th dimension. 40 | """ 41 | 42 | import matplotlib.pyplot as plt 43 | 44 | import gstools as gs 45 | 46 | dim = 4 47 | size = 20 48 | pos = [range(size)] * dim 49 | model = gs.Exponential(dim=dim, len_scale=5) 50 | srf = gs.SRF(model, seed=20170519) 51 | field = srf.structured(pos) 52 | 53 | ############################################################################### 54 | # In order to "prove" correctness, we can calculate an empirical variogram 55 | # of the generated field and fit our model to it. 56 | 57 | bin_center, vario = gs.vario_estimate( 58 | pos, field, sampling_size=2000, mesh_type="structured" 59 | ) 60 | model.fit_variogram(bin_center, vario) 61 | print(model) 62 | 63 | ############################################################################### 64 | # As you can see, the estimated variance and length scale match our input 65 | # quite well. 66 | # 67 | # Let's have a look at the fit and a x-y cross-section of the 4D field: 68 | 69 | f, a = plt.subplots(1, 2, gridspec_kw={"width_ratios": [2, 1]}, figsize=[9, 3]) 70 | model.plot(x_max=max(bin_center), ax=a[0]) 71 | a[0].scatter(bin_center, vario) 72 | a[1].imshow(field[:, :, 0, 0].T, origin="lower") 73 | a[0].set_title("isotropic empirical variogram with fitted model") 74 | a[1].set_title("x-y cross-section") 75 | f.show() 76 | 77 | ############################################################################### 78 | # GSTools also provides plotting routines for higher dimensions. 79 | # Fields are shown by 2D cross-sections, where other dimensions can be 80 | # controlled via sliders. 81 | 82 | srf.plot() 83 | -------------------------------------------------------------------------------- /examples/01_random_field/08_fourier.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generating a Simple Periodic Random Field 3 | ----------------------------------------- 4 | 5 | In this simple example we are going to learn how to generate periodic spatial 6 | random fields. The Fourier method comes naturally with the property of 7 | periodicity, so we'll use it to create the random field. 8 | """ 9 | 10 | import numpy as np 11 | 12 | import gstools as gs 13 | 14 | # We start off by defining the spatial grid. For the sake of simplicity, we 15 | # use a square domain. We set the optional argument `endpoint` to `False`, to 16 | # not make the domain in each dimension one grid cell larger than the 17 | # periodicity. 18 | L = 500.0 19 | x = np.linspace(0, L, 256, endpoint=False) 20 | y = np.linspace(0, L, 128, endpoint=False) 21 | 22 | # Now, we create a Gaussian covariance model with a correlation length which is 23 | # roughly half the size of the grid. 24 | model = gs.Gaussian(dim=2, var=1, len_scale=200) 25 | 26 | # Next, we hand the cov. model to the spatial random field class `SRF` 27 | # and set the generator to `"Fourier"`. The argument `period` is set to the 28 | # domain size. If only a single number is given, the same periodicity is 29 | # applied in each dimension, as shown in this example. The `mode_no` argument 30 | # sets the number of Fourier modes. If only an integer is given, that number 31 | # of modes is used for all dimensions. 32 | srf = gs.SRF( 33 | model, 34 | generator="Fourier", 35 | period=L, 36 | mode_no=32, 37 | seed=1681903, 38 | ) 39 | 40 | # Now, we can calculate the field with the given parameters. 41 | srf((x, y), mesh_type="structured") 42 | 43 | # GSTools has a few simple visualization methods built in. 44 | srf.plot() 45 | -------------------------------------------------------------------------------- /examples/01_random_field/09_fourier_trans.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generating a Transformed Periodic Random Field 3 | ---------------------------------------------- 4 | 5 | Building on the precious example, we are now going to generate periodic 6 | spatial random fields with a transformation applied, resulting in a level set. 7 | """ 8 | 9 | import numpy as np 10 | 11 | import gstools as gs 12 | 13 | # We start off by defining the spatial grid. As in the previous example, we do 14 | # not want to include the endpoints. 15 | L = np.array((500, 400)) 16 | x = np.linspace(0, L[0], 300, endpoint=False) 17 | y = np.linspace(0, L[1], 200, endpoint=False) 18 | 19 | # Instead of using a Gaussian covariance model, we will use the much rougher 20 | # exponential model and we will introduce an anisotropy by using two different 21 | # length scales in the x- and y-directions 22 | model = gs.Exponential(dim=2, var=2, len_scale=[80, 20]) 23 | 24 | # Same as before, we set up the spatial random field. But this time, we will 25 | # use a periodicity which is equal to the domain size in x-direction, but 26 | # half the domain size in y-direction. And we will use different `mode_no` for 27 | # the different dimensions. 28 | srf = gs.SRF( 29 | model, 30 | generator="Fourier", 31 | period=[L[0], L[1] / 2], 32 | mode_no=[30, 20], 33 | seed=1681903, 34 | ) 35 | # and compute it on our spatial domain 36 | srf((x, y), mesh_type="structured") 37 | 38 | # With the field generated, we can now apply transformations starting with a 39 | # discretization of the field into 4 different values 40 | thresholds = np.linspace(np.min(srf.field), np.max(srf.field), 4) 41 | srf.transform("discrete", store="transform_discrete", values=thresholds) 42 | srf.plot("transform_discrete") 43 | 44 | # This is already a nice result, but we want to pronounce the peaks of the 45 | # field. We can do this by applying a log-normal transformation on top 46 | srf.transform( 47 | "lognormal", field="transform_discrete", store="transform_lognormal" 48 | ) 49 | srf.plot("transform_lognormal") 50 | -------------------------------------------------------------------------------- /examples/01_random_field/README.rst: -------------------------------------------------------------------------------- 1 | Random Field Generation 2 | ======================= 3 | 4 | The main feature of GSTools is the spatial random field generator :any:`SRF`, 5 | which can generate random fields following a given covariance model. 6 | The generator provides a lot of nice features, which will be explained in 7 | the following 8 | 9 | GSTools generates spatial random fields with a given covariance model or 10 | semi-variogram. This is done by using the so-called randomization method. 11 | The spatial random field is represented by a stochastic Fourier integral 12 | and its discretised modes are evaluated at random frequencies. 13 | 14 | In case you want to generate spatial random fields with periodic boundaries, 15 | you can use the so-called Fourier method. See the corresponding examples for 16 | how to do that. The spatial random field is represented by a stochastic 17 | Fourier integral and its discretised modes are evaluated at equidistant 18 | frequencies. 19 | 20 | GSTools supports arbitrary and non-isotropic covariance models. 21 | 22 | Examples 23 | -------- 24 | -------------------------------------------------------------------------------- /examples/02_cov_model/00_intro.py: -------------------------------------------------------------------------------- 1 | """ 2 | Introductory example 3 | ==================== 4 | 5 | Let us start with a short example of a self defined model (Of course, we 6 | provide a lot of predefined models [See: :any:`gstools.covmodel`], 7 | but they all work the same way). 8 | Therefore we reimplement the Gaussian covariance model 9 | by defining just the "normalized" 10 | `correlation `_ 11 | function: 12 | """ 13 | 14 | import numpy as np 15 | 16 | import gstools as gs 17 | 18 | 19 | # use CovModel as the base-class 20 | class Gau(gs.CovModel): 21 | def cor(self, h): 22 | return np.exp(-(h**2)) 23 | 24 | 25 | ############################################################################### 26 | # Here the parameter ``h`` stands for the normalized range ``r / len_scale``. 27 | # Now we can instantiate this model: 28 | 29 | model = Gau(dim=2, var=2.0, len_scale=10) 30 | 31 | ############################################################################### 32 | # To have a look at the variogram, let's plot it: 33 | 34 | model.plot() 35 | 36 | ############################################################################### 37 | # This is almost identical to the already provided :any:`Gaussian` model. 38 | # There, a scaling factor is implemented so the len_scale coincides with the 39 | # integral scale: 40 | 41 | gau_model = gs.Gaussian(dim=2, var=2.0, len_scale=10) 42 | gau_model.plot() 43 | 44 | 45 | ############################################################################### 46 | # Parameters 47 | # ---------- 48 | # 49 | # We already used some parameters, which every covariance models has. 50 | # The basic ones are: 51 | # 52 | # - **dim** : dimension of the model 53 | # - **var** : variance of the model (on top of the subscale variance) 54 | # - **len_scale** : length scale of the model 55 | # - **nugget** : nugget (subscale variance) of the model 56 | # 57 | # These are the common parameters used to characterize 58 | # a covariance model and are therefore used by every model in GSTools. 59 | # You can also access and reset them: 60 | 61 | print("old model:", model) 62 | model.dim = 3 63 | model.var = 1 64 | model.len_scale = 15 65 | model.nugget = 0.1 66 | print("new model:", model) 67 | 68 | 69 | ############################################################################### 70 | # .. note:: 71 | # 72 | # - The sill of the variogram is calculated by ``sill = variance + nugget`` 73 | # So we treat the variance as everything **above** the nugget, 74 | # which is sometimes called **partial sill**. 75 | # - A covariance model can also have additional parameters. 76 | -------------------------------------------------------------------------------- /examples/02_cov_model/01_basic_methods.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Basic Methods 3 | ============= 4 | 5 | The covariance model class :any:`CovModel` of GSTools provides a set of handy 6 | methods. 7 | 8 | One of the following functions defines the main characterization of the 9 | variogram: 10 | 11 | - ``CovModel.variogram`` : The variogram of the model given by 12 | 13 | .. math:: 14 | \gamma\left(r\right)= 15 | \sigma^2\cdot\left(1-\rho\left(r\right)\right)+n 16 | 17 | - ``CovModel.covariance`` : The (auto-)covariance of the model given by 18 | 19 | .. math:: 20 | C\left(r\right)= \sigma^2\cdot\rho\left(r\right) 21 | 22 | - ``CovModel.correlation`` : The (auto-)correlation 23 | (or normalized covariance) of the model given by 24 | 25 | .. math:: 26 | \rho\left(r\right) 27 | 28 | - ``CovModel.cor`` : The normalized correlation taking a 29 | normalized range given by: 30 | 31 | .. math:: 32 | \mathrm{cor}\left(\frac{r}{\ell}\right) = \rho\left(r\right) 33 | 34 | 35 | As you can see, it is the easiest way to define a covariance model by giving a 36 | correlation function as demonstrated in the introductory example. 37 | If one of the above functions is given, the others will be determined: 38 | """ 39 | 40 | import gstools as gs 41 | 42 | model = gs.Exponential(dim=3, var=2.0, len_scale=10, nugget=0.5) 43 | ax = model.plot("variogram") 44 | model.plot("covariance", ax=ax) 45 | model.plot("correlation", ax=ax) 46 | -------------------------------------------------------------------------------- /examples/02_cov_model/02_aniso_rotation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Anisotropy and Rotation 3 | ======================= 4 | 5 | The internally used (semi-) variogram 6 | represents the isotropic case for the model. 7 | Nevertheless, you can provide anisotropy ratios by: 8 | """ 9 | 10 | import gstools as gs 11 | 12 | model = gs.Gaussian(dim=3, var=2.0, len_scale=10, anis=0.5) 13 | print(model.anis) 14 | print(model.len_scale_vec) 15 | 16 | 17 | ############################################################################### 18 | # As you can see, we defined just one anisotropy-ratio 19 | # and the second transversal direction was filled up with ``1.``. 20 | # You can get the length-scales in each direction by 21 | # the attribute :any:`CovModel.len_scale_vec`. For full control you can set 22 | # a list of anistropy ratios: ``anis=[0.5, 0.4]``. 23 | # 24 | # Alternatively you can provide a list of length-scales: 25 | 26 | model = gs.Gaussian(dim=3, var=2.0, len_scale=[10, 5, 4]) 27 | model.plot("vario_spatial") 28 | print("Anisotropy representations:") 29 | print("Anis. ratios:", model.anis) 30 | print("Main length scale", model.len_scale) 31 | print("All length scales", model.len_scale_vec) 32 | 33 | 34 | ############################################################################### 35 | # Rotation Angles 36 | # --------------- 37 | # 38 | # The main directions of the field don't have to coincide with the spatial 39 | # directions :math:`x`, :math:`y` and :math:`z`. Therefore you can provide 40 | # rotation angles for the model: 41 | 42 | model = gs.Gaussian(dim=3, var=2.0, len_scale=[10, 2], angles=2.5) 43 | model.plot("vario_spatial") 44 | print("Rotation angles", model.angles) 45 | 46 | ############################################################################### 47 | # Again, the angles were filled up with ``0.`` to match the dimension and you 48 | # could also provide a list of angles. The number of angles depends on the 49 | # given dimension: 50 | # 51 | # - in 1D: no rotation performable 52 | # - in 2D: given as rotation around z-axis 53 | # - in 3D: given by yaw, pitch, and roll (known as 54 | # `Tait–Bryan `_ 55 | # angles) 56 | # - in nD: See the random field example about higher dimensions 57 | -------------------------------------------------------------------------------- /examples/02_cov_model/03_spectral_methods.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Spectral methods 3 | ================ 4 | 5 | The spectrum of a covariance model is given by: 6 | 7 | .. math:: S(\mathbf{k}) = \left(\frac{1}{2\pi}\right)^n 8 | \int C(\Vert\mathbf{r}\Vert) e^{i b\mathbf{k}\cdot\mathbf{r}} d^n\mathbf{r} 9 | 10 | Since the covariance function :math:`C(r)` is radially symmetric, we can 11 | calculate this by the 12 | `hankel-transformation `_: 13 | 14 | .. math:: S(k) = \left(\frac{1}{2\pi}\right)^n \cdot 15 | \frac{(2\pi)^{n/2}}{(bk)^{n/2-1}} 16 | \int_0^\infty r^{n/2-1} C(r) J_{n/2-1}(bkr) r dr 17 | 18 | Where :math:`k=\left\Vert\mathbf{k}\right\Vert`. 19 | 20 | Depending on the spectrum, the spectral-density is defined by: 21 | 22 | .. math:: \tilde{S}(k) = \frac{S(k)}{\sigma^2} 23 | 24 | You can access these methods by: 25 | """ 26 | 27 | import gstools as gs 28 | 29 | model = gs.Gaussian(dim=3, var=2.0, len_scale=10) 30 | ax = model.plot("spectrum") 31 | model.plot("spectral_density", ax=ax) 32 | 33 | ############################################################################### 34 | # .. note:: 35 | # The spectral-density is given by the radius of the input phase. But it is 36 | # **not** a probability density function for the radius of the phase. 37 | # To obtain the pdf for the phase-radius, you can use the methods 38 | # :any:`CovModel.spectral_rad_pdf` 39 | # or :any:`CovModel.ln_spectral_rad_pdf` for the logarithm. 40 | # 41 | # The user can also provide a cdf (cumulative distribution function) by 42 | # defining a method called ``spectral_rad_cdf`` 43 | # and/or a ppf (percent-point function) 44 | # by ``spectral_rad_ppf``. 45 | # 46 | # The attributes :any:`CovModel.has_cdf` 47 | # and :any:`CovModel.has_ppf` will check for that. 48 | -------------------------------------------------------------------------------- /examples/02_cov_model/04_different_scales.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Different scales 3 | ================ 4 | 5 | Besides the length-scale, there are many other ways of characterizing a certain 6 | scale of a covariance model. We provide two common scales with the covariance 7 | model. 8 | 9 | Integral scale 10 | -------------- 11 | 12 | The `integral scale `_ 13 | of a covariance model is calculated by: 14 | 15 | .. math:: I = \int_0^\infty \rho(r) dr 16 | 17 | You can access it by: 18 | """ 19 | 20 | import gstools as gs 21 | 22 | model = gs.Stable(dim=3, var=2.0, len_scale=10) 23 | print("Main integral scale:", model.integral_scale) 24 | print("All integral scales:", model.integral_scale_vec) 25 | 26 | 27 | ############################################################################### 28 | # You can also specify integral length scales like the ordinary length scale, 29 | # and len_scale/anis will be recalculated: 30 | 31 | model = gs.Stable(dim=3, var=2.0, integral_scale=[10, 4, 2]) 32 | print("Anisotropy ratios:", model.anis) 33 | print("Main length scale:", model.len_scale) 34 | print("All length scales:", model.len_scale_vec) 35 | print("Main integral scale:", model.integral_scale) 36 | print("All integral scales:", model.integral_scale_vec) 37 | 38 | 39 | ############################################################################### 40 | # Percentile scale 41 | # ---------------- 42 | # 43 | # Another scale characterizing the covariance model, is the percentile scale. 44 | # It is the distance, where the normalized 45 | # variogram reaches a certain percentage of its sill. 46 | 47 | model = gs.Stable(dim=3, var=2.0, len_scale=10) 48 | per_scale = model.percentile_scale(0.9) 49 | int_scale = model.integral_scale 50 | len_scale = model.len_scale 51 | print("90% Percentile scale:", per_scale) 52 | print("Integral scale:", int_scale) 53 | print("Length scale:", len_scale) 54 | 55 | ############################################################################### 56 | # .. note:: 57 | # 58 | # The nugget is neglected by the percentile scale. 59 | # 60 | # 61 | # Comparison 62 | # ---------- 63 | 64 | ax = model.plot() 65 | ax.axhline(1.8, color="k", label=r"90% percentile") 66 | ax.axvline(per_scale, color="k", linestyle="--", label=r"90% percentile scale") 67 | ax.axvline(int_scale, color="k", linestyle="-.", label=r"integral scale") 68 | ax.axvline(len_scale, color="k", linestyle=":", label=r"length scale") 69 | ax.legend() 70 | -------------------------------------------------------------------------------- /examples/02_cov_model/05_additional_para.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Additional Parameters 3 | ===================== 4 | 5 | Let's pimp our self-defined model ``Gau`` from the introductory example 6 | by setting the exponent as an additional parameter: 7 | 8 | .. math:: 9 | \rho(r) := \exp\left(-\left(\frac{r}{\ell}\right)^{\alpha}\right) 10 | 11 | This leads to the so called **stable** covariance model and we can define it by 12 | """ 13 | 14 | import numpy as np 15 | 16 | import gstools as gs 17 | 18 | 19 | class Stab(gs.CovModel): 20 | def default_opt_arg(self): 21 | return {"alpha": 1.5} 22 | 23 | def cor(self, h): 24 | return np.exp(-(h**self.alpha)) 25 | 26 | 27 | ############################################################################### 28 | # As you can see, we override the method :any:`CovModel.default_opt_arg` 29 | # to provide a standard value for the optional argument ``alpha``. 30 | # We can access it in the correlation function by ``self.alpha`` 31 | # 32 | # Now we can instantiate this model by either setting alpha implicitly with 33 | # the default value or explicitly: 34 | 35 | model1 = Stab(dim=2, var=2.0, len_scale=10) 36 | model2 = Stab(dim=2, var=2.0, len_scale=10, alpha=0.5) 37 | ax = model1.plot() 38 | model2.plot(ax=ax) 39 | 40 | ############################################################################### 41 | # Apparently, the parameter alpha controls the slope of the variogram 42 | # and consequently the roughness of a generated random field. 43 | # 44 | # .. note:: 45 | # 46 | # You don't have to override the :any:`CovModel.default_opt_arg`, 47 | # but you will get a ValueError if you don't set it on creation. 48 | -------------------------------------------------------------------------------- /examples/02_cov_model/06_fitting_para_ranges.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fitting variogram data 3 | ====================== 4 | 5 | The model class comes with a routine to fit the model-parameters to given 6 | variogram data. In the following we will use the self defined stable model 7 | from a previous example. 8 | """ 9 | 10 | import numpy as np 11 | 12 | import gstools as gs 13 | 14 | 15 | class Stab(gs.CovModel): 16 | def default_opt_arg(self): 17 | return {"alpha": 1.5} 18 | 19 | def cor(self, h): 20 | return np.exp(-(h**self.alpha)) 21 | 22 | 23 | # Exemplary variogram data (e.g. estimated from field observations) 24 | bins = [1.0, 3.0, 5.0, 7.0, 9.0, 11.0] 25 | est_vario = [0.2, 0.5, 0.6, 0.8, 0.8, 0.9] 26 | # fitting model 27 | model = Stab(dim=2) 28 | # we have to provide boundaries for the parameters 29 | model.set_arg_bounds(alpha=[0, 3]) 30 | results, pcov = model.fit_variogram(bins, est_vario, nugget=False) 31 | print("Results:", results) 32 | 33 | ############################################################################### 34 | 35 | ax = model.plot() 36 | ax.scatter(bins, est_vario, color="k", label="sample variogram") 37 | ax.legend() 38 | 39 | 40 | ############################################################################### 41 | # As you can see, we have to provide boundaries for the parameters. 42 | # As a default, the following bounds are set: 43 | # 44 | # - additional parameters: ``[-np.inf, np.inf]`` 45 | # - variance: ``[0.0, np.inf]`` 46 | # - len_scale: ``[0.0, np.inf]`` 47 | # - nugget: ``[0.0, np.inf]`` 48 | # 49 | # Also, you can deselect parameters from fitting, so their predefined values 50 | # will be kept. In our case, we fixed a ``nugget`` of ``0.0``, which was set 51 | # by default. You can deselect any standard or 52 | # optional argument of the covariance model. 53 | # The second return value ``pcov`` is the estimated covariance of ``popt`` from 54 | # the used scipy routine :any:`scipy.optimize.curve_fit`. 55 | # 56 | # You can use the following methods to manipulate the used bounds: 57 | # 58 | # .. currentmodule:: gstools.covmodel 59 | # 60 | # .. autosummary:: 61 | # CovModel.default_opt_arg_bounds 62 | # CovModel.default_arg_bounds 63 | # CovModel.set_arg_bounds 64 | # CovModel.check_arg_bounds 65 | # 66 | # You can override the :any:`CovModel.default_opt_arg_bounds` 67 | # to provide standard bounds for your additional parameters. 68 | # 69 | # To access the bounds you can use: 70 | # 71 | # .. autosummary:: 72 | # CovModel.var_bounds 73 | # CovModel.len_scale_bounds 74 | # CovModel.nugget_bounds 75 | # CovModel.opt_arg_bounds 76 | # CovModel.arg_bounds 77 | -------------------------------------------------------------------------------- /examples/02_cov_model/README.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial_02_cov: 2 | 3 | The Covariance Model 4 | ==================== 5 | 6 | One of the core-features of GSTools is the powerful :any:`CovModel` 7 | class, which allows you to easily define arbitrary covariance models by 8 | yourself. The resulting models provide a bunch of nice features to explore the 9 | covariance models. 10 | 11 | A covariance model is used to characterize the 12 | `semi-variogram `_, 13 | denoted by :math:`\gamma`, of a spatial random field. 14 | In GSTools, we use the following form for an isotropic and stationary field: 15 | 16 | .. math:: 17 | \gamma\left(r\right)= 18 | \sigma^2\cdot\left(1-\mathrm{cor}\left(s\cdot\frac{r}{\ell}\right)\right)+n 19 | 20 | Where: 21 | 22 | - :math:`r` is the lag distance 23 | - :math:`\ell` is the main correlation length 24 | - :math:`s` is a scaling factor for unit conversion or normalization 25 | - :math:`\sigma^2` is the variance 26 | - :math:`n` is the nugget (subscale variance) 27 | - :math:`\mathrm{cor}(h)` is the normalized correlation function depending on 28 | the non-dimensional distance :math:`h=s\cdot\frac{r}{\ell}` 29 | 30 | Depending on the normalized correlation function, all covariance models in 31 | GSTools are providing the following functions: 32 | 33 | - :math:`\rho(r)=\mathrm{cor}\left(s\cdot\frac{r}{\ell}\right)` 34 | is the so called 35 | `correlation `_ 36 | function 37 | - :math:`C(r)=\sigma^2\cdot\rho(r)` is the so called 38 | `covariance `_ 39 | function, which gives the name for our GSTools class 40 | 41 | .. note:: 42 | 43 | We are not limited to isotropic models. GSTools supports anisotropy ratios 44 | for length scales in orthogonal transversal directions like: 45 | 46 | - :math:`x_0` (main direction) 47 | - :math:`x_1` (1. transversal direction) 48 | - :math:`x_2` (2. transversal direction) 49 | - ... 50 | 51 | These main directions can also be rotated. 52 | Just have a look at the corresponding examples. 53 | 54 | Provided Covariance Models 55 | -------------------------- 56 | 57 | .. currentmodule:: gstools.covmodel 58 | 59 | The following standard covariance models are provided by GSTools 60 | 61 | .. autosummary:: 62 | Gaussian 63 | Exponential 64 | Matern 65 | Integral 66 | Stable 67 | Rational 68 | Cubic 69 | Linear 70 | Circular 71 | Spherical 72 | HyperSpherical 73 | SuperSpherical 74 | JBessel 75 | TPLSimple 76 | 77 | As a special feature, we also provide truncated power law (TPL) covariance models 78 | 79 | .. autosummary:: 80 | TPLGaussian 81 | TPLExponential 82 | TPLStable 83 | 84 | These models provide a lower and upper length scale truncation 85 | for superpositioned models. 86 | 87 | Examples 88 | -------- 89 | -------------------------------------------------------------------------------- /examples/03_variogram/00_fit_variogram.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fit Variogram 3 | ------------- 4 | """ 5 | 6 | import numpy as np 7 | 8 | import gstools as gs 9 | 10 | ############################################################################### 11 | # Generate a synthetic field with an exponential model. 12 | 13 | x = np.random.RandomState(19970221).rand(1000) * 100.0 14 | y = np.random.RandomState(20011012).rand(1000) * 100.0 15 | model = gs.Exponential(dim=2, var=2, len_scale=8) 16 | srf = gs.SRF(model, mean=0, seed=19970221) 17 | field = srf((x, y)) 18 | 19 | ############################################################################### 20 | # Estimate the variogram of the field with 40 bins. 21 | 22 | bins = np.arange(40) 23 | bin_center, gamma = gs.vario_estimate((x, y), field, bins) 24 | 25 | ############################################################################### 26 | # Fit the variogram with a stable model (no nugget fitted). 27 | 28 | fit_model = gs.Stable(dim=2) 29 | fit_model.fit_variogram(bin_center, gamma, nugget=False) 30 | 31 | ############################################################################### 32 | # Plot the fitting result. 33 | 34 | ax = fit_model.plot(x_max=40) 35 | ax.scatter(bin_center, gamma) 36 | print(fit_model) 37 | -------------------------------------------------------------------------------- /examples/03_variogram/01_find_best_model.py: -------------------------------------------------------------------------------- 1 | """ 2 | Finding the best fitting variogram model 3 | ---------------------------------------- 4 | """ 5 | 6 | import numpy as np 7 | from matplotlib import pyplot as plt 8 | 9 | import gstools as gs 10 | 11 | ############################################################################### 12 | # Generate a synthetic field with an exponential model. 13 | 14 | x = np.random.RandomState(19970221).rand(1000) * 100.0 15 | y = np.random.RandomState(20011012).rand(1000) * 100.0 16 | model = gs.Exponential(dim=2, var=2, len_scale=8) 17 | srf = gs.SRF(model, mean=0, seed=19970221) 18 | field = srf((x, y)) 19 | 20 | ############################################################################### 21 | # Estimate the variogram of the field with 40 bins and plot the result. 22 | 23 | bins = np.arange(40) 24 | bin_center, gamma = gs.vario_estimate((x, y), field, bins) 25 | 26 | ############################################################################### 27 | # Define a set of models to test. 28 | 29 | models = { 30 | "Gaussian": gs.Gaussian, 31 | "Exponential": gs.Exponential, 32 | "Matern": gs.Matern, 33 | "Stable": gs.Stable, 34 | "Rational": gs.Rational, 35 | "Circular": gs.Circular, 36 | "Spherical": gs.Spherical, 37 | "SuperSpherical": gs.SuperSpherical, 38 | "JBessel": gs.JBessel, 39 | } 40 | scores = {} 41 | 42 | ############################################################################### 43 | # Iterate over all models, fit their variogram and calculate the r2 score. 44 | 45 | # plot the estimated variogram 46 | plt.scatter(bin_center, gamma, color="k", label="data") 47 | ax = plt.gca() 48 | 49 | # fit all models to the estimated variogram 50 | for model in models: 51 | fit_model = models[model](dim=2) 52 | para, pcov, r2 = fit_model.fit_variogram(bin_center, gamma, return_r2=True) 53 | fit_model.plot(x_max=40, ax=ax) 54 | scores[model] = r2 55 | 56 | ############################################################################### 57 | # Create a ranking based on the score and determine the best models 58 | 59 | ranking = sorted(scores.items(), key=lambda item: item[1], reverse=True) 60 | print("RANKING by Pseudo-r2 score") 61 | for i, (model, score) in enumerate(ranking, 1): 62 | print(f"{i:>6}. {model:>15}: {score:.5}") 63 | 64 | plt.show() 65 | -------------------------------------------------------------------------------- /examples/03_variogram/02_multi_vario.py: -------------------------------------------------------------------------------- 1 | """ 2 | Multi-field variogram estimation 3 | -------------------------------- 4 | 5 | In this example, we demonstrate how to estimate a variogram from multiple 6 | fields on the same point-set that should have the same statistical properties. 7 | """ 8 | 9 | import matplotlib.pyplot as plt 10 | import numpy as np 11 | 12 | import gstools as gs 13 | 14 | x = np.random.RandomState(19970221).rand(1000) * 100.0 15 | y = np.random.RandomState(20011012).rand(1000) * 100.0 16 | model = gs.Exponential(dim=2, var=2, len_scale=8) 17 | srf = gs.SRF(model, mean=0) 18 | 19 | ############################################################################### 20 | # Generate two synthetic fields with an exponential model. 21 | 22 | field1 = srf((x, y), seed=19970221) 23 | field2 = srf((x, y), seed=20011012) 24 | fields = [field1, field2] 25 | 26 | ############################################################################### 27 | # Now we estimate the variograms for both fields individually and then again 28 | # simultaneously with only one call. 29 | 30 | bins = np.arange(40) 31 | bin_center, gamma1 = gs.vario_estimate((x, y), field1, bins) 32 | bin_center, gamma2 = gs.vario_estimate((x, y), field2, bins) 33 | bin_center, gamma = gs.vario_estimate((x, y), fields, bins) 34 | 35 | ############################################################################### 36 | # Now we demonstrate that the mean variogram from both fields coincides 37 | # with the joined estimated one. 38 | 39 | plt.plot(bin_center, gamma1, label="field 1") 40 | plt.plot(bin_center, gamma2, label="field 2") 41 | plt.plot(bin_center, gamma, label="joined fields") 42 | plt.plot(bin_center, 0.5 * (gamma1 + gamma2), ":", label="field 1+2 mean") 43 | plt.legend() 44 | plt.show() 45 | -------------------------------------------------------------------------------- /examples/03_variogram/03_directional_2d.py: -------------------------------------------------------------------------------- 1 | """ 2 | Directional variogram estimation and fitting in 2D 3 | -------------------------------------------------- 4 | 5 | In this example, we demonstrate how to estimate a directional variogram by 6 | setting the direction angles in 2D. 7 | 8 | Afterwards we will fit a model to this estimated variogram and show the result. 9 | """ 10 | 11 | import numpy as np 12 | from matplotlib import pyplot as plt 13 | 14 | import gstools as gs 15 | 16 | ############################################################################### 17 | # Generating synthetic field with anisotropy and a rotation of 22.5 degree. 18 | 19 | angle = np.pi / 8 20 | model = gs.Exponential(dim=2, len_scale=[10, 5], angles=angle) 21 | x = y = range(101) 22 | srf = gs.SRF(model, seed=123456) 23 | field = srf((x, y), mesh_type="structured") 24 | 25 | ############################################################################### 26 | # Now we are going to estimate a directional variogram with an angular 27 | # tolerance of 11.25 degree and a bandwith of 8. 28 | 29 | bins = range(0, 40, 2) 30 | bin_center, dir_vario, counts = gs.vario_estimate( 31 | *((x, y), field, bins), 32 | direction=gs.rotated_main_axes(dim=2, angles=angle), 33 | angles_tol=np.pi / 16, 34 | bandwidth=8, 35 | mesh_type="structured", 36 | return_counts=True, 37 | ) 38 | 39 | ############################################################################### 40 | # Afterwards we can use the estimated variogram to fit a model to it: 41 | 42 | print("Original:") 43 | print(model) 44 | model.fit_variogram(bin_center, dir_vario) 45 | print("Fitted:") 46 | print(model) 47 | 48 | ############################################################################### 49 | # Plotting. 50 | 51 | fig, (ax1, ax2) = plt.subplots(1, 2, figsize=[10, 5]) 52 | 53 | ax1.scatter(bin_center, dir_vario[0], label="emp. vario: pi/8") 54 | ax1.scatter(bin_center, dir_vario[1], label="emp. vario: pi*5/8") 55 | ax1.legend(loc="lower right") 56 | 57 | model.plot("vario_axis", axis=0, ax=ax1, x_max=40, label="fit on axis 0") 58 | model.plot("vario_axis", axis=1, ax=ax1, x_max=40, label="fit on axis 1") 59 | ax1.set_title("Fitting an anisotropic model") 60 | 61 | srf.plot(ax=ax2) 62 | plt.show() 63 | 64 | ############################################################################### 65 | # Without fitting a model, we see that the correlation length in the main 66 | # direction is greater than the transversal one. 67 | -------------------------------------------------------------------------------- /examples/03_variogram/04_directional_3d.py: -------------------------------------------------------------------------------- 1 | """ 2 | Directional variogram estimation and fitting in 3D 3 | -------------------------------------------------- 4 | 5 | In this example, we demonstrate how to estimate a directional variogram by 6 | setting the estimation directions in 3D. 7 | 8 | Afterwards we will fit a model to this estimated variogram and show the result. 9 | """ 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy as np 13 | from mpl_toolkits.mplot3d import Axes3D 14 | 15 | import gstools as gs 16 | 17 | ############################################################################### 18 | # Generating synthetic field with anisotropy and rotation by Tait-Bryan angles. 19 | 20 | dim = 3 21 | # rotation around z, y, x 22 | angles = [np.deg2rad(90), np.deg2rad(45), np.deg2rad(22.5)] 23 | model = gs.Gaussian(dim=3, len_scale=[16, 8, 4], angles=angles) 24 | x = y = z = range(50) 25 | pos = (x, y, z) 26 | srf = gs.SRF(model, seed=1001) 27 | field = srf.structured(pos) 28 | 29 | ############################################################################### 30 | # Here we generate the axes of the rotated coordinate system 31 | # to get an impression what the rotation angles do. 32 | 33 | # All 3 axes of the rotated coordinate-system 34 | main_axes = gs.rotated_main_axes(dim, angles) 35 | axis1, axis2, axis3 = main_axes 36 | 37 | ############################################################################### 38 | # Now we estimate the variogram along the main axes. When the main axes are 39 | # unknown, one would need to sample multiple directions and look for the one 40 | # with the longest correlation length (flattest gradient). 41 | # Then check the transversal directions and so on. 42 | 43 | bin_center, dir_vario, counts = gs.vario_estimate( 44 | pos, 45 | field, 46 | direction=main_axes, 47 | bandwidth=10, 48 | sampling_size=2000, 49 | sampling_seed=1001, 50 | mesh_type="structured", 51 | return_counts=True, 52 | ) 53 | 54 | ############################################################################### 55 | # Afterwards we can use the estimated variogram to fit a model to it. 56 | # Note, that the rotation angles need to be set beforehand. 57 | 58 | print("Original:") 59 | print(model) 60 | model.fit_variogram(bin_center, dir_vario) 61 | print("Fitted:") 62 | print(model) 63 | 64 | ############################################################################### 65 | # Plotting main axes and the fitted directional variogram. 66 | 67 | fig = plt.figure(figsize=[10, 5]) 68 | ax1 = fig.add_subplot(121, projection=Axes3D.name) 69 | ax2 = fig.add_subplot(122) 70 | 71 | ax1.plot([0, axis1[0]], [0, axis1[1]], [0, axis1[2]], label="0.") 72 | ax1.plot([0, axis2[0]], [0, axis2[1]], [0, axis2[2]], label="1.") 73 | ax1.plot([0, axis3[0]], [0, axis3[1]], [0, axis3[2]], label="2.") 74 | ax1.set_xlim(-1, 1) 75 | ax1.set_ylim(-1, 1) 76 | ax1.set_zlim(-1, 1) 77 | ax1.set_xlabel("X") 78 | ax1.set_ylabel("Y") 79 | ax1.set_zlabel("Z") 80 | ax1.set_title("Tait-Bryan main axis") 81 | ax1.legend(loc="lower left") 82 | 83 | x_max = max(bin_center) 84 | ax2.scatter(bin_center, dir_vario[0], label="0. axis") 85 | ax2.scatter(bin_center, dir_vario[1], label="1. axis") 86 | ax2.scatter(bin_center, dir_vario[2], label="2. axis") 87 | model.plot("vario_axis", axis=0, ax=ax2, x_max=x_max, label="fit on axis 0") 88 | model.plot("vario_axis", axis=1, ax=ax2, x_max=x_max, label="fit on axis 1") 89 | model.plot("vario_axis", axis=2, ax=ax2, x_max=x_max, label="fit on axis 2") 90 | ax2.set_title("Fitting an anisotropic model") 91 | ax2.legend() 92 | 93 | plt.show() 94 | 95 | ############################################################################### 96 | # Also, let's have a look at the field. 97 | 98 | srf.plot() 99 | -------------------------------------------------------------------------------- /examples/03_variogram/05_auto_fit_variogram.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fit Variogram with automatic binning 3 | ------------------------------------ 4 | """ 5 | 6 | import numpy as np 7 | 8 | import gstools as gs 9 | 10 | ############################################################################### 11 | # Generate a synthetic field with an exponential model. 12 | 13 | x = np.random.RandomState(19970221).rand(1000) * 100.0 14 | y = np.random.RandomState(20011012).rand(1000) * 100.0 15 | model = gs.Exponential(dim=2, var=2, len_scale=8) 16 | srf = gs.SRF(model, mean=0, seed=19970221) 17 | field = srf((x, y)) 18 | print(field.var()) 19 | ############################################################################### 20 | # Estimate the variogram of the field with automatic binning. 21 | 22 | bin_center, gamma = gs.vario_estimate((x, y), field) 23 | print("estimated bin number:", len(bin_center)) 24 | print("maximal bin distance:", max(bin_center)) 25 | 26 | ############################################################################### 27 | # Fit the variogram with a stable model (no nugget fitted). 28 | 29 | fit_model = gs.Stable(dim=2) 30 | fit_model.fit_variogram(bin_center, gamma, nugget=False) 31 | print(fit_model) 32 | 33 | ############################################################################### 34 | # Plot the fitting result. 35 | 36 | ax = fit_model.plot(x_max=max(bin_center)) 37 | ax.scatter(bin_center, gamma) 38 | -------------------------------------------------------------------------------- /examples/03_variogram/06_auto_bin_latlon.py: -------------------------------------------------------------------------------- 1 | """ 2 | Automatic binning with lat-lon data 3 | ----------------------------------- 4 | 5 | In this example we demonstrate automatic binning for a tiny data set 6 | containing temperature records from Germany 7 | (See the detailed DWD example for more information on the data). 8 | 9 | We use a data set from 20 meteo-stations choosen randomly. 10 | """ 11 | 12 | import numpy as np 13 | 14 | import gstools as gs 15 | 16 | # lat, lon, temperature 17 | data = np.array( 18 | [ 19 | [52.9336, 8.237, 15.7], 20 | [48.6159, 13.0506, 13.9], 21 | [52.4853, 7.9126, 15.1], 22 | [50.7446, 9.345, 17.0], 23 | [52.9437, 12.8518, 21.9], 24 | [53.8633, 8.1275, 11.9], 25 | [47.8342, 10.8667, 11.4], 26 | [51.0881, 12.9326, 17.2], 27 | [48.406, 11.3117, 12.9], 28 | [49.7273, 8.1164, 17.2], 29 | [49.4691, 11.8546, 13.4], 30 | [48.0197, 12.2925, 13.9], 31 | [50.4237, 7.4202, 18.1], 32 | [53.0316, 13.9908, 21.3], 33 | [53.8412, 13.6846, 21.3], 34 | [54.6792, 13.4343, 17.4], 35 | [49.9694, 9.9114, 18.6], 36 | [51.3745, 11.292, 20.2], 37 | [47.8774, 11.3643, 12.7], 38 | [50.5908, 12.7139, 15.8], 39 | ] 40 | ) 41 | pos = data.T[:2] # lat, lon 42 | field = data.T[2] # temperature 43 | 44 | ############################################################################### 45 | # Since the overall range of these meteo-stations is too low, we can use the 46 | # data-variance as additional information during the fit of the variogram. 47 | 48 | emp_v = gs.vario_estimate(pos, field, latlon=True, geo_scale=gs.KM_SCALE) 49 | sph = gs.Spherical(latlon=True, geo_scale=gs.KM_SCALE) 50 | sph.fit_variogram(*emp_v, sill=np.var(field)) 51 | ax = sph.plot("vario_yadrenko", x_max=2 * np.max(emp_v[0])) 52 | ax.scatter(*emp_v, label="Empirical variogram") 53 | ax.legend() 54 | print(sph) 55 | 56 | ############################################################################### 57 | # As we can see, the variogram fitting was successful and providing the data 58 | # variance helped finding the right length-scale. 59 | # 60 | # Now, we'll use this covariance model to interpolate the given data with 61 | # ordinary kriging. 62 | 63 | # enclosing box for data points 64 | grid_lat = np.linspace(np.min(pos[0]), np.max(pos[0])) 65 | grid_lon = np.linspace(np.min(pos[1]), np.max(pos[1])) 66 | # ordinary kriging 67 | krige = gs.krige.Ordinary(sph, pos, field) 68 | krige((grid_lat, grid_lon), mesh_type="structured") 69 | ax = krige.plot() 70 | # plotting lat on y-axis and lon on x-axis 71 | ax.scatter(pos[1], pos[0], 50, c=field, edgecolors="k", label="input") 72 | ax.legend() 73 | 74 | ############################################################################### 75 | # Looks good, doesn't it? 76 | # 77 | # This workflow is also implemented in the :any:`Krige` class, by setting 78 | # ``fit_variogram=True``. Then the whole procedure shortens: 79 | 80 | krige = gs.krige.Ordinary(sph, pos, field, fit_variogram=True) 81 | krige.structured((grid_lat, grid_lon)) 82 | 83 | # plot the result 84 | krige.plot() 85 | # show the fitting results 86 | print(krige.model) 87 | 88 | ############################################################################### 89 | # This example shows, that setting up variogram estimation and kriging routines 90 | # is straight forward with GSTools! 91 | -------------------------------------------------------------------------------- /examples/03_variogram/README.rst: -------------------------------------------------------------------------------- 1 | Variogram Estimation 2 | ==================== 3 | 4 | Estimating the spatial correlations is an important part of geostatistics. 5 | These spatial correlations can be expressed by the variogram, which can be 6 | estimated with the subpackage :any:`gstools.variogram`. The variograms can be 7 | estimated on structured and unstructured grids. 8 | 9 | The same `(semi-)variogram `_ as 10 | :ref:`tutorial_02_cov` is being used 11 | by this subpackage. 12 | 13 | Examples 14 | -------- 15 | -------------------------------------------------------------------------------- /examples/04_vector_field/00_2d_vector_field.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generating a Random 2D Vector Field 3 | ----------------------------------- 4 | 5 | As a first example we are going to generate a 2d vector field with a Gaussian 6 | covariance model on a structured grid: 7 | """ 8 | 9 | import numpy as np 10 | 11 | import gstools as gs 12 | 13 | # the grid 14 | x = np.arange(100) 15 | y = np.arange(100) 16 | 17 | # a smooth Gaussian covariance model 18 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 19 | srf = gs.SRF(model, generator="VectorField", seed=19841203) 20 | srf((x, y), mesh_type="structured") 21 | srf.plot() 22 | 23 | ############################################################################### 24 | # Let us have a look at the influence of the covariance model. Choosing the 25 | # exponential model and keeping all other parameters the same 26 | 27 | # a rougher exponential covariance model 28 | model2 = gs.Exponential(dim=2, var=1, len_scale=10) 29 | srf.model = model2 30 | srf((x, y), mesh_type="structured", seed=19841203) 31 | srf.plot() 32 | 33 | ############################################################################### 34 | # and we see, that the wiggles are much "rougher" than the smooth Gaussian ones. 35 | 36 | 37 | ############################################################################### 38 | # Applications 39 | # ~~~~~~~~~~~~ 40 | # 41 | # One great advantage of the Kraichnan method is, that after some initializations, 42 | # one can compute the velocity field at arbitrary points, online, with hardly any 43 | # overhead. 44 | # This means, that for a Lagrangian transport simulation for example, the velocity 45 | # can be evaluated at each particle position very efficiently and without any 46 | # interpolation. These field interpolations are a common problem for Lagrangian 47 | # methods. 48 | -------------------------------------------------------------------------------- /examples/04_vector_field/01_3d_vector_field.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generating a Random 3D Vector Field 3 | ----------------------------------- 4 | 5 | In this example we are going to generate a random 3D vector field with a 6 | Gaussian covariance model. The mesh on which we generate the field will be 7 | externally defined and it will be generated by PyVista. 8 | """ 9 | 10 | # sphinx_gallery_thumbnail_path = 'pics/GS_3d_vector_field.png' 11 | import pyvista as pv 12 | 13 | import gstools as gs 14 | 15 | # mainly for setting a white background 16 | pv.set_plot_theme("document") 17 | 18 | ############################################################################### 19 | # create a uniform grid with PyVista 20 | dims, spacing, origin = (40, 30, 10), (1, 1, 1), (-10, 0, 0) 21 | mesh = pv.ImageData(dimensions=dims, spacing=spacing, origin=origin) 22 | 23 | ############################################################################### 24 | # create an incompressible random 3d velocity field on the given mesh 25 | # with added mean velocity in x-direction 26 | model = gs.Gaussian(dim=3, var=3, len_scale=1.5) 27 | srf = gs.SRF(model, mean=(0.5, 0, 0), generator="VectorField", seed=198412031) 28 | srf.mesh(mesh, points="points", name="Velocity") 29 | 30 | ############################################################################### 31 | # Now, we can do the plotting 32 | streamlines = mesh.streamlines( 33 | "Velocity", 34 | terminal_speed=0.0, 35 | n_points=800, 36 | source_radius=2.5, 37 | ) 38 | 39 | # set a fancy camera position 40 | cpos = [(25, 23, 17), (0, 10, 0), (0, 0, 1)] 41 | 42 | p = pv.Plotter() 43 | # adding an outline might help navigating in 3D space 44 | # p.add_mesh(mesh.outline(), color="k") 45 | p.add_mesh( 46 | streamlines.tube(radius=0.005), 47 | show_scalar_bar=False, 48 | diffuse=0.5, 49 | ambient=0.5, 50 | ) 51 | 52 | ############################################################################### 53 | # .. note:: 54 | # PyVista is not working on readthedocs, but you can try it out yourself by 55 | # uncommenting the following line of code. 56 | 57 | # p.show(cpos=cpos) 58 | 59 | ############################################################################### 60 | # The result should look like this: 61 | # 62 | # .. image:: ../../pics/GS_3d_vector_field.png 63 | # :width: 400px 64 | # :align: center 65 | -------------------------------------------------------------------------------- /examples/04_vector_field/README.rst: -------------------------------------------------------------------------------- 1 | Random Vector Field Generation 2 | ============================== 3 | 4 | In 1970, Kraichnan was the first to suggest a randomization method. 5 | For studying the diffusion of single particles in a random incompressible 6 | velocity field, he came up with a randomization method which includes a 7 | projector which ensures the incompressibility of the vector field. 8 | 9 | 10 | Without loss of generality we assume that the mean velocity :math:`\bar{U}` is oriented 11 | towards the direction of the first basis vector :math:`\mathbf{e}_1`. Our goal is now to 12 | generate random fluctuations with a given covariance model around this mean velocity. 13 | And at the same time, making sure that the velocity field remains incompressible or 14 | in other words, ensure :math:`\nabla \cdot \mathbf U = 0`. 15 | This can be done by using the randomization method we already know, but adding a 16 | projector to every mode being summed: 17 | 18 | 19 | .. math:: 20 | 21 | \mathbf{U}(\mathbf{x}) = \bar{U} \mathbf{e}_1 - \sqrt{\frac{\sigma^{2}}{N}} 22 | \sum_{i=1}^{N} \mathbf{p}(\mathbf{k}_i) \left[ Z_{1,i} 23 | \cos\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) 24 | + \sin\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) \right] 25 | 26 | with the projector 27 | 28 | .. math:: 29 | 30 | \mathbf{p}(\mathbf{k}_i) = \mathbf{e}_1 - \frac{\mathbf{k}_i k_1}{k^2} \; . 31 | 32 | By calculating :math:`\nabla \cdot \mathbf U = 0`, it can be verified, that 33 | the resulting field is indeed incompressible. 34 | 35 | 36 | Examples 37 | -------- 38 | -------------------------------------------------------------------------------- /examples/05_kriging/00_simple_kriging.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Simple Kriging 3 | -------------- 4 | 5 | Simple kriging assumes a known mean of the data. 6 | For simplicity we assume a mean of 0, 7 | which can be achieved by subtracting the mean from the observed values and 8 | subsequently adding it to the resulting data. 9 | 10 | The resulting equation system for :math:`W` is given by: 11 | 12 | .. math:: 13 | 14 | W = \begin{pmatrix}c(x_1,x_1) & \cdots & c(x_1,x_n) \\ 15 | \vdots & \ddots & \vdots \\ 16 | c(x_n,x_1) & \cdots & c(x_n,x_n) 17 | \end{pmatrix}^{-1} 18 | \begin{pmatrix}c(x_1,x_0) \\ \vdots \\ c(x_n,x_0) \end{pmatrix} 19 | 20 | Thereby :math:`c(x_i,x_j)` is the covariance of the given observations. 21 | 22 | 23 | Example 24 | ^^^^^^^ 25 | 26 | Here we use simple kriging in 1D (for plotting reasons) with 5 given observations/conditions. 27 | The mean of the field has to be given beforehand. 28 | 29 | """ 30 | 31 | import numpy as np 32 | 33 | from gstools import Gaussian, krige 34 | 35 | # condtions 36 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 37 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 38 | # resulting grid 39 | gridx = np.linspace(0.0, 15.0, 151) 40 | # spatial random field class 41 | model = Gaussian(dim=1, var=0.5, len_scale=2) 42 | 43 | ############################################################################### 44 | krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) 45 | krig(gridx) 46 | 47 | ############################################################################### 48 | ax = krig.plot() 49 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 50 | ax.legend() 51 | -------------------------------------------------------------------------------- /examples/05_kriging/01_ordinary_kriging.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Ordinary Kriging 3 | ---------------- 4 | 5 | Ordinary kriging will estimate an appropriate mean of the field, 6 | based on the given observations/conditions and the covariance model used. 7 | 8 | The resulting system of equations for :math:`W` is given by: 9 | 10 | .. math:: 11 | 12 | \begin{pmatrix}W\\\mu\end{pmatrix} = \begin{pmatrix} 13 | c(x_1,x_1) & \cdots & c(x_1,x_n) &1 \\ 14 | \vdots & \ddots & \vdots & \vdots \\ 15 | c(x_n,x_1) & \cdots & c(x_n,x_n) & 1 \\ 16 | 1 &\cdots& 1 & 0 17 | \end{pmatrix}^{-1} 18 | \begin{pmatrix}c(x_1,x_0) \\ \vdots \\ c(x_n,x_0) \\ 1\end{pmatrix} 19 | 20 | Thereby :math:`c(x_i,x_j)` is the covariance of the given observations 21 | and :math:`\mu` is a Lagrange multiplier to minimize the kriging error and estimate the mean. 22 | 23 | 24 | Example 25 | ^^^^^^^ 26 | 27 | Here we use ordinary kriging in 1D (for plotting reasons) with 5 given observations/conditions. 28 | The estimated mean can be accessed by ``krig.mean``. 29 | """ 30 | 31 | import numpy as np 32 | 33 | from gstools import Gaussian, krige 34 | 35 | # condtions 36 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 37 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 38 | # resulting grid 39 | gridx = np.linspace(0.0, 15.0, 151) 40 | # spatial random field class 41 | model = Gaussian(dim=1, var=0.5, len_scale=2) 42 | 43 | ############################################################################### 44 | krig = krige.Ordinary(model, cond_pos=cond_pos, cond_val=cond_val) 45 | krig(gridx) 46 | 47 | ############################################################################### 48 | ax = krig.plot() 49 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 50 | ax.legend() 51 | -------------------------------------------------------------------------------- /examples/05_kriging/02_pykrige_interface.py: -------------------------------------------------------------------------------- 1 | """ 2 | Interface to PyKrige 3 | -------------------- 4 | 5 | To use fancier methods like 6 | `regression kriging `__, 7 | we provide an interface to 8 | `PyKrige `__ (>v1.5), which means 9 | you can pass a GSTools covariance model to the kriging routines of PyKrige. 10 | 11 | To demonstrate the general workflow, we compare ordinary kriging of PyKrige 12 | with the corresponding GSTools routine in 2D: 13 | """ 14 | 15 | import numpy as np 16 | from matplotlib import pyplot as plt 17 | from pykrige.ok import OrdinaryKriging 18 | 19 | import gstools as gs 20 | 21 | # conditioning data 22 | cond_x = [0.3, 1.9, 1.1, 3.3, 4.7] 23 | cond_y = [1.2, 0.6, 3.2, 4.4, 3.8] 24 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 25 | 26 | # grid definition for output field 27 | gridx = np.arange(0.0, 5.5, 0.1) 28 | gridy = np.arange(0.0, 6.5, 0.1) 29 | 30 | ############################################################################### 31 | # A GSTools based :any:`Gaussian` covariance model: 32 | 33 | model = gs.Gaussian( 34 | dim=2, len_scale=1, anis=0.2, angles=-0.5, var=0.5, nugget=0.1 35 | ) 36 | 37 | ############################################################################### 38 | # Ordinary Kriging with PyKrige 39 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 40 | # 41 | # One can pass the defined GSTools model as 42 | # variogram model, which will `not` be fitted to the given data. 43 | # By providing the GSTools model, rotation and anisotropy are also 44 | # automatically defined: 45 | 46 | OK1 = OrdinaryKriging(cond_x, cond_y, cond_val, variogram_model=model) 47 | z1, ss1 = OK1.execute("grid", gridx, gridy) 48 | plt.imshow(z1, origin="lower") 49 | plt.show() 50 | 51 | ############################################################################### 52 | # Ordinary Kriging with GSTools 53 | # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 54 | # 55 | # The :any:`Ordinary` kriging class is provided by GSTools as a shortcut to 56 | # define ordinary kriging with the general :any:`Krige` class. 57 | # 58 | # PyKrige's routines are using exact kriging by default (when given a nugget). 59 | # To reproduce this behavior in GSTools, we have to set ``exact=True``. 60 | 61 | OK2 = gs.krige.Ordinary(model, [cond_x, cond_y], cond_val, exact=True) 62 | OK2.structured([gridx, gridy]) 63 | ax = OK2.plot() 64 | ax.set_aspect("equal") 65 | -------------------------------------------------------------------------------- /examples/05_kriging/03_compare_kriging.py: -------------------------------------------------------------------------------- 1 | """ 2 | Compare Kriging 3 | --------------- 4 | """ 5 | 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | 9 | from gstools import Gaussian, krige 10 | 11 | # condtions 12 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 13 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 14 | # resulting grid 15 | gridx = np.linspace(0.0, 15.0, 151) 16 | 17 | ############################################################################### 18 | # A gaussian variogram model. 19 | 20 | model = Gaussian(dim=1, var=0.5, len_scale=2) 21 | 22 | ############################################################################### 23 | # Two kriged fields. One with simple and one with ordinary kriging. 24 | 25 | kr1 = krige.Simple(model=model, mean=1, cond_pos=cond_pos, cond_val=cond_val) 26 | kr2 = krige.Ordinary(model=model, cond_pos=cond_pos, cond_val=cond_val) 27 | kr1(gridx) 28 | kr2(gridx) 29 | 30 | ############################################################################### 31 | 32 | plt.plot(gridx, kr1.field, label="simple kriged field") 33 | plt.plot(gridx, kr2.field, label="ordinary kriged field") 34 | plt.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 35 | plt.legend() 36 | plt.show() 37 | -------------------------------------------------------------------------------- /examples/05_kriging/04_extdrift_kriging.py: -------------------------------------------------------------------------------- 1 | """ 2 | External Drift Kriging 3 | ---------------------- 4 | """ 5 | 6 | import numpy as np 7 | 8 | from gstools import SRF, Gaussian, krige 9 | 10 | # synthetic condtions with a drift 11 | drift_model = Gaussian(dim=1, len_scale=4) 12 | drift = SRF(drift_model, seed=1010) 13 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 14 | ext_drift = drift(cond_pos) 15 | cond_val = ext_drift * 2 + 1 16 | # resulting grid 17 | gridx = np.linspace(0.0, 15.0, 151) 18 | grid_drift = drift(gridx) 19 | # kriging 20 | model = Gaussian(dim=1, var=2, len_scale=4) 21 | krig = krige.ExtDrift(model, cond_pos, cond_val, ext_drift) 22 | krig(gridx, ext_drift=grid_drift) 23 | ax = krig.plot() 24 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 25 | ax.plot(gridx, grid_drift, label="drift") 26 | ax.legend() 27 | -------------------------------------------------------------------------------- /examples/05_kriging/05_universal_kriging.py: -------------------------------------------------------------------------------- 1 | """ 2 | Universal Kriging 3 | ----------------- 4 | 5 | You can give a polynomial order or a list of self defined 6 | functions representing the internal drift of the given values. 7 | This drift will be fitted internally during the kriging interpolation. 8 | 9 | In the following we are creating artificial data, where a linear drift 10 | was added. The resulting samples are then used as input for Universal kriging. 11 | 12 | The "linear" drift is then estimated during the interpolation. 13 | To access only the estimated mean/drift, we provide a switch `only_mean` 14 | in the call routine. 15 | """ 16 | 17 | import numpy as np 18 | 19 | from gstools import SRF, Gaussian, krige 20 | 21 | # synthetic condtions with a drift 22 | drift_model = Gaussian(dim=1, var=0.1, len_scale=2) 23 | drift = SRF(drift_model, seed=101) 24 | cond_pos = np.linspace(0.1, 8, 10) 25 | cond_val = drift(cond_pos) + cond_pos * 0.1 + 1 26 | # resulting grid 27 | gridx = np.linspace(0.0, 15.0, 151) 28 | drift_field = drift(gridx) + gridx * 0.1 + 1 29 | # kriging 30 | model = Gaussian(dim=1, var=0.1, len_scale=2) 31 | krig = krige.Universal(model, cond_pos, cond_val, "linear") 32 | krig(gridx) 33 | ax = krig.plot() 34 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 35 | ax.plot(gridx, gridx * 0.1 + 1, ":", label="linear drift") 36 | ax.plot(gridx, drift_field, "--", label="original field") 37 | 38 | mean = krig(gridx, only_mean=True) 39 | ax.plot(gridx, mean, label="estimated drift") 40 | 41 | ax.legend() 42 | -------------------------------------------------------------------------------- /examples/05_kriging/06_detrended_kriging.py: -------------------------------------------------------------------------------- 1 | """ 2 | Detrended Kriging 3 | ----------------- 4 | """ 5 | 6 | import numpy as np 7 | 8 | from gstools import SRF, Gaussian, krige 9 | 10 | 11 | def trend(x): 12 | """Example for a simple linear trend.""" 13 | return x * 0.1 + 1 14 | 15 | 16 | # synthetic condtions with trend/drift 17 | drift_model = Gaussian(dim=1, var=0.1, len_scale=2) 18 | drift = SRF(drift_model, seed=101) 19 | cond_pos = np.linspace(0.1, 8, 10) 20 | cond_val = drift(cond_pos) + trend(cond_pos) 21 | # resulting grid 22 | gridx = np.linspace(0.0, 15.0, 151) 23 | drift_field = drift(gridx) + trend(gridx) 24 | # kriging 25 | model = Gaussian(dim=1, var=0.1, len_scale=2) 26 | krig_trend = krige.Detrended(model, cond_pos, cond_val, trend) 27 | krig_trend(gridx) 28 | ax = krig_trend.plot() 29 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 30 | ax.plot(gridx, trend(gridx), ":", label="linear trend") 31 | ax.plot(gridx, drift_field, "--", label="original field") 32 | ax.legend() 33 | -------------------------------------------------------------------------------- /examples/05_kriging/07_detrended_ordinary_kriging.py: -------------------------------------------------------------------------------- 1 | """ 2 | Detrended Ordinary Kriging 3 | -------------------------- 4 | """ 5 | 6 | import numpy as np 7 | 8 | from gstools import SRF, Gaussian, krige 9 | 10 | 11 | def trend(x): 12 | """Example for a simple linear trend.""" 13 | return x * 0.1 + 1 14 | 15 | 16 | # synthetic condtions with trend/drift 17 | drift_model = Gaussian(dim=1, var=0.1, len_scale=2) 18 | drift = SRF(drift_model, seed=101) 19 | cond_pos = np.linspace(0.1, 8, 10) 20 | cond_val = drift(cond_pos) + trend(cond_pos) 21 | # resulting grid 22 | gridx = np.linspace(0.0, 15.0, 151) 23 | drift_field = drift(gridx) + trend(gridx) 24 | # kriging 25 | model = Gaussian(dim=1, var=0.1, len_scale=2) 26 | krig_trend = krige.Ordinary(model, cond_pos, cond_val, trend=trend) 27 | krig_trend(gridx) 28 | ax = krig_trend.plot() 29 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 30 | ax.plot(gridx, trend(gridx), ":", label="linear trend") 31 | ax.plot(gridx, drift_field, "--", label="original field") 32 | ax.legend() 33 | -------------------------------------------------------------------------------- /examples/05_kriging/08_measurement_errors.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Incorporating measurement errors 3 | -------------------------------- 4 | 5 | To incorporate the nugget effect and/or given measurement errors, 6 | one can set `exact` to `False` and provide either individual measurement errors 7 | for each point or set the nugget as a constant measurement error everywhere. 8 | 9 | In the following we will show the influence of the nugget and 10 | measurement errors. 11 | """ 12 | 13 | import numpy as np 14 | 15 | import gstools as gs 16 | 17 | # condtions 18 | cond_pos = [0.3, 1.1, 1.9, 3.3, 4.7] 19 | cond_val = [0.47, 0.74, 0.56, 1.47, 1.74] 20 | cond_err = [0.01, 0.0, 0.1, 0.05, 0] 21 | # resulting grid 22 | gridx = np.linspace(0.0, 15.0, 151) 23 | # spatial random field class 24 | model = gs.Gaussian(dim=1, var=0.9, len_scale=1, nugget=0.1) 25 | 26 | ############################################################################### 27 | # Here we will use Simple kriging (`unbiased=False`) to interpolate the given 28 | # conditions. 29 | 30 | krig = gs.Krige( 31 | model=model, 32 | cond_pos=cond_pos, 33 | cond_val=cond_val, 34 | mean=1, 35 | unbiased=False, 36 | exact=False, 37 | cond_err=cond_err, 38 | ) 39 | krig(gridx) 40 | 41 | ############################################################################### 42 | # Let's plot the data. You can see, that the estimated values differ more from 43 | # the input, when the given measurement errors get bigger. 44 | # In addition we plot the standard deviation. 45 | 46 | ax = krig.plot() 47 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 48 | ax.fill_between( 49 | gridx, 50 | # plus/minus standard deviation (70 percent confidence interval) 51 | krig.field - np.sqrt(krig.krige_var), 52 | krig.field + np.sqrt(krig.krige_var), 53 | alpha=0.3, 54 | label="Standard deviation", 55 | ) 56 | ax.legend() 57 | -------------------------------------------------------------------------------- /examples/05_kriging/09_pseudo_inverse.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Redundant data and pseudo-inverse 3 | --------------------------------- 4 | 5 | It can happen, that the kriging system gets numerically unstable. 6 | One reason could be, that the input data contains redundant conditioning points 7 | that hold different values. 8 | 9 | To smoothly deal with such situations, you can use the pseudo 10 | inverse for the kriging matrix, which is enabled by default. 11 | 12 | This will result in the average value for the redundant data. 13 | 14 | Example 15 | ^^^^^^^ 16 | 17 | In the following we have two different values at the same location. 18 | The resulting kriging field will hold the average at this point. 19 | """ 20 | 21 | import numpy as np 22 | 23 | from gstools import Gaussian, krige 24 | 25 | # condtions 26 | cond_pos = [0.3, 1.9, 1.1, 3.3, 1.1] 27 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.14] 28 | # resulting grid 29 | gridx = np.linspace(0.0, 8.0, 81) 30 | # spatial random field class 31 | model = Gaussian(dim=1, var=0.5, len_scale=1) 32 | 33 | ############################################################################### 34 | krig = krige.Ordinary(model, cond_pos=cond_pos, cond_val=cond_val) 35 | krig(gridx) 36 | 37 | ############################################################################### 38 | ax = krig.plot() 39 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 40 | ax.legend() 41 | -------------------------------------------------------------------------------- /examples/05_kriging/README.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial_05_kriging: 2 | 3 | Kriging 4 | ======= 5 | 6 | The subpackage :py:mod:`gstools.krige` provides routines for Gaussian process regression, 7 | also known as kriging. 8 | Kriging is a method of data interpolation based on predefined covariance models. 9 | 10 | The aim of kriging is to derive the value of a field at some point :math:`x_0`, 11 | when there are fixed observed values :math:`z(x_1)\ldots z(x_n)` at given points :math:`x_i`. 12 | 13 | The resluting value :math:`z_0` at :math:`x_0` is calculated as a weighted mean: 14 | 15 | .. math:: 16 | 17 | z_0 = \sum_{i=1}^n w_i \cdot z_i 18 | 19 | The weights :math:`W = (w_1,\ldots,w_n)` depent on the given covariance model and the location of the target point. 20 | 21 | The different kriging approaches provide different ways of calculating :math:`W`. 22 | 23 | The :any:`Krige` class provides everything in one place and you can switch on/off 24 | the features you want: 25 | 26 | * `unbiased`: the weights have to sum up to `1`. If true, this results in 27 | :any:`Ordinary` kriging, where the mean is estimated, otherwise it will result in 28 | :any:`Simple` kriging, where the mean has to be given. 29 | * `drift_functions`: you can give a polynomial order or a list of self defined 30 | functions representing the internal drift of the given values. This drift will 31 | be fitted internally during the kriging interpolation. This results in :any:`Universal` kriging. 32 | * `ext_drift`: You can also give an external drift per point to the routine. 33 | In contrast to the internal drift, that is evaluated at the desired points with 34 | the given functions, the external drift has to given for each point form an "external" 35 | source. This results in :any:`ExtDrift` kriging. 36 | * `trend`, `mean`, `normalizer`: These are used to pre- and post-process data. 37 | If you already have fitted a trend model that is provided as a callable function, 38 | you can give it to the kriging routine. Normalizer are power-transformations 39 | to gain normality. 40 | `mean` behaves similar to `trend` but is applied at another position: 41 | 42 | 1. conditioning data is de-trended (substracting trend) 43 | 2. detrended conditioning data is then normalized (in order to follow a normal distribution) 44 | 3. normalized conditioning data is set to zero mean (subtracting mean) 45 | 46 | Cosequently, when there is no normalizer given, trend and mean are the same thing 47 | and only one should be used. 48 | :any:`Detrended` kriging is a shortcut to provide only a trend and simple kriging 49 | with normal data. 50 | * `exact` and `cond_err`: To incorporate the nugget effect and/or measurement errors, 51 | one can set `exact` to `False` and provide either individual measurement errors 52 | for each point or set the nugget as a constant measurement error everywhere. 53 | * `pseudo_inv`: Sometimes the inversion of the kriging matrix can be numerically unstable. 54 | This occurs for examples in cases of redundant input values. In this case we provide a switch to 55 | use the pseudo-inverse of the matrix. Then redundant conditional values will automatically 56 | be averaged. 57 | 58 | .. note:: 59 | 60 | All mentioned features can be combined within the :any:`Krige` class. 61 | All other kriging classes are just shortcuts to this class with a limited list 62 | of input parameters. 63 | 64 | The routines for kriging are almost identical to the routines for spatial random fields, 65 | with regard to their handling. 66 | First you define a covariance model, as described in :ref:`tutorial_02_cov`, 67 | then you initialize the kriging class with this model: 68 | 69 | .. code-block:: python 70 | 71 | import gstools as gs 72 | # condtions 73 | cond_pos = [...] 74 | cond_val = [...] 75 | model = gs.Gaussian(dim=1, var=0.5, len_scale=2) 76 | krig = gs.krige.Simple(model, cond_pos=cond_pos, cond_val=cond_val, mean=1) 77 | 78 | The resulting field instance ``krig`` has the same methods as the 79 | :any:`SRF` class. 80 | You can call it to evaluate the kriged field at different points, 81 | you can plot the latest field or you can export the field and so on. 82 | 83 | Provided Kriging Methods 84 | ------------------------ 85 | 86 | .. currentmodule:: gstools.krige 87 | 88 | The following kriging methods are provided within the 89 | submodule :any:`gstools.krige`. 90 | 91 | .. autosummary:: 92 | Krige 93 | Simple 94 | Ordinary 95 | Universal 96 | ExtDrift 97 | Detrended 98 | 99 | Examples 100 | -------- 101 | -------------------------------------------------------------------------------- /examples/06_conditioned_fields/00_condition_ensemble.py: -------------------------------------------------------------------------------- 1 | """ 2 | Conditioning with Ordinary Kriging 3 | ---------------------------------- 4 | 5 | Here we use ordinary kriging in 1D (for plotting reasons) 6 | with 5 given observations/conditions, 7 | to generate an ensemble of conditioned random fields. 8 | """ 9 | 10 | import matplotlib.pyplot as plt 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | # condtions 16 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 17 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 18 | gridx = np.linspace(0.0, 15.0, 151) 19 | 20 | ############################################################################### 21 | # The conditioned spatial random field class depends on a Krige class in order 22 | # to handle the conditions. 23 | # This is created as described in the kriging tutorial. 24 | # 25 | # Here we use a Gaussian covariance model and ordinary kriging for conditioning 26 | # the spatial random field. 27 | 28 | model = gs.Gaussian(dim=1, var=0.5, len_scale=1.5) 29 | krige = gs.krige.Ordinary(model, cond_pos, cond_val) 30 | cond_srf = gs.CondSRF(krige) 31 | cond_srf.set_pos(gridx) 32 | 33 | ############################################################################### 34 | # To generate the ensemble we will use a seed-generator. 35 | # We can specify individual names for each field by the keyword `store`: 36 | 37 | seed = gs.random.MasterRNG(20170519) 38 | for i in range(100): 39 | cond_srf(seed=seed(), store=f"f{i}") 40 | label = "Conditioned ensemble" if i == 0 else None 41 | plt.plot(gridx, cond_srf[f"f{i}"], color="k", alpha=0.1, label=label) 42 | 43 | fields = [cond_srf[f"f{i}"] for i in range(100)] 44 | plt.plot(gridx, cond_srf.krige(only_mean=True), label="estimated mean") 45 | plt.plot(gridx, np.mean(fields, axis=0), linestyle=":", label="Ensemble mean") 46 | plt.plot(gridx, cond_srf.krige.field, linestyle="dashed", label="kriged field") 47 | plt.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 48 | # 99 percent confidence interval 49 | conf = gs.tools.confidence_scaling(0.99) 50 | plt.fill_between( 51 | gridx, 52 | cond_srf.krige.field - conf * np.sqrt(cond_srf.krige.krige_var), 53 | cond_srf.krige.field + conf * np.sqrt(cond_srf.krige.krige_var), 54 | alpha=0.3, 55 | label="99% confidence interval", 56 | ) 57 | plt.legend() 58 | plt.show() 59 | 60 | ############################################################################### 61 | # As you can see, the kriging field coincides with the ensemble mean of the 62 | # conditioned random fields and the estimated mean 63 | # is the mean of the far-field. 64 | -------------------------------------------------------------------------------- /examples/06_conditioned_fields/01_2D_condition_ensemble.py: -------------------------------------------------------------------------------- 1 | """ 2 | Creating an Ensemble of conditioned 2D Fields 3 | --------------------------------------------- 4 | 5 | Let's create an ensemble of conditioned random fields in 2D. 6 | """ 7 | 8 | import matplotlib.pyplot as plt 9 | import numpy as np 10 | 11 | import gstools as gs 12 | 13 | # conditioning data (x, y, value) 14 | cond_pos = [[0.3, 1.9, 1.1, 3.3, 4.7], [1.2, 0.6, 3.2, 4.4, 3.8]] 15 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 16 | 17 | # grid definition for output field 18 | x = np.arange(0, 5, 0.1) 19 | y = np.arange(0, 5, 0.1) 20 | 21 | model = gs.Gaussian(dim=2, var=0.5, len_scale=5, anis=0.5, angles=-0.5) 22 | krige = gs.Krige(model, cond_pos=cond_pos, cond_val=cond_val) 23 | cond_srf = gs.CondSRF(krige) 24 | cond_srf.set_pos([x, y], "structured") 25 | 26 | ############################################################################### 27 | # To generate the ensemble we will use a seed-generator. 28 | # By specifying ``store=[f"fld{i}", False, False]``, only the conditioned field 29 | # is stored with the specified name. The raw random field and the raw kriging 30 | # field is not stored. This way, we can access each conditioned field by index 31 | # ``cond_srf[i]``: 32 | 33 | seed = gs.random.MasterRNG(20170519) 34 | ens_no = 4 35 | for i in range(ens_no): 36 | cond_srf(seed=seed(), store=[f"fld{i}", False, False]) 37 | 38 | ############################################################################### 39 | # Now let's have a look at the pairwise differences between the generated 40 | # fields. We will see, that they coincide at the given conditions. 41 | 42 | fig, ax = plt.subplots(ens_no + 1, ens_no + 1, figsize=(8, 8)) 43 | # plotting kwargs for scatter and image 44 | vmax = np.max(cond_srf.all_fields) 45 | sc_kw = dict(c=cond_val, edgecolors="k", vmin=0, vmax=vmax) 46 | im_kw = dict(extent=2 * [0, 5], origin="lower", vmin=0, vmax=vmax) 47 | for i in range(ens_no): 48 | # conditioned fields and conditions 49 | ax[i + 1, 0].imshow(cond_srf[i].T, **im_kw) 50 | ax[i + 1, 0].scatter(*cond_pos, **sc_kw) 51 | ax[i + 1, 0].set_ylabel(f"Field {i}", fontsize=10) 52 | ax[0, i + 1].imshow(cond_srf[i].T, **im_kw) 53 | ax[0, i + 1].scatter(*cond_pos, **sc_kw) 54 | ax[0, i + 1].set_title(f"Field {i}", fontsize=10) 55 | # absolute differences 56 | for j in range(ens_no): 57 | ax[i + 1, j + 1].imshow(np.abs(cond_srf[i] - cond_srf[j]).T, **im_kw) 58 | 59 | # beautify plots 60 | ax[0, 0].axis("off") 61 | for a in ax.flatten(): 62 | a.set_xticklabels([]), a.set_yticklabels([]) 63 | a.set_xticks([]), a.set_yticks([]) 64 | fig.subplots_adjust(wspace=0, hspace=0) 65 | fig.show() 66 | 67 | ############################################################################### 68 | # To check if the generated fields are correct, we can have a look at their 69 | # names: 70 | 71 | print(cond_srf.field_names) 72 | -------------------------------------------------------------------------------- /examples/06_conditioned_fields/README.rst: -------------------------------------------------------------------------------- 1 | Conditioned Fields 2 | ================== 3 | 4 | Kriged fields tend to approach the field mean outside the area of observations. 5 | To generate random fields, that coincide with given observations, but are still 6 | random according to a given covariance model away from the observations proximity, 7 | we provide the generation of conditioned random fields. 8 | 9 | The idea behind conditioned random fields builds up on kriging. 10 | First we generate a field with a kriging method, then we generate a random field, 11 | with 0 as mean and 1 as variance that will be multiplied with the kriging 12 | standard deviation. 13 | 14 | To do so, you can instantiate a :any:`CondSRF` class with a configured 15 | :any:`Krige` class. 16 | 17 | The setup of the a conditioned random field should be as follows: 18 | 19 | .. code-block:: python 20 | 21 | krige = gs.Krige(model, cond_pos, cond_val) 22 | cond_srf = gs.CondSRF(krige) 23 | field = cond_srf(grid) 24 | 25 | Examples 26 | -------- 27 | -------------------------------------------------------------------------------- /examples/07_transformations/00_log_normal.py: -------------------------------------------------------------------------------- 1 | """ 2 | log-normal fields 3 | ----------------- 4 | 5 | Here we transform a field to a log-normal distribution: 6 | 7 | See :any:`transform.normal_to_lognormal` 8 | """ 9 | 10 | import gstools as gs 11 | 12 | # structured field with a size of 100x100 and a grid-size of 1x1 13 | x = y = range(100) 14 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 15 | srf = gs.SRF(model, seed=20170519) 16 | srf.structured([x, y]) 17 | srf.transform("normal_to_lognormal") # also "lognormal" works 18 | srf.plot() 19 | -------------------------------------------------------------------------------- /examples/07_transformations/01_binary.py: -------------------------------------------------------------------------------- 1 | """ 2 | binary fields 3 | ------------- 4 | 5 | Here we transform a field to a binary field with only two values. 6 | The dividing value is the mean by default and the upper and lower values 7 | are derived to preserve the variance. 8 | 9 | See :any:`transform.binary` 10 | """ 11 | 12 | import gstools as gs 13 | 14 | # structured field with a size of 100x100 and a grid-size of 1x1 15 | x = y = range(100) 16 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 17 | srf = gs.SRF(model, seed=20170519) 18 | srf.structured([x, y]) 19 | srf.transform("binary") 20 | srf.plot() 21 | -------------------------------------------------------------------------------- /examples/07_transformations/02_discrete.py: -------------------------------------------------------------------------------- 1 | """ 2 | Discrete fields 3 | --------------- 4 | 5 | Here we transform a field to a discrete field with values. 6 | If we do not give thresholds, the pairwise means of the given 7 | values are taken as thresholds. 8 | If thresholds are given, arbitrary values can be applied to the field. 9 | 10 | See :any:`transform.discrete` 11 | """ 12 | 13 | import numpy as np 14 | 15 | import gstools as gs 16 | 17 | # Structured field with a size of 100x100 and a grid-size of 0.5x0.5 18 | x = y = np.arange(200) * 0.5 19 | model = gs.Gaussian(dim=2, var=1, len_scale=5) 20 | srf = gs.SRF(model, seed=20170519) 21 | srf.structured([x, y]) 22 | 23 | ############################################################################### 24 | # Create 5 equidistanly spaced values, thresholds are the arithmetic means 25 | 26 | values1 = np.linspace(np.min(srf.field), np.max(srf.field), 5) 27 | srf.transform("discrete", store="f1", values=values1) 28 | srf.plot("f1") 29 | 30 | ############################################################################### 31 | # Calculate thresholds for equal shares 32 | # but apply different values to the separated classes 33 | 34 | values2 = [0, -1, 2, -3, 4] 35 | srf.transform("discrete", store="f2", values=values2, thresholds="equal") 36 | srf.plot("f2") 37 | 38 | ############################################################################### 39 | # Create user defined thresholds 40 | # and apply different values to the separated classes 41 | 42 | values3 = [0, 1, 10] 43 | thresholds = [-1, 1] 44 | srf.transform("discrete", store="f3", values=values3, thresholds=thresholds) 45 | srf.plot("f3") 46 | -------------------------------------------------------------------------------- /examples/07_transformations/03_zinn_harvey.py: -------------------------------------------------------------------------------- 1 | """ 2 | Zinn & Harvey transformation 3 | ---------------------------- 4 | 5 | Here, we transform a field with the so called "Zinn & Harvey" transformation presented in 6 | `Zinn & Harvey (2003) `__. 7 | With this transformation, one could overcome the restriction that in ordinary 8 | Gaussian random fields the mean values are the ones being the most connected. 9 | 10 | See :any:`transform.zinnharvey` 11 | """ 12 | 13 | import gstools as gs 14 | 15 | # structured field with a size of 100x100 and a grid-size of 1x1 16 | x = y = range(100) 17 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 18 | srf = gs.SRF(model, seed=20170519) 19 | srf.structured([x, y]) 20 | srf.transform("zinnharvey", conn="high") 21 | srf.plot() 22 | -------------------------------------------------------------------------------- /examples/07_transformations/04_bimodal.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bimodal fields 3 | -------------- 4 | 5 | We provide two transformations to obtain bimodal distributions: 6 | 7 | * `arcsin `__. 8 | * `uquad `__. 9 | 10 | Both transformations will preserve the mean and variance of the given field by default. 11 | 12 | See: :any:`transform.normal_to_arcsin` and :any:`transform.normal_to_uquad` 13 | """ 14 | 15 | import gstools as gs 16 | 17 | # structured field with a size of 100x100 and a grid-size of 1x1 18 | x = y = range(100) 19 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 20 | srf = gs.SRF(model, seed=20170519) 21 | field = srf.structured([x, y]) 22 | srf.transform("normal_to_arcsin") # also "arcsin" works 23 | srf.plot() 24 | -------------------------------------------------------------------------------- /examples/07_transformations/05_combinations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Combinations 3 | ------------ 4 | 5 | You can combine different transformations simply by successively applying them. 6 | 7 | Here, we first force the single field realization to hold the given moments, 8 | namely mean and variance. 9 | Then we apply the Zinn & Harvey transformation to connect the low values. 10 | Afterwards the field is transformed to a binary field and last but not least, 11 | we transform it to log-values. 12 | 13 | We can select the desired field by its name and we can define an output name 14 | to store the field. 15 | 16 | If you don't specify `field` and `store` everything happens inplace. 17 | """ 18 | 19 | # sphinx_gallery_thumbnail_number = 1 20 | import gstools as gs 21 | 22 | # structured field with a size of 100x100 and a grid-size of 1x1 23 | x = y = range(100) 24 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 25 | srf = gs.SRF(model, mean=-9, seed=20170519) 26 | srf.structured([x, y]) 27 | srf.transform("force_moments", field="field", store="f_forced") 28 | srf.transform("zinnharvey", field="f_forced", store="f_zinnharvey", conn="low") 29 | srf.transform("binary", field="f_zinnharvey", store="f_binary") 30 | srf.transform("lognormal", field="f_binary", store="f_result") 31 | srf.plot(field="f_result") 32 | 33 | ############################################################################### 34 | # The resulting field could be interpreted as a transmissivity field, where 35 | # the values of low permeability are the ones being the most connected 36 | # and only two kinds of soil exist. 37 | # 38 | # All stored fields can be accessed and plotted by name: 39 | 40 | print("Max binary value:", srf.f_binary.max()) 41 | srf.plot(field="f_zinnharvey") 42 | -------------------------------------------------------------------------------- /examples/07_transformations/README.rst: -------------------------------------------------------------------------------- 1 | Field transformations 2 | ===================== 3 | 4 | The generated fields of gstools are ordinary Gaussian random fields. 5 | In application there are several transformations to describe real world 6 | problems in an appropriate manner. 7 | 8 | GStools provides a submodule :py:mod:`gstools.transform` with a range of 9 | common transformations: 10 | 11 | .. currentmodule:: gstools.transform 12 | 13 | .. autosummary:: 14 | binary 15 | discrete 16 | boxcox 17 | zinnharvey 18 | normal_force_moments 19 | normal_to_lognormal 20 | normal_to_uniform 21 | normal_to_arcsin 22 | normal_to_uquad 23 | apply_function 24 | 25 | 26 | All the transformations take a field class, that holds a generated field, 27 | as input and will manipulate this field inplace or store it with a given name. 28 | 29 | Simply apply a transformation to a field class: 30 | 31 | .. code-block:: python 32 | 33 | import gstools as gs 34 | ... 35 | srf = gs.SRF(model) 36 | srf(...) 37 | gs.transform.normal_to_lognormal(srf) 38 | 39 | Or use the provided wrapper: 40 | 41 | .. code-block:: python 42 | 43 | import gstools as gs 44 | ... 45 | srf = gs.SRF(model) 46 | srf(...) 47 | srf.transform("lognormal") 48 | 49 | Examples 50 | -------- 51 | -------------------------------------------------------------------------------- /examples/08_geo_coordinates/00_field_generation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Working with lat-lon random fields 3 | ---------------------------------- 4 | 5 | In this example, we demonstrate how to generate a random field on 6 | geographical coordinates. 7 | 8 | First we setup a model, with ``latlon=True``, to get the associated 9 | Yadrenko model. 10 | 11 | In addition, we will use a kilometer scale provided by :any:`KM_SCALE` 12 | as ``geo_scale`` to have a meaningful length scale in km. 13 | By default the length scale would be given in radians (:any:`RADIAN_SCALE`). 14 | A third option is a length scale in degrees (:any:`DEGREE_SCALE`). 15 | 16 | To generate the field, we simply pass ``(lat, lon)`` as the position tuple 17 | to the :any:`SRF` class. 18 | """ 19 | 20 | import numpy as np 21 | 22 | import gstools as gs 23 | 24 | model = gs.Gaussian(latlon=True, len_scale=777, geo_scale=gs.KM_SCALE) 25 | 26 | lat = lon = range(-80, 81) 27 | srf = gs.SRF(model, seed=1234) 28 | field = srf.structured((lat, lon)) 29 | srf.plot() 30 | 31 | ############################################################################### 32 | # This was easy as always! Now we can use this field to estimate the empirical 33 | # variogram in order to prove, that the generated field has the correct 34 | # geo-statistical properties. 35 | # The :any:`vario_estimate` routine also provides a ``latlon`` switch to 36 | # indicate, that the given field is defined on geographical variables. 37 | # 38 | # As we will see, everthing went well... phew! 39 | 40 | bin_edges = np.linspace(0, 777 * 3, 30) 41 | bin_center, emp_vario = gs.vario_estimate( 42 | (lat, lon), 43 | field, 44 | bin_edges, 45 | latlon=True, 46 | mesh_type="structured", 47 | sampling_size=2000, 48 | sampling_seed=12345, 49 | geo_scale=gs.KM_SCALE, 50 | ) 51 | 52 | ax = model.plot("vario_yadrenko", x_max=max(bin_center)) 53 | model.fit_variogram(bin_center, emp_vario, nugget=False) 54 | model.plot("vario_yadrenko", ax=ax, label="fitted", x_max=max(bin_center)) 55 | ax.scatter(bin_center, emp_vario, color="k") 56 | print(model) 57 | 58 | ############################################################################### 59 | # .. note:: 60 | # 61 | # Note, that the estimated variogram coincides with the yadrenko variogram, 62 | # which means it depends on the great-circle distance given in radians. 63 | # 64 | # Keep that in mind when defining bins: The range is at most 65 | # :math:`\pi\approx 3.14`, which corresponds to the half globe. 66 | -------------------------------------------------------------------------------- /examples/08_geo_coordinates/README.rst: -------------------------------------------------------------------------------- 1 | Geographic Coordinates 2 | ====================== 3 | 4 | GSTools provides support for 5 | `geographic coordinates `_ 6 | given by: 7 | 8 | - latitude ``lat``: specifies the north–south position of a point on the Earth's surface 9 | - longitude ``lon``: specifies the east–west position of a point on the Earth's surface 10 | 11 | If you want to use this feature for field generation or Kriging, you 12 | have to set up a geographical covariance Model by setting ``latlon=True`` 13 | in your desired model (see :any:`CovModel`): 14 | 15 | .. code-block:: python 16 | 17 | import numpy as np 18 | import gstools as gs 19 | 20 | model = gs.Gaussian(latlon=True, var=2, len_scale=np.pi / 16) 21 | 22 | By doing so, the model will use the associated `Yadrenko` model on a sphere 23 | (see `here `_). 24 | The `len_scale` is given in radians to scale the arc-length. 25 | In order to have a more meaningful length scale, one can use the ``geo_scale`` 26 | argument: 27 | 28 | .. code-block:: python 29 | 30 | import gstools as gs 31 | 32 | model = gs.Gaussian(latlon=True, var=2, len_scale=500, geo_scale=gs.KM_SCALE) 33 | 34 | Then ``len_scale`` can be interpreted as given in km. 35 | 36 | A `Yadrenko` model :math:`C` is derived from a valid 37 | isotropic covariance model in 3D :math:`C_{3D}` by the following relation: 38 | 39 | .. math:: 40 | C(\zeta)=C_{3D}\left(2r \cdot \sin\left(\frac{\zeta}{2r}\right)\right) 41 | 42 | Where :math:`\zeta` is the 43 | `great-circle distance `_ 44 | and :math:`r` is the ``geo_scale``. 45 | 46 | .. note:: 47 | 48 | ``lat`` and ``lon`` are given in degree, whereas the great-circle distance 49 | :math:`zeta` is given in units of the ``geo_scale``. 50 | 51 | Note, that :math:`2r \cdot \sin(\frac{\zeta}{2r})` is the 52 | `chordal distance `_ 53 | of two points on a sphere with radius :math:`r`, which means we simply think of the 54 | earth surface as a sphere, that is cut out of the surrounding three dimensional space, 55 | when using the `Yadrenko` model. 56 | 57 | .. note:: 58 | 59 | Anisotropy is not available with the geographical models, since their 60 | geometry is not euclidean. When passing values for :any:`CovModel.anis` 61 | or :any:`CovModel.angles`, they will be ignored. 62 | 63 | Since the Yadrenko model comes from a 3D model, the model dimension will 64 | be 3 (see :any:`CovModel.dim`) but the `field_dim` will be 2 in this case 65 | (see :any:`CovModel.field_dim`). 66 | 67 | Examples 68 | -------- 69 | -------------------------------------------------------------------------------- /examples/09_spatio_temporal/02_precip_2d.py: -------------------------------------------------------------------------------- 1 | """ 2 | Creating a 2D Synthetic Precipitation Field 3 | ------------------------------------------- 4 | 5 | In this example we'll create a time series of a 2D synthetic precipitation 6 | field. 7 | 8 | Very similar to the previous tutorial, we'll start off by creating a Gaussian 9 | random field with an exponential variogram, which seems to reproduce the 10 | spatial correlations of precipitation fields quite well. We'll create a daily 11 | timeseries over a two dimensional domain of 50km x 40km. This workflow is 12 | suited for sub daily precipitation time series. 13 | """ 14 | 15 | import matplotlib.animation as animation 16 | import matplotlib.pyplot as plt 17 | import numpy as np 18 | 19 | import gstools as gs 20 | 21 | # fix the seed for reproducibility 22 | seed = 20170521 23 | # 1st spatial axis of 50km with a resolution of 1km 24 | x = np.arange(0, 50, 1.0) 25 | # 2nd spatial axis of 40km with a resolution of 1km 26 | y = np.arange(0, 40, 1.0) 27 | # half daily timesteps over three months 28 | t = np.arange(0.0, 90.0, 0.5) 29 | 30 | # space-time anisotropy ratio given in units d / km 31 | st_anis = 0.4 32 | 33 | # an exponential variogram with a corr. lengths of 5km, 5km, and 2d 34 | model = gs.Exponential( 35 | temporal=True, spatial_dim=2, var=1, len_scale=5, anis=st_anis 36 | ) 37 | # create a spatial random field instance 38 | srf = gs.SRF(model, seed=seed) 39 | 40 | pos, time = [x, y], [t] 41 | 42 | # the Gaussian random field 43 | srf.structured(pos + time) 44 | 45 | # account for the skewness and the dry periods 46 | cutoff = 0.55 47 | gs.transform.boxcox(srf, lmbda=0.5, shift=-1.0 / cutoff) 48 | 49 | # adjust the amount of precipitation 50 | amount = 4.0 51 | srf.field *= amount 52 | 53 | ############################################################################### 54 | # plot the 2d precipitation field over time as an animation. 55 | 56 | 57 | def _update_ani(time_step): 58 | im.set_array(srf.field[:, :, time_step].T) 59 | return (im,) 60 | 61 | 62 | fig, ax = plt.subplots() 63 | im = ax.imshow( 64 | srf.field[:, :, 0].T, 65 | cmap="Blues", 66 | interpolation="bicubic", 67 | origin="lower", 68 | ) 69 | cbar = fig.colorbar(im) 70 | cbar.ax.set_ylabel(r"Precipitation $P$ / mm") 71 | ax.set_xlabel(r"$x$ / km") 72 | ax.set_ylabel(r"$y$ / km") 73 | 74 | ani = animation.FuncAnimation( 75 | fig, _update_ani, len(t), interval=100, blit=True 76 | ) 77 | -------------------------------------------------------------------------------- /examples/09_spatio_temporal/03_geographic_coordinates.py: -------------------------------------------------------------------------------- 1 | """ 2 | Working with spatio-temporal lat-lon fields 3 | ------------------------------------------- 4 | 5 | In this example, we demonstrate how to generate a spatio-temporal 6 | random field on geographical coordinates. 7 | 8 | First we setup a model, with ``latlon=True`` and ``temporal=True``, 9 | to get the associated spatio-temporal Yadrenko model. 10 | 11 | In addition, we will use a kilometer scale provided by :any:`KM_SCALE` 12 | as ``geo_scale`` to have a meaningful length scale in km. 13 | By default the length scale would be given in radians (:any:`RADIAN_SCALE`). 14 | A third option is a length scale in degrees (:any:`DEGREE_SCALE`). 15 | 16 | To generate the field, we simply pass ``(lat, lon, time)`` as the position tuple 17 | to the :any:`SRF` class. 18 | 19 | We will set a spatial length-scale of `1000` and a time length-scale of `100` days. 20 | """ 21 | 22 | import numpy as np 23 | 24 | import gstools as gs 25 | 26 | model = gs.Matern( 27 | latlon=True, 28 | temporal=True, 29 | var=1, 30 | len_scale=[1000, 100], 31 | geo_scale=gs.KM_SCALE, 32 | ) 33 | 34 | lat = lon = np.linspace(-80, 81, 50) 35 | time = np.linspace(0, 777, 50) 36 | srf = gs.SRF(model, seed=1234) 37 | field = srf.structured((lat, lon, time)) 38 | srf.plot() 39 | -------------------------------------------------------------------------------- /examples/09_spatio_temporal/README.rst: -------------------------------------------------------------------------------- 1 | Spatio-Temporal Modeling 2 | ======================== 3 | 4 | Spatio-Temporal modelling can provide insights into time dependent processes 5 | like rainfall, air temperature or crop yield. 6 | 7 | GSTools provides the metric spatio-temporal model for all covariance models 8 | by setting ``temporal=True``, which enhances the spatial model dimension with 9 | a time dimension to result in the spatio-temporal dimension. 10 | Since the model dimension is then higher than the spatial dimension, you can use 11 | the ``spatial_dim`` argument to explicitly set the spatial dimension. 12 | Doing that and setting a spatio-temporal anisotropy ratio looks like this: 13 | 14 | .. code-block:: python 15 | 16 | import gstools as gs 17 | dim = 3 # spatial dimension 18 | st_anis = 0.4 19 | st_model = gs.Exponential(temporal=True, spatial_dim=dim, anis=st_anis) 20 | 21 | Since it is given in the name "spatio-temporal", time is always treated as last dimension. 22 | You could also use ``dim`` to specify the dimension but note that it needs to include 23 | the temporal dimension. 24 | 25 | There are now three different dimension attributes giving information about (i) the 26 | model dimension (``dim``), (ii) the field dimension (``field_dim``, including time) and 27 | (iii) the spatial dimension (``spatial_dim`` always 1 less than ``field_dim`` for temporal models). 28 | Model and field dimension can differ in case of geographic coordinates where the model dimension is 3, 29 | but the field or parametric dimension is 2. 30 | If the model is spatio-temporal with geographic coordinates, the model dimension is 4, 31 | the field dimension is 3 and the spatial dimension is 2. 32 | 33 | In the case above we get: 34 | 35 | .. code-block:: python 36 | 37 | st_model.dim == 4 38 | st_model.field_dim == 4 39 | st_model.spatial_dim == 3 40 | 41 | This formulation enables us to have spatial anisotropy and rotation defined as in 42 | non-temporal models, without altering the behavior in the time dimension: 43 | 44 | .. code-block:: python 45 | 46 | anis = [0.4, 0.2] # spatial anisotropy in 3D 47 | angles = [0.5, 0.4, 0.3] # spatial rotation in 3D 48 | st_model = gs.Exponential(temporal=True, spatial_dim=dim, anis=anis+[st_anis], angles=angles) 49 | 50 | In order to generate spatio-temporal position tuples, GSTools provides a 51 | convenient function :any:`generate_st_grid`. The output can be used for 52 | spatio-temporal random field generation (or kriging resp. conditioned fields): 53 | 54 | .. code-block:: python 55 | 56 | pos = dim * [1, 2, 3] # 3 points in space (1,1,1), (2,2,2) and (3,3,3) 57 | time = range(10) # 10 time steps 58 | st_grid = gs.generate_st_grid(pos, time) 59 | st_rf = gs.SRF(st_model) 60 | st_field = st_rf(st_grid).reshape(-1, len(time)) 61 | 62 | Then we can access the different time-steps by the last array index. 63 | 64 | Examples 65 | -------- 66 | -------------------------------------------------------------------------------- /examples/10_normalizer/00_lognormal_kriging.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Log-Normal Kriging 3 | ------------------ 4 | 5 | Log Normal kriging is a term to describe a special workflow for kriging to 6 | deal with log-normal data, like conductivity or transmissivity in hydrogeology. 7 | 8 | It simply means to first convert the input data to a normal distribution, i.e. 9 | applying a logarithic function, then interpolating these values with kriging 10 | and transforming the result back with the exponential function. 11 | 12 | The resulting kriging variance describes the error variance of the log-values 13 | of the target variable. 14 | 15 | In this example we will use ordinary kriging. 16 | """ 17 | 18 | import numpy as np 19 | 20 | import gstools as gs 21 | 22 | # condtions 23 | cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] 24 | cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] 25 | # resulting grid 26 | gridx = np.linspace(0.0, 15.0, 151) 27 | # stable covariance model 28 | model = gs.Stable(dim=1, var=0.5, len_scale=2.56, alpha=1.9) 29 | 30 | ############################################################################### 31 | # In order to result in log-normal kriging, we will use the :any:`LogNormal` 32 | # Normalizer. This is a parameter-less normalizer, so we don't have to fit it. 33 | normalizer = gs.normalizer.LogNormal 34 | 35 | ############################################################################### 36 | # Now we generate the interpolated field as well as the mean field. 37 | # This can be done by setting `only_mean=True` in :any:`Krige.__call__`. 38 | # The result is then stored as `mean_field`. 39 | # 40 | # In terms of log-normal kriging, this mean represents the geometric mean of 41 | # the field. 42 | krige = gs.krige.Ordinary(model, cond_pos, cond_val, normalizer=normalizer) 43 | # interpolate the field 44 | krige(gridx) 45 | # also generate the mean field 46 | krige(gridx, only_mean=True) 47 | 48 | ############################################################################### 49 | # And that's it. Let's have a look at the results. 50 | ax = krige.plot() 51 | # plotting the geometric mean 52 | krige.plot("mean_field", ax=ax) 53 | # plotting the conditioning data 54 | ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") 55 | ax.legend() 56 | -------------------------------------------------------------------------------- /examples/10_normalizer/01_auto_fit.py: -------------------------------------------------------------------------------- 1 | """ 2 | Automatic fitting 3 | ----------------- 4 | 5 | In order to demonstrate how to automatically fit normalizer and variograms, 6 | we generate synthetic log-normal data, that should be interpolated with 7 | ordinary kriging. 8 | 9 | Normalizers are fitted by minimizing the likelihood function and variograms 10 | are fitted by estimating the empirical variogram with automatic binning and 11 | fitting the theoretical model to it. Thereby the sill is constrained to match 12 | the field variance. 13 | 14 | Artificial data 15 | ^^^^^^^^^^^^^^^ 16 | 17 | Here we generate log-normal data following a Gaussian covariance model. 18 | We will generate the "original" field on a 60x60 mesh, from which we will take 19 | samples in order to pretend a situation of data-scarcity. 20 | """ 21 | 22 | import matplotlib.pyplot as plt 23 | import numpy as np 24 | 25 | import gstools as gs 26 | 27 | # structured field with edge length of 50 28 | x = y = range(51) 29 | pos = gs.generate_grid([x, y]) 30 | model = gs.Gaussian(dim=2, var=1, len_scale=10) 31 | srf = gs.SRF(model, seed=20170519, normalizer=gs.normalizer.LogNormal()) 32 | # generate the original field 33 | srf(pos) 34 | 35 | ############################################################################### 36 | # Here, we sample 60 points and set the conditioning points and values. 37 | 38 | ids = np.arange(srf.field.size) 39 | samples = np.random.RandomState(20210201).choice(ids, size=60, replace=False) 40 | 41 | # sample conditioning points from generated field 42 | cond_pos = pos[:, samples] 43 | cond_val = srf.field[samples] 44 | 45 | ############################################################################### 46 | # Fitting and Interpolation 47 | # ^^^^^^^^^^^^^^^^^^^^^^^^^ 48 | # 49 | # Now we want to interpolate the "measured" samples 50 | # and we want to normalize the given data with the BoxCox transformation. 51 | # 52 | # Here we set up the kriging routine and use a :any:`Stable` model, that should 53 | # be fitted automatically to the given data 54 | # and we pass the :any:`BoxCox` normalizer in order to gain normality. 55 | # 56 | # The normalizer will be fitted automatically to the data, 57 | # by setting ``fit_normalizer=True``. 58 | # 59 | # The covariance/variogram model will be fitted by an automatic workflow 60 | # by setting ``fit_variogram=True``. 61 | 62 | krige = gs.krige.Ordinary( 63 | model=gs.Stable(dim=2), 64 | cond_pos=cond_pos, 65 | cond_val=cond_val, 66 | normalizer=gs.normalizer.BoxCox(), 67 | fit_normalizer=True, 68 | fit_variogram=True, 69 | ) 70 | 71 | ############################################################################### 72 | # First, let's have a look at the fitting results: 73 | 74 | print(krige.model) 75 | print(krige.normalizer) 76 | 77 | ############################################################################### 78 | # As we see, it went quite well. Variance is a bit underestimated, but 79 | # length scale and nugget are good. The shape parameter of the stable model 80 | # is correctly estimated to be close to `2`, 81 | # so we result in a Gaussian like model. 82 | # 83 | # The BoxCox parameter `lmbda` was estimated to be almost 0, which means, 84 | # the log-normal distribution was correctly fitted. 85 | # 86 | # Now let's run the kriging interpolation. 87 | 88 | krige(pos) 89 | 90 | ############################################################################### 91 | # Plotting 92 | # ^^^^^^^^ 93 | # 94 | # Finally let's compare the original, sampled and interpolated fields. 95 | # As we'll see, there is a lot of information in the covariance structure 96 | # of the measurement samples and the field is reconstructed quite accurately. 97 | 98 | fig, ax = plt.subplots(1, 3, figsize=[8, 3]) 99 | ax[0].imshow(srf.field.reshape(len(x), len(y)).T, origin="lower") 100 | ax[1].scatter(*cond_pos, c=cond_val) 101 | ax[2].imshow(krige.field.reshape(len(x), len(y)).T, origin="lower") 102 | # titles 103 | ax[0].set_title("original field") 104 | ax[1].set_title("sampled field") 105 | ax[2].set_title("interpolated field") 106 | # set aspect ratio to equal in all plots 107 | [ax[i].set_aspect("equal") for i in range(3)] 108 | -------------------------------------------------------------------------------- /examples/10_normalizer/02_compare.py: -------------------------------------------------------------------------------- 1 | """ 2 | Normalizer Comparison 3 | --------------------- 4 | 5 | Let's compare the transformation behavior of the provided normalizers. 6 | 7 | But first, we define a convenience routine and make some imports as always. 8 | """ 9 | 10 | import matplotlib.pyplot as plt 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | 16 | def dashes(i=1, max_n=12, width=1): 17 | """Return line dashes.""" 18 | return i * [width, width] + [max_n * 2 * width - 2 * i * width, width] 19 | 20 | 21 | ############################################################################### 22 | # We select 4 normalizers depending on a single parameter lambda and 23 | # plot their transformation behavior within the interval [-5, 5]. 24 | # 25 | # For the shape parameter lambda, we create a list of 8 values ranging from 26 | # -1 to 2.5. 27 | 28 | lmbdas = [i * 0.5 for i in range(-2, 6)] 29 | normalizers = [ 30 | gs.normalizer.BoxCox, 31 | gs.normalizer.YeoJohnson, 32 | gs.normalizer.Modulus, 33 | gs.normalizer.Manly, 34 | ] 35 | 36 | ############################################################################### 37 | # Let's plot them! 38 | 39 | fig, ax = plt.subplots(2, 2, figsize=[8, 8]) 40 | for i, norm in enumerate(normalizers): 41 | # correctly setting the data range 42 | x_rng = norm().normalize_range 43 | x = np.linspace(max(-5, x_rng[0] + 0.01), min(5, x_rng[1] - 0.01)) 44 | for j, lmbda in enumerate(lmbdas): 45 | ax.flat[i].plot( 46 | x, 47 | norm(lmbda=lmbda).normalize(x), 48 | label=r"$\lambda=" + str(lmbda) + "$", 49 | color="k", 50 | alpha=0.2 + j * 0.1, 51 | dashes=dashes(j), 52 | ) 53 | # axis formatting 54 | ax.flat[i].grid(which="both", color="grey", linestyle="-", alpha=0.2) 55 | ax.flat[i].set_ylim((-5, 5)) 56 | ax.flat[i].set_xlim((-5, 5)) 57 | ax.flat[i].set_title(norm().name) 58 | # figure formatting 59 | handles, labels = ax.flat[-1].get_legend_handles_labels() 60 | fig.legend(handles, labels, loc="lower center", ncol=4, handlelength=3.0) 61 | fig.suptitle("Normalizer Comparison", fontsize=20) 62 | fig.show() 63 | 64 | ############################################################################### 65 | # The missing :any:`LogNormal` transformation is covered by the :any:`BoxCox` 66 | # transformation for lambda=0. The :any:`BoxCoxShift` transformation is 67 | # simply the :any:`BoxCox` transformation shifted on the X-axis. 68 | -------------------------------------------------------------------------------- /examples/10_normalizer/README.rst: -------------------------------------------------------------------------------- 1 | Normalizing Data 2 | ================ 3 | 4 | When dealing with real-world data, one can't assume it to be normal distributed. 5 | In fact, many properties are modeled by applying different transformations, 6 | for example conductivity is often assumed to be log-normal or precipitation 7 | is transformed using the famous box-cox power transformation. 8 | 9 | These "normalizers" are often represented as parameteric power transforms and 10 | one is interested in finding the best parameter to gain normality in the input 11 | data. 12 | 13 | This is of special interest when kriging should be applied, since the target 14 | variable of the kriging interpolation is assumed to be normal distributed. 15 | 16 | GSTools provides a set of Normalizers and routines to automatically fit these 17 | to input data by minimizing the likelihood function. 18 | 19 | Mean, Trend and Normalizers 20 | --------------------------- 21 | 22 | All Field classes (:any:`SRF`, :any:`Krige` or :any:`CondSRF`) provide the input 23 | of `mean`, `normalizer` and `trend`: 24 | 25 | * A `trend` can be a callable function, that represents a trend in input data. 26 | For example a linear decrease of temperature with height. 27 | 28 | * The `normalizer` will be applied after the data was detrended, i.e. the trend 29 | was substracted from the data, in order to gain normality. 30 | 31 | * The `mean` is now interpreted as the mean of the normalized data. The user 32 | could also provide a callable mean, but it is mostly meant to be constant. 33 | 34 | When no normalizer is given, `trend` and `mean` basically behave the same. 35 | We just decided that a trend is associated with raw data and a mean is used 36 | in the context of normally distributed data. 37 | 38 | Provided Normalizers 39 | -------------------- 40 | 41 | The following normalizers can be passed to all Field-classes and variogram 42 | estimation routines or can be used as standalone tools to analyse data. 43 | 44 | .. currentmodule:: gstools.normalizer 45 | 46 | .. autosummary:: 47 | LogNormal 48 | BoxCox 49 | BoxCoxShift 50 | YeoJohnson 51 | Modulus 52 | Manly 53 | 54 | Examples 55 | -------- 56 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/00_simple.py: -------------------------------------------------------------------------------- 1 | """ 2 | A First and Simple Example 3 | -------------------------- 4 | 5 | As a first example, we will create a two dimensional plurigaussian field 6 | (PGS). Thus, we need two spatial random fields(SRF) and on top of that, we 7 | need a field describing the categorical data and its spatial relation. 8 | We will start off by creating the two SRFs with a Gaussian variogram, which 9 | makes the fields nice and smooth. But before that, we will import all 10 | necessary libraries and define a few variables, like the number of grid 11 | cells in each dimension. 12 | """ 13 | 14 | import matplotlib.pyplot as plt 15 | import numpy as np 16 | 17 | import gstools as gs 18 | 19 | dim = 2 20 | # no. of cells in both dimensions 21 | N = [180, 140] 22 | 23 | x = np.arange(N[0]) 24 | y = np.arange(N[1]) 25 | 26 | ############################################################################### 27 | # In this first example we will use the same geostatistical parameters for 28 | # both fields for simplicity. Thus, we can use the same SRF instance for the 29 | # two fields. 30 | 31 | model = gs.Gaussian(dim=dim, var=1, len_scale=10) 32 | srf = gs.SRF(model) 33 | field1 = srf.structured([x, y], seed=20170519) 34 | field2 = srf.structured([x, y], seed=19970221) 35 | 36 | ############################################################################### 37 | # Now, we will create the lithotypes field describing the categorical data. For 38 | # now, we will only have two categories and we will address them by the 39 | # integers 0 and 1. We start off by creating a matrix of 0s from which we will 40 | # select a rectangle and fill that with 1s. This field does not have to match 41 | # the shape of the SRFs. 42 | 43 | centroid = [200, 160] 44 | 45 | # size of the rectangle 46 | rect = [40, 32] 47 | 48 | lithotypes = np.zeros(centroid) 49 | lithotypes[ 50 | centroid[0] // 2 - rect[0] // 2 : centroid[0] // 2 + rect[0] // 2, 51 | centroid[1] // 2 - rect[1] // 2 : centroid[1] // 2 + rect[1] // 2, 52 | ] = 1 53 | 54 | ############################################################################### 55 | # With the two SRFs and the L-field ready, we can create our first PGS. First, we 56 | # set up an instance of the PGS class and then we are ready to calculate the 57 | # field by calling the instance and handing over the L-field. 58 | 59 | pgs = gs.PGS(dim, [field1, field2]) 60 | P = pgs(lithotypes) 61 | 62 | ############################################################################### 63 | # Finally, we can plot the PGS, but we will also show the L-field and the two 64 | # original Gaussian fields. 65 | 66 | fig, axs = plt.subplots(2, 2) 67 | 68 | axs[0, 0].imshow(field1, cmap="copper", origin="lower") 69 | axs[0, 1].imshow(field2, cmap="copper", origin="lower") 70 | axs[1, 0].imshow(lithotypes, cmap="copper", origin="lower") 71 | axs[1, 1].imshow(P, cmap="copper", origin="lower") 72 | 73 | # For more information on Plurigaussian fields and how they naturally extend 74 | # truncated Gaussian fields, we can recommend the book 75 | # [Plurigaussian Simulations in Geosciences](https://doi.org/10.1007/978-3-642-19607-2) 76 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/01_pgs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Understanding PGS 3 | ----------------- 4 | 5 | In this example we want to try to understand how exactly PGS are generated 6 | and how to influence them with the categorical field. 7 | First of all, we will set everything up very similar to the first example. 8 | """ 9 | 10 | import matplotlib.pyplot as plt 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | dim = 2 16 | # no. of cells in both dimensions 17 | N = [100, 80] 18 | 19 | x = np.arange(N[0]) 20 | y = np.arange(N[1]) 21 | 22 | ############################################################################### 23 | # In this example we will use different geostatistical parameters for the 24 | # SRFs. We will create fields with a strong anisotropy, and on top of that they 25 | # will both be rotated. 26 | 27 | model1 = gs.Gaussian(dim=dim, var=1, len_scale=[20, 1], angles=np.pi / 8) 28 | srf1 = gs.SRF(model1, seed=20170519) 29 | field1 = srf1.structured([x, y]) 30 | model2 = gs.Gaussian(dim=dim, var=1, len_scale=[1, 20], angles=np.pi / 4) 31 | srf2 = gs.SRF(model2, seed=19970221) 32 | field2 = srf2.structured([x, y]) 33 | field1 += 5.0 34 | 35 | ############################################################################### 36 | # Internally, each field's values are mapped along an axis, which can be nicely 37 | # visualized with a scatter plot. We can easily do that by flattening the 2d 38 | # field values and simply use matplotlib's scatter plotting functionality. 39 | # The x-axis shows field1's values and the y-axis shows field2's values. 40 | 41 | plt.scatter(field1.flatten(), field2.flatten(), s=0.1) 42 | 43 | ############################################################################### 44 | # This mapping always has a multivariate Gaussian distribution and this is also 45 | # the field on which we define our categorical data `lithotypes` and their 46 | # relations to each other. Before providing further explanations, we will 47 | # create the lithotypes field, which again will have only two categories, but 48 | # this time we will not prescribe a rectangle, but a circle. 49 | 50 | # no. of grid cells of L-field 51 | M = [51, 41] 52 | # we need the indices of `lithotypes` later 53 | x_lith = np.arange(M[0]) 54 | y_lith = np.arange(M[1]) 55 | 56 | # radius of circle 57 | radius = 7 58 | 59 | lithotypes = np.zeros(M) 60 | mask = (x_lith[:, np.newaxis] - M[0] // 2) ** 2 + ( 61 | y_lith[np.newaxis, :] - M[1] // 2 62 | ) ** 2 < radius**2 63 | lithotypes[mask] = 1 64 | 65 | ############################################################################### 66 | # We can compute the actual PGS now. As a second step, we use a helper function 67 | # to recalculate the axes on which the lithotypes are defined. Normally, this 68 | # is handled internally. But in order to show the scatter plot together with 69 | # the lithotypes, we need the axes here. 70 | 71 | pgs = gs.PGS(dim, [field1, field2]) 72 | P = pgs(lithotypes) 73 | 74 | x_lith, y_lith = pgs.calc_lithotype_axes(lithotypes.shape) 75 | 76 | ############################################################################### 77 | # And now to some plotting. Unfortunately, matplotlib likes to mess around with 78 | # the aspect ratios of the plots, so the left panel is a bit stretched. 79 | 80 | fig, axs = plt.subplots(2, 2) 81 | axs[0, 0].imshow(field1, cmap="copper", origin="lower") 82 | axs[0, 1].imshow(field2, cmap="copper", origin="lower") 83 | axs[1, 0].scatter(field1.flatten(), field2.flatten(), s=0.1, color="C0") 84 | axs[1, 0].pcolormesh(x_lith, y_lith, lithotypes.T, alpha=0.3, cmap="copper") 85 | 86 | axs[1, 1].imshow(P, cmap="copper", origin="lower") 87 | 88 | ############################################################################### 89 | # The black areas show the category 0 and the orange areas show category 1. We 90 | # see that the majority of all points in the scatter plot are within the 91 | # yellowish circle, thus the orange areas are larger than the black ones. The 92 | # strong anisotropy and the rotation of the fields create these interesting 93 | # patterns which remind us of fractures in the subsurface. 94 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/02_spatial_relations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Controlling Spatial Relations 3 | ----------------------------- 4 | 5 | In this example we will try to understand how we can influence the spatial 6 | relationships of the different categories with the lithotypes field. For 7 | simplicity, we will start very similarly to the very first example. 8 | """ 9 | 10 | import matplotlib.pyplot as plt 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | dim = 2 16 | # no. of cells in both dimensions 17 | N = [100, 80] 18 | 19 | x = np.arange(N[0]) 20 | y = np.arange(N[1]) 21 | 22 | ############################################################################### 23 | # Again, we will use the same parameters for both fields. 24 | 25 | model = gs.Gaussian(dim=dim, var=1, len_scale=10) 26 | srf = gs.SRF(model) 27 | field1 = srf.structured([x, y], seed=20170519) 28 | field2 = srf.structured([x, y], seed=19970221) 29 | 30 | ############################################################################### 31 | # Now, we will prepare the lithotypes field, which will be a bit more 32 | # complicated this # time. First, we will create a triangle. Next, we will 33 | # create two rectangles touching each other along one of their edges and both 34 | # being directly above the triangle, but without touching it directly. 35 | # Finally, we will create a few very narrow rectangles, which will not touch 36 | # any other category shapes. The implementation details are not very 37 | # interesting, and can be skipped. 38 | 39 | # no. of grid cells of lithotypes field 40 | M = [60, 50] 41 | 42 | # size of the rectangles 43 | rect = [10, 8] 44 | 45 | # positions of some of the shapes for concise indexing 46 | S1 = [1, -9] 47 | S2 = [-5, 3] 48 | S3 = [-5, -5] 49 | 50 | lithotypes = np.zeros(M) 51 | # a small upper triangular helper matrix to create the triangle 52 | triu = np.triu(np.ones((rect[0], rect[0]))) 53 | # the triangle 54 | lithotypes[ 55 | M[0] // 2 + S1[0] : M[0] // 2 + S1[0] + rect[0], 56 | M[1] // 2 + S1[1] : M[1] // 2 + S1[1] + rect[0], 57 | ] = triu 58 | # the first rectangle 59 | lithotypes[ 60 | M[0] // 2 + S2[0] - rect[0] // 2 : M[0] // 2 + S2[0] + rect[0] // 2, 61 | M[1] // 2 + S2[1] - rect[1] // 2 : M[1] // 2 + S2[1] + rect[1] // 2, 62 | ] = 2 63 | # the second rectangle 64 | lithotypes[ 65 | M[0] // 2 + S3[0] - rect[0] // 2 : M[0] // 2 + S3[0] + rect[0] // 2, 66 | M[1] // 2 + S3[1] - rect[1] // 2 : M[1] // 2 + S3[1] + rect[1] // 2, 67 | ] = 3 68 | # some very narrow rectangles 69 | for i in range(4): 70 | lithotypes[ 71 | M[0] // 2 + S1[0] : M[0] // 2 + S1[0] + rect[0], 72 | M[1] // 2 73 | + S1[1] 74 | + rect[1] 75 | + 3 76 | + 2 * i : M[1] // 2 77 | + S1[1] 78 | + rect[1] 79 | + 4 80 | + 2 * i, 81 | ] = ( 82 | 4 + i 83 | ) 84 | 85 | ############################################################################### 86 | # With the two SRFs and the L-field ready, we can create the PGS. 87 | pgs = gs.PGS(dim, [field1, field2]) 88 | P = pgs(lithotypes) 89 | 90 | ############################################################################### 91 | # And now the plotting of the two Gaussian fields, the L-field, and the PGS. 92 | 93 | fig, axs = plt.subplots(2, 2) 94 | 95 | axs[0, 0].imshow(field1, cmap="copper", origin="lower") 96 | axs[0, 1].imshow(field2, cmap="copper", origin="lower") 97 | axs[1, 0].imshow(lithotypes, cmap="copper", origin="lower") 98 | axs[1, 1].imshow(P, cmap="copper", origin="lower") 99 | plt.show() 100 | 101 | ############################################################################### 102 | # We can see that the two lower light and medium brown rectangles both fill up 103 | # large and rather smooth areas of the PGS. And they share very long common 104 | # borders due to the fact that these categories touch each other along one of 105 | # their edges. The next large area is the dark brown of the upper triangle. 106 | # This category is always very close to the light brown areas, but only 107 | # sometimes close to the medium brown areas, as they only share small parts in 108 | # close proximity to each other. Finally, we have the four stripes. They create 109 | # distorted stripes in the PGS. The lighter they get, the less area they fill. 110 | # This is due to the fact that their area is not only relatively small, but 111 | # also because they are increasingly further away from the center of the 112 | # lithotypes. 113 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/03_correlations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Understanding the Influence of Variograms 3 | ----------------------------------------- 4 | 5 | Up until now, we have only used very smooth Gaussian variograms for the 6 | underlying spatial random fields. Now, we will combine a smooth Gaussian 7 | field with a much rougher exponential field. This example should feel 8 | familiar, if you had a look at the previous examples. 9 | """ 10 | 11 | import matplotlib.pyplot as plt 12 | import numpy as np 13 | 14 | import gstools as gs 15 | 16 | dim = 2 17 | # no. of cells in both dimensions 18 | N = [200, 200] 19 | 20 | x = np.arange(N[0]) 21 | y = np.arange(N[1]) 22 | 23 | ############################################################################### 24 | # Now, we generate fields with a Gaussian and an Exponential variogram. 25 | 26 | model1 = gs.Gaussian(dim=dim, var=1, len_scale=[50, 25]) 27 | srf1 = gs.SRF(model1) 28 | field1 = srf1.structured([x, y], seed=20170519) 29 | model2 = gs.Exponential(dim=dim, var=1, len_scale=[40, 40]) 30 | srf2 = gs.SRF(model2) 31 | field2 = srf2.structured([x, y], seed=19970221) 32 | 33 | ############################################################################### 34 | # The lithotypes will consist of a circle which contains one category and the 35 | # surrounding is the second category. 36 | 37 | # no. of grid cells of the lithotypes 38 | M = [200, 200] 39 | 40 | # radius of circle 41 | radius = 25 42 | 43 | x_lith = np.arange(M[0]) 44 | y_lith = np.arange(M[1]) 45 | lithotypes = np.zeros(M) 46 | mask = (x_lith[:, np.newaxis] - M[0] // 2) ** 2 + ( 47 | y_lith[np.newaxis, :] - M[1] // 2 48 | ) ** 2 < radius**2 49 | lithotypes[mask] = 1 50 | 51 | ############################################################################### 52 | # With the two SRFs and the lithotypes ready, we can create the PGS. 53 | pgs = gs.PGS(dim, [field1, field2]) 54 | P = pgs(lithotypes) 55 | 56 | ############################################################################### 57 | # And now the plotting of the two Gaussian fields, the lithotypes, and the PGS. 58 | 59 | fig, axs = plt.subplots(2, 2) 60 | 61 | axs[0, 0].imshow(field1, cmap="copper", origin="lower") 62 | axs[0, 1].imshow(field2, cmap="copper", origin="lower") 63 | axs[1, 0].imshow(lithotypes, cmap="copper", origin="lower") 64 | axs[1, 1].imshow(P, cmap="copper", origin="lower") 65 | 66 | ############################################################################### 67 | # In this PGS, we can see two different spatial structures combined. We see large 68 | # and rather smooth structures and shapes, which are surrounded by very rough and 69 | # unconnected patches. 70 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/04_3d_pgs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Creating a Three Dimensional PGS 3 | -------------------------------- 4 | 5 | Let's create a 3d PGS! This will mostly feel very familiar, but the plotting 6 | will be a bit more involved. 7 | """ 8 | 9 | # sphinx_gallery_thumbnail_path = 'pics/3d_pgs.png' 10 | import matplotlib.pyplot as plt 11 | import numpy as np 12 | 13 | import gstools as gs 14 | 15 | dim = 3 16 | # no. of cells in all dimensions 17 | N = [40] * dim 18 | 19 | x = np.arange(N[0]) 20 | y = np.arange(N[1]) 21 | z = np.arange(N[2]) 22 | 23 | ############################################################################### 24 | # Because we want to create a 3d PGS, we have to generate 3 SRFs. If we are 25 | # interested in even higher dimensions, we could solve this code repetition 26 | # by using a loop... 27 | 28 | model1 = gs.Gaussian(dim=dim, var=1, len_scale=[20, 10, 15]) 29 | srf1 = gs.SRF(model1) 30 | field1 = srf1.structured([x, y, z], seed=20170519) 31 | model2 = gs.Exponential(dim=dim, var=1, len_scale=[5, 5, 5]) 32 | srf2 = gs.SRF(model2) 33 | field2 = srf2.structured([x, y, z], seed=19970221) 34 | model3 = gs.Gaussian(dim=dim, var=1, len_scale=[7, 12, 18]) 35 | srf3 = gs.SRF(model3) 36 | field3 = srf3.structured([x, y, z], seed=20011012) 37 | 38 | ############################################################################### 39 | # The 3d lithotypes field will consist of a cube which contains one category 40 | # and the surrounding is the second category. 41 | 42 | # size of cube 43 | cube = [18] * dim 44 | 45 | lithotypes = np.zeros(N) 46 | lithotypes[ 47 | N[0] // 2 - cube[0] // 2 : N[0] // 2 + cube[0] // 2, 48 | N[1] // 2 - cube[1] // 2 : N[1] // 2 + cube[1] // 2, 49 | N[2] // 2 - cube[2] // 2 : N[2] // 2 + cube[2] // 2, 50 | ] = 1 51 | 52 | ############################################################################### 53 | # With the three SRFs and the lithotypes ready, we can create the 3d PGS. 54 | pgs = gs.PGS(dim, [field1, field2, field3]) 55 | P = pgs(lithotypes) 56 | 57 | # ############################################################################### 58 | # For ploting the 3d PGS, we will use [PyVista](https://pyvista.org/) which works 59 | # nicely together with GSTools. 60 | 61 | import pyvista as pv 62 | 63 | grid = pv.ImageData(dimensions=N) 64 | 65 | # uncomment, if you want to see lithotypes field, which is just a cube... 66 | # grid.point_data['lithotypes'] = np.meshgrid(lithotypes, indexing="ij")[0] 67 | # grid.plot(show_edges=True) 68 | 69 | grid.point_data["PGS"] = P.reshape(-1) 70 | 71 | ############################################################################### 72 | # .. note:: 73 | # PyVista does not work on readthedocs, but you can try it out yourself by 74 | # running the example yourself. You will get an interactive version of this 75 | # screenshot. 76 | 77 | # grid.contour(isosurfaces=8).plot() 78 | 79 | ############################################################################### 80 | # 81 | # .. image:: ../../pics/3d_pgs.png 82 | # :width: 400px 83 | # :align: center 84 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/06_periodic.py: -------------------------------------------------------------------------------- 1 | """ 2 | Creating PGS with periodic boundaries 3 | ------------------------------------- 4 | 5 | Plurigaussian fields with periodic boundaries (P-PGS) are used in various 6 | applications, including the simulation of interactions between the landsurface 7 | and the atmosphere, as well as the application of homogenisation theory to 8 | porous media, e.g. [Ricketts 2024](https://doi.org/10.1007/s11242-024-02074-z). 9 | 10 | In this example we will use GSTools's Fourier generator to create periodic 11 | random fields, which can in turn be used to generate P-PGS. 12 | """ 13 | 14 | import matplotlib.pyplot as plt 15 | import numpy as np 16 | 17 | import gstools as gs 18 | 19 | dim = 2 20 | # define the spatial grid, see the periodic random field [examples](https://geostat-framework.readthedocs.io/projects/gstools/en/latest/examples/01_random_field/08_fourier.html) 21 | # for details. 22 | 23 | # domain size and periodicity 24 | lithotypes = 200 25 | # no. of cells in both dimensions 26 | N = [170, 153] 27 | 28 | x = np.linspace(0, lithotypes, N[0], endpoint=False) 29 | y = np.linspace(0, lithotypes, N[1], endpoint=False) 30 | 31 | ############################################################################### 32 | # The parameters of the covariance model are very similar to previous examples. 33 | # The interesting part is the SRF class. We set the `generator` to `"Fourier"`, 34 | # which inherently generates periodic SRFs. The Fourier generator needs an 35 | # extra parameter `period` which defines the periodicity. 36 | 37 | model = gs.Gaussian(dim=dim, var=0.8, len_scale=40) 38 | srf = gs.SRF(model, generator="Fourier", period=lithotypes) 39 | field1 = srf.structured([x, y], seed=19770319) 40 | field2 = srf.structured([x, y], seed=19860912) 41 | 42 | ############################################################################### 43 | # Very similar to previous examples, we create a simple lithotypes field. 44 | 45 | M = [200, 160] 46 | 47 | # size of the rectangle 48 | rect = [40, 32] 49 | 50 | lithotypes = np.zeros(M) 51 | lithotypes[ 52 | M[0] // 2 - rect[0] // 2 : M[0] // 2 + rect[0] // 2, 53 | M[1] // 2 - rect[1] // 2 : M[1] // 2 + rect[1] // 2, 54 | ] = 1 55 | 56 | ############################################################################### 57 | # With the two SRFs and the lithotypes ready, we can create our first P-PGS. 58 | 59 | pgs = gs.PGS(dim, [field1, field2]) 60 | P = pgs(lithotypes) 61 | 62 | ############################################################################### 63 | # Finally, we can plot the PGS, but we will also show the lithotypes and the 64 | # two original periodic Gaussian fields. Especially with `field1` you can 65 | # nicely see the periodic structures in the black structure in the upper right 66 | # corner. This transfers to the P-PGS, where you can see that the structures 67 | # seemlessly match the opposite boundaries. 68 | 69 | fig, axs = plt.subplots(2, 2) 70 | 71 | axs[0, 0].imshow(field1, cmap="copper", origin="lower") 72 | axs[0, 1].imshow(field2, cmap="copper", origin="lower") 73 | axs[1, 0].imshow(lithotypes, cmap="copper", origin="lower") 74 | axs[1, 1].imshow(P, cmap="copper", origin="lower") 75 | 76 | plt.show() 77 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/README.rst: -------------------------------------------------------------------------------- 1 | Plurigaussian Field Generation 2 | ============================== 3 | 4 | Plurigaussian field simulations (PGS) are used to simulate correlated fields 5 | of categorical data, e.g. lithofacies, hydrofacies, soil types, or 6 | cementitious materials. 7 | PGS uses one spatial random field (SRF) per dimension, e.g. two SRFs, when 8 | working with two dimensional data. Furthermore, PGS needs a field, which 9 | describes the categorical data and its spatial relations. 10 | This might sound more complicated then it is, as we will see in the following 11 | examples. 12 | 13 | Examples 14 | -------- 15 | -------------------------------------------------------------------------------- /examples/11_plurigaussian/conditional_values.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoStat-Framework/GSTools/1edd2b93bf429803d716a9cbab125362583575de/examples/11_plurigaussian/conditional_values.npz -------------------------------------------------------------------------------- /examples/12_sum_model/00_simple_sum_model.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Creating a Sum Model 3 | -------------------- 4 | 5 | This tutorial demonstrates how to create and use sum models in GSTools. 6 | We'll combine a Spherical and a Gaussian covariance model to construct 7 | a sum model, visualize its variogram, and generate spatial random fields. 8 | 9 | Let's start with importing GSTools and setting up the domain size. 10 | """ 11 | 12 | import gstools as gs 13 | 14 | x = y = range(100) 15 | 16 | ############################################################################### 17 | # First, we create two individual covariance models: a :any:`Spherical` model and a 18 | # :any:`Gaussian` model. The Spherical model with its short length scale 19 | # will emphasize small-scale variability, while the Gaussian model with a larger length scale 20 | # captures larger-scale patterns. 21 | 22 | m0 = gs.Spherical(dim=2, var=2.0, len_scale=5.0) 23 | m1 = gs.Gaussian(dim=2, var=1.0, len_scale=10.0) 24 | 25 | ############################################################################### 26 | # Next, we create a sum model by adding these two models together. 27 | # Let's visualize the resulting variogram alongside the individual models. 28 | 29 | model = m0 + m1 30 | ax = model.plot(x_max=20) 31 | m0.plot(x_max=20, ax=ax) 32 | m1.plot(x_max=20, ax=ax) 33 | 34 | ############################################################################### 35 | # As shown, the Spherical model controls the behavior at shorter distances, 36 | # while the Gaussian model dominates at longer distances. The ratio of influence 37 | # is thereby controlled by the provided variances of the individual models. 38 | # 39 | # Using the sum model, we can generate a spatial random field. Let's visualize 40 | # the field created by the sum model. 41 | 42 | srf = gs.SRF(model, seed=20250107) 43 | srf.structured((x, y)) 44 | srf.plot() 45 | 46 | ############################################################################### 47 | # For comparison, we generate random fields using the individual models 48 | # to observe their contributions more clearly. 49 | 50 | srf0 = gs.SRF(m0, seed=20250107) 51 | srf0.structured((x, y)) 52 | srf0.plot() 53 | 54 | srf1 = gs.SRF(m1, seed=20250107) 55 | srf1.structured((x, y)) 56 | srf1.plot() 57 | 58 | ############################################################################### 59 | # As seen, the Gaussian model introduces large-scale structures, while the 60 | # Spherical model influences the field's roughness. The sum model combines 61 | # these effects, resulting in a field that reflects multi-scale variability. 62 | -------------------------------------------------------------------------------- /examples/12_sum_model/01_fitting_sum_model.py: -------------------------------------------------------------------------------- 1 | r""" 2 | Fitting a Sum Model 3 | -------------------- 4 | 5 | In this tutorial, we demonstrate how to fit a sum model consisting of two 6 | covariance models to an empirical variogram. 7 | 8 | We will generate synthetic data, compute an empirical variogram, and fit a 9 | sum model combining a Spherical and Gaussian model to it. 10 | """ 11 | 12 | import gstools as gs 13 | 14 | x = y = range(100) 15 | 16 | ############################################################################### 17 | # First, we create a synthetic random field based on a known sum model. 18 | # This will serve as the ground truth for fitting. 19 | 20 | # Define the true sum model 21 | m0 = gs.Spherical(dim=2, var=2.0, len_scale=5.0) 22 | m1 = gs.Gaussian(dim=2, var=1.0, len_scale=10.0) 23 | true_model = m0 + m1 24 | 25 | # Generate synthetic field 26 | srf = gs.SRF(true_model, seed=20250405) 27 | field = srf.structured((x, y)) 28 | 29 | ############################################################################### 30 | # Next, we calculate the empirical variogram from the synthetic data. 31 | 32 | # Compute empirical variogram 33 | bin_center, gamma = gs.vario_estimate((x, y), field) 34 | 35 | ############################################################################### 36 | # Now we define a sum model to fit to the empirical variogram. 37 | # Initially, the parameters of the models are arbitrary. 38 | # 39 | # A sum model can also be created by a list of model classes together with 40 | # the common arguments (like dim in this case). 41 | 42 | fit_model = gs.SumModel(gs.Spherical, gs.Gaussian, dim=2) 43 | 44 | ############################################################################### 45 | # We fit the sum model to the empirical variogram using GSTools' built-in 46 | # fitting capabilities. We deactivate the nugget fitting to not overparameterize 47 | # our model. 48 | 49 | fit_model.fit_variogram(bin_center, gamma, nugget=False) 50 | print(f"{true_model=}") 51 | print(f" {fit_model=}") 52 | 53 | ############################################################################### 54 | # The variance of a sum model is the sum of the sub variances (:any:`SumModel.vars`) 55 | # from the contained models. The length scale is a weighted sum of the sub 56 | # length scales (:any:`SumModel.len_scales`) where the weights are the ratios 57 | # of the sub variances to the total variance of the sum model. 58 | 59 | print(f"{true_model.var=:.2}, {true_model.len_scale=:.2}") 60 | print(f" {fit_model.var=:.2}, {fit_model.len_scale=:.2}") 61 | 62 | ############################################################################### 63 | # After fitting, we can visualize the empirical variogram alongside the 64 | # fitted sum model and its components. A Sum Model is subscriptable to access 65 | # the individual models its contains. 66 | 67 | ax = fit_model.plot(x_max=max(bin_center)) 68 | ax.scatter(bin_center, gamma) 69 | # Extract individual components 70 | fit_model[0].plot(x_max=max(bin_center), ax=ax) 71 | fit_model[1].plot(x_max=max(bin_center), ax=ax) 72 | # True models 73 | true_model.plot(x_max=max(bin_center), ax=ax, ls="--", c="C0", label="") 74 | true_model[0].plot(x_max=max(bin_center), ax=ax, ls="--", c="C1", label="") 75 | true_model[1].plot(x_max=max(bin_center), ax=ax, ls="--", c="C2", label="") 76 | 77 | ############################################################################### 78 | # As we can see, the fitted sum model closely matches the empirical variogram, 79 | # demonstrating its ability to capture multi-scale variability effectively. 80 | # The "true" variograms are shown with dashed lines for comparison. 81 | -------------------------------------------------------------------------------- /examples/12_sum_model/README.rst: -------------------------------------------------------------------------------- 1 | Summing Covariance Models 2 | ========================= 3 | 4 | In geostatistics, the spatial relations of natural phenomena is often represented using covariance models, 5 | which describe how values of a property correlate over distance. 6 | A single covariance model may capture specific features of the spatial correlation, such as smoothness or the range of influence. 7 | However, many real-world spatial processes are complex, involving multiple overlapping structures 8 | that cannot be adequately described by a single covariance model. 9 | 10 | This is where **sum models** come into play. 11 | A sum model combines multiple covariance models into a single representation, 12 | allowing for a more flexible and comprehensive description of spatial variability. 13 | By summing covariance models, we can: 14 | 15 | 1. **Capture Multi-Scale Variability:** Many spatial phenomena exhibit variability at different scales. 16 | For example, soil properties may have small-scale variation due to local heterogeneities and large-scale variation due to regional trends. 17 | A sum model can combine short-range and long-range covariance models to reflect this behavior. 18 | 2. **Improve Model Fit and Prediction Accuracy:** By combining models, sum models can better match empirical variograms or other observed data, 19 | leading to more accurate predictions in kriging or simulation tasks. 20 | 3. **Enhance Interpretability:** Each component of a sum model can be associated with a specific spatial process or scale, 21 | providing insights into the underlying mechanisms driving spatial variability. 22 | 23 | The new :any:`SumModel` introduced in GSTools makes it straightforward to define and work with such composite covariance structures. 24 | It allows users to combine any number of base models, each with its own parameters, in a way that is both intuitive and computationally efficient. 25 | 26 | In the following tutorials, we'll explore how to use the :any:`SumModel` in GSTools, 27 | including practical examples that demonstrate its utility in different scenarios. 28 | 29 | Examples 30 | -------- 31 | -------------------------------------------------------------------------------- /src/gstools/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing global variables. 3 | 4 | .. currentmodule:: gstools.config 5 | 6 | """ 7 | 8 | NUM_THREADS = None 9 | 10 | # pylint: disable=W0611 11 | try: # pragma: no cover 12 | import gstools_core 13 | 14 | _GSTOOLS_CORE_AVAIL = True 15 | USE_GSTOOLS_CORE = True 16 | except ImportError: 17 | _GSTOOLS_CORE_AVAIL = False 18 | USE_GSTOOLS_CORE = False 19 | -------------------------------------------------------------------------------- /src/gstools/covmodel/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing a set of handy covariance models. 3 | 4 | .. currentmodule:: gstools.covmodel 5 | 6 | Subpackages 7 | ^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | plot 13 | 14 | Covariance Base-Class 15 | ^^^^^^^^^^^^^^^^^^^^^ 16 | Class to construct user defined covariance models 17 | 18 | .. autosummary:: 19 | :toctree: 20 | 21 | CovModel 22 | SumModel 23 | 24 | Covariance Models 25 | ^^^^^^^^^^^^^^^^^ 26 | Standard Covariance Models 27 | 28 | .. autosummary:: 29 | :toctree: 30 | 31 | Nugget 32 | Gaussian 33 | Exponential 34 | Matern 35 | Integral 36 | Stable 37 | Rational 38 | Cubic 39 | Linear 40 | Circular 41 | Spherical 42 | HyperSpherical 43 | SuperSpherical 44 | JBessel 45 | 46 | Truncated Power Law Covariance Models 47 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 48 | 49 | .. autosummary:: 50 | :toctree: 51 | 52 | TPLGaussian 53 | TPLExponential 54 | TPLStable 55 | TPLSimple 56 | """ 57 | 58 | from gstools.covmodel.base import CovModel, SumModel 59 | from gstools.covmodel.models import ( 60 | Circular, 61 | Cubic, 62 | Exponential, 63 | Gaussian, 64 | HyperSpherical, 65 | Integral, 66 | JBessel, 67 | Linear, 68 | Matern, 69 | Nugget, 70 | Rational, 71 | Spherical, 72 | Stable, 73 | SuperSpherical, 74 | ) 75 | from gstools.covmodel.tpl_models import ( 76 | TPLExponential, 77 | TPLGaussian, 78 | TPLSimple, 79 | TPLStable, 80 | ) 81 | 82 | __all__ = [ 83 | "CovModel", 84 | "SumModel", 85 | "Nugget", 86 | "Gaussian", 87 | "Exponential", 88 | "Matern", 89 | "Integral", 90 | "Stable", 91 | "Rational", 92 | "Cubic", 93 | "Linear", 94 | "Circular", 95 | "Spherical", 96 | "HyperSpherical", 97 | "SuperSpherical", 98 | "JBessel", 99 | "TPLGaussian", 100 | "TPLExponential", 101 | "TPLStable", 102 | "TPLSimple", 103 | ] 104 | -------------------------------------------------------------------------------- /src/gstools/field/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing tools for spatial random fields. 3 | 4 | .. currentmodule:: gstools.field 5 | 6 | Subpackages 7 | ^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | generator 13 | upscaling 14 | 15 | Spatial Random Field 16 | ^^^^^^^^^^^^^^^^^^^^ 17 | 18 | .. autosummary:: 19 | :toctree: 20 | 21 | SRF 22 | CondSRF 23 | 24 | Field Base Class 25 | ^^^^^^^^^^^^^^^^ 26 | 27 | .. autosummary:: 28 | :toctree: 29 | 30 | Field 31 | 32 | Plurigaussian Simulation 33 | ^^^^^^^^^^^^^^^^^^^^^^^^ 34 | 35 | .. autosummary:: 36 | :toctree: 37 | 38 | PGS 39 | """ 40 | 41 | from gstools.field.base import Field 42 | from gstools.field.cond_srf import CondSRF 43 | from gstools.field.pgs import PGS 44 | from gstools.field.srf import SRF 45 | 46 | __all__ = ["SRF", "CondSRF", "Field", "PGS"] 47 | -------------------------------------------------------------------------------- /src/gstools/field/upscaling.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing upscaling routines for the spatial random field. 3 | 4 | .. currentmodule:: gstools.field.upscaling 5 | 6 | The following functions are provided 7 | 8 | .. autosummary:: 9 | :toctree: 10 | 11 | var_coarse_graining 12 | var_no_scaling 13 | """ 14 | 15 | # pylint: disable=W0613 16 | import warnings 17 | 18 | import numpy as np 19 | 20 | __all__ = ["var_coarse_graining", "var_no_scaling"] 21 | 22 | 23 | # scaling routines ############################################################ 24 | 25 | 26 | def var_coarse_graining(model, point_volumes=0.0): 27 | r"""Coarse Graning procedure to upscale the variance for uniform flow. 28 | 29 | Parameters 30 | ---------- 31 | model : :any:`CovModel` 32 | Covariance Model used for the field. 33 | point_volumes : :class:`float` or :class:`numpy.ndarray` 34 | Volumes of the elements at the given points. Default: ``0`` 35 | 36 | Returns 37 | ------- 38 | scaled_var : :class:`float` or :class:`numpy.ndarray` 39 | The upscaled variance 40 | 41 | Notes 42 | ----- 43 | This procedure was presented in [Attinger03]_. It applies the 44 | upscaling procedure 'Coarse Graining' to the Groundwater flow equation 45 | under uniform flow on a lognormal distributed conductivity field following 46 | a gaussian covariance function. A filter over a cube with a given 47 | edge-length :math:`\lambda` is applied and an upscaled conductivity field 48 | is obtained. 49 | The upscaled field is again following a gaussian covariance function with 50 | scale dependent variance and length-scale: 51 | 52 | .. math:: 53 | \lambda &= V^{\frac{1}{d}} \\ 54 | \sigma^2\left(\lambda\right) &= 55 | \sigma^2\cdot\left( 56 | \frac{\ell^2}{\ell^2+\left(\frac{\lambda}{2}\right)^2} 57 | \right)^{\frac{d}{2}} \\ 58 | \ell\left(\lambda\right) &= 59 | \left(\ell^2+\left(\frac{\lambda}{2}\right)^2\right)^{\frac{1}{2}} 60 | 61 | Therby :math:`\lambda` will be calculated from the given 62 | ``point_volumes`` :math:`V` by assuming a cube with the given volume. 63 | 64 | The upscaled length scale will be ignored by this routine. 65 | 66 | References 67 | ---------- 68 | .. [Attinger03] Attinger, S. 2003, 69 | ''Generalized coarse graining procedures for flow in porous media'', 70 | Computational Geosciences, 7(4), 253–273. 71 | """ 72 | if not np.isclose(model.nugget, 0): 73 | warnings.warn( 74 | "var_coarse_graining: non-zero nugget will violate upscaling!" 75 | ) 76 | # interpret volume as a hypercube and calculate the edge length 77 | edge = point_volumes ** (1.0 / model.dim) 78 | var_factor = ( 79 | model.len_scale**2 / (model.len_scale**2 + edge**2 / 4) 80 | ) ** (model.dim / 2.0) 81 | 82 | return model.sill * var_factor 83 | 84 | 85 | def var_no_scaling(model, *args, **kwargs): 86 | r"""Dummy function to bypass scaling. 87 | 88 | Parameters 89 | ---------- 90 | model : :any:`CovModel` 91 | Covariance Model used for the field. 92 | 93 | Returns 94 | ------- 95 | var : :class:`float` 96 | The model variance. 97 | """ 98 | return model.sill 99 | -------------------------------------------------------------------------------- /src/gstools/krige/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing kriging. 3 | 4 | .. currentmodule:: gstools.krige 5 | 6 | Kriging Classes 7 | ^^^^^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | Krige 13 | Simple 14 | Ordinary 15 | Universal 16 | ExtDrift 17 | Detrended 18 | """ 19 | 20 | from gstools.krige.base import Krige 21 | from gstools.krige.methods import ( 22 | Detrended, 23 | ExtDrift, 24 | Ordinary, 25 | Simple, 26 | Universal, 27 | ) 28 | 29 | __all__ = ["Krige", "Simple", "Ordinary", "Universal", "ExtDrift", "Detrended"] 30 | -------------------------------------------------------------------------------- /src/gstools/krige/tools.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing tools for Kriging. 3 | 4 | .. currentmodule:: gstools.krige.tools 5 | 6 | The following classes and functions are provided 7 | 8 | .. autosummary:: 9 | set_condition 10 | get_drift_functions 11 | """ 12 | 13 | # pylint: disable=C0103 14 | from itertools import combinations_with_replacement 15 | 16 | import numpy as np 17 | 18 | __all__ = ["set_condition", "get_drift_functions"] 19 | 20 | 21 | def set_condition(cond_pos, cond_val, dim): 22 | """ 23 | Set the conditions for kriging. 24 | 25 | Parameters 26 | ---------- 27 | cond_pos : :class:`list` 28 | the position tuple of the conditions (x, [y, z]) 29 | cond_val : :class:`numpy.ndarray` 30 | the values of the conditions (nan values will be ignored) 31 | dim : :class:`int`, optional 32 | Spatial dimension 33 | 34 | Raises 35 | ------ 36 | ValueError 37 | If the given data does not match the given dimension. 38 | 39 | Returns 40 | ------- 41 | cond_pos : :class:`list` 42 | the error checked cond_pos with all finite values 43 | cond_val : :class:`numpy.ndarray` 44 | the error checked cond_val for all finite cond_pos values 45 | """ 46 | # convert the input for right shapes and dimension checks 47 | cond_val = np.asarray(cond_val, dtype=np.double).reshape(-1) 48 | cond_pos = np.asarray(cond_pos, dtype=np.double).reshape(dim, -1) 49 | if len(cond_pos[0]) != len(cond_val): 50 | raise ValueError( 51 | "Please check your 'cond_pos' and 'cond_val' parameters. " 52 | "The shapes do not match." 53 | ) 54 | mask = np.isfinite(cond_val) 55 | return cond_pos[:, mask], cond_val[mask] 56 | 57 | 58 | def get_drift_functions(dim, drift_type): 59 | """ 60 | Get functions for a given drift type in universal kriging. 61 | 62 | Parameters 63 | ---------- 64 | dim : :class:`int` 65 | Given dimension. 66 | drift_type : :class:`str` or :class:`int` 67 | Drift type: 'linear' or 'quadratic' or an integer for the polynomial 68 | order of the drift type. (linear equals 1, quadratic equals 2 ...) 69 | 70 | Returns 71 | ------- 72 | :class:`list` of :any:`callable` 73 | List of drift functions. 74 | """ 75 | if drift_type in ["lin", "linear"]: 76 | drift_type = 1 77 | elif drift_type in ["quad", "quadratic"]: 78 | drift_type = 2 79 | else: 80 | drift_type = int(drift_type) 81 | drift_functions = [] 82 | for d in range(drift_type): 83 | selects = combinations_with_replacement(range(dim), d + 1) 84 | for select in selects: 85 | drift_functions.append(_f_factory(select)) 86 | return drift_functions 87 | 88 | 89 | def _f_factory(select): 90 | def f(*pos): 91 | res = 1.0 92 | for i in select: 93 | res *= np.asarray(pos[i]) 94 | return res 95 | 96 | return f 97 | -------------------------------------------------------------------------------- /src/gstools/normalizer/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing normalization routines. 3 | 4 | .. currentmodule:: gstools.normalizer 5 | 6 | Base-Normalizer 7 | ^^^^^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | Normalizer 13 | 14 | Field-Normalizer 15 | ^^^^^^^^^^^^^^^^ 16 | 17 | .. autosummary:: 18 | :toctree: 19 | 20 | LogNormal 21 | BoxCox 22 | BoxCoxShift 23 | YeoJohnson 24 | Modulus 25 | Manly 26 | 27 | Convenience Routines 28 | ^^^^^^^^^^^^^^^^^^^^ 29 | 30 | .. autosummary:: 31 | :toctree: 32 | 33 | apply_mean_norm_trend 34 | remove_trend_norm_mean 35 | """ 36 | 37 | from gstools.normalizer.base import Normalizer 38 | from gstools.normalizer.methods import ( 39 | BoxCox, 40 | BoxCoxShift, 41 | LogNormal, 42 | Manly, 43 | Modulus, 44 | YeoJohnson, 45 | ) 46 | from gstools.normalizer.tools import ( 47 | apply_mean_norm_trend, 48 | remove_trend_norm_mean, 49 | ) 50 | 51 | __all__ = [ 52 | "Normalizer", 53 | "LogNormal", 54 | "BoxCox", 55 | "BoxCoxShift", 56 | "YeoJohnson", 57 | "Modulus", 58 | "Manly", 59 | "apply_mean_norm_trend", 60 | "remove_trend_norm_mean", 61 | ] 62 | -------------------------------------------------------------------------------- /src/gstools/random/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage for random number generation. 3 | 4 | .. currentmodule:: gstools.random 5 | 6 | Random Number Generator 7 | ^^^^^^^^^^^^^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | RNG 13 | 14 | Seed Generator 15 | ^^^^^^^^^^^^^^ 16 | 17 | .. autosummary:: 18 | :toctree: 19 | 20 | MasterRNG 21 | 22 | Distribution factory 23 | ^^^^^^^^^^^^^^^^^^^^ 24 | 25 | .. autosummary:: 26 | :toctree: 27 | 28 | dist_gen 29 | 30 | ---- 31 | """ 32 | 33 | from gstools.random.rng import RNG 34 | from gstools.random.tools import MasterRNG, dist_gen 35 | 36 | __all__ = ["RNG", "MasterRNG", "dist_gen"] 37 | -------------------------------------------------------------------------------- /src/gstools/tools/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing miscellaneous tools. 3 | 4 | .. currentmodule:: gstools.tools 5 | 6 | Export 7 | ^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | vtk_export 13 | vtk_export_structured 14 | vtk_export_unstructured 15 | to_vtk 16 | to_vtk_structured 17 | to_vtk_unstructured 18 | 19 | Special functions 20 | ^^^^^^^^^^^^^^^^^ 21 | 22 | .. autosummary:: 23 | :toctree: 24 | 25 | confidence_scaling 26 | inc_gamma 27 | inc_gamma_low 28 | exp_int 29 | inc_beta 30 | tplstable_cor 31 | tpl_exp_spec_dens 32 | tpl_gau_spec_dens 33 | 34 | Geometric 35 | ^^^^^^^^^ 36 | 37 | .. autosummary:: 38 | :toctree: 39 | 40 | rotated_main_axes 41 | set_angles 42 | set_anis 43 | no_of_angles 44 | rotation_planes 45 | givens_rotation 46 | matrix_rotate 47 | matrix_derotate 48 | matrix_isotropify 49 | matrix_anisotropify 50 | matrix_isometrize 51 | matrix_anisometrize 52 | ang2dir 53 | generate_grid 54 | generate_st_grid 55 | 56 | Misc 57 | ^^^^ 58 | 59 | .. autosummary:: 60 | EARTH_RADIUS 61 | KM_SCALE 62 | DEGREE_SCALE 63 | RADIAN_SCALE 64 | 65 | ---- 66 | 67 | .. autodata:: EARTH_RADIUS 68 | 69 | .. autodata:: KM_SCALE 70 | 71 | .. autodata:: DEGREE_SCALE 72 | 73 | .. autodata:: RADIAN_SCALE 74 | """ 75 | 76 | from gstools.tools.export import ( 77 | to_vtk, 78 | to_vtk_structured, 79 | to_vtk_unstructured, 80 | vtk_export, 81 | vtk_export_structured, 82 | vtk_export_unstructured, 83 | ) 84 | from gstools.tools.geometric import ( 85 | ang2dir, 86 | generate_grid, 87 | generate_st_grid, 88 | givens_rotation, 89 | matrix_anisometrize, 90 | matrix_anisotropify, 91 | matrix_derotate, 92 | matrix_isometrize, 93 | matrix_isotropify, 94 | matrix_rotate, 95 | no_of_angles, 96 | rotated_main_axes, 97 | rotation_planes, 98 | set_angles, 99 | set_anis, 100 | ) 101 | from gstools.tools.special import ( 102 | confidence_scaling, 103 | exp_int, 104 | inc_beta, 105 | inc_gamma, 106 | inc_gamma_low, 107 | tpl_exp_spec_dens, 108 | tpl_gau_spec_dens, 109 | tplstable_cor, 110 | ) 111 | 112 | EARTH_RADIUS = 6371.0 113 | """float: earth radius for WGS84 ellipsoid in km""" 114 | 115 | KM_SCALE = 6371.0 116 | """float: earth radius for WGS84 ellipsoid in km""" 117 | 118 | DEGREE_SCALE = 57.29577951308232 119 | """float: radius for unit sphere in degree""" 120 | 121 | RADIAN_SCALE = 1.0 122 | """float: radius for unit sphere""" 123 | 124 | 125 | __all__ = [ 126 | "vtk_export", 127 | "vtk_export_structured", 128 | "vtk_export_unstructured", 129 | "to_vtk", 130 | "to_vtk_structured", 131 | "to_vtk_unstructured", 132 | "confidence_scaling", 133 | "inc_gamma", 134 | "inc_gamma_low", 135 | "exp_int", 136 | "inc_beta", 137 | "tplstable_cor", 138 | "tpl_exp_spec_dens", 139 | "tpl_gau_spec_dens", 140 | "set_angles", 141 | "set_anis", 142 | "no_of_angles", 143 | "rotation_planes", 144 | "givens_rotation", 145 | "matrix_rotate", 146 | "matrix_derotate", 147 | "matrix_isotropify", 148 | "matrix_anisotropify", 149 | "matrix_isometrize", 150 | "matrix_anisometrize", 151 | "rotated_main_axes", 152 | "ang2dir", 153 | "generate_grid", 154 | "generate_st_grid", 155 | "EARTH_RADIUS", 156 | "KM_SCALE", 157 | "DEGREE_SCALE", 158 | "RADIAN_SCALE", 159 | ] 160 | -------------------------------------------------------------------------------- /src/gstools/transform/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing transformations to post-process normal fields. 3 | 4 | .. currentmodule:: gstools.transform 5 | 6 | Wrapper 7 | ^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | apply 13 | 14 | Field Transformations 15 | ^^^^^^^^^^^^^^^^^^^^^ 16 | 17 | .. autosummary:: 18 | :toctree: 19 | 20 | binary 21 | discrete 22 | boxcox 23 | zinnharvey 24 | normal_force_moments 25 | normal_to_lognormal 26 | normal_to_uniform 27 | normal_to_arcsin 28 | normal_to_uquad 29 | apply_function 30 | 31 | Array Transformations 32 | ^^^^^^^^^^^^^^^^^^^^^ 33 | 34 | .. autosummary:: 35 | :toctree: 36 | 37 | array_discrete 38 | array_boxcox 39 | array_zinnharvey 40 | array_force_moments 41 | array_to_lognormal 42 | array_to_uniform 43 | array_to_arcsin 44 | array_to_uquad 45 | 46 | ---- 47 | """ 48 | 49 | from gstools.transform.array import ( 50 | array_boxcox, 51 | array_discrete, 52 | array_force_moments, 53 | array_to_arcsin, 54 | array_to_lognormal, 55 | array_to_uniform, 56 | array_to_uquad, 57 | array_zinnharvey, 58 | ) 59 | from gstools.transform.field import ( 60 | apply, 61 | apply_function, 62 | binary, 63 | boxcox, 64 | discrete, 65 | normal_force_moments, 66 | normal_to_arcsin, 67 | normal_to_lognormal, 68 | normal_to_uniform, 69 | normal_to_uquad, 70 | zinnharvey, 71 | ) 72 | 73 | __all__ = [ 74 | "apply", 75 | "apply_function", 76 | "binary", 77 | "discrete", 78 | "boxcox", 79 | "zinnharvey", 80 | "normal_force_moments", 81 | "normal_to_lognormal", 82 | "normal_to_uniform", 83 | "normal_to_arcsin", 84 | "normal_to_uquad", 85 | "array_discrete", 86 | "array_boxcox", 87 | "array_zinnharvey", 88 | "array_force_moments", 89 | "array_to_lognormal", 90 | "array_to_uniform", 91 | "array_to_arcsin", 92 | "array_to_uquad", 93 | ] 94 | -------------------------------------------------------------------------------- /src/gstools/variogram/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing tools for estimating and fitting variograms. 3 | 4 | .. currentmodule:: gstools.variogram 5 | 6 | Variogram estimation 7 | ^^^^^^^^^^^^^^^^^^^^ 8 | 9 | .. autosummary:: 10 | :toctree: 11 | 12 | vario_estimate 13 | vario_estimate_axis 14 | 15 | Binning 16 | ^^^^^^^ 17 | 18 | .. autosummary:: 19 | :toctree: 20 | 21 | standard_bins 22 | 23 | ---- 24 | """ 25 | 26 | from gstools.variogram.binning import standard_bins 27 | from gstools.variogram.variogram import ( 28 | vario_estimate, 29 | vario_estimate_axis, 30 | vario_estimate_structured, 31 | vario_estimate_unstructured, 32 | ) 33 | 34 | __all__ = [ 35 | "vario_estimate", 36 | "vario_estimate_axis", 37 | "vario_estimate_unstructured", 38 | "vario_estimate_structured", 39 | "standard_bins", 40 | ] 41 | -------------------------------------------------------------------------------- /src/gstools/variogram/binning.py: -------------------------------------------------------------------------------- 1 | """ 2 | GStools subpackage providing binning routines. 3 | 4 | .. currentmodule:: gstools.variogram.binning 5 | 6 | The following functions are provided 7 | 8 | .. autosummary:: 9 | standard_bins 10 | """ 11 | 12 | import numpy as np 13 | 14 | from gstools.tools import RADIAN_SCALE 15 | from gstools.tools.geometric import ( 16 | chordal_to_great_circle, 17 | format_struct_pos_dim, 18 | generate_grid, 19 | latlon2pos, 20 | ) 21 | 22 | __all__ = ["standard_bins"] 23 | 24 | 25 | def _sturges(pnt_cnt): 26 | return int(np.ceil(2 * np.log2(pnt_cnt) + 1)) 27 | 28 | 29 | def standard_bins( 30 | pos=None, 31 | dim=2, 32 | latlon=False, 33 | mesh_type="unstructured", 34 | bin_no=None, 35 | max_dist=None, 36 | geo_scale=RADIAN_SCALE, 37 | ): 38 | r""" 39 | Get standard binning. 40 | 41 | Parameters 42 | ---------- 43 | pos : :class:`list`, optional 44 | the position tuple, containing either the point coordinates (x, y, ...) 45 | or the axes descriptions (for mesh_type='structured') 46 | dim : :class:`int`, optional 47 | Field dimension. 48 | latlon : :class:`bool`, optional 49 | Whether the data is representing 2D fields on earths surface described 50 | by latitude and longitude. When using this, the estimator will 51 | use great-circle distance for variogram estimation. 52 | Note, that only an isotropic variogram can be estimated and a 53 | ValueError will be raised, if a direction was specified. 54 | Bin edges need to be given in radians in this case. 55 | Default: False 56 | mesh_type : :class:`str`, optional 57 | 'structured' / 'unstructured', indicates whether the pos tuple 58 | describes the axis or the point coordinates. 59 | Default: `'unstructured'` 60 | bin_no: :class:`int`, optional 61 | number of bins to create. If None is given, will be determined by 62 | Sturges' rule from the number of points. 63 | Default: None 64 | max_dist: :class:`float`, optional 65 | Cut of length for the bins. If None is given, it will be set to one 66 | third of the box-diameter from the given points. 67 | Default: None 68 | geo_scale : :class:`float`, optional 69 | Geographic unit scaling in case of latlon coordinates to get a 70 | meaningful bins unit. 71 | By default, bins are assumed to be in radians with latlon=True. 72 | Can be set to :any:`KM_SCALE` to have bins in km or 73 | :any:`DEGREE_SCALE` to have bins in degrees. 74 | Default: :any:`RADIAN_SCALE` 75 | 76 | Returns 77 | ------- 78 | :class:`numpy.ndarray` 79 | The generated bin edges. 80 | 81 | Notes 82 | ----- 83 | Internally uses double precision and also returns doubles. 84 | """ 85 | dim = 2 if latlon else int(dim) 86 | if bin_no is None or max_dist is None: 87 | if pos is None: 88 | raise ValueError("standard_bins: no pos tuple given.") 89 | if mesh_type != "unstructured": 90 | pos = generate_grid(format_struct_pos_dim(pos, dim)[0]) 91 | else: 92 | pos = np.asarray(pos, dtype=np.double).reshape(dim, -1) 93 | pos = latlon2pos(pos, radius=geo_scale) if latlon else pos 94 | pnt_cnt = len(pos[0]) 95 | box = [] 96 | for axis in pos: 97 | box.append([np.min(axis), np.max(axis)]) 98 | box = np.asarray(box) 99 | diam = np.linalg.norm(box[:, 0] - box[:, 1]) 100 | # convert diameter to great-circle distance if using latlon 101 | diam = chordal_to_great_circle(diam, geo_scale) if latlon else diam 102 | bin_no = _sturges(pnt_cnt) if bin_no is None else int(bin_no) 103 | max_dist = diam / 3 if max_dist is None else float(max_dist) 104 | return np.linspace(0, max_dist, num=bin_no + 1, dtype=np.double) 105 | -------------------------------------------------------------------------------- /tests/test_export.py: -------------------------------------------------------------------------------- 1 | """Test the PyVista/VTK export methods""" 2 | 3 | import os 4 | import shutil 5 | import tempfile 6 | import unittest 7 | 8 | import numpy as np 9 | 10 | from gstools import SRF, Exponential, Gaussian 11 | from gstools.random import MasterRNG 12 | 13 | HAS_PYVISTA = False 14 | try: 15 | import pyvista as pv 16 | 17 | HAS_PYVISTA = True 18 | except ImportError: 19 | pass 20 | 21 | 22 | class TestExport(unittest.TestCase): 23 | def setUp(self): 24 | self.test_dir = tempfile.mkdtemp() 25 | # structured field with a size 100x100x100 and a grid-size of 1x1x1 26 | x = y = z = range(50) 27 | model = Gaussian(dim=3, var=0.6, len_scale=20) 28 | self.srf_structured = SRF(model) 29 | self.srf_structured((x, y, z), mesh_type="structured") 30 | # unstrucutred field 31 | seed = MasterRNG(19970221) 32 | rng = np.random.RandomState(seed()) 33 | x = rng.randint(0, 100, size=1000) 34 | y = rng.randint(0, 100, size=1000) 35 | model = Exponential( 36 | dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8.0 37 | ) 38 | self.srf_unstructured = SRF(model, seed=20170519) 39 | self.srf_unstructured([x, y]) 40 | 41 | def tearDown(self): 42 | # Remove the test data directory after the test 43 | shutil.rmtree(self.test_dir) 44 | 45 | @unittest.skipIf(not HAS_PYVISTA, "PyVista is not installed.") 46 | def test_pyvista(self): 47 | mesh = self.srf_structured.to_pyvista() 48 | self.assertIsInstance(mesh, pv.RectilinearGrid) 49 | mesh = self.srf_unstructured.to_pyvista() 50 | self.assertIsInstance(mesh, pv.UnstructuredGrid) 51 | 52 | def test_pyevtk_export(self): 53 | # Structured 54 | sfilename = os.path.join(self.test_dir, "structured") 55 | self.srf_structured.vtk_export(sfilename) 56 | self.assertTrue(os.path.isfile(sfilename + ".vtr")) 57 | # Unstructured 58 | ufilename = os.path.join(self.test_dir, "unstructured") 59 | self.srf_unstructured.vtk_export(ufilename) 60 | self.assertTrue(os.path.isfile(ufilename + ".vtu")) 61 | 62 | 63 | if __name__ == "__main__": 64 | unittest.main() 65 | -------------------------------------------------------------------------------- /tests/test_field.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | This is the unittest of SRF class. 4 | """ 5 | 6 | import unittest 7 | 8 | import numpy as np 9 | 10 | import gstools as gs 11 | 12 | 13 | class TestField(unittest.TestCase): 14 | def setUp(self): 15 | self.cov_model = gs.Gaussian(dim=2, var=1.5, len_scale=4.0) 16 | rng = np.random.RandomState(123018) 17 | x = rng.uniform(0.0, 10, 100) 18 | y = rng.uniform(0.0, 10, 100) 19 | self.field = rng.uniform(0.0, 10, 100) 20 | self.pos = np.array([x, y]) 21 | 22 | def test_standalone(self): 23 | fld = gs.field.Field(dim=2) 24 | fld_cov = gs.field.Field(model=self.cov_model) 25 | field1 = fld(self.pos, self.field) 26 | field2 = fld_cov(self.pos, self.field) 27 | self.assertTrue(np.all(np.isclose(field1, field2))) 28 | self.assertTrue(np.all(np.isclose(field1, self.field))) 29 | 30 | def test_raise(self): 31 | # vector field on latlon 32 | fld = gs.field.Field(gs.Gaussian(latlon=True), value_type="vector") 33 | self.assertRaises(ValueError, fld, [1, 2], [1, 2]) 34 | # no pos tuple present 35 | fld = gs.field.Field(dim=2) 36 | self.assertRaises(ValueError, fld.post_field, [1, 2]) 37 | # wrong model type 38 | with self.assertRaises(ValueError): 39 | gs.field.Field(model=3.1415) 40 | # no model and no dim given 41 | with self.assertRaises(ValueError): 42 | gs.field.Field() 43 | # wrong value type 44 | with self.assertRaises(ValueError): 45 | gs.field.Field(dim=2, value_type="complex") 46 | # wrong mean shape 47 | with self.assertRaises(ValueError): 48 | gs.field.Field(dim=3, mean=[1, 2]) 49 | 50 | def test_pos_compare(self): 51 | fld = gs.field.Field(dim=1) 52 | fld.set_pos([1, 2]) 53 | fld._dim = 2 54 | info = fld.set_pos([[1], [2]], info=True) 55 | self.assertTrue(info["deleted"]) 56 | info = fld.set_pos([[2], [3]], info=True) 57 | self.assertTrue(info["deleted"]) 58 | 59 | def test_magic(self): 60 | fld = gs.field.Field(dim=1) 61 | f1 = np.array([0, 0], dtype=np.double) 62 | f2 = np.array([2, 3], dtype=np.double) 63 | fld([1, 2], store="f1") # default field with zeros 64 | fld([1, 2], f2, store="f2") 65 | fields1 = fld[:] 66 | fields2 = fld[[0, 1]] 67 | fields3 = fld[["f1", "f2"]] 68 | fields4 = fld.all_fields 69 | self.assertTrue(np.allclose([f1, f2], fields1)) 70 | self.assertTrue(np.allclose([f1, f2], fields2)) 71 | self.assertTrue(np.allclose([f1, f2], fields3)) 72 | self.assertTrue(np.allclose([f1, f2], fields4)) 73 | self.assertEqual(len(fld), 2) 74 | self.assertTrue("f1" in fld) 75 | self.assertTrue("f2" in fld) 76 | self.assertFalse("f3" in fld) 77 | # subscription 78 | with self.assertRaises(KeyError): 79 | fld["f3"] 80 | with self.assertRaises(KeyError): 81 | del fld["f3"] 82 | with self.assertRaises(KeyError): 83 | del fld[["f3"]] 84 | del fld["f1"] 85 | self.assertFalse("f1" in fld) 86 | fld([1, 2], f1, store="f1") 87 | del fld[-1] 88 | self.assertFalse("f1" in fld) 89 | fld([1, 2], f1, store="f1") 90 | del fld[:] 91 | self.assertEqual(len(fld), 0) 92 | fld([1, 2], f1, store="f1") 93 | del fld.field_names 94 | self.assertEqual(len(fld), 0) 95 | # store config (missing check) 96 | name, save = fld.get_store_config(store="fld", fld_cnt=1) 97 | self.assertEqual(name, ["fld"]) 98 | self.assertTrue(save[0]) 99 | 100 | def test_reuse(self): 101 | fld = gs.field.Field(dim=1) 102 | # no pos tuple 103 | with self.assertRaises(ValueError): 104 | fld() 105 | # no field shape 106 | with self.assertRaises(ValueError): 107 | fld.post_field([1, 2]) 108 | # bad name 109 | fld.set_pos([1, 2]) 110 | with self.assertRaises(ValueError): 111 | fld.post_field([1, 2], process=False, name=0) 112 | # incompatible reuse 113 | with self.assertRaises(ValueError): 114 | fld.structured() 115 | fld.set_pos([1, 2], "structured") 116 | with self.assertRaises(ValueError): 117 | fld.unstructured() 118 | 119 | 120 | if __name__ == "__main__": 121 | unittest.main() 122 | -------------------------------------------------------------------------------- /tests/test_fouriergen.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the unittest of the Fourier class. 3 | """ 4 | 5 | import unittest 6 | 7 | import numpy as np 8 | 9 | import gstools as gs 10 | 11 | 12 | class TestFourier(unittest.TestCase): 13 | def setUp(self): 14 | self.seed = 19900408 15 | self.cov_model_1d = gs.Gaussian(dim=1, var=0.5, len_scale=10.0) 16 | self.cov_model_2d = gs.Gaussian(dim=2, var=2.0, len_scale=30.0) 17 | self.cov_model_3d = gs.Gaussian(dim=3, var=2.1, len_scale=21.0) 18 | self.L = [80, 30, 91] 19 | self.x = np.linspace(0, self.L[0], 11) 20 | self.y = np.linspace(0, self.L[1], 31) 21 | self.z = np.linspace(0, self.L[2], 13) 22 | 23 | self.mode_no = [12, 6, 14] 24 | 25 | self.srf_1d = gs.SRF( 26 | self.cov_model_1d, 27 | generator="Fourier", 28 | mode_no=[self.mode_no[0]], 29 | period=[self.L[0]], 30 | seed=self.seed, 31 | ) 32 | self.srf_2d = gs.SRF( 33 | self.cov_model_2d, 34 | generator="Fourier", 35 | mode_no=self.mode_no[:2], 36 | period=self.L[:2], 37 | seed=self.seed, 38 | ) 39 | self.srf_3d = gs.SRF( 40 | self.cov_model_3d, 41 | generator="Fourier", 42 | mode_no=self.mode_no, 43 | period=self.L, 44 | seed=self.seed, 45 | ) 46 | 47 | def test_1d(self): 48 | field = self.srf_1d((self.x,), mesh_type="structured") 49 | self.assertAlmostEqual(field[0], 0.6236929351309081) 50 | 51 | def test_2d(self): 52 | field = self.srf_2d((self.x, self.y), mesh_type="structured") 53 | self.assertAlmostEqual(field[0, 0], -0.1431996611581266) 54 | 55 | def test_3d(self): 56 | field = self.srf_3d((self.x, self.y, self.z), mesh_type="structured") 57 | self.assertAlmostEqual(field[0, 0, 0], -1.0433325279452803) 58 | 59 | def test_periodicity_1d(self): 60 | field = self.srf_1d((self.x,), mesh_type="structured") 61 | self.assertAlmostEqual(field[0], field[-1]) 62 | 63 | def test_periodicity_2d(self): 64 | field = self.srf_2d((self.x, self.y), mesh_type="structured") 65 | self.assertAlmostEqual( 66 | field[0, len(self.y) // 2], field[-1, len(self.y) // 2] 67 | ) 68 | self.assertAlmostEqual( 69 | field[len(self.x) // 2, 0], field[len(self.x) // 2, -1] 70 | ) 71 | 72 | def test_periodicity_3d(self): 73 | field = self.srf_3d((self.x, self.y, self.z), mesh_type="structured") 74 | self.assertAlmostEqual( 75 | field[0, len(self.y) // 2, 0], field[-1, len(self.y) // 2, 0] 76 | ) 77 | self.assertAlmostEqual(field[0, 0, 0], field[0, -1, 0]) 78 | self.assertAlmostEqual( 79 | field[len(self.x) // 2, len(self.y) // 2, 0], 80 | field[len(self.x) // 2, len(self.y) // 2, -1], 81 | ) 82 | 83 | def test_setters(self): 84 | new_period = [5, 10] 85 | self.srf_2d.generator.period = new_period 86 | np.testing.assert_almost_equal( 87 | self.srf_2d.generator.period, 88 | np.array(new_period), 89 | ) 90 | new_mode_no = [6, 6] 91 | self.srf_2d.generator.mode_no = new_mode_no 92 | np.testing.assert_almost_equal( 93 | self.srf_2d.generator.mode_no, 94 | np.array(new_mode_no), 95 | ) 96 | 97 | def test_assertions(self): 98 | # unstructured grids not supported 99 | self.assertRaises(ValueError, self.srf_2d, (self.x, self.y)) 100 | self.assertRaises( 101 | ValueError, self.srf_2d, (self.x, self.y), mesh_type="unstructured" 102 | ) 103 | self.assertRaises( 104 | ValueError, 105 | gs.SRF, 106 | self.cov_model_2d, 107 | generator="Fourier", 108 | mode_no=[13, 50], 109 | period=self.L[:2], 110 | seed=self.seed, 111 | ) 112 | -------------------------------------------------------------------------------- /tests/test_incomprrandmeth.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the unittest of the RandMeth class. 3 | """ 4 | 5 | import copy 6 | import unittest 7 | 8 | import numpy as np 9 | 10 | import gstools as gs 11 | from gstools.field.generator import IncomprRandMeth 12 | 13 | 14 | class TestIncomprRandMeth(unittest.TestCase): 15 | def setUp(self): 16 | self.cov_model_2d = gs.Gaussian(dim=2, var=1.5, len_scale=2.5) 17 | self.cov_model_3d = copy.deepcopy(self.cov_model_2d) 18 | self.cov_model_3d.dim = 3 19 | self.seed = 19031977 20 | self.x_grid = np.linspace(0.0, 10.0, 9) 21 | self.y_grid = np.linspace(-5.0, 5.0, 16) 22 | self.z_grid = np.linspace(-6.0, 7.0, 8) 23 | self.x_tuple = np.linspace(0.0, 10.0, 10) 24 | self.y_tuple = np.linspace(-5.0, 5.0, 10) 25 | self.z_tuple = np.linspace(-6.0, 8.0, 10) 26 | 27 | self.rm_2d = IncomprRandMeth( 28 | self.cov_model_2d, mode_no=100, seed=self.seed 29 | ) 30 | self.rm_3d = IncomprRandMeth( 31 | self.cov_model_3d, mode_no=100, seed=self.seed 32 | ) 33 | 34 | def test_unstruct_2d(self): 35 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 36 | self.assertAlmostEqual(modes[0, 0], 0.50751115) 37 | self.assertAlmostEqual(modes[0, 1], 1.03291018) 38 | self.assertAlmostEqual(modes[1, 1], -0.22003005) 39 | 40 | def test_unstruct_3d(self): 41 | modes = self.rm_3d((self.x_tuple, self.y_tuple, self.z_tuple)) 42 | self.assertAlmostEqual(modes[0, 0], 0.7924546333550331) 43 | self.assertAlmostEqual(modes[0, 1], 1.660747056686244) 44 | self.assertAlmostEqual(modes[1, 0], -0.28049855754819514) 45 | 46 | def test_assertions(self): 47 | cov_model_1d = gs.Gaussian(dim=1, var=1.5, len_scale=2.5) 48 | self.assertRaises(ValueError, IncomprRandMeth, cov_model_1d) 49 | 50 | def test_vector_mean(self): 51 | srf = gs.SRF( 52 | self.cov_model_2d, 53 | mean=(0.5, 0), 54 | generator="VectorField", 55 | seed=198412031, 56 | ) 57 | srf.structured((self.x_grid, self.y_grid)) 58 | self.assertAlmostEqual(np.mean(srf.field[0]), 1.3025621393180298) 59 | self.assertAlmostEqual(np.mean(srf.field[1]), -0.04729596839446052) 60 | 61 | 62 | if __name__ == "__main__": 63 | unittest.main() 64 | -------------------------------------------------------------------------------- /tests/test_randmeth.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the unittest of the RandMeth class. 3 | """ 4 | 5 | import copy 6 | import unittest 7 | 8 | import numpy as np 9 | 10 | from gstools import Gaussian 11 | from gstools.field.generator import RandMeth 12 | 13 | 14 | class TestRandMeth(unittest.TestCase): 15 | def setUp(self): 16 | self.cov_model_1d = Gaussian(dim=1, var=1.5, len_scale=3.5) 17 | self.cov_model_2d = copy.deepcopy(self.cov_model_1d) 18 | self.cov_model_2d.dim = 2 19 | self.cov_model_3d = copy.deepcopy(self.cov_model_1d) 20 | self.cov_model_3d.dim = 3 21 | self.seed = 19031977 22 | self.x_grid = np.linspace(0.0, 10.0, 9) 23 | self.y_grid = np.linspace(-5.0, 5.0, 16) 24 | self.z_grid = np.linspace(-6.0, 7.0, 8) 25 | self.x_tuple = np.linspace(0.0, 10.0, 10) 26 | self.y_tuple = np.linspace(-5.0, 5.0, 10) 27 | self.z_tuple = np.linspace(-6.0, 8.0, 10) 28 | 29 | self.rm_1d = RandMeth(self.cov_model_1d, mode_no=100, seed=self.seed) 30 | self.rm_2d = RandMeth(self.cov_model_2d, mode_no=100, seed=self.seed) 31 | self.rm_3d = RandMeth(self.cov_model_3d, mode_no=100, seed=self.seed) 32 | 33 | def test_unstruct_1d(self): 34 | modes = self.rm_1d((self.x_tuple,)) 35 | self.assertAlmostEqual(modes[0], 3.19799030) 36 | self.assertAlmostEqual(modes[1], 2.44848295) 37 | 38 | def test_unstruct_2d(self): 39 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 40 | self.assertAlmostEqual(modes[0], 1.67318010) 41 | self.assertAlmostEqual(modes[1], 2.12310269) 42 | 43 | def test_unstruct_3d(self): 44 | modes = self.rm_3d((self.x_tuple, self.y_tuple, self.z_tuple)) 45 | self.assertAlmostEqual(modes[0], 1.3240234883187239) 46 | self.assertAlmostEqual(modes[1], 1.6367244277732766) 47 | 48 | def test_reset(self): 49 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 50 | self.assertAlmostEqual(modes[0], 1.67318010) 51 | self.assertAlmostEqual(modes[1], 2.12310269) 52 | 53 | self.rm_2d.seed = self.rm_2d.seed 54 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 55 | self.assertAlmostEqual(modes[0], 1.67318010) 56 | self.assertAlmostEqual(modes[1], 2.12310269) 57 | 58 | self.rm_2d.seed = 74893621 59 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 60 | self.assertAlmostEqual(modes[0], -1.94278053) 61 | self.assertAlmostEqual(modes[1], -1.12401651) 62 | 63 | self.rm_1d.model = self.cov_model_3d 64 | modes = self.rm_1d((self.x_tuple, self.y_tuple, self.z_tuple)) 65 | self.assertAlmostEqual(modes[0], 1.3240234883187239) 66 | self.assertAlmostEqual(modes[1], 1.6367244277732766) 67 | 68 | self.rm_2d.mode_no = 800 69 | modes = self.rm_2d((self.x_tuple, self.y_tuple)) 70 | self.assertAlmostEqual(modes[0], -3.20809251) 71 | self.assertAlmostEqual(modes[1], -2.62032778) 72 | 73 | 74 | if __name__ == "__main__": 75 | unittest.main() 76 | -------------------------------------------------------------------------------- /tests/test_temporal.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the unittest for temporal related routines. 3 | """ 4 | 5 | import unittest 6 | 7 | import numpy as np 8 | 9 | import gstools as gs 10 | 11 | 12 | class TestTemporal(unittest.TestCase): 13 | def setUp(self): 14 | self.mod = gs.Gaussian( 15 | latlon=True, 16 | temporal=True, 17 | len_scale=1000, 18 | anis=0.5, 19 | geo_scale=gs.KM_SCALE, 20 | ) 21 | 22 | def test_latlon(self): 23 | mod = gs.Gaussian( 24 | latlon=True, temporal=True, angles=[1, 2, 3, 4, 5, 6] 25 | ) 26 | self.assertEqual(mod.dim, 4) 27 | self.assertEqual(mod.field_dim, 3) 28 | self.assertEqual(mod.spatial_dim, 2) 29 | self.assertTrue(np.allclose(mod.angles, 0)) 30 | 31 | mod1 = gs.Gaussian(latlon=True, temporal=True, len_scale=[10, 5]) 32 | mod2 = gs.Gaussian(latlon=True, temporal=True, len_scale=10, anis=0.5) 33 | 34 | self.assertTrue(np.allclose(mod1.anis, mod2.anis)) 35 | self.assertAlmostEqual(mod1.len_scale, mod2.len_scale) 36 | 37 | def test_latlon2pos(self): 38 | self.assertAlmostEqual( 39 | 8, self.mod.anisometrize(self.mod.isometrize((8, 6, 9)))[0, 0] 40 | ) 41 | self.assertAlmostEqual( 42 | 6, self.mod.anisometrize(self.mod.isometrize((8, 6, 9)))[1, 0] 43 | ) 44 | self.assertAlmostEqual( 45 | 9, self.mod.anisometrize(self.mod.isometrize((8, 6, 9)))[2, 0] 46 | ) 47 | self.assertAlmostEqual( 48 | gs.EARTH_RADIUS, 49 | self.mod.isometrize( 50 | self.mod.anisometrize((gs.EARTH_RADIUS, 0, 0, 10)) 51 | )[0, 0], 52 | ) 53 | self.assertAlmostEqual( 54 | 10, 55 | self.mod.isometrize( 56 | self.mod.anisometrize((gs.EARTH_RADIUS, 0, 0, 10)) 57 | )[3, 0], 58 | ) 59 | 60 | def test_rotation(self): 61 | mod = gs.Gaussian( 62 | spatial_dim=3, temporal=True, angles=[1, 2, 3, 4, 5, 6] 63 | ) 64 | self.assertTrue(np.allclose(mod.angles, [1, 2, 3, 0, 0, 0])) 65 | self.assertEqual(mod.dim, 4) 66 | 67 | def test_krige(self): 68 | # auto-fitting latlon-temporal model in kriging not possible 69 | with self.assertRaises(ValueError): 70 | kri = gs.Krige(self.mod, 3 * [[1, 2]], [1, 2], fit_variogram=True) 71 | 72 | def test_field(self): 73 | srf = gs.SRF(self.mod) 74 | self.assertTrue(srf.temporal) 75 | 76 | 77 | if __name__ == "__main__": 78 | unittest.main() 79 | --------------------------------------------------------------------------------