├── .gitignore ├── LICENSE ├── README.md ├── build └── lib │ └── pykasso │ ├── __init__.py │ ├── _misc │ ├── log_logo.txt │ ├── parameters.yaml │ ├── statistics.xlsx │ └── statistics_old.xlsx │ ├── _typing │ ├── __init__.py │ └── types.py │ ├── _utils │ ├── __init__.py │ ├── array.py │ ├── datareader.py │ └── validation.py │ ├── _version.py │ ├── analysis │ ├── __init__.py │ └── analysis.py │ ├── core │ ├── __init__.py │ ├── _namespaces.py │ ├── application.py │ ├── grid.py │ ├── main.py │ └── project.py │ ├── model │ ├── __init__.py │ ├── _validations.py │ ├── _wrappers.py │ ├── domain_features │ │ ├── __init__.py │ │ ├── bedrock.py │ │ ├── delimitation.py │ │ ├── domain.py │ │ ├── topography.py │ │ └── watertable.py │ ├── fracturation.py │ ├── geologic_features │ │ ├── __init__.py │ │ ├── faults.py │ │ ├── fractures.py │ │ ├── geologicfeature.py │ │ ├── geology.py │ │ └── surface.py │ └── sks.py │ └── visualization │ ├── __init__.py │ └── visualizer.py ├── environment.yml ├── img ├── animation_01.gif ├── animation_02.gif └── pykasso_banner_logo.png ├── notebooks ├── colab │ ├── colab_01.ipynb │ └── readme.md ├── geometry │ ├── geometry_01.ipynb │ ├── geometry_02.ipynb │ ├── geometry_03.ipynb │ ├── geometry_04.ipynb │ ├── geometry_05.ipynb │ ├── geometry_06.ipynb │ ├── geometry_07.ipynb │ ├── geometry_08.ipynb │ └── readme.md ├── misc │ ├── PDF.ipynb │ ├── conceptual_model.ipynb │ ├── figures.ipynb │ ├── img │ │ ├── PDF_poisson.png │ │ ├── PDF_power.png │ │ ├── PDF_uniform.png │ │ ├── PDF_vonmises.png │ │ ├── cp_conceptual_model.png │ │ ├── cp_faults.png │ │ ├── cp_fractures.png │ │ ├── cp_geology.png │ │ ├── example_01_merge.png │ │ ├── example_03_merge.png │ │ ├── if_o-1-1-1_i-1-1-1.png │ │ ├── if_o-1-1-1_i-1-1.png │ │ ├── if_o-1-1-1_i-1-2-3.png │ │ ├── if_o-1-1-1_i-1-2.png │ │ ├── if_o-1-1-1_i-1.png │ │ ├── if_o-1-1_i-1-1-1.png │ │ ├── if_o-1-1_i-1-1.png │ │ ├── if_o-1-1_i-1-2-3.png │ │ ├── if_o-1-1_i-1-2.png │ │ ├── if_o-1-1_i-1.png │ │ ├── if_o-1_i-1-1-1.png │ │ ├── if_o-1_i-1-1.png │ │ ├── if_o-1_i-1-2-3.png │ │ ├── if_o-1_i-1-2.png │ │ ├── if_o-1_i-1.png │ │ ├── merge_PDF.png │ │ ├── merge_conceptual_model.png │ │ └── merge_importance_factor.png │ ├── importance_factor.ipynb │ └── subdomains.ipynb ├── paper │ ├── example_01.ipynb │ ├── example_02.html │ ├── example_02.ipynb │ ├── example_03.html │ ├── example_03.ipynb │ ├── fig │ │ ├── example_01_cost.png │ │ ├── example_01_fracturation.png │ │ ├── example_01_karst.png │ │ ├── example_01_mean_karstic_network.png │ │ ├── example_01_time.png │ │ ├── example_02_iso_vs_ani.png │ │ ├── example_03_faults_model.png │ │ ├── example_03_fracturation_model.png │ │ ├── example_03_geologic_model.png │ │ └── example_03_karst_model.png │ ├── readme.md │ └── tsanfleuron_data.zip └── readme.md ├── pykasso ├── __init__.py ├── _misc │ ├── cases │ │ └── betteraz │ │ │ ├── inputs │ │ │ ├── betteraz_2D_z430.gslib │ │ │ ├── betteraz_fault_20_2D.gslib │ │ │ ├── betteraz_polygon.txt │ │ │ ├── inlets_01.txt │ │ │ ├── inlets_02.txt │ │ │ └── outlets.txt │ │ │ └── settings │ │ │ ├── betteraz_01.yaml │ │ │ ├── betteraz_case_1.yaml │ │ │ ├── betteraz_case_2.yaml │ │ │ └── betteraz_case_3.yaml │ ├── log_logo.txt │ ├── parameters.yaml │ ├── statistics.xlsx │ └── statistics_old.xlsx ├── _typing │ ├── __init__.py │ └── types.py ├── _utils │ ├── __init__.py │ ├── array.py │ ├── datareader.py │ └── validation.py ├── _version.py ├── analysis │ ├── __init__.py │ └── analysis.py ├── core │ ├── __init__.py │ ├── _namespaces.py │ ├── application.py │ ├── grid.py │ ├── main.py │ └── project.py ├── model │ ├── __init__.py │ ├── _validations.py │ ├── _wrappers.py │ ├── domain_features │ │ ├── __init__.py │ │ ├── bedrock.py │ │ ├── delimitation.py │ │ ├── domain.py │ │ ├── topography.py │ │ └── watertable.py │ ├── fracturation.py │ ├── geologic_features │ │ ├── __init__.py │ │ ├── faults.py │ │ ├── fractures.py │ │ ├── geologicfeature.py │ │ ├── geology.py │ │ └── surface.py │ └── sks.py └── visualization │ ├── __init__.py │ └── visualizer.py └── pyproject.toml /.gitignore: -------------------------------------------------------------------------------- 1 | # Build 2 | pykasso.egg-info/ 3 | __pycache__/ 4 | 5 | # Jupyter Notebook 6 | .ipynb_checkpoints 7 | 8 | # IPython 9 | profile_default/ 10 | ipython_config.py 11 | 12 | # Notebooks outputs 13 | notebooks/geometry/geometry_??/** 14 | notebooks/geometry/geometry_???/** 15 | notebooks/paper/example_??/** -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![pyKasso's banner](/img/pykasso_banner_logo.png) 2 | 3 | 4 | 5 | 6 | 7 | 8 | ![license](https://img.shields.io/github/license/randlab/pyKasso) 9 | ![last-commit](https://img.shields.io/github/last-commit/randlab/pyKasso/master) 10 | 11 | 12 | 13 | ## pyKasso: a stochastic karst network simulation tool 14 | 15 | pyKasso is a python3 open-source package intended to simulate easily and quickly discrete karst networks (DKNs) using a geological model, hydrogeological, and structural data. It relies on a pseudo-genetic methodology where stochastic data and fast-marching methods are combined to perform multiple simulations rapidly. The method is based on the stochastic karst simulator developed by Borghi et al (2012). It has been extended to account for anisotropy allowing to simplify the algorithm while accounting better for the geological structure, following the method presented in Fandel et al. (2022). Statistical, geometrical, and topological metrics are computed on the simulated networks and compared with the same statistics computed on real karst network to evaluate the plausibility of the simulations. 16 | 17 | To understand the principles of pyKasso and its syntax, please have a look at this [paper](https://doi.org/10.1016/j.envsoft.2025.106362). 18 | 19 | ![gif_01](/img/animation_01.gif) 20 | ![gif_02](/img/animation_02.gif) 21 | 22 | ## Installation 23 | 24 | > [!IMPORTANT] 25 | > Currently, pyKasso is working with: 26 | > - Python 3.11 27 | > - Python 3.10 28 | > - Python 3.9 29 | 30 | The easiest way to install pyKasso is to use anaconda or miniconda. 31 | 32 | 1. Create a python 3.11 environnement: 33 | ``` 34 | conda create --name pyKasso -c conda-forge python=3.11 35 | conda activate pyKasso 36 | ``` 37 | 38 | 2. Clone the project then install it locally: 39 | ``` 40 | pip install -e git+https://github.com/randlab/pyKasso.git#egg=pykasso 41 | ``` 42 | Or download it then install it locally: 43 | ``` 44 | python -m pip install . 45 | ``` 46 | 47 | 3. Install the hfm package: 48 | ``` 49 | conda config --add channels agd-lbr 50 | conda install hfm 51 | ``` 52 | 53 | An alternative and faster way to do the same is to use miniforge: https://github.com/conda-forge/miniforge 54 | ``` 55 | conda create --name pyKasso -c conda-forge python=3.11 56 | conda activate pyKasso 57 | python -m pip install . 58 | conda config --add channels agd-lbr 59 | conda install hfm 60 | ``` 61 | 62 | ## Examples 63 | 64 | - Examples developped for the paper: [notebooks/paper/](https://github.com/randlab/pyKasso/tree/master/notebooks/paper) 65 | - Some basic examples illustrating pyKasso's functionalities: [notebooks/geometry/](https://github.com/randlab/pyKasso/tree/master/notebooks/geometry) 66 | - An example to use pyKasso with Google Colab: [notebooks/colab/](https://github.com/randlab/pyKasso/tree/master/notebooks/colab) 67 | 68 | ## Citing pyKasso 69 | 70 | > Miville, F., Renard, P., Fandel, C., Filipponi, M. 2025: pyKasso: An open-source three-dimensional discrete karst network generator. Environmental Modelling & Software, Volume 186, https://doi.org/10.1016/j.envsoft.2025.106362 - [direct link](https://doi.org/10.1016/j.envsoft.2025.106362) 71 | 72 | BibTex: 73 | ``` 74 | @article{MIVILLE2025106362, 75 | title = {pyKasso: An open-source three-dimensional discrete karst network generator}, 76 | journal = {Environmental Modelling & Software}, 77 | volume = {186}, 78 | pages = {106362}, 79 | year = {2025}, 80 | issn = {1364-8152}, 81 | doi = {https://doi.org/10.1016/j.envsoft.2025.106362}, 82 | url = {https://www.sciencedirect.com/science/article/pii/S1364815225000465}, 83 | author = {François Miville and Philippe Renard and Chloé Fandel and Marco Filipponi}, 84 | } 85 | ``` 86 | 87 | ## Other related publications 88 | 89 | - Fandel, C., Miville, F., Ferré, T. et al. 2022: The stochastic simulation of karst conduit network structure using anisotropic fast marching, and its application to a geologically complex alpine karst system. Hydrogeol J 30, 927–946, https://doi.org/10.1007/s10040-022-02464-x 90 | - Borghi, A., Renard, P., Jenni, S. 2012: A pseudo-genetic stochastic model to generate karstic networks, Journal of Hydrology, 414–415, https://doi.org/10.1016/j.jhydrol.2011.11.032. 91 | -------------------------------------------------------------------------------- /build/lib/pykasso/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | pyKasso 3 | ======= 4 | 5 | pyKasso is a python3 open-source package intended to simulate easily and 6 | quickly karst networks using a geological model, hydrogeological, and 7 | structural data. 8 | 9 | License 10 | ------- 11 | Released under the GPL-3.0 license. 12 | Copyright (C) 2025 University of Neuchâtel - CHYN. 13 | - François Miville 14 | - Philippe Renard 15 | - Chloé Fandel 16 | 17 | Available subpackages 18 | --------------------- 19 | core 20 | Karstic conduit network generator 21 | analysis 22 | Karstic conduit network analysis tool 23 | visualization 24 | Karstic conduit network visualization tool 25 | 26 | Utilities 27 | --------- 28 | __version__ 29 | pyKasso version string 30 | """ 31 | 32 | __all__ = [] 33 | 34 | # Import pyKasso's core 35 | from . import core 36 | from .core import * 37 | __all__.extend(core.__all__) 38 | 39 | # Import pyKasso version string 40 | from ._version import __version__ 41 | __all__.extend(['__version__']) 42 | -------------------------------------------------------------------------------- /build/lib/pykasso/_misc/log_logo.txt: -------------------------------------------------------------------------------- 1 | _ 2 | | | 3 | _ __ _ _| | ____ _ ___ ___ ___ 4 | | `_ \| | | | |/ / _` / __/ __|/ _ \ 5 | | |_) | |_| | < (_| \__ \__ \ (_) | 6 | | .__/ \__, |_|\_\__,_|___/___/\___/ 7 | | | __/ | 8 | |_| |___/ 9 | -------------------------------------------------------------------------------- /build/lib/pykasso/_misc/parameters.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | sks: 7 | seed : 0 8 | algorithm : 'Isotropic3' 9 | 10 | ############## 11 | ### Domain ### 12 | ############## 13 | 14 | domain: 15 | delimitation : '' 16 | topography : '' 17 | bedrock : '' 18 | water_level : '' 19 | 20 | ############### 21 | ### Geology ### 22 | ############### 23 | 24 | geology: 25 | data : '' 26 | costs : {} 27 | 28 | faults: 29 | data : '' 30 | costs : {} 31 | 32 | ############## 33 | ### Points ### 34 | ############## 35 | 36 | ### Outlets 37 | outlets: 38 | number : 1 39 | data : '' 40 | shuffle : False 41 | importance : [1] 42 | seed : 1 43 | subdomain : 'domain_bottom' 44 | 45 | ### Inlets 46 | inlets: 47 | number : 30 48 | data : '' 49 | shuffle : False 50 | per_outlet : [1] 51 | importance : [1,1,1] 52 | seed : 2 53 | subdomain : 'domain_surface' 54 | 55 | #################### 56 | ### Fracturation ### 57 | #################### 58 | 59 | fractures: 60 | data : '' 61 | seed : 12 62 | settings : 63 | family_01: 64 | alpha : 2 65 | density : 0.00001 66 | orientation : [340, 20] 67 | dip : [80, 90] 68 | length : [1000, 2000] 69 | family_02: 70 | alpha : 2 71 | density : 0.00001 72 | orientation : [340, 20] 73 | dip : [0, 10] 74 | length : [1000, 2000] 75 | 76 | ############## 77 | ### OTHERS ### 78 | ############## 79 | 80 | verbosity: 81 | logging : 0 82 | agd : 0 83 | karstnet : 0 84 | 85 | ... -------------------------------------------------------------------------------- /build/lib/pykasso/_misc/statistics.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/build/lib/pykasso/_misc/statistics.xlsx -------------------------------------------------------------------------------- /build/lib/pykasso/_misc/statistics_old.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/build/lib/pykasso/_misc/statistics_old.xlsx -------------------------------------------------------------------------------- /build/lib/pykasso/_typing/__init__.py: -------------------------------------------------------------------------------- 1 | from .types import ( 2 | Project, 3 | Grid, 4 | Domain, 5 | Delimitation, 6 | Topography, 7 | Bedrock, 8 | WaterLevel, 9 | Geology, 10 | RandomNumberGenerator, 11 | Series, 12 | DataFrame, 13 | Styler, 14 | ) 15 | -------------------------------------------------------------------------------- /build/lib/pykasso/_typing/types.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom pyKasso's typing. 3 | """ 4 | 5 | # Typing 6 | from typing import TypeVar, TYPE_CHECKING 7 | 8 | from numpy import random 9 | from pandas import core, io 10 | 11 | if TYPE_CHECKING: 12 | from ..core import project as pcp 13 | from ..core import grid as pcg 14 | from ..model.domain_features import domain as pcd 15 | from ..model.geologic_features import geologicfeature as pcgf 16 | 17 | ### Custom internal types 18 | 19 | # Core types 20 | Project = TypeVar('Project', bound='pcp.Project') 21 | Grid = TypeVar('Grid', bound='pcg.Grid') 22 | 23 | # Model types 24 | Domain = TypeVar('Domain', bound='pcd.Domain') 25 | Delimitation = TypeVar('Delimitation', bound='pcd.Delimitation') 26 | Topography = TypeVar('Topography', bound='pcd.Topography') 27 | Bedrock = TypeVar('Bedrock', bound='pcd.Bedrock') 28 | WaterLevel = TypeVar('WaterLevel', bound='pcd.WaterLevel') 29 | Geology = TypeVar('Geology', bound='pcgf.Geology') 30 | 31 | ### Custom external types 32 | 33 | # Numpy 34 | RandomNumberGenerator = TypeVar( 35 | 'RandomNumberGenerator', 36 | bound='random._generator.Generator', 37 | ) 38 | # Pandas 39 | Series = TypeVar('Series', bound='core.series.Series') 40 | DataFrame = TypeVar('DataFrame', bound='core.frame.DataFrame') 41 | Styler = TypeVar('Styler', bound='io.formats.style.Styler') 42 | -------------------------------------------------------------------------------- /build/lib/pykasso/_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/build/lib/pykasso/_utils/__init__.py -------------------------------------------------------------------------------- /build/lib/pykasso/_utils/array.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module defining array manipulation functions. 3 | """ 4 | 5 | ### External dependencies 6 | import numpy as np 7 | 8 | 9 | def normalize_array(array: np.ndarray) -> np.ndarray: 10 | """ 11 | The function takes a numpy array and returns a new numpy array where each 12 | element is scaled to have values between 0 and 1. 13 | 14 | Parameters 15 | ---------- 16 | array : np.ndarray 17 | Numpy array to normalize. 18 | 19 | Returns 20 | ------- 21 | np.ndarray 22 | Normalized numpy array, with values ranging from 0 to 1. 23 | """ 24 | min_value = array.min() 25 | max_value = array.max() 26 | normalized = (array - min_value) / (max_value - min_value) 27 | return normalized 28 | -------------------------------------------------------------------------------- /build/lib/pykasso/_utils/validation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing functions used for data flow validation. 3 | """ 4 | 5 | ### Internal dependencies 6 | import os 7 | import logging 8 | 9 | ### External dependencies 10 | import numpy as np 11 | 12 | from ..core._namespaces import ( 13 | SKS_VALID_ALGORITHM_VALUES, 14 | SKS_VALID_MODE_VALUES, 15 | ) 16 | 17 | ### Typing 18 | from typing import Union 19 | 20 | 21 | def is_filepath_valid( 22 | filepath: str, 23 | ) -> Union[FileNotFoundError, bool]: 24 | """ 25 | Test if the filepath is valid. 26 | 27 | Parameters 28 | ---------- 29 | filepath : str. 30 | String defining the filepath. 31 | 32 | Returns 33 | ------- 34 | Union[FileNotFoundError, bool] 35 | Return ``True`` if test pass. 36 | Otherwise raise a ``FileNotFoundError`` exception. 37 | 38 | Raises 39 | ------ 40 | FileNotFoundError 41 | """ 42 | if not os.path.exists(filepath): 43 | msg = ("Filepath '{}' does not exist.".format(filepath)) 44 | raise FileNotFoundError(msg) 45 | else: 46 | return True 47 | 48 | 49 | def is_variable_type_valid( 50 | variable_name: str, 51 | variable_value, 52 | valid_types: tuple, 53 | ) -> Union[TypeError, bool]: 54 | """ 55 | Test if the type of the variable is valid. 56 | 57 | Parameters 58 | ---------- 59 | variable_name : str 60 | Name of the parameter. 61 | variable_value : any 62 | Value of the parameter. 63 | valid_types : tuple 64 | Accepted types. 65 | 66 | Returns 67 | ------- 68 | Union[TypeError, bool] 69 | Return ``True`` if test pass. 70 | Otherwise raise a ``TypeError`` exception. 71 | 72 | Raises 73 | ------ 74 | TypeError 75 | """ 76 | if isinstance(variable_value, valid_types): 77 | return True 78 | else: 79 | msg = ("Parameter '{}' type is invalid. Valid type(s) : {}" 80 | .format(variable_name, valid_types)) 81 | raise TypeError(msg) 82 | 83 | 84 | def is_key_in_dict( 85 | dictionary: dict, 86 | dictionary_name: str, 87 | key_to_test: str, 88 | ) -> Union[KeyError, bool]: 89 | """ 90 | Test key presence in the dictionary. 91 | 92 | Parameters 93 | ---------- 94 | dictionary : dict 95 | Dictionary to test. 96 | dictionary_name : str 97 | Name of the dictionary. 98 | key_to_test : str 99 | Key to verify presence in dictionary. 100 | 101 | Returns 102 | ------- 103 | Union[KeyError, bool] 104 | Return ``True`` if test pass. 105 | Otherwise raise a ``KeyError`` exception. 106 | 107 | Raises 108 | ------ 109 | KeyError 110 | """ 111 | if key_to_test in dictionary: 112 | return True 113 | else: 114 | msg = ("Key '{}' is missing in '{}' dictionary." 115 | .format(key_to_test, dictionary_name)) 116 | raise KeyError(msg) 117 | 118 | 119 | def is_variable_in_list( 120 | variable_name: str, 121 | variable_value, 122 | accepted_values: list, 123 | ) -> Union[ValueError, bool]: 124 | """ 125 | TODO 126 | """ 127 | if variable_value not in accepted_values: 128 | msg = ("Parameter '{}' value is invalid. Accepted values : {}" 129 | .format(variable_name, accepted_values)) 130 | raise ValueError(msg) 131 | else: 132 | return True 133 | 134 | 135 | def is_parameter_comparison_valid( 136 | parameter_name: str, 137 | parameter_value, 138 | logical_test: str, 139 | compared_to, 140 | ) -> Union[ValueError, bool]: 141 | """ 142 | Test if the comparision returns true. 143 | 144 | Parameters 145 | ---------- 146 | parameter_name : str 147 | Name of the parameter. 148 | parameter_value : any 149 | Value of the parameter. 150 | logical_test : str 151 | Logical test to use ('>', '>=', '<', '<=', '==', '!='). 152 | compared_to : _type_ 153 | Value to compare. 154 | 155 | Returns 156 | ------- 157 | Union[ValueError, bool] 158 | Return ``True`` if test pass. 159 | Otherwise raise a ``ValueError`` exception. 160 | 161 | Raises 162 | ------ 163 | ValueError 164 | """ 165 | logical_test_text = { 166 | '>': 'greater than', 167 | '>=': 'greater than or equal to', 168 | '<': 'less than', 169 | '<=': 'less than or equal to', 170 | '==': 'equal to', 171 | '!=': 'not equal to' 172 | } 173 | test = str(parameter_value) + logical_test + str(compared_to) 174 | if not eval(test): 175 | msg = ("The value of the '{}' parameter must be {} {}." 176 | .format(parameter_name, 177 | logical_test_text[logical_test], 178 | compared_to)) 179 | raise ValueError(msg) 180 | else: 181 | return True 182 | 183 | ########################## 184 | ### Dictionary testing ### 185 | ########################## 186 | 187 | 188 | def test_sks_settings(settings: dict) -> None: 189 | """ 190 | """ 191 | logger = logging.getLogger("sks.validation") 192 | 193 | ### Test 'seed' value 194 | try: 195 | is_variable_type_valid( 196 | variable_name='seed', 197 | variable_value=settings['seed'], 198 | valid_types=(int), 199 | ) 200 | except TypeError as error: 201 | logger.error(error) 202 | raise 203 | 204 | ### Test 'algorithm' value 205 | try: 206 | is_variable_in_list( 207 | variable_name='algorithm', 208 | variable_value=settings['algorithm'], 209 | accepted_values=SKS_VALID_ALGORITHM_VALUES 210 | ) 211 | except ValueError as error: 212 | logger.error(error) 213 | raise 214 | 215 | ### Test 'costs' value 216 | try: 217 | is_variable_type_valid( 218 | variable_name='costs', 219 | variable_value=settings['costs'], 220 | valid_types=(dict), 221 | ) 222 | except TypeError as error: 223 | logger.error(error) 224 | raise 225 | 226 | ### Test 'factors' value 227 | try: 228 | is_variable_type_valid( 229 | variable_name='factors', 230 | variable_value=settings['factors'], 231 | valid_types=(dict), 232 | ) 233 | except TypeError as error: 234 | logger.error(error) 235 | raise 236 | 237 | ### Test 'mode' value 238 | try: 239 | is_variable_in_list( 240 | variable_name='mode', 241 | variable_value=settings['mode'], 242 | accepted_values=SKS_VALID_MODE_VALUES 243 | ) 244 | except ValueError as error: 245 | logger.error(error) 246 | raise 247 | 248 | return None 249 | 250 | 251 | # def test_geologic_feature_settings(settings: dict) -> None: 252 | # """ 253 | # """ 254 | # return None 255 | 256 | # def test_point_settings(kind: str, settings: dict) -> None: 257 | # """ 258 | # """ 259 | # logger = logging.getLogger("{}.validation".format(kind)) 260 | 261 | # ### Test 'number' value 262 | # try: 263 | # is_variable_type_valid( 264 | # variable_name='number', 265 | # variable_value=settings['number'], 266 | # valid_types=(int), 267 | # ) 268 | # except TypeError as error: 269 | # logger.error(error) 270 | # raise 271 | 272 | # ### Test 'data' value 273 | 274 | 275 | # ### Test 'shuffle' value 276 | 277 | 278 | # ### Test 'importance' value 279 | 280 | 281 | # ### Test 'subdomain' value 282 | 283 | 284 | # ### Test 'geology' value 285 | 286 | 287 | # ### Test 'seed' value 288 | 289 | 290 | # return None 291 | -------------------------------------------------------------------------------- /build/lib/pykasso/_version.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata 2 | 3 | __version__ = importlib.metadata.version("pykasso") 4 | -------------------------------------------------------------------------------- /build/lib/pykasso/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======== 3 | analysis 4 | ======== 5 | 6 | A subpackage for discrete karst conduit network analysis. 7 | """ 8 | 9 | __all__ = [] 10 | -------------------------------------------------------------------------------- /build/lib/pykasso/analysis/analysis.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains a class able to mangage project results in order to 3 | perform statistical analysis. 4 | """ 5 | 6 | ### Internal dependencies 7 | import copy 8 | 9 | ### External dependencies 10 | import numpy as np 11 | import pandas as pd 12 | 13 | ### Optional dependencies 14 | try: 15 | import karstnet as kn 16 | except ImportError: 17 | _has_karstnet = False 18 | else: 19 | _has_karstnet = True 20 | 21 | ### Typing 22 | from typing import Union 23 | from ..core.project import Project 24 | from pandas import (DataFrame, Series) 25 | from pandas.io.formats.style import Styler 26 | 27 | 28 | def requires_karstnet(): 29 | """ 30 | If ``karstnet`` package is not installed, return ``ImportError`` exception 31 | when a method requiring ``karstnet`` is called. 32 | """ 33 | def _(function): 34 | def _wrapper(*args, **kwargs): 35 | if not _has_karstnet: 36 | msg = ("karstnet package is required to do this." 37 | " 'pip install -e pykasso[analysis]' to install it.") 38 | raise ImportError(msg) 39 | result = function(*args, **kwargs) 40 | return result 41 | return _wrapper 42 | return _ 43 | 44 | 45 | class Analyzer(): 46 | """ 47 | This class manages pyKasso's project and provides methods to compute 48 | statistical analysis. 49 | """ 50 | def __init__(self, 51 | project: Project 52 | ) -> None: 53 | """ 54 | Initialize the class. 55 | """ 56 | 57 | # Intialization 58 | self.project = project 59 | self.stats = None 60 | 61 | # Load reference metrics for statistical karstic network analysis 62 | self._load_statistics() 63 | 64 | def _load_statistics(self) -> None: 65 | """ 66 | Set the reference metrics for statistical karstic network analysis 67 | More details here : https://github.com/karstnet/karstnet 68 | """ 69 | package_location = self.project._pckg_paths['package_location'] 70 | statistics_file_path = "/../_misc/statistics.xlsx" 71 | statistics_file_location = package_location + statistics_file_path 72 | self.stats = pd.read_excel(statistics_file_location).describe() 73 | return None 74 | 75 | @requires_karstnet() 76 | def compute_metrics( 77 | self, 78 | verbose: bool = False, 79 | ) -> DataFrame: 80 | """ 81 | Compute the statistical metrics for each simulated discret karst 82 | conduit network using the karstnet package. 83 | 84 | Parameters 85 | ---------- 86 | verbosity : int, default: 0 87 | Verbosity of karstnet results. 88 | 89 | Returns 90 | ------- 91 | df_metrics : pandas.DataFrame 92 | Dataframe of karstnet metrics. 93 | 94 | Notes 95 | ----- 96 | Karstnet is a python3 project providing tools for the statistical 97 | analysis of karstic networks. More details here: 98 | https://github.com/karstnet/karstnet 99 | 100 | References 101 | ---------- 102 | .. [1] Collon, P., Bernasconi D., Vuilleumier C., and Renard P., 2017, 103 | Statistical metrics for the characterization of karst network 104 | geometry and topology. Geomorphology. 283: 122-142 doi:10.1016/ 105 | j.geomorph.2017.01.034 106 | http://dx.doi.org/doi:10.1016/j.geomorph.2017.01.034 107 | 108 | .. warning:: 109 | A corrigendum has been published in Geomorphology journal: 110 | Geomorphology 389, 107848, 111 | http://dx.doi.org/doi:10.1016/j.geomorph.2021.107848. 112 | 113 | Examples 114 | -------- 115 | >>> app = pk.pykasso() 116 | >>> ... 117 | >>> df_metrics = app.analyzer.compute_metrics() 118 | """ 119 | df_metrics = pd.DataFrame() 120 | 121 | # For each simulation, retrieve data and compute metrics 122 | for i, data in enumerate(self.project): 123 | 124 | # Retrieve data 125 | karstnet_edges = data["vectors"]["edges_"].to_numpy().tolist() 126 | karstnet_nodes = copy.deepcopy(data["vectors"]["nodes_"]) 127 | 128 | # Drop last item in list (the node type) for each dictionary entry 129 | karstnet_nodes = karstnet_nodes.drop(columns=['type', 'vadose']) 130 | index = karstnet_nodes.index 131 | values = karstnet_nodes.iloc[:, [0, 1, 2]].to_numpy().tolist() 132 | karstnet_nodes = {i: value for i, value in zip(index, values)} 133 | 134 | # Compute karstnet metrics 135 | # Make graph - edges must be a list, and nodes must be a dic of 136 | # format {nodeindex: [x,y]} 137 | k = kn.KGraph(karstnet_edges, karstnet_nodes, verbose=False) 138 | metrics = k.characterize_graph(verbose) 139 | 140 | # Concatenate dataframes 141 | df_ = pd.DataFrame(metrics, index=[i]) 142 | df_metrics = pd.concat([df_metrics, df_]) 143 | 144 | return df_metrics 145 | 146 | @requires_karstnet() 147 | def compare_metrics( 148 | self, 149 | dataframe: Union[DataFrame, Series, Styler], 150 | ) -> Styler: 151 | """ 152 | Compare the calculated statistical metrics with the reference. 153 | 154 | Parameters 155 | ---------- 156 | dataframe : Union[DataFrame, Series, Styler] 157 | Data to compare with karstnet metrics. 158 | 159 | Returns 160 | ------- 161 | df_metrics : Styler 162 | 163 | References 164 | ---------- 165 | .. [1] Collon, P., Bernasconi D., Vuilleumier C., and Renard P., 2017, 166 | Statistical metrics for the characterization of karst network 167 | geometry and topology. Geomorphology. 283: 122-142 doi:10.1016/ 168 | j.geomorph.2017.01.034 169 | http://dx.doi.org/doi:10.1016/j.geomorph.2017.01.034 170 | 171 | .. warning:: 172 | A corrigendum has been published in Geomorphology journal: 173 | Geomorphology 389, 107848, 174 | http://dx.doi.org/doi:10.1016/j.geomorph.2021.107848. 175 | 176 | Examples 177 | -------- 178 | >>> app = pk.pykasso() 179 | >>> ... 180 | >>> df_metrics = app.analyzer.compute_metrics() 181 | >>> app.analyzer.compare_metrics(df_metrics) 182 | """ 183 | ### Convert pandas Series in DataFrame 184 | if isinstance(dataframe, Series): 185 | dataframe = dataframe.to_frame().T 186 | 187 | ### Define the text coloring function 188 | # Green if inside [V_min, V_max] 189 | # Orange if outside 190 | def _bg_color(x, min_val, max_val): 191 | if pd.isnull(x): 192 | return 'color: grey' 193 | elif (x < min_val) or (x > max_val): 194 | return 'color: #FF8C00' 195 | else: 196 | return 'color: #00FF00' 197 | 198 | # Iterate in the dataframe columns 199 | df_metrics = dataframe.style 200 | for column_name in dataframe: 201 | kwargs = { 202 | 'min_val': self.stats[column_name]['min'], 203 | 'max_val': self.stats[column_name]['max'], 204 | 'subset': [column_name] 205 | } 206 | df_metrics = df_metrics.applymap(_bg_color, **kwargs) 207 | 208 | return df_metrics 209 | 210 | def compute_stats_on_networks( 211 | self, 212 | numpy_algorithm: str = 'mean', 213 | numpy_parameters: dict = {}, 214 | ) -> np.ndarray: 215 | """ 216 | Compute selected algorithm on the whole set of computed discrete karst 217 | conduit networks. 218 | 219 | Parameters 220 | ---------- 221 | numpy_algorithm : str, default: 'mean' 222 | Numpy algorithm to use. More details here: 223 | https://numpy.org/doc/stable/reference/routines.statistics.html 224 | numpy_parameters : dict, default: {} 225 | Parameters of the selected algorithm. 226 | 227 | Returns 228 | ------- 229 | out : np.ndarray 230 | 231 | Examples 232 | -------- 233 | >>> app = pk.pykasso() 234 | >>> ... 235 | >>> df_metrics = app.analyzer.compute_metrics() 236 | >>> karst_std = app.analyzer.compute_stats_on_networks('std') 237 | """ 238 | 239 | # For each simulation, retrieve data and store it 240 | karst_map = [] 241 | for data in self.project: 242 | karst_map.append(data['maps']['karst'][-1].copy()) 243 | 244 | # Retrieve algorithm 245 | try: 246 | numpy_func = getattr(np, numpy_algorithm) 247 | except ValueError: 248 | msg = "Asked algorithm is not valid." 249 | raise ValueError(msg) 250 | 251 | # Compute 252 | numpy_parameters.pop('axis', None) 253 | out = numpy_func(karst_map, axis=0, **numpy_parameters) 254 | 255 | return out 256 | -------------------------------------------------------------------------------- /build/lib/pykasso/core/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Contains the core of pyKasso: application, project, grid, etc. 3 | 4 | This subpackage contains the core of pyKasso. It constructs an 5 | application class able to manage a project and to communicate between 6 | the different other subpackages. 7 | 8 | Please note that this module is private. All functions and objects 9 | are available in the main ``pykasso`` namespace - use that instead. 10 | """ 11 | 12 | __all__ = [] 13 | 14 | # Import pykasso function 15 | from .main import pykasso 16 | from .main import create_datareader 17 | __all__.extend(['pykasso', 'create_datareader']) 18 | -------------------------------------------------------------------------------- /build/lib/pykasso/core/_namespaces.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module defining some constants in pyKasso. 3 | """ 4 | 5 | ### Internal dependencies 6 | from PIL import Image 7 | 8 | 9 | MISC_DIR_PATH = '/../_misc/' 10 | DEFAULT_PARAMETERS_FILENAME = 'parameters.yaml' 11 | DEFAULT_PROJECT_FILENAME = 'project.yaml' 12 | DEFAULT_LOG_FILENAME = 'project.log' 13 | 14 | GRID_PARAMETERS = [ 15 | 'x0', 16 | 'y0', 17 | 'z0', 18 | 'nx', 19 | 'ny', 20 | 'nz', 21 | 'dx', 22 | 'dy', 23 | 'dz' 24 | ] 25 | 26 | GEOLOGICAL_FEATURES = [ 27 | 'domain', 28 | 'geology', 29 | 'faults', 30 | 'fractures', 31 | ] 32 | 33 | SURFACE_FEATURES = [ 34 | 'topography', 35 | 'bedrock', 36 | 'water_table', 37 | ] 38 | 39 | DOMAIN_FEATURES = [ 40 | 'delimitation', 41 | 'topography', 42 | 'bedrock', 43 | 'water_table', 44 | ] 45 | 46 | ISOTROPIC_FEATURES = [ 47 | 'cost', 48 | 'time', 49 | 'karst', 50 | ] 51 | 52 | ANISOTROPIC_FEATURES = [ 53 | 'cost', 54 | 'alpha', 55 | 'beta', 56 | 'time', 57 | 'karst', 58 | 'gradient', 59 | ] 60 | 61 | features = [ 62 | GEOLOGICAL_FEATURES, 63 | DOMAIN_FEATURES, 64 | ANISOTROPIC_FEATURES 65 | ] 66 | 67 | AUTHORIZED_FEATURES = [f for list_ in features for f in list_] 68 | 69 | VALID_EXTENSIONS_DATAFRAME = [ 70 | 'gslib', 71 | 'vox', 72 | ] 73 | 74 | VALID_EXTENSIONS_IMAGE = [key.strip('.') for key in Image.EXTENSION.keys()] 75 | 76 | VALID_EXTENSIONS_DATA = [ 77 | 'gslib', 78 | 'vox', 79 | 'csv', 80 | 'txt', 81 | 'npy', 82 | 'tif', 83 | 'tiff', 84 | 'asc', 85 | ] 86 | VALID_EXTENSIONS_DATA.extend(VALID_EXTENSIONS_IMAGE) 87 | 88 | DEFAULT_FMM_COSTS = { 89 | 'out': 10, 90 | 'geology': 0.4, 91 | 'beddings': 0.35, 92 | 'faults': 0.2, 93 | 'fractures': 0.2, 94 | 'karst': 0.1, 95 | 'conduits': 0.1, 96 | 'ratio': 0.5, 97 | } 98 | 99 | DEFAULT_FEATURE_PARAMETERS = { 100 | 'geology': { 101 | 'nodata': 1, 102 | 'name': 'unit {}', 103 | 'model': True, 104 | }, 105 | 'faults': { 106 | 'nodata': 0, 107 | 'name': 'fault {}', 108 | 'model': True, 109 | }, 110 | 'fractures': { 111 | 'nodata': 0, 112 | 'name': 'family {}', 113 | 'model': True, 114 | } 115 | } 116 | 117 | SKS_VALID_ALGORITHM_VALUES = ['Isotropic3', 'Riemann3'] 118 | SKS_VALID_MODE_VALUES = ['A', 'B', 'C', 'D'] 119 | -------------------------------------------------------------------------------- /build/lib/pykasso/core/application.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing the application class. 3 | """ 4 | 5 | ### Local dependencies 6 | from .project import Project 7 | from ..model.sks import SKS 8 | from ..analysis.analysis import Analyzer 9 | from ..visualization.visualizer import Visualizer 10 | 11 | ### Validation 12 | from .._utils.validation import ( 13 | is_variable_type_valid, 14 | is_key_in_dict, 15 | is_parameter_comparison_valid, 16 | ) 17 | 18 | ### Variables 19 | from ._namespaces import ( 20 | GRID_PARAMETERS, 21 | ) 22 | 23 | 24 | class Application(): 25 | """ 26 | Class modeling an application and embedding the pyKasso subpackages. 27 | 28 | This class manages a pyKasso project and provides access to the different 29 | subpackages by storing them as class attributes. 30 | 31 | Attributes 32 | ---------- 33 | project 34 | Project class. 35 | model 36 | Model class. 37 | analyzer 38 | Analyzer class. 39 | visualizer 40 | Visualizer class 41 | 42 | Notes 43 | ----- 44 | The attributes are set to ``None`` until a project is created or loaded. 45 | 46 | Examples 47 | -------- 48 | This class can be instancied by using the public function ``pykasso()``. 49 | >>> import pykasso as pk 50 | >>> app = pk.pykasso() 51 | """ 52 | 53 | def __init__(self) -> None: 54 | self.__project = None 55 | self.__model = None 56 | self.__analyzer = None 57 | self.__visualizer = None 58 | 59 | ###################### 60 | ### MANAGE PROJECT ### 61 | ###################### 62 | 63 | def new_project( 64 | self, 65 | name: str, 66 | grid_parameters: dict, 67 | force: bool = True, 68 | ) -> None: 69 | """ 70 | Create a new project. 71 | 72 | Instance a ``Project`` class within the ``project`` attribute and 73 | initialize the subpackages. 74 | 75 | Parameters 76 | ---------- 77 | name : str 78 | The name of the project. A new directory is created if the 79 | argument points to a non-existant folder. 80 | grid_parameters : dict 81 | The dictionary containing the grid parameters. 82 | force : bool, default: True 83 | If True, overwrite files in case of conflict when ``name`` 84 | points to an already existing directory. 85 | 86 | Examples 87 | -------- 88 | >>> import pykasso as pk 89 | >>> app = pk.pykasso() 90 | >>> name = "new_project" 91 | >>> grid_parameters = { 92 | >>> 'nx': 10, 'ny': 10, 'nz': 10, 93 | >>> 'x0': 0, 'y0': 0, 'z0': 0, 94 | >>> 'dx': 10, 'dy': 10, 'dz': 10, 95 | >>> } 96 | >>> app.new_project(name, grid_parameters) 97 | """ 98 | 99 | ### Input validation 100 | 101 | # Test 'name' type 102 | try: 103 | is_variable_type_valid(variable_name='name', 104 | variable_value=name, 105 | valid_types=(str)) 106 | except TypeError: 107 | raise 108 | 109 | # Test 'grid_parameters' type 110 | try: 111 | is_variable_type_valid(variable_name='grid_parameters', 112 | variable_value=grid_parameters, 113 | valid_types=(dict)) 114 | except TypeError: 115 | raise 116 | 117 | # Test 'Grid' parameters presence 118 | for parameter in GRID_PARAMETERS: 119 | try: 120 | is_key_in_dict(dictionary=grid_parameters, 121 | dictionary_name='grid_parameters', 122 | key_to_test=parameter) 123 | except KeyError: 124 | raise 125 | 126 | # Test if the values of attributes are of type int or float 127 | for parameter_name in ['x0', 'y0', 'z0', 'dx', 'dy', 'dz']: 128 | try: 129 | parameter_value = grid_parameters[parameter_name] 130 | is_variable_type_valid(variable_name=parameter_name, 131 | variable_value=parameter_value, 132 | valid_types=(int, float)) 133 | except TypeError: 134 | raise 135 | 136 | # Test if the values of attributes are of type int 137 | for parameter_name in ['nx', 'ny', 'nz']: 138 | try: 139 | parameter_value = grid_parameters[parameter_name] 140 | is_variable_type_valid(variable_name=parameter_name, 141 | variable_value=parameter_value, 142 | valid_types=(int)) 143 | except TypeError: 144 | raise 145 | 146 | # Test if the values of attributes are well upper 0 147 | for parameter_name in ['nx', 'ny', 'nz']: 148 | try: 149 | parameter_value = grid_parameters[parameter_name] 150 | is_parameter_comparison_valid(parameter_name=parameter_name, 151 | parameter_value=parameter_value, 152 | logical_test='>', 153 | compared_to=0) 154 | except ValueError: 155 | raise 156 | 157 | ### Initialization of the application 158 | 159 | # Set a project instance 160 | self.__project = Project( 161 | grid_parameters=grid_parameters, 162 | project_location=name, 163 | force=force, 164 | ) 165 | 166 | # Initialize the 'model' module 167 | self.__model = SKS(self.project) 168 | 169 | # Initialize the 'analysis' module 170 | self.__analyzer = Analyzer(self.project) 171 | 172 | # Initialize the 'visualisation' module 173 | self.__visualizer = Visualizer(self.project) 174 | 175 | return None 176 | 177 | def open_project(self) -> NotImplementedError: 178 | """ 179 | Not implemented yet. 180 | """ 181 | msg = "Not implemented yet." 182 | raise NotImplementedError(msg) 183 | 184 | def save_project(self) -> NotImplementedError: 185 | """ 186 | Not implemented yet. 187 | """ 188 | msg = "Not implemented yet." 189 | raise NotImplementedError(msg) 190 | 191 | def export_project(self) -> NotImplementedError: 192 | """ 193 | Not implemented yet. 194 | """ 195 | msg = "Not implemented yet." 196 | raise NotImplementedError(msg) 197 | 198 | ############### 199 | ### GETTERS ### 200 | ############### 201 | 202 | @property 203 | def project(self) -> Project: 204 | """ 205 | Return the project class. 206 | """ 207 | if self.__project is None: 208 | msg = "No project available yet. Please create or load a project." 209 | print(msg) 210 | return None 211 | else: 212 | return self.__project 213 | 214 | @property 215 | def model(self) -> SKS: 216 | """ 217 | Return the SKS model class. 218 | """ 219 | if self.__model is None: 220 | msg = ("This feature is not available yet. Please create or load a" 221 | " project first.") 222 | print(msg) 223 | return None 224 | else: 225 | return self.__model 226 | 227 | @property 228 | def analyzer(self) -> Analyzer: 229 | """ 230 | Return the analyzer class. 231 | """ 232 | if self.__analyzer is None: 233 | msg = ("This feature is not available yet. Please create or load a" 234 | " project first.") 235 | print(msg) 236 | return None 237 | else: 238 | return self.__analyzer 239 | 240 | @property 241 | def visualizer(self) -> Visualizer: 242 | """ 243 | Return the visualizer class. 244 | """ 245 | if self.__visualizer is None: 246 | msg = ("This feature is not available yet. Please create or load a" 247 | " project first.") 248 | print(msg) 249 | return None 250 | else: 251 | return self.__visualizer 252 | -------------------------------------------------------------------------------- /build/lib/pykasso/core/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing functions for accessing the public content of pyKasso. 3 | """ 4 | 5 | ### Local dependencies 6 | from .application import Application 7 | from .grid import Grid 8 | from .._utils.datareader import DataReader 9 | 10 | 11 | def pykasso() -> Application: 12 | """ 13 | Create and return an ``Application``. 14 | 15 | Returns 16 | ------- 17 | Application 18 | 19 | See Also 20 | -------- 21 | Application, Project, Grid 22 | 23 | Examples 24 | -------- 25 | >>> import pykasso as pk 26 | >>> app = pk.pykasso() 27 | """ 28 | out = Application() 29 | return out 30 | 31 | 32 | def create_datareader(grid: Grid = None) -> DataReader: 33 | """ 34 | Create and return a ``DataReader``. 35 | 36 | Returns 37 | ------- 38 | DataReader 39 | 40 | Parameters 41 | ---------- 42 | Grid 43 | 44 | See Also 45 | -------- 46 | DataReader 47 | 48 | Examples 49 | -------- 50 | >>> import pykasso as pk 51 | >>> data_reader = pk.create_datareader() 52 | """ 53 | out = DataReader(grid=grid) 54 | return out 55 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Contains the model of pyKasso. 3 | """ 4 | 5 | __all__ = [] 6 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/_validations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Input validation functions. 3 | """ 4 | 5 | import PIL 6 | import sys 7 | import logging 8 | import rasterio 9 | import numpy as np 10 | 11 | from .._utils import datareader 12 | from .._utils import validation as val 13 | 14 | this = sys.modules[__name__] 15 | 16 | 17 | ################# 18 | ### FUNCTIONS ### 19 | ################# 20 | 21 | def read_file(path: str, attribute: str) -> np.ndarray: 22 | extension = path.split('.')[-1] 23 | try: 24 | ### GSLIB 25 | if extension == 'gslib': 26 | data = np.genfromtxt(path, skip_header=3, dtype=np.int8) 27 | 28 | ### Numpy_pickle 29 | elif extension == 'npy': 30 | data = np.load(path) 31 | 32 | ### Images 33 | elif extension in ['jpg', 'png']: 34 | data = np.asarray(PIL.Image.open(path).convert('L')).T 35 | 36 | ### CSV 37 | elif extension == 'csv': 38 | data = np.genfromtxt(path, delimiter=',').T 39 | 40 | ### TIF, TIFF 41 | elif extension in ['tif', 'tiff']: 42 | data = rasterio.open(path).read(1).T 43 | 44 | ### Others 45 | else: 46 | data = np.genfromtxt(path) 47 | 48 | except Exception as err: 49 | msg = ("Impossible to read the file designated by the '{}' attribute." 50 | " Location : {}".format(attribute, path)) 51 | this.logger.error(msg) 52 | raise err 53 | else: 54 | return data 55 | 56 | 57 | def is_list_length_valid(data: list, value: int, attribute: str) -> bool: 58 | if len(data) < value: 59 | msg = ("'{}' data length is too short ({} elements minimum)." 60 | .format(attribute, value)) 61 | this.logger.critical(msg) 62 | raise ValueError(msg) 63 | else: 64 | return True 65 | 66 | 67 | def is_coordinate_type_valid(coordinate: tuple, types: tuple, 68 | attribute: str) -> bool: 69 | if not isinstance(coordinate, types): 70 | msg = ("The values of the '{}' attribute contains at least one invalid" 71 | " vertex. Coordinates must be of type : {}." 72 | .format(attribute, types)) 73 | this.logger.critical(msg) 74 | raise TypeError(msg) 75 | else: 76 | return True 77 | 78 | 79 | def is_surface_dimensions_valid(attribute: str, array: np.ndarray, 80 | grid) -> bool: 81 | nx, ny, nz = grid.shape 82 | if not (array.shape == (nx, ny)): 83 | msg = ("The '{}' array shape does not match with grid surface." 84 | " Array shape: {}, Grid surface shape: {}" 85 | .format(attribute, array.shape, (nx, ny))) 86 | this.logger.critical(msg) 87 | raise ValueError(msg) 88 | else: 89 | return True 90 | 91 | 92 | def is_costs_dictionnary_valid(costs_dictionnary: dict, ids_data: list): 93 | """ """ 94 | for i in ids_data: 95 | if i not in costs_dictionnary: 96 | msg = ("The data id ({}) is not within 'costs' dictionnary keys" 97 | " ({})".format(i, list(costs_dictionnary.keys()))) 98 | this.logger.error(msg) 99 | raise KeyError(msg) 100 | return True 101 | 102 | 103 | ############################################################################### 104 | 105 | ######################## 106 | ### OUTLETS - INLETS ### 107 | ######################## 108 | 109 | def validate_settings_points(settings: dict, 110 | attribute: str, 111 | ) -> dict: 112 | """ 113 | Validate the parameters of ``outlets`` and ``inlets`` keys. 114 | 115 | TODO 116 | 117 | Tested parameters: 118 | - ``seed``: must be of type int; 119 | - ``number``: must be of type int, value must be greater than zero; 120 | - ``data``: TODO 121 | """ 122 | # Set logger 123 | logger = logging.getLogger("{}.validation".format(attribute)) 124 | 125 | ### 'seed' ### 126 | 127 | # Test if 'seed' is of type int or None 128 | try: 129 | if settings['seed'] is not None: 130 | val.is_variable_type_valid(variable_name='seed', 131 | variable_value=settings['seed'], 132 | valid_types=(int)) 133 | except TypeError as error: 134 | logger.error(error) 135 | raise 136 | 137 | ### 'subdomain' ### 138 | 139 | # Test if 'subdomain' is of type str 140 | try: 141 | val.is_variable_type_valid(variable_name='subdomain', 142 | variable_value=settings['subdomain'], 143 | valid_types=(str)) 144 | except TypeError as error: 145 | logger.error(error) 146 | raise 147 | 148 | ### 'shuffle' ### 149 | 150 | # Test if 'shuffle' is of type bool 151 | try: 152 | val.is_variable_type_valid(variable_name='shuffle', 153 | variable_value=settings['shuffle'], 154 | valid_types=(bool)) 155 | except TypeError as error: 156 | logger.error(error) 157 | raise 158 | 159 | ### 'number' ### 160 | 161 | # Test if 'number' is of type int 162 | try: 163 | val.is_variable_type_valid(variable_name='number', 164 | variable_value=settings['number'], 165 | valid_types=(int)) 166 | except TypeError as error: 167 | logger.error(error) 168 | raise 169 | 170 | # Test if 'number' is greater than zero 171 | try: 172 | val.is_parameter_comparison_valid(parameter_name='number', 173 | parameter_value=settings['number'], 174 | logical_test='>', 175 | compared_to=0) 176 | except ValueError as error: 177 | logger.error(error) 178 | raise 179 | 180 | ### 'importance' ### 181 | 182 | # Test if 'importance' is of type list 183 | try: 184 | val.is_variable_type_valid(variable_name='importance', 185 | variable_value=settings['importance'], 186 | valid_types=(list)) 187 | except TypeError as error: 188 | logger.error(error) 189 | raise 190 | 191 | # Test if length of the list is adequate with declared number of points 192 | try: 193 | if len(settings['importance']) > settings['number']: 194 | # TODO - write error msg 195 | msg = "" 196 | raise Exception(msg) 197 | # TODO - custom exception ? 198 | except Exception as error: 199 | logger.error(error) 200 | raise 201 | 202 | ### 'data' 203 | 204 | # Test if data is empty 205 | if isinstance(settings['data'], str) and (settings['data'] == ''): 206 | settings['data'] = [] 207 | 208 | # If 'data' type is str, try to read the file 209 | if isinstance(settings['data'], str): 210 | filepath = settings['data'] 211 | 212 | # Test if filepath is valid 213 | val.is_filepath_valid(filepath) 214 | 215 | # Try to read filepath 216 | dr = datareader.DataReader() 217 | settings['data'] = dr.get_data_from_file(filepath) 218 | 219 | # If 'data' type is np.ndarray, transform it to list 220 | if isinstance(settings['data'], np.ndarray): 221 | 222 | # If the list of points contains only one element 223 | if len(settings['data'].shape) == 1: 224 | settings['data'] = np.array([settings['data']]) 225 | 226 | settings['data'] = settings['data'].tolist() 227 | 228 | return settings 229 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/_wrappers.py: -------------------------------------------------------------------------------- 1 | """pyKasso's wrappers functions.""" 2 | 3 | ### Internal dependencies 4 | import logging 5 | 6 | ### Local dependencies 7 | from .._utils.validation import test_sks_settings 8 | from ..model import _validations as val 9 | from ..core._namespaces import DEFAULT_FMM_COSTS 10 | 11 | 12 | DEFAULT_VALUES = { 13 | 'sks': { 14 | 'seed': 0, 15 | 'algorithm': 'Isotropic3', 16 | 'costs': DEFAULT_FMM_COSTS, 17 | 'factors': {'F': 100, 'F1': 100, 'F2': 50} 18 | }, 19 | 'geology': { 20 | 'data': None, 21 | 'axis': 'z', 22 | 'names': {}, 23 | 'costs': {}, 24 | 'model': {} 25 | }, 26 | 'faults': { 27 | 'data': None, 28 | 'axis': 'z', 29 | 'names': {}, 30 | 'costs': {}, 31 | 'model': {} 32 | }, 33 | 'fractures': { 34 | 'data': None, 35 | 'axis': 'z', 36 | 'names': {}, 37 | # 'costs': {}, 38 | 'model': {}, 39 | 'seed': None, 40 | }, 41 | 'domain': { 42 | 'delimitation': None, 43 | 'topography': None, 44 | 'bedrock': None, 45 | 'water_table': None, 46 | }, 47 | 'outlets': { 48 | # 'number': ['required', ''], 49 | 'data': [], 50 | 'shuffle': False, 51 | 'importance': [1], 52 | 'subdomain': 'domain_surface', 53 | 'geology': None, 54 | 'seed': None, 55 | }, 56 | 'inlets': { 57 | # 'number': ['required', ''], 58 | 'data': [], 59 | 'shuffle': False, 60 | 'importance': [1], 61 | # 'per_outlet': [1], 62 | 'subdomain': 'domain_surface', 63 | 'geology': None, 64 | 'seed': None, 65 | }, 66 | } 67 | 68 | 69 | def _parameters_validation(feature, kind): 70 | """ 71 | This decorator validates input parameters before creatings modeling classes. 72 | """ 73 | def _(function): 74 | def _wrapper(*args, **kwargs): 75 | logger = logging.getLogger("validation.{}".format(feature)) 76 | model = args[0] 77 | 78 | # Add feature dictionary if value is missing 79 | if feature not in model.model_parameters: 80 | if kind == 'required': 81 | msg = "The '{}' key is missing.".format(feature) 82 | logger.error(msg) 83 | raise KeyError(msg) 84 | else: 85 | model.model_parameters[feature] = {} 86 | 87 | # Add default feature values 88 | user_params = model.model_parameters[feature].copy() 89 | default_params = DEFAULT_VALUES[feature].copy() 90 | for (key, value) in default_params.items(): 91 | if key not in user_params: 92 | msg = ("The '{}' attribute is missing. Set to default" 93 | " value.").format(key) 94 | logger.warning(msg) 95 | default_params.update(user_params) 96 | 97 | # Test special key presences 98 | if feature == 'sks': 99 | # Travel cost 100 | costs = default_params['costs'].copy() 101 | default_costs = DEFAULT_FMM_COSTS.copy() 102 | default_costs.update(costs) 103 | default_params['costs'] = default_costs 104 | # Mode 105 | if default_params['algorithm'] == 'Isotropic3': 106 | default_params['mode'] = 'A' 107 | else: 108 | default_params.setdefault('mode', 'D') 109 | if feature in ['outlets', 'inlets']: 110 | for key in ['number']: 111 | if key not in default_params: 112 | msg = ("The mandatory '{}' attribute is missing." 113 | ).format(key) 114 | logger.error(msg) 115 | raise KeyError(msg) 116 | 117 | # Control values 118 | if feature == 'sks': 119 | test_sks_settings(default_params) 120 | elif feature in ['geology', 'faults', 'fractures']: 121 | pass 122 | elif feature == 'domain': 123 | pass 124 | elif feature in ['inlets', 'outlets']: 125 | val.validate_settings_points(default_params, feature) 126 | # if isinstance(default_params['data'], str) 127 | 128 | pass 129 | 130 | # Update dictionary 131 | model.model_parameters[feature] = default_params 132 | msg = "'{}' parameters have been validated.".format(feature) 133 | logger.info(msg) 134 | result = function(*args, **kwargs) 135 | return model 136 | return _wrapper 137 | return _ 138 | 139 | 140 | def _memoize(feature): 141 | """ 142 | This decorator caches the results of function calls, preventing the need 143 | to recompute results for the same inputs. 144 | """ 145 | def _(function): 146 | def _wrapper(*args, **kwargs): 147 | logger = logging.getLogger("construction.{}".format(feature)) 148 | model = args[0] 149 | memoization = model.project._memoization 150 | if model.model_parameters[feature] is not memoization['settings'][feature]: 151 | # print('is not') # TODO 152 | result = function(*args, **kwargs) 153 | memoization['settings'][feature] = model.model_parameters[feature] 154 | memoization['model'][feature] = getattr(model, feature) 155 | logger.info("'{}' has been constructed".format(feature)) 156 | else: 157 | setattr(model, feature, memoization['model'][feature]) 158 | msg = "'{}' has been reused from previous simulation".format(feature) 159 | logger.info(msg) 160 | return model 161 | return _wrapper 162 | return _ 163 | 164 | 165 | def _logging(feature=None, step=None): 166 | """ 167 | This decorator records messages to a log fileand tracks events, errors, 168 | and informational messages. 169 | """ 170 | def _(function): 171 | def _wrapper(*args, **kwargs): 172 | if feature is not None: 173 | logger = logging.getLogger("{}.{}".format(feature, step)) 174 | else: 175 | logger = logging.getLogger(".") 176 | try: 177 | result = function(*args, **kwargs) 178 | except Exception as err: 179 | msg = "Critical error during '{}'".format(function.__name__) 180 | logger.critical(msg) 181 | raise err 182 | else: 183 | logger.debug("'{}' went well".format(function.__name__)) 184 | return result 185 | return _wrapper 186 | return _ 187 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/domain_features/__init__.py: -------------------------------------------------------------------------------- 1 | from .bedrock import Bedrock 2 | from .delimitation import Delimitation 3 | from .domain import Domain 4 | from .topography import Topography 5 | from .watertable import WaterTable 6 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/domain_features/bedrock.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class Bedrock(Surface): 6 | """ 7 | Class modeling the lower horizontal limit of the study site. 8 | """ 9 | def __init__( 10 | self, 11 | grid: Grid, 12 | *args, 13 | **kwargs, 14 | ) -> None: 15 | feature = 'bedrock' 16 | super().__init__(grid, feature, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/domain_features/delimitation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from shapely.geometry import Point, Polygon 3 | from ...core.grid import Grid 4 | 5 | 6 | class Delimitation(): 7 | """ 8 | Class modeling the vertical limits of the study site. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | vertices: list, 14 | grid: Grid, 15 | ) -> None: 16 | """ 17 | Construct the delimitation, the vertical limits of the study site. 18 | 19 | Parameters 20 | ---------- 21 | vertices : list 22 | List of coordinates representing the vertices of the boundary 23 | polygon : [[x0,y0], ..., [xn, yn]]. The list must contain at least 24 | 3 vertices. 25 | grid : Grid 26 | Grid of the model. 27 | """ 28 | self.label = 'delimitation' 29 | self.vertices = vertices 30 | 31 | ### Set the polygon with shapely 32 | path_vertices = self.vertices.copy() 33 | self.polygon = Polygon(path_vertices) 34 | 35 | ### Sets the mask array with a numpy-array 36 | row, col = np.indices((grid.nx, grid.ny)) 37 | X, Y, Z = grid.get_meshgrids() 38 | pts = np.column_stack((X[row, col, 0].flatten(), 39 | Y[row, col, 0].flatten())) 40 | msk = [self.polygon.contains(Point(x, y)) for (x, y) in pts] 41 | msk = np.array(msk).reshape((grid.nx, grid.ny)).astype(int) 42 | self.data_volume = np.repeat(msk[:, :, np.newaxis], grid.nz, axis=2) 43 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/domain_features/topography.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class Topography(Surface): 6 | """ 7 | Class modeling the upper horizontal limit of the study site. 8 | """ 9 | def __init__( 10 | self, 11 | grid: Grid, 12 | *args, 13 | **kwargs, 14 | ) -> None: 15 | feature = 'topography' 16 | super().__init__(grid, feature, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/domain_features/watertable.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class WaterTable(Surface): 6 | """ 7 | Class modeling the water level elevation, the phreatic/vadose limit of the 8 | study site. 9 | """ 10 | def __init__( 11 | self, 12 | grid: Grid, 13 | *args, 14 | **kwargs, 15 | ) -> None: 16 | feature = 'water_table' 17 | super().__init__(grid, feature, *args, **kwargs) 18 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/geologic_features/__init__.py: -------------------------------------------------------------------------------- 1 | from .faults import Faults 2 | from .fractures import Fractures 3 | from .geologicfeature import GeologicFeature 4 | from .geology import Geology 5 | from .surface import Surface 6 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/geologic_features/faults.py: -------------------------------------------------------------------------------- 1 | from ...core._namespaces import DEFAULT_FMM_COSTS 2 | from ...core.grid import Grid 3 | from .geologicfeature import GeologicFeature 4 | 5 | 6 | class Faults(GeologicFeature): 7 | """ 8 | Class modeling the faults model. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | grid: Grid, 14 | default_fmm_cost: float = DEFAULT_FMM_COSTS['faults'], 15 | *args, 16 | **kwargs, 17 | ) -> None: 18 | feature = 'faults' 19 | dim = 3 20 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 21 | 22 | def set_names( 23 | self, 24 | names: dict[int, str], 25 | default_name: str = 'fault {}', 26 | ) -> None: 27 | """ 28 | Assign names to fault items based on the provided ``names`` dictionary 29 | , with an optional default naming pattern. 30 | 31 | Parameters 32 | ---------- 33 | names : dict[int, str] 34 | A dictionary where the keys are fault item indices (integers) and 35 | the values are the corresponding names (strings) to be assigned. 36 | This dictionary specifies which geologic unit should receive 37 | custom names. 38 | default_name : str, default: 'fault {}' 39 | A format string used to generate default fault item names for 40 | items not explicitly named in the ``names`` dictionary. The format 41 | string should include a placeholder (e.g., '{}') that will be 42 | replaced by the item's index. 43 | 44 | Notes 45 | ----- 46 | This function does not return a value. It rewrites the ``self.names`` 47 | attribute with the new specified dictionary. 48 | """ 49 | return super().set_names(names, default_name) 50 | 51 | def set_model( 52 | self, 53 | model: dict[int, str], 54 | default_model: bool = True, 55 | ) -> None: 56 | """ 57 | Indicate if a fault item should be considered in the modelisation 58 | based on the provided dictionary, with an optional default setting. 59 | 60 | Parameters 61 | ---------- 62 | model : dict[int, bool] 63 | A dictionary where the keys are fault item indices (integers) and 64 | the values are booleans indicating if the item is considered or 65 | not. 66 | default_model : bool, default: True 67 | The default value to be applied to fault items not explicitly 68 | listed in the ``model`` dictionary. 69 | 70 | Notes 71 | ----- 72 | This function does not return a value. It rewrites the ``self.model`` 73 | attribute with the new specified dictionary. 74 | """ 75 | model.setdefault(0, False) 76 | return super().set_model(model, default_model) 77 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/geologic_features/geologicfeature.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains classes modeling the geological features. 3 | """ 4 | 5 | ### External dependencies 6 | import numpy as np 7 | import pandas as pd 8 | 9 | ### Local dependencies 10 | from ..._utils.datareader import DataReader 11 | from ...core._namespaces import DEFAULT_FEATURE_PARAMETERS 12 | 13 | ### Typing 14 | from typing import Union 15 | from ...core.grid import Grid 16 | 17 | 18 | class GeologicFeature(DataReader): 19 | """ 20 | Class modeling a geological feature. 21 | """ 22 | 23 | def __init__( 24 | self, 25 | grid: Grid, 26 | feature: str, 27 | dim: int, 28 | default_fmm_cost: float, 29 | *args, 30 | **kwargs, 31 | ) -> None: 32 | """ 33 | Construct a geological feature. 34 | 35 | Parameters 36 | ---------- 37 | grid : Grid 38 | pyKasso's ``Grid`` of the model. 39 | feature : str 40 | Define the type of geological feature. 41 | 42 | Available 2D geological features: 43 | - ``'topography'`` 44 | - ``'water_table'`` 45 | - ``'bedrock'`` 46 | 47 | Available 3D geological features: 48 | - ``'geology'`` 49 | - ``'faults'`` 50 | - ``'fractures'`` 51 | dim : int 52 | Define whether the geological feature corresponds to a 2D or 3D dataset. 53 | 54 | default_fmm_cost : float 55 | Define the default fast-marching method cost value. 56 | """ 57 | 58 | # Initialization 59 | super().__init__(grid, *args, **kwargs) 60 | self.feature = feature 61 | self.dim = dim 62 | self.default_fmm_cost = default_fmm_cost 63 | self.data_surface = None 64 | self.data_volume = None 65 | self.stats = None 66 | self.names = None 67 | self.costs = None 68 | self.model = None 69 | 70 | # Retrieve arguments from kwargs 71 | data = kwargs.get('data', None) 72 | axis = kwargs.get('axis', 'z') 73 | names = kwargs.get('names', {}) 74 | costs = kwargs.get('costs', {}) 75 | model = kwargs.get('model', {}) 76 | 77 | # Set the data 78 | self.set_data(data, axis) 79 | self.compute_statistics() 80 | if self.feature not in ['topography', 'water_table', 'bedrock']: 81 | self.set_names(names) 82 | self.set_costs(costs, self.default_fmm_cost) 83 | self.set_model(model) 84 | 85 | def overview(self) -> pd.DataFrame: 86 | """ 87 | Return a pandas DataFrame describing each contained unit with its name, 88 | its cost, and if it will be considered during the simulation. Basic 89 | statistics are also described. 90 | """ 91 | index = self.stats.index 92 | data = { 93 | 'names': self.names.values(), 94 | 'costs': self.costs.values(), 95 | 'model': self.model.values(), 96 | } 97 | df = pd.DataFrame(data, index=index) 98 | df = pd.merge(df, self.stats, left_index=True, right_index=True) 99 | return df 100 | 101 | ############### 102 | ### SETTERS ### 103 | ############### 104 | 105 | def set_data( 106 | self, 107 | data: Union[None, str, np.ndarray], 108 | axis: str = 'z', 109 | ) -> None: 110 | """ 111 | """ 112 | # If no data is provdided 113 | if data is None: 114 | 115 | value = DEFAULT_FEATURE_PARAMETERS[self.feature]['nodata'] 116 | 117 | if self.dim == 2: 118 | self.data_surface = self._get_data_full_2D(value) 119 | elif self.dim == 3: 120 | self.data_volume = self._get_data_full_3D(value) 121 | 122 | # Else 123 | elif isinstance(data, np.ndarray): 124 | if self.dim == 2: 125 | self.data_surface = data 126 | elif self.dim == 3: 127 | self.data_volume = data 128 | else: 129 | if self.dim == 2: 130 | self.data_surface = self.get_data_from_file(data, 131 | False) 132 | elif self.dim == 3: 133 | self.data_volume = self.get_data_from_file(data, 134 | True, 135 | axis) 136 | return None 137 | 138 | def set_names( 139 | self, 140 | names: dict[int, str], 141 | default_name: str = 'item {}', 142 | ) -> None: 143 | """ 144 | Assign names to items based on the provided ``names`` dictionary, with 145 | an optional default naming pattern. 146 | 147 | Parameters 148 | ---------- 149 | names : dict[int, str] 150 | A dictionary where the keys are item indices (integers) and the 151 | values are the corresponding names (strings) to be assigned. This 152 | dictionary specifies which items should receive custom names. 153 | default_name : str, default: 'item {}' 154 | A format string used to generate default names for items not 155 | explicitly named in the ``names`` dictionary. The format string 156 | should include a placeholder (e.g., '{}') that will be replaced by 157 | the item's index. 158 | 159 | Notes 160 | ----- 161 | This function does not return a value. It rewrites the ``self.names`` 162 | attribute with the new specified dictionary. 163 | """ 164 | ids = self.stats.index 165 | names_df = {} 166 | for id in ids: 167 | names_df[id] = names.get(id, default_name.format(id)) 168 | self.names = names_df 169 | return None 170 | 171 | def set_costs( 172 | self, 173 | costs: dict[int, str], 174 | default_cost: float = None, 175 | ) -> None: 176 | """ 177 | Assign costs to items based on the provided dictionary, with an 178 | optional default cost. 179 | 180 | Parameters 181 | ---------- 182 | costs : dict[int, str] 183 | A dictionary where the keys are item indices (integers) and the 184 | values are the corresponding costs (floats) to be assigned. This 185 | dictionary specifies which items should receive custom costs. 186 | default_cost : float, default: 0.5 187 | The default cost to be applied to items not explicitly listed in 188 | the ``costs`` dictionary. 189 | 190 | Notes 191 | ----- 192 | This function does not return a value. It rewrites the ``self.costs`` 193 | attribute with the new specified dictionary. 194 | """ 195 | # Retrieve default fmm cost 196 | if default_cost is None: 197 | default_cost = self.default_fmm_cost 198 | 199 | # Assign costs 200 | ids = self.stats.index 201 | costs_df = {} 202 | for id in ids: 203 | costs_df[id] = costs.get(id, default_cost) 204 | self.costs = costs_df 205 | 206 | return None 207 | 208 | def set_model( 209 | self, 210 | model: dict[int, str], 211 | default_model: bool = True, 212 | ) -> None: 213 | """ 214 | Indicate if an item should be considered in the modelisation based on 215 | the provided dictionary, with an optional default setting. 216 | 217 | Parameters 218 | ---------- 219 | model : dict[int, bool] 220 | A dictionary where the keys are item indices (integers) and the 221 | values are booleans indicating if the item is considered or not. 222 | default_model : bool, default: True 223 | The default value to be applied to items not explicitly listed in 224 | the ``model`` dictionary. 225 | 226 | Notes 227 | ----- 228 | This function does not return a value. It rewrites the ``self.model`` 229 | attribute with the new specified dictionary. 230 | """ 231 | ids = self.stats.index 232 | model_df = {} 233 | for id in ids: 234 | model_df[id] = model.get(id, default_model) 235 | self.model = model_df 236 | return None 237 | 238 | ############### 239 | ### GETTERS ### 240 | ############### 241 | 242 | def get_data_units(self, units: list[int]) -> np.ndarray: 243 | """ 244 | Return a copy of the ``self.data_volume`` attribute only containing 245 | the specified units. 246 | 247 | Parameters 248 | ---------- 249 | units: list[int] 250 | List of units to retrieve. 251 | 252 | Returns 253 | ------- 254 | np.ndarray 255 | """ 256 | # data = np.empty(self.grid.shape) * np.nan # ISSUES with plotting 257 | data = np.zeros(self.grid.shape) 258 | test = np.isin(self.data_volume, units) 259 | data = np.where(test, self.data_volume, data) 260 | return data 261 | 262 | def get_data_model(self) -> np.ndarray: 263 | """ 264 | Return a copy of the ``self.data_volume`` attribute corresponding of 265 | the state of the ``self.model`` attribute. 266 | """ 267 | valid_ids = [id_ for (id_, boolean) in self.model.items() if boolean] 268 | geology = self.get_data_units(valid_ids) 269 | return geology 270 | 271 | ############# 272 | ### OTHER ### 273 | ############# 274 | 275 | def compute_statistics(self) -> None: 276 | """ 277 | Populate the ``self.stats`` attribute with a pandas DataFrame 278 | containing statistics (counts and frequency) on the data. 279 | 280 | Returns 281 | ------- 282 | None 283 | """ 284 | values, counts = np.unique(self.data_volume, return_counts=True) 285 | values = values.astype('int') 286 | stats = { 287 | 'counts': counts, 288 | 'freq': counts / self.grid.nodes, 289 | 'volume': counts * self.grid.node_volume, 290 | } 291 | self.stats = pd.DataFrame(data=stats, index=values) 292 | return None 293 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/geologic_features/geology.py: -------------------------------------------------------------------------------- 1 | from ...core._namespaces import DEFAULT_FMM_COSTS 2 | from ...core.grid import Grid 3 | from .geologicfeature import GeologicFeature 4 | 5 | 6 | class Geology(GeologicFeature): 7 | """ 8 | Class modeling the geologic model. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | grid: Grid, 14 | default_fmm_cost: float = DEFAULT_FMM_COSTS['geology'], 15 | *args, 16 | **kwargs, 17 | ) -> None: 18 | feature = 'geology' 19 | dim = 3 20 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 21 | 22 | def set_names( 23 | self, 24 | names: dict[int, str], 25 | default_name: str = 'unit {}', 26 | ) -> None: 27 | """ 28 | Assign names to geologic units based on the provided ``names`` 29 | dictionary, with an optional default naming pattern. 30 | 31 | Parameters 32 | ---------- 33 | names : dict[int, str] 34 | A dictionary where the keys are geologic unit indices (integers) 35 | and the values are the corresponding names (strings) to be 36 | assigned. This dictionary specifies which geologic unit should 37 | receive custom names. 38 | default_name : str, default: 'unit {}' 39 | A format string used to generate default geologic unit names for 40 | items not explicitly named in the ``names`` dictionary. The format 41 | string should include a placeholder (e.g., '{}') that will be 42 | replaced by the item's index. 43 | 44 | Notes 45 | ----- 46 | This function does not return a value. It rewrites the ``self.names`` 47 | attribute with the new specified dictionary. 48 | """ 49 | return super().set_names(names, default_name) 50 | 51 | def set_model( 52 | self, 53 | model: dict[int, str], 54 | default_model: bool = True, 55 | ) -> None: 56 | """ 57 | Indicate if a geologic unit should be considered in the modelisation 58 | based on the provided dictionary, with an optional default setting. 59 | 60 | Parameters 61 | ---------- 62 | model : dict[int, bool] 63 | A dictionary where the keys are geologic unit indices (integers) 64 | and the values are booleans indicating if the item is considered or 65 | not. 66 | default_model : bool, default: True 67 | The default value to be applied to geologic units not explicitly 68 | listed in the ``model`` dictionary. 69 | 70 | Notes 71 | ----- 72 | This function does not return a value. It rewrites the ``self.model`` 73 | attribute with the new specified dictionary. 74 | """ 75 | model.setdefault(0, True) 76 | return super().set_model(model, default_model) 77 | -------------------------------------------------------------------------------- /build/lib/pykasso/model/geologic_features/surface.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from .geologicfeature import GeologicFeature 5 | from ...core.grid import Grid 6 | 7 | 8 | class Surface(GeologicFeature): 9 | """ 10 | Subclass modeling a two dimensional geological feature. 11 | """ 12 | 13 | def __init__( 14 | self, 15 | grid: Grid, 16 | feature: str, 17 | *args, 18 | **kwargs, 19 | ) -> None: 20 | dim = 2 21 | default_fmm_cost = None, 22 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 23 | 24 | def _surface_to_volume(self, condition: str, grid: Grid) -> np.ndarray: 25 | """ 26 | Convert a two dimensional array in a three dimensional array. 27 | """ 28 | k = grid.get_k(self.data_surface) 29 | data_volume = np.zeros((grid.nx, grid.ny, grid.nz)) 30 | for z in range(grid.nz): 31 | data_volume[:, :, z] = z 32 | if condition == '>=': 33 | test = data_volume[:, :, z] >= k 34 | elif condition == '=': 35 | test = data_volume[:, :, z] == k 36 | elif condition == '<=': 37 | test = data_volume[:, :, z] <= k 38 | data_volume[:, :, z] = np.where(test, 1, 0) 39 | return data_volume 40 | 41 | def compute_statistics(self) -> None: 42 | """ 43 | Populate the ``self.stats`` attribute with a pandas DataFrame 44 | containing statistics (counts and frequency) on the data. 45 | 46 | Returns 47 | ------- 48 | None 49 | """ 50 | values, counts = np.unique(self.data_surface, return_counts=True) 51 | values = values.astype('int') 52 | stats = { 53 | 'counts': counts, 54 | 'freq': counts / self.grid.nodes, 55 | 'surface': counts * self.grid.node_area, 56 | } 57 | self.stats = pd.DataFrame(data=stats, index=values) 58 | return None 59 | -------------------------------------------------------------------------------- /build/lib/pykasso/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============= 3 | visualization 4 | ============= 5 | 6 | A sub-module for karst network visualization. 7 | """ 8 | 9 | __all__ = [] 10 | 11 | from .visualizer import * 12 | from .._version import __version__ 13 | 14 | __all__.extend(['__version__']) 15 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: pykasso 2 | channels: 3 | - conda-forge 4 | - agd-lbr 5 | dependencies: 6 | - python=3.9 7 | - numpy 8 | - pandas 9 | - matplotlib 10 | - pyqt 11 | - openpyxl 12 | - mpmath 13 | - pyyaml 14 | - scipy 15 | - plotly 16 | - shapely 17 | - rasterio 18 | - agd 19 | - hfm 20 | - networkx 21 | - mplstereonet 22 | - pyvista 23 | - imageio 24 | - trame 25 | - trame-vuetify 26 | - trame-vtk 27 | - ipykernel 28 | - jupyter 29 | - jupyterlab 30 | - ipyvtklink 31 | - snakeviz 32 | - sphinx 33 | - numpydoc 34 | - pip 35 | - pip: 36 | - karstnet 37 | - -e . 38 | -------------------------------------------------------------------------------- /img/animation_01.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/img/animation_01.gif -------------------------------------------------------------------------------- /img/animation_02.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/img/animation_02.gif -------------------------------------------------------------------------------- /img/pykasso_banner_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/img/pykasso_banner_logo.png -------------------------------------------------------------------------------- /notebooks/colab/readme.md: -------------------------------------------------------------------------------- 1 | # Colab 2 | 3 | ``Colab`` is a notebook collection of simple examples showcasing how to use pyKasso on [Google Colab](https://colab.google/). 4 | 5 | - [Colab 01](colab_01.ipynb): Installation of pyKasso -------------------------------------------------------------------------------- /notebooks/geometry/readme.md: -------------------------------------------------------------------------------- 1 | # Geometry 2 | 3 | ``Geometry`` is a notebook collection of simple examples designed to illustrate how pyKasso works and behaves according to the defined parameters. 4 | 5 | - [Geometry 01](geometry_01.ipynb): The influence of geology, faults, and fractures 6 | - [Geometry 02](geometry_02.ipynb): The influence of topography 7 | - [Geometry 03](geometry_03.ipynb): The influence of water level elevation 8 | - [Geometry 04](geometry_04.ipynb): The influence of bedrock elevation 9 | - [Geometry 05](geometry_05.ipynb): The combined influence of topography, water elevation level, and bedrock elevation 10 | - [Geometry 06](geometry_06.ipynb): Two-dimensional examples 11 | - [Geometry 07](geometry_07.ipynb): The influence of inlets and outlets parameters 12 | - [Geometry 08](geometry_08.ipynb): The influence of the ratio parameter -------------------------------------------------------------------------------- /notebooks/misc/figures.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "id": "4304ce3d", 7 | "metadata": {}, 8 | "source": [ 9 | "# Figures" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "id": "fa45608e", 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "from PIL import Image\n", 20 | "from PIL import ImageFont\n", 21 | "from PIL import ImageDraw" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "id": "06ad11aa", 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "def merge_figure(filenames: list,\n", 32 | " filepath: str,\n", 33 | " size: tuple = (2, 2), # (x, y)\n", 34 | " labels: list = None,\n", 35 | " text_color: tuple = (0, 0, 0),\n", 36 | " font_size: int = 24,\n", 37 | " background_color: tuple = (255, 255, 255)):\n", 38 | " #DOC\n", 39 | " \n", 40 | " # Read images\n", 41 | " images = [Image.open(x) for x in filenames]\n", 42 | " \n", 43 | " # Get image size\n", 44 | " nx = size[0]\n", 45 | " ny = size[1]\n", 46 | " \n", 47 | " # Get dimensions\n", 48 | " widths, heights = zip(*(i.size for i in images))\n", 49 | " width = max(widths)\n", 50 | " height = max(heights)\n", 51 | " blank_space_width = width\n", 52 | " blank_space_height = int(height / 10)\n", 53 | " \n", 54 | " # Resize the images\n", 55 | " images = [im.resize((width, height), Image.Resampling.LANCZOS)for im in images]\n", 56 | " total_width = width * nx\n", 57 | " if labels is not None:\n", 58 | " total_height = (height * ny) + (blank_space_height * ny)\n", 59 | " else:\n", 60 | " total_height = (height * ny)\n", 61 | " \n", 62 | " # Define the variables for font\n", 63 | " font = ImageFont.truetype(\"arial.ttf\", font_size)\n", 64 | " x_padding = 0\n", 65 | " y_padding = 10\n", 66 | " \n", 67 | " # Paste the images / Add blank spaces / Print text\n", 68 | " new_image = Image.new('RGB', (total_width, total_height), background_color)\n", 69 | " blank_position = 0\n", 70 | " # ys = [0, 1]\n", 71 | " # xs = [0, 1]\n", 72 | " ys = list(range(size[1]))\n", 73 | " xs = list(range(size[0]))\n", 74 | " for (j, y) in enumerate(ys):\n", 75 | " for (i, x) in enumerate(xs):\n", 76 | " n = x + y * len(xs)\n", 77 | " x_location = i*width\n", 78 | " if labels is not None:\n", 79 | " y_location = j*height + (y + 1 - blank_position) * blank_space_height\n", 80 | " else:\n", 81 | " y_location = j*height\n", 82 | " new_image.paste(images[n], (x_location, y_location))\n", 83 | " \n", 84 | " # Print text\n", 85 | " if labels is not None:\n", 86 | " draw = ImageDraw.Draw(new_image)\n", 87 | " draw_x_location = (width / 2) + i * width\n", 88 | " draw_x_location = draw_x_location - x_padding\n", 89 | " draw_y_location = (blank_space_height / 2) + ((j + blank_position) * (height + blank_space_height))\n", 90 | " draw_y_location = draw_y_location - y_padding\n", 91 | " draw.text((draw_x_location, draw_y_location), labels[n], text_color, font=font)\n", 92 | "\n", 93 | " # new_image.show()\n", 94 | " new_image.save(filepath)\n", 95 | " return None\n" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 3, 101 | "id": "acbe2333", 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "# Merge figures from PDFs\n", 106 | "filenames = [\n", 107 | " 'img/PDF_poisson.png',\n", 108 | " 'img/PDF_uniform.png',\n", 109 | " 'img/PDF_vonmises.png',\n", 110 | " 'img/PDF_power.png',\n", 111 | "]\n", 112 | "labels = ['(a)', '(b)', '(c)', '(d)']\n", 113 | "merge_figure(filenames=filenames, labels=labels, filepath='img/merge_PDF.png')" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 8, 119 | "id": "b4154606", 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "# Merge figures from example 1\n", 124 | "filenames = [\n", 125 | " '../paper/fig/example_01_karst.png',\n", 126 | " '../paper/fig/example_01_fracturation.png',\n", 127 | " '../paper/fig/example_01_cost.png',\n", 128 | " '../paper/fig/example_01_time.png',\n", 129 | "]\n", 130 | "labels = ['(a)', '(b)', '(c)', '(d)']\n", 131 | "merge_figure(filenames=filenames, labels=labels, filepath='img/example_01_merge.png')" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 5, 137 | "id": "53356c5f", 138 | "metadata": {}, 139 | "outputs": [], 140 | "source": [ 141 | "# Merge figures from conceptual model\n", 142 | "filenames = ['cp_geology.png', 'cp_faults.png', 'cp_fractures.png', 'cp_conceptual_model.png']\n", 143 | "path = 'img/'\n", 144 | "filenames = [path + filename for filename in filenames]\n", 145 | "merge_figure(filenames=filenames, filepath='img/merge_conceptual_model.png')" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": 6, 151 | "id": "2453e0ef", 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "# Merge figures from importance factor\n", 156 | "filenames = [\n", 157 | " 'img/if_o-1_i-1.png',\n", 158 | " 'img/if_o-1_i-1-1.png',\n", 159 | " 'img/if_o-1_i-1-1-1.png',\n", 160 | " 'img/if_o-1-1_i-1.png',\n", 161 | " 'img/if_o-1-1_i-1-1.png',\n", 162 | " 'img/if_o-1-1_i-1-1-1.png',\n", 163 | " 'img/if_o-1-1-1_i-1.png',\n", 164 | " 'img/if_o-1-1-1_i-1-1.png',\n", 165 | " 'img/if_o-1-1-1_i-1-1-1.png'\n", 166 | "]\n", 167 | "filenames = [\n", 168 | " 'img/if_o-1_i-1.png',\n", 169 | " 'img/if_o-1_i-1-2.png',\n", 170 | " 'img/if_o-1_i-1-2-3.png',\n", 171 | " 'img/if_o-1-1_i-1.png',\n", 172 | " 'img/if_o-1-1_i-1-2.png',\n", 173 | " 'img/if_o-1-1_i-1-2-3.png',\n", 174 | " 'img/if_o-1-1-1_i-1.png',\n", 175 | " 'img/if_o-1-1-1_i-1-2.png',\n", 176 | " 'img/if_o-1-1-1_i-1-2-3.png',\n", 177 | "]\n", 178 | "labels = [\n", 179 | " '(a)', '(b)', '(c)',\n", 180 | " '(d)', '(e)', '(f)',\n", 181 | " '(g)', '(h)', '(i)',\n", 182 | "]\n", 183 | "merge_figure(filenames=filenames,\n", 184 | " size=(3, 3),\n", 185 | " labels=labels,\n", 186 | " filepath='img/merge_importance_factor.png')" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 7, 192 | "id": "7808e852", 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "# Merge figures of example three\n", 197 | "filenames = [\n", 198 | " '../paper/fig/example_03_geologic_model.png',\n", 199 | " '../paper/fig/example_03_faults_model.png',\n", 200 | " '../paper/fig/example_03_fracturation_model.png',\n", 201 | " '../paper/fig/example_03_karst_model.png',\n", 202 | "]\n", 203 | "merge_figure(\n", 204 | " filenames=filenames,\n", 205 | " size=(2,2),\n", 206 | " filepath='img/example_03_merge.png',\n", 207 | ")" 208 | ] 209 | } 210 | ], 211 | "metadata": { 212 | "kernelspec": { 213 | "display_name": "Python 3 (ipykernel)", 214 | "language": "python", 215 | "name": "python3" 216 | }, 217 | "language_info": { 218 | "codemirror_mode": { 219 | "name": "ipython", 220 | "version": 3 221 | }, 222 | "file_extension": ".py", 223 | "mimetype": "text/x-python", 224 | "name": "python", 225 | "nbconvert_exporter": "python", 226 | "pygments_lexer": "ipython3", 227 | "version": "3.9.0" 228 | }, 229 | "vscode": { 230 | "interpreter": { 231 | "hash": "4413b7a1516cd768ea3ac68cc197c3d7aee95b3034134a5d88d14bfdfd96b022" 232 | } 233 | } 234 | }, 235 | "nbformat": 4, 236 | "nbformat_minor": 5 237 | } 238 | -------------------------------------------------------------------------------- /notebooks/misc/img/PDF_poisson.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/PDF_poisson.png -------------------------------------------------------------------------------- /notebooks/misc/img/PDF_power.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/PDF_power.png -------------------------------------------------------------------------------- /notebooks/misc/img/PDF_uniform.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/PDF_uniform.png -------------------------------------------------------------------------------- /notebooks/misc/img/PDF_vonmises.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/PDF_vonmises.png -------------------------------------------------------------------------------- /notebooks/misc/img/cp_conceptual_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/cp_conceptual_model.png -------------------------------------------------------------------------------- /notebooks/misc/img/cp_faults.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/cp_faults.png -------------------------------------------------------------------------------- /notebooks/misc/img/cp_fractures.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/cp_fractures.png -------------------------------------------------------------------------------- /notebooks/misc/img/cp_geology.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/cp_geology.png -------------------------------------------------------------------------------- /notebooks/misc/img/example_01_merge.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/example_01_merge.png -------------------------------------------------------------------------------- /notebooks/misc/img/example_03_merge.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/example_03_merge.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1-1_i-1-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1-1_i-1-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1-1_i-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1-1_i-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1-1_i-1-2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1-1_i-1-2-3.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1-1_i-1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1-1_i-1-2.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1-1_i-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1-1_i-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1_i-1-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1_i-1-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1_i-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1_i-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1_i-1-2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1_i-1-2-3.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1_i-1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1_i-1-2.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1-1_i-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1-1_i-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1_i-1-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1_i-1-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1_i-1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1_i-1-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1_i-1-2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1_i-1-2-3.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1_i-1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1_i-1-2.png -------------------------------------------------------------------------------- /notebooks/misc/img/if_o-1_i-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/if_o-1_i-1.png -------------------------------------------------------------------------------- /notebooks/misc/img/merge_PDF.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/merge_PDF.png -------------------------------------------------------------------------------- /notebooks/misc/img/merge_conceptual_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/merge_conceptual_model.png -------------------------------------------------------------------------------- /notebooks/misc/img/merge_importance_factor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/misc/img/merge_importance_factor.png -------------------------------------------------------------------------------- /notebooks/misc/subdomains.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "df576a89", 6 | "metadata": {}, 7 | "source": [ 8 | "# Understand the subdomains" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": null, 14 | "id": "d3d8460a", 15 | "metadata": { 16 | "vscode": { 17 | "languageId": "plaintext" 18 | } 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "# subdomains = app.model.domain.subdomains\n", 23 | "# app.visualizer.notebook = False\n", 24 | "\n", 25 | "# for subdomain in ['bedrock_vadose', 'bedrock_phreatic']:\n", 26 | "# print(subdomain)\n", 27 | "# sbd = app.model.domain.get_subdomain(subdomain)\n", 28 | "# app.visualizer.pv_plot_array(sbd, mask_values=[0])" 29 | ] 30 | } 31 | ], 32 | "metadata": { 33 | "kernelspec": { 34 | "display_name": "Python 3 (ipykernel)", 35 | "language": "python", 36 | "name": "python3" 37 | }, 38 | "language_info": { 39 | "codemirror_mode": { 40 | "name": "ipython", 41 | "version": 3 42 | }, 43 | "file_extension": ".py", 44 | "mimetype": "text/x-python", 45 | "name": "python", 46 | "nbconvert_exporter": "python", 47 | "pygments_lexer": "ipython3", 48 | "version": "3.9.0" 49 | }, 50 | "vscode": { 51 | "interpreter": { 52 | "hash": "4413b7a1516cd768ea3ac68cc197c3d7aee95b3034134a5d88d14bfdfd96b022" 53 | } 54 | } 55 | }, 56 | "nbformat": 4, 57 | "nbformat_minor": 5 58 | } 59 | -------------------------------------------------------------------------------- /notebooks/paper/fig/example_01_cost.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_01_cost.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_01_fracturation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_01_fracturation.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_01_karst.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_01_karst.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_01_mean_karstic_network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_01_mean_karstic_network.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_01_time.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_01_time.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_02_iso_vs_ani.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_02_iso_vs_ani.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_03_faults_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_03_faults_model.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_03_fracturation_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_03_fracturation_model.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_03_geologic_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_03_geologic_model.png -------------------------------------------------------------------------------- /notebooks/paper/fig/example_03_karst_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/fig/example_03_karst_model.png -------------------------------------------------------------------------------- /notebooks/paper/readme.md: -------------------------------------------------------------------------------- 1 | # Paper 2 | 3 | ``Paper`` is a notebook collection containing the examples presented in [Miville2024](). 4 | 5 | - [Example 1](example_01.ipynb): Extremely simplified model in which a single conduit is simulated from an outlet and an inlet in a uniform geological unit. Two families of fractures are randomly generated to introduce uncertainty into the model. 6 | - [Example 2](example_02.ipynb): Simple example illustrating the differences in results between isotropic and anisotropic fast marching ([Visualize results](https://htmlpreview.github.io/?https://github.com/randlab/pyKasso/blob/dev/notebooks/paper/example_02.html)). 7 | - [Example 3](example_03.ipynb): Complex example showcasing pyKasso's functionalities using the Tsanfleuron Glacier as a case study ([Visualize result](https://htmlpreview.github.io/?https://github.com/randlab/pyKasso/blob/dev/notebooks/paper/example_03.html)). -------------------------------------------------------------------------------- /notebooks/paper/tsanfleuron_data.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/notebooks/paper/tsanfleuron_data.zip -------------------------------------------------------------------------------- /notebooks/readme.md: -------------------------------------------------------------------------------- 1 | # Notebooks 2 | 3 | ## Geometry 4 | 5 | A notebook collection illustrating the behavior of pyKasso in various oversimplified geological situations. 6 | 7 | ## Paper 8 | 9 | A notebook collection grouping the examples presented in ARTICLE. 10 | 11 | ## Colab 12 | 13 | Notebooks showcasing how pyKasso can be used with [Google Colab](https://colab.google/). 14 | -------------------------------------------------------------------------------- /pykasso/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | pyKasso 3 | ======= 4 | 5 | pyKasso is a python3 open-source package intended to simulate easily and 6 | quickly karst networks using a geological model, hydrogeological, and 7 | structural data. 8 | 9 | License 10 | ------- 11 | Released under the GPL-3.0 license. 12 | Copyright (C) 2025 University of Neuchâtel - CHYN. 13 | - François Miville 14 | - Philippe Renard 15 | - Chloé Fandel 16 | 17 | Available subpackages 18 | --------------------- 19 | core 20 | Karstic conduit network generator 21 | analysis 22 | Karstic conduit network analysis tool 23 | visualization 24 | Karstic conduit network visualization tool 25 | 26 | Utilities 27 | --------- 28 | __version__ 29 | pyKasso version string 30 | """ 31 | 32 | __all__ = [] 33 | 34 | # Import pyKasso's core 35 | from . import core 36 | from .core import * 37 | __all__.extend(core.__all__) 38 | 39 | # Import pyKasso version string 40 | from ._version import __version__ 41 | __all__.extend(['__version__']) 42 | -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/inputs/betteraz_polygon.txt: -------------------------------------------------------------------------------- 1 | 573233.2159 254285.4075 2 | 573492.7457 254216.9601 3 | 574224.7527 254498.355 4 | 574448.1574 254530.2021 5 | 574839.8287 254546.3632 6 | 575243.8585 254558.4841 7 | 575823.5819 254594.015 8 | 575891.3999 254594.0688 9 | 576035.757 254774.5213 10 | 576820.0502 254989.8454 11 | 577300.1327 255001.2533 12 | 578109.9526 253756.6557 13 | 578196.6041 253751.2034 14 | 578201.0083 253717.9452 15 | 578210.6853 253700.7526 16 | 578212.0483 253684.8019 17 | 578207.6328 253671.9952 18 | 578163.6277 253647.8722 19 | 577845.9889 253568.1357 20 | 577697.2703 253379.6677 21 | 577154.4443 253114.6716 22 | 576504.5207 251837.67 23 | 576499.203 251710.668 24 | 576397.2449 251303.4297 25 | 576300.9907 251025.5997 26 | 576173.0974 250690.8491 27 | 576165.3288 250571.7198 28 | 576120.5475 250478.2502 29 | 575756.5605 250672.9055 30 | 575629.4619 250716.5986 31 | 575278.2224 250712.6279 32 | 574844.1067 250823.3472 33 | 574329.3251 251094.9978 34 | 573998.496 251110.2083 35 | 573904.7373 251081.5698 36 | 573724.944 251041.7609 37 | 573399.1058 251248.2915 38 | 573198.0416 252889.8409 39 | 573315.6856 253260.5977 40 | 573301.901 253427.2006 41 | 573037.8556 253788.9261 42 | 573014.3268 253847.6293 43 | 573233.2159 254285.4075 44 | -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/inputs/inlets_01.txt: -------------------------------------------------------------------------------- 1 | 574521.4022 253511.5752 2 | 574608.7148 253489.0855 3 | 575852.259 253979.8886 4 | 575607.5189 253010.1888 5 | 575292.6641 254470.6917 6 | 575189.4764 254424.3895 7 | 575309.8621 254257.7017 8 | 573562.365 252612.7309 9 | 573594.1151 252670.9393 10 | 575944.5547 254397.2167 11 | 574259.1555 253670.934 12 | 574589.8853 252918.1929 13 | 575331.9107 253247.2471 14 | 574354.2734 251662.3897 15 | 573680.4989 253532.3081 16 | 573421.2067 253832.6108 17 | 577145.2246 254231.8678 18 | 575599.7902 251620.4251 19 | 575443.1566 251169.5742 20 | 575726.7905 251131.4741 21 | 576249.3436 251055.8032 -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/inputs/inlets_02.txt: -------------------------------------------------------------------------------- 1 | 574521.4022 253511.5752 2 | 574608.7148 253489.0855 3 | 575852.259 253979.8886 4 | 575607.5189 253010.1888 5 | 575292.6641 254470.6917 6 | 575189.4764 254424.3895 7 | 575309.8621 254257.7017 8 | 573562.365 252612.7309 9 | 573594.1151 252670.9393 10 | 575944.5547 254397.2167 11 | 574259.1555 253670.934 12 | 574589.8853 252918.1929 13 | 575331.9107 253247.2471 14 | 574354.2734 251662.3897 15 | 577145.2246 254231.8678 16 | 575599.7902 251620.4251 17 | 575443.1566 251169.5742 18 | 575726.7905 251131.4741 19 | 576249.3436 251055.8032 20 | 575427.0996 250985.4269 21 | 574288.6286 253239.8142 22 | 575795.1691 251547.2712 23 | 575701.2418 251343.5416 24 | 576039.9091 251035.3014 25 | 576130.3968 254230.1516 26 | 575363.3682 254346.5684 27 | 573684.5836 253553.6106 28 | 573419.9997 253851.2675 29 | 573423.1747 253809.9924 30 | 573678.7628 253522.6543 31 | 574460.7404 253860.9248 32 | 575144.4251 254075.2377 33 | 575272.4836 254225.5213 34 | 575164.5334 254305.9548 35 | 575231.2086 254522.9136 36 | 575342.3338 251518.2993 37 | 575878.9099 251056.3358 38 | 575915.4224 251405.5865 39 | 574057.5145 251482.845 40 | 573544.2219 254073.6502 41 | 573569.6431 251540.8525 42 | 573938.7541 251638.6171 43 | 574809.5701 252049.6701 44 | 575187.746 251941.8867 45 | 574501.399 252875.6378 46 | 575291.4961 253621.8406 47 | 575223.7868 254303.407 48 | 575246.2764 254410.5635 49 | 574529.2541 252959.321 50 | 575804.5483 251108.5568 -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/inputs/outlets.txt: -------------------------------------------------------------------------------- 1 | 573262.1261 252885.0377 -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/settings/betteraz_01.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | sks: 7 | seed : 1 8 | algorithm : 'Isotropic3' 9 | 10 | ############ 11 | ### Grid ### 12 | ############ 13 | 14 | grid: 15 | x0 : 572510 16 | y0 : 250010 17 | z0 : 430 18 | nx : 300 19 | ny : 300 20 | nz : 20 21 | dx : 20 22 | dy : 20 23 | dz : 20 24 | 25 | ############## 26 | ### Domain ### 27 | ############## 28 | 29 | domain: 30 | delimitation : 'betteraz/inputs/betteraz_polygon.txt' 31 | topography : '' 32 | bedrock : '' 33 | water_level : '' 34 | 35 | ############### 36 | ### Geology ### 37 | ############### 38 | 39 | geology: 40 | data : 'betteraz/inputs/betteraz_2D_z430.gslib' 41 | costs : 42 | 1 : 0.5 43 | 2 : 0.7 44 | 45 | faults: 46 | data : 'betteraz/inputs/betteraz_fault_20_2D.gslib' 47 | 48 | ############## 49 | ### Points ### 50 | ############## 51 | 52 | ### Outlets 53 | outlets: 54 | number : 1 55 | data : 'betteraz/inputs/outlets.txt' 56 | shuffle : False 57 | importance : [1] 58 | seed : 1 59 | mode : 'domain_bottom' 60 | 61 | ### Inlets 62 | inlets: 63 | number : 15 64 | data : 'betteraz/inputs/inlets_01.txt' 65 | shuffle : False 66 | per_outlet : [1] 67 | importance : [1,1,1] 68 | seed : 2 69 | mode : 'domain_surface' 70 | 71 | #################### 72 | ### Fracturation ### 73 | #################### 74 | 75 | fractures: 76 | data : '' 77 | seed : 12 78 | settings : 79 | family_01: 80 | alpha : 2 81 | density : 0.00001 82 | orientation : [340, 20] 83 | dip : [80, 90] 84 | length : [1000, 2000] 85 | family_02: 86 | alpha : 2 87 | density : 0.00001 88 | orientation : [340, 20] 89 | dip : [0, 10] 90 | length : [1000, 2000] 91 | 92 | # fractures_densities : [5.83e-6,1.38e-6,2.22e-6, 2.5e-6,4.16e-6,6.66e-6, 5e-6] 93 | # fractures_min_orientation : [ 0, 15, 30, 60, 105, 120, 150] 94 | # fractures_max_orientation : [ 15, 30, 45, 105, 120, 150, 180] 95 | # fractures_min_dip : [ 80, 80, 80, 80, 80, 80, 80] 96 | # fractures_max_dip : [ 90, 90, 90, 90, 90, 90, 90] 97 | 98 | # # For fracture size 99 | # fractures_alpha : [ 2, 2, 2, 2, 2, 2, 2] 100 | # fractures_min_length : [ 100, 100, 100, 100, 100, 100, 100] 101 | # fractures_max_length : [8500,8500,8500,8500,8500,8500,8500] 102 | 103 | ############## 104 | ### OTHERS ### 105 | ############## 106 | 107 | verbosity: 108 | logging : 0 109 | agd : 0 110 | karstnet : 0 111 | ... 112 | -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/settings/betteraz_case_1.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | ############ 7 | ### Grid ### 8 | ############ 9 | 10 | x0 : 572510 11 | y0 : 250010 12 | z0 : 430 13 | nx : 300 14 | ny : 300 15 | nz : 1 16 | dx : 20 17 | dy : 20 18 | dz : 20 19 | 20 | ############### 21 | ### Polygon ### 22 | ############### 23 | 24 | data_has_mask : True 25 | mask_data : 'inputs/betteraz_polygon.txt' 26 | 27 | #################### 28 | ### PointManager ### 29 | #################### 30 | 31 | ###Outlets 32 | # 'random' - full random points 33 | # 'import' - import points 34 | # 'composite' - add n random points to imported points 35 | outlets_mode : 'import' 36 | outlets_data : 'inputs/betteraz_outlets.csv' 37 | outlets_number : 1 38 | outlets_shuffle : False 39 | outlets_importance : [1] 40 | 41 | ###Inlets 42 | # 'random' - full random points 43 | # 'import' - import points 44 | # 'composite' - add n random points to imported points 45 | inlets_mode : 'import' 46 | inlets_data : 'inputs/betteraz_inlets_case_1.csv' 47 | inlets_number : 50 48 | inlets_shuffle : False 49 | inlets_per_outlet : [1] 50 | inlets_importance : [1] 51 | 52 | ###################### 53 | ### GeologyManager ### 54 | ###################### 55 | 56 | ###Geology 57 | # 'null' - No geology 58 | # 'gslib' - Import geology via GSLIB 59 | # 'csv' - Import geology via csv 60 | # 'image' - Import geology via image 61 | geology_mode : 'gslib' 62 | geology_datafile : 'inputs/betteraz_2D_z430.gslib' 63 | 64 | ###Topography 65 | # 'null' - No topography 66 | # 'csv' - Import topography from csv file 67 | topography_mode : 'null' 68 | topography_datafile : '' 69 | 70 | ###Orientation 71 | # 'null' - No orientation 72 | # 'topo' - Calculate orientation from a topography array 73 | # 'surface' - Calculate from array of elevation of lower boundary of karst unit (as csv) 74 | orientation_mode : 'null' 75 | orientation_datafile : '' 76 | 77 | ###Faults 78 | # 'null' - No faults 79 | # 'gslib' - Import faults 80 | # 'csv' - Import faults via csv 81 | # 'image' - Import faults via image 82 | faults_mode : 'gslib' 83 | faults_datafile : 'inputs/betteraz_fault_20_2D.gslib' 84 | 85 | ###Fractures 86 | # 'null' - No fractures 87 | # 'gslib' - Import fractures 88 | # 'csv' - Import fractures via csv 89 | # 'image' - Import fractures via image 90 | # 'random' - Generate random fractures 91 | fractures_mode : 'random' 92 | fractures_datafile : 'inputs/betteraz_fault_20_2D.gslib' 93 | 94 | # If fractures mode is 'generate' 95 | # Each value in list is for a separate fracture family - length of list indicates number of families 96 | fractures_densities : [5.83e-6,1.38e-6,2.22e-6, 2.5e-6,4.16e-6,6.66e-6, 5e-6] 97 | fractures_min_orientation : [ 0, 15, 30, 60, 105, 120, 150] 98 | fractures_max_orientation : [ 15, 30, 45, 105, 120, 150, 180] 99 | fractures_min_dip : [ 80, 80, 80, 80, 80, 80, 80] 100 | fractures_max_dip : [ 90, 90, 90, 90, 90, 90, 90] 101 | 102 | # For fracture size 103 | fractures_alpha : [ 2, 2, 2, 2, 2, 2, 2] 104 | fractures_min_length : [ 100, 100, 100, 100, 100, 100, 100] 105 | fractures_max_length : [8500,8500,8500,8500,8500,8500,8500] 106 | 107 | ############################### 108 | ### Fast-Marching Algorithm ### 109 | ############################### 110 | 111 | #Choose which algorithm to use: 112 | #'Isotropic2': agd-hfm: 2D, equivalent of skfmm 113 | #'Isotropic3': adg-hfm: 3D, equivalent of skfmm 114 | #'Riemann2' : agd-hfm: 2D, anisotropic, requires an anisotropy vector field (topography or geologic orientation), performs well in high-relief catchments 115 | #'Riemann3' : agd-hfm: 3D version of anisotropic algorithm 116 | #Note: There are MANY more options for fast marching algorithms built into the agd-hfm library. 117 | ##See the agd-hfm library documentation for how to use these and integrate them into pyKasso: 118 | #https://github.com/Mirebeau/AdaptiveGridDiscretizations/blob/master/Notebooks_FMM/Summary.ipynb 119 | 120 | algorithm : 'Isotropic2' 121 | 122 | #Travel cost parallel to gradient (higher=slower/harder) 123 | #Must be between 0 and 1 124 | cost_out : 0.999 #default: 0.999 125 | cost_aquifer : 0.4 #default: 0.4 126 | cost_aquiclude : 0.8 #default: 0.8 127 | cost_faults : 0.2 #default: 0.2 128 | cost_fractures : 0.2 #default: 0.2 129 | cost_conduits : 0.1 #default: 0.1 130 | cost_ratio : 0.5 #ratio of travel cost parallel to gradient / travel cost prependicular to gradient (default 0.25) 131 | #if ratio = 1: same travel cost parallel and perpendicular to gradient 132 | #if ratio < 1: travel cost is lower parallel to gradient (paths will follow steepest gradient), decrease value to increase contrast 133 | #if ratio > 1: travel cost is lower perpendicular to gradient (paths will follow contours), increase value to increase contrast 134 | 135 | ### Formation to consider and FMM code associated 136 | geology_id : [1,2] # Only for 'import' geological mode 137 | geology_cost : [cost_aquifer,cost_aquiclude] 138 | 139 | ############# 140 | ### Other ### 141 | ############# 142 | 143 | # if 0 = random, if 1 = same results each run 144 | rand_seed : 1 145 | 146 | #Verbosity: how much output to display during run 147 | verbosity : 0 #0: minimal output, 1: some output; 2: more output; 3: maximum output 148 | ... 149 | -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/settings/betteraz_case_2.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | ############ 7 | ### Grid ### 8 | ############ 9 | 10 | x0 : 572510 11 | y0 : 250010 12 | z0 : 430 13 | nx : 300 14 | ny : 300 15 | nz : 1 16 | dx : 20 17 | dy : 20 18 | dz : 20 19 | 20 | ############### 21 | ### Polygon ### 22 | ############### 23 | 24 | data_has_mask : True 25 | mask_data : 'inputs/betteraz_polygon.txt' 26 | 27 | #################### 28 | ### PointManager ### 29 | #################### 30 | 31 | ###Outlets 32 | # 'random' - full random points 33 | # 'import' - import points 34 | # 'composite' - add n random points to imported points 35 | outlets_mode : 'import' 36 | outlets_data : 'inputs/betteraz_outlets.csv' 37 | outlets_number : 1 38 | outlets_shuffle : False 39 | outlets_importance : [1] 40 | 41 | ###Inlets 42 | # 'random' - full random points 43 | # 'import' - import points 44 | # 'composite' - add n random points to imported points 45 | inlets_mode : 'import' 46 | inlets_data : 'inputs/betteraz_inlets_case_2.csv' 47 | inlets_number : 50 48 | inlets_shuffle : False 49 | inlets_per_outlet : [1] 50 | inlets_importance : [1] 51 | 52 | ###################### 53 | ### GeologyManager ### 54 | ###################### 55 | 56 | ###Geology 57 | # 'null' - No geology 58 | # 'gslib' - Import geology via GSLIB 59 | # 'csv' - Import geology via csv 60 | # 'image' - Import geology via image 61 | geology_mode : 'gslib' 62 | geology_datafile : 'inputs/betteraz_2D_z430.gslib' 63 | 64 | ###Topography 65 | # 'null' - No topography 66 | # 'csv' - Import topography from csv file 67 | topography_mode : 'null' 68 | topography_datafile : '' 69 | 70 | ###Orientation 71 | # 'null' - No orientation 72 | # 'topo' - Calculate orientation from a topography array 73 | # 'surface' - Calculate from array of elevation of lower boundary of karst unit (as csv) 74 | orientation_mode : 'null' 75 | orientation_datafile : '' 76 | 77 | ###Faults 78 | # 'null' - No faults 79 | # 'gslib' - Import faults 80 | # 'csv' - Import faults via csv 81 | # 'image' - Import faults via image 82 | faults_mode : 'gslib' 83 | faults_datafile : 'inputs/betteraz_fault_20_2D.gslib' 84 | 85 | ###Fractures 86 | # 'null' - No fractures 87 | # 'gslib' - Import fractures 88 | # 'csv' - Import fractures via csv 89 | # 'image' - Import fractures via image 90 | # 'random' - Generate random fractures 91 | fractures_mode : 'random' 92 | fractures_datafile : 'inputs/betteraz_fault_20_2D.gslib' 93 | 94 | # If fractures mode is 'generate' 95 | # Each value in list is for a separate fracture family - length of list indicates number of families 96 | fractures_densities : [5.83e-6,1.38e-6,2.22e-6, 2.5e-6,4.16e-6,6.66e-6, 5e-6] 97 | fractures_min_orientation : [ 0, 15, 30, 60, 105, 120, 150] 98 | fractures_max_orientation : [ 15, 30, 45, 105, 120, 150, 180] 99 | fractures_min_dip : [ 80, 80, 80, 80, 80, 80, 80] 100 | fractures_max_dip : [ 90, 90, 90, 90, 90, 90, 90] 101 | 102 | # For fracture size 103 | fractures_alpha : [ 2, 2, 2, 2, 2, 2, 2] 104 | fractures_min_length : [ 100, 100, 100, 100, 100, 100, 100] 105 | fractures_max_length : [8500,8500,8500,8500,8500,8500,8500] 106 | 107 | ############################### 108 | ### Fast-Marching Algorithm ### 109 | ############################### 110 | 111 | #Choose which algorithm to use: 112 | #'Isotropic2': agd-hfm: 2D, equivalent of skfmm 113 | #'Isotropic3': adg-hfm: 3D, equivalent of skfmm 114 | #'Riemann2' : agd-hfm: 2D, anisotropic, requires an anisotropy vector field (topography or geologic orientation), performs well in high-relief catchments 115 | #'Riemann3' : agd-hfm: 3D version of anisotropic algorithm 116 | #Note: There are MANY more options for fast marching algorithms built into the agd-hfm library. 117 | ##See the agd-hfm library documentation for how to use these and integrate them into pyKasso: 118 | #https://github.com/Mirebeau/AdaptiveGridDiscretizations/blob/master/Notebooks_FMM/Summary.ipynb 119 | 120 | algorithm : 'Isotropic2' 121 | 122 | #Travel cost parallel to gradient (higher=slower/harder) 123 | #Must be between 0 and 1 124 | cost_out : 0.999 #default: 0.999 125 | cost_aquifer : 0.4 #default: 0.4 126 | cost_aquiclude : 0.8 #default: 0.8 127 | cost_faults : 0.2 #default: 0.2 128 | cost_fractures : 0.2 #default: 0.2 129 | cost_conduits : 0.1 #default: 0.1 130 | cost_ratio : 0.5 #ratio of travel cost parallel to gradient / travel cost prependicular to gradient (default 0.25) 131 | #if ratio = 1: same travel cost parallel and perpendicular to gradient 132 | #if ratio < 1: travel cost is lower parallel to gradient (paths will follow steepest gradient), decrease value to increase contrast 133 | #if ratio > 1: travel cost is lower perpendicular to gradient (paths will follow contours), increase value to increase contrast 134 | 135 | ### Formation to consider and FMM code associated 136 | geology_id : [1,2] # Only for 'import' geological mode 137 | geology_cost : [cost_aquifer,cost_aquiclude] 138 | 139 | ############# 140 | ### Other ### 141 | ############# 142 | 143 | # if 0 = random, if 1 = same results each run 144 | rand_seed : 1 145 | 146 | #Verbosity: how much output to display during run 147 | verbosity : 0 #0: minimal output, 1: some output; 2: more output; 3: maximum output 148 | ... 149 | -------------------------------------------------------------------------------- /pykasso/_misc/cases/betteraz/settings/betteraz_case_3.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | ############ 7 | ### Grid ### 8 | ############ 9 | 10 | x0 : 572510 11 | y0 : 250010 12 | z0 : 430 13 | nx : 300 14 | ny : 300 15 | nz : 1 16 | dx : 20 17 | dy : 20 18 | dz : 20 19 | 20 | ############### 21 | ### Polygon ### 22 | ############### 23 | 24 | data_has_mask : True 25 | mask_data : 'inputs/betteraz_polygon.txt' 26 | 27 | #################### 28 | ### PointManager ### 29 | #################### 30 | 31 | ###Outlets 32 | # 'random' - full random points 33 | # 'import' - import points 34 | # 'composite' - add n random points to imported points 35 | outlets_mode : 'import' 36 | outlets_data : 'inputs/betteraz_outlets.csv' 37 | outlets_number : 1 38 | outlets_shuffle : False 39 | outlets_importance : [1] 40 | 41 | ###Inlets 42 | # 'random' - full random points 43 | # 'import' - import points 44 | # 'composite' - add n random points to imported points 45 | inlets_mode : 'composite' 46 | inlets_data : 'inputs/betteraz_inlets_case_2.csv' 47 | inlets_number : 50 48 | inlets_shuffle : False 49 | inlets_per_outlet : [1] 50 | inlets_importance : [1] 51 | 52 | ###################### 53 | ### GeologyManager ### 54 | ###################### 55 | 56 | ###Geology 57 | # 'null' - No geology 58 | # 'gslib' - Import geology via GSLIB 59 | # 'csv' - Import geology via csv 60 | # 'image' - Import geology via image 61 | geology_mode : 'gslib' 62 | geology_datafile : 'inputs/betteraz_2D_z430.gslib' 63 | 64 | ###Topography 65 | # 'null' - No topography 66 | # 'csv' - Import topography from csv file 67 | topography_mode : 'null' 68 | topography_datafile : '' 69 | 70 | ###Orientation 71 | # 'null' - No orientation 72 | # 'topo' - Calculate orientation from a topography array 73 | # 'surface' - Calculate from array of elevation of lower boundary of karst unit (as csv) 74 | orientation_mode : 'null' 75 | orientation_datafile : '' 76 | 77 | ###Faults 78 | # 'null' - No faults 79 | # 'gslib' - Import faults 80 | # 'csv' - Import faults via csv 81 | # 'image' - Import faults via image 82 | faults_mode : 'gslib' 83 | faults_datafile : 'inputs/betteraz_fault_20_2D.gslib' 84 | 85 | ###Fractures 86 | # 'null' - No fractures 87 | # 'gslib' - Import fractures 88 | # 'csv' - Import fractures via csv 89 | # 'image' - Import fractures via image 90 | # 'random' - Generate random fractures 91 | fractures_mode : 'random' 92 | fractures_datafile : 'inputs/betteraz_fault_20_2D.gslib' 93 | 94 | # If fractures mode is 'generate' 95 | # Each value in list is for a separate fracture family - length of list indicates number of families 96 | fractures_densities : [5.83e-6,1.38e-6,2.22e-6, 2.5e-6,4.16e-6,6.66e-6, 5e-6] 97 | fractures_min_orientation : [ 0, 15, 30, 60, 105, 120, 150] 98 | fractures_max_orientation : [ 15, 30, 45, 105, 120, 150, 180] 99 | fractures_min_dip : [ 80, 80, 80, 80, 80, 80, 80] 100 | fractures_max_dip : [ 90, 90, 90, 90, 90, 90, 90] 101 | 102 | # For fracture size 103 | fractures_alpha : [ 2, 2, 2, 2, 2, 2, 2] 104 | fractures_min_length : [ 100, 100, 100, 100, 100, 100, 100] 105 | fractures_max_length : [8500,8500,8500,8500,8500,8500,8500] 106 | 107 | ############################### 108 | ### Fast-Marching Algorithm ### 109 | ############################### 110 | 111 | #Choose which algorithm to use: 112 | #'Isotropic2': agd-hfm: 2D, equivalent of skfmm 113 | #'Isotropic3': adg-hfm: 3D, equivalent of skfmm 114 | #'Riemann2' : agd-hfm: 2D, anisotropic, requires an anisotropy vector field (topography or geologic orientation), performs well in high-relief catchments 115 | #'Riemann3' : agd-hfm: 3D version of anisotropic algorithm 116 | #Note: There are MANY more options for fast marching algorithms built into the agd-hfm library. 117 | ##See the agd-hfm library documentation for how to use these and integrate them into pyKasso: 118 | #https://github.com/Mirebeau/AdaptiveGridDiscretizations/blob/master/Notebooks_FMM/Summary.ipynb 119 | 120 | algorithm : 'Isotropic2' 121 | 122 | #Travel cost parallel to gradient (higher=slower/harder) 123 | #Must be between 0 and 1 124 | cost_out : 0.999 #default: 0.999 125 | cost_aquifer : 0.4 #default: 0.4 126 | cost_aquiclude : 0.8 #default: 0.8 127 | cost_faults : 0.2 #default: 0.2 128 | cost_fractures : 0.2 #default: 0.2 129 | cost_conduits : 0.1 #default: 0.1 130 | cost_ratio : 0.5 #ratio of travel cost parallel to gradient / travel cost prependicular to gradient (default 0.25) 131 | #if ratio = 1: same travel cost parallel and perpendicular to gradient 132 | #if ratio < 1: travel cost is lower parallel to gradient (paths will follow steepest gradient), decrease value to increase contrast 133 | #if ratio > 1: travel cost is lower perpendicular to gradient (paths will follow contours), increase value to increase contrast 134 | 135 | ### Formation to consider and FMM code associated 136 | geology_id : [1,2] # Only for 'import' geological mode 137 | geology_cost : [cost_aquifer,cost_aquiclude] 138 | 139 | ############# 140 | ### Other ### 141 | ############# 142 | 143 | # if 0 = random, if 1 = same results each run 144 | rand_seed : 1 145 | 146 | #Verbosity: how much output to display during run 147 | verbosity : 0 #0: minimal output, 1: some output; 2: more output; 3: maximum output 148 | ... 149 | -------------------------------------------------------------------------------- /pykasso/_misc/log_logo.txt: -------------------------------------------------------------------------------- 1 | _ 2 | | | 3 | _ __ _ _| | ____ _ ___ ___ ___ 4 | | `_ \| | | | |/ / _` / __/ __|/ _ \ 5 | | |_) | |_| | < (_| \__ \__ \ (_) | 6 | | .__/ \__, |_|\_\__,_|___/___/\___/ 7 | | | __/ | 8 | |_| |___/ 9 | -------------------------------------------------------------------------------- /pykasso/_misc/parameters.yaml: -------------------------------------------------------------------------------- 1 | ###################### 2 | ### PYKASSO SETTINGS ### 3 | ###################### 4 | 5 | --- 6 | sks: 7 | seed : 0 8 | algorithm : 'Isotropic3' 9 | 10 | ############## 11 | ### Domain ### 12 | ############## 13 | 14 | domain: 15 | delimitation : '' 16 | topography : '' 17 | bedrock : '' 18 | water_level : '' 19 | 20 | ############### 21 | ### Geology ### 22 | ############### 23 | 24 | geology: 25 | data : '' 26 | costs : {} 27 | 28 | faults: 29 | data : '' 30 | costs : {} 31 | 32 | ############## 33 | ### Points ### 34 | ############## 35 | 36 | ### Outlets 37 | outlets: 38 | number : 1 39 | data : '' 40 | shuffle : False 41 | importance : [1] 42 | seed : 1 43 | subdomain : 'domain_bottom' 44 | 45 | ### Inlets 46 | inlets: 47 | number : 30 48 | data : '' 49 | shuffle : False 50 | per_outlet : [1] 51 | importance : [1,1,1] 52 | seed : 2 53 | subdomain : 'domain_surface' 54 | 55 | #################### 56 | ### Fracturation ### 57 | #################### 58 | 59 | fractures: 60 | data : '' 61 | seed : 12 62 | settings : 63 | family_01: 64 | alpha : 2 65 | density : 0.00001 66 | orientation : [340, 20] 67 | dip : [80, 90] 68 | length : [1000, 2000] 69 | family_02: 70 | alpha : 2 71 | density : 0.00001 72 | orientation : [340, 20] 73 | dip : [0, 10] 74 | length : [1000, 2000] 75 | 76 | ############## 77 | ### OTHERS ### 78 | ############## 79 | 80 | verbosity: 81 | logging : 0 82 | agd : 0 83 | karstnet : 0 84 | 85 | ... -------------------------------------------------------------------------------- /pykasso/_misc/statistics.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/pykasso/_misc/statistics.xlsx -------------------------------------------------------------------------------- /pykasso/_misc/statistics_old.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/pykasso/_misc/statistics_old.xlsx -------------------------------------------------------------------------------- /pykasso/_typing/__init__.py: -------------------------------------------------------------------------------- 1 | from .types import ( 2 | Project, 3 | Grid, 4 | Domain, 5 | Delimitation, 6 | Topography, 7 | Bedrock, 8 | WaterLevel, 9 | Geology, 10 | RandomNumberGenerator, 11 | Series, 12 | DataFrame, 13 | Styler, 14 | ) 15 | -------------------------------------------------------------------------------- /pykasso/_typing/types.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom pyKasso's typing. 3 | """ 4 | 5 | # Typing 6 | from typing import TypeVar, TYPE_CHECKING 7 | 8 | from numpy import random 9 | from pandas import core, io 10 | 11 | if TYPE_CHECKING: 12 | from ..core import project as pcp 13 | from ..core import grid as pcg 14 | from ..model.domain_features import domain as pcd 15 | from ..model.geologic_features import geologicfeature as pcgf 16 | 17 | ### Custom internal types 18 | 19 | # Core types 20 | Project = TypeVar('Project', bound='pcp.Project') 21 | Grid = TypeVar('Grid', bound='pcg.Grid') 22 | 23 | # Model types 24 | Domain = TypeVar('Domain', bound='pcd.Domain') 25 | Delimitation = TypeVar('Delimitation', bound='pcd.Delimitation') 26 | Topography = TypeVar('Topography', bound='pcd.Topography') 27 | Bedrock = TypeVar('Bedrock', bound='pcd.Bedrock') 28 | WaterLevel = TypeVar('WaterLevel', bound='pcd.WaterLevel') 29 | Geology = TypeVar('Geology', bound='pcgf.Geology') 30 | 31 | ### Custom external types 32 | 33 | # Numpy 34 | RandomNumberGenerator = TypeVar( 35 | 'RandomNumberGenerator', 36 | bound='random._generator.Generator', 37 | ) 38 | # Pandas 39 | Series = TypeVar('Series', bound='core.series.Series') 40 | DataFrame = TypeVar('DataFrame', bound='core.frame.DataFrame') 41 | Styler = TypeVar('Styler', bound='io.formats.style.Styler') 42 | -------------------------------------------------------------------------------- /pykasso/_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/randlab/pyKasso/083155d2f2e634db132853d27ec7bb16ee2c7ad5/pykasso/_utils/__init__.py -------------------------------------------------------------------------------- /pykasso/_utils/array.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module defining array manipulation functions. 3 | """ 4 | 5 | ### External dependencies 6 | import numpy as np 7 | 8 | 9 | def normalize_array(array: np.ndarray) -> np.ndarray: 10 | """ 11 | The function takes a numpy array and returns a new numpy array where each 12 | element is scaled to have values between 0 and 1. 13 | 14 | Parameters 15 | ---------- 16 | array : np.ndarray 17 | Numpy array to normalize. 18 | 19 | Returns 20 | ------- 21 | np.ndarray 22 | Normalized numpy array, with values ranging from 0 to 1. 23 | """ 24 | min_value = array.min() 25 | max_value = array.max() 26 | normalized = (array - min_value) / (max_value - min_value) 27 | return normalized 28 | -------------------------------------------------------------------------------- /pykasso/_utils/datareader.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module defining a class able to read external data. 3 | """ 4 | 5 | ### Internal dependencies 6 | import linecache as lc 7 | 8 | ### External dependencies 9 | import numpy as np 10 | import pandas as pd 11 | import rasterio 12 | 13 | ### Local dependencies 14 | from ..core._namespaces import ( 15 | VALID_EXTENSIONS_DATA, 16 | VALID_EXTENSIONS_DATAFRAME, 17 | VALID_EXTENSIONS_IMAGE, 18 | ) 19 | from ..core.grid import Grid 20 | 21 | ### Typing 22 | from typing import Union 23 | 24 | 25 | class DataReader(): 26 | """ 27 | Multiple format data reader class. 28 | 29 | Supported formats: 30 | - gslib, vox, 31 | - csv, 32 | - txt, 33 | - npy, 34 | - jpg, png, 35 | - grd : http://peterbird.name/guide/grd_format.htm 36 | - tif / asc 37 | - asc : https://desktop.arcgis.com/en/arcmap/latest/manage-data/ 38 | raster-and-images/esri-ascii-raster-format.htm 39 | """ 40 | 41 | def __init__(self, grid=None, *args, **kwargs): 42 | """ 43 | Create a data reader. 44 | """ 45 | if grid is None: 46 | self.has_grid = False 47 | self.grid = None 48 | else: 49 | self.has_grid = True 50 | self.grid = grid 51 | 52 | def _requires_grid(self): 53 | """ 54 | Check that a grid is declared in order to use the method correctly. 55 | """ 56 | if self.has_grid is not True: 57 | msg = 'To be used, this method requires a declared grid.' 58 | raise ValueError(msg) 59 | 60 | @staticmethod 61 | def _get_extension_file(filename: str, 62 | valid_extensions: list[str] 63 | ) -> str: 64 | """ 65 | Get the extension of a filename and check its validity. 66 | """ 67 | # Retrieve file extension 68 | extension = filename.split('.')[-1] 69 | 70 | # Remove potential capital letters 71 | extension = extension.lower() 72 | 73 | # Check extension validity 74 | if extension not in valid_extensions: 75 | msg = ("File with a '.{}' extension are not supported. Supported " 76 | "extensions: {}".format(extension, valid_extensions)) 77 | raise TypeError(msg) 78 | 79 | return extension 80 | 81 | ##################### 82 | ### Get DataFrame ### 83 | ##################### 84 | 85 | @staticmethod 86 | def get_dataframe_from_file(filename: str, 87 | **kwargs: dict, 88 | ) -> pd.DataFrame: 89 | """ 90 | Convert data from a file into a pandas dataframe. 91 | 92 | Parameters 93 | ---------- 94 | filename : str 95 | The path to the file that needs to be processed. This should 96 | include the file name and its extension. 97 | """ 98 | # Get extension file 99 | valid_extensions = VALID_EXTENSIONS_DATAFRAME 100 | extension = DataReader._get_extension_file(filename, valid_extensions) 101 | 102 | ### Get adequate reader parameters 103 | 104 | # GSLIB 105 | if extension == 'gslib': 106 | df = DataReader._get_dataframe_from_gslib(filename, **kwargs) 107 | 108 | # VOX 109 | elif extension == 'vox': 110 | df = DataReader._get_dataframe_from_vox(filename, **kwargs) 111 | 112 | return df 113 | 114 | @staticmethod 115 | def _get_dataframe_from_gslib(filename: str, 116 | **kwargs: dict, 117 | ) -> pd.DataFrame: 118 | """ 119 | Convert data from a gslib file into a pandas dataframe. 120 | """ 121 | # Retrieve the number of variables in the gslib file 122 | n_var = int(lc.getline(filename, 2).strip()) 123 | 124 | # Retrieve the names of the columns 125 | names = [lc.getline(filename, i).strip() for i in range(3, n_var + 3)] 126 | 127 | # Read the file 128 | kwargs.setdefault('sep', ' ') 129 | kwargs.setdefault('skiprows', n_var + 2) 130 | kwargs.setdefault('names', names) 131 | df = pd.read_csv(filename, **kwargs) 132 | 133 | return df 134 | 135 | @staticmethod 136 | def _get_dataframe_from_vox(filename: str, 137 | **kwargs: dict, 138 | ) -> pd.DataFrame: 139 | """ 140 | Convert data from a vox file into a pandas dataframe. 141 | """ 142 | # Read the file 143 | kwargs.setdefault('sep', ' ') 144 | kwargs.setdefault('header', 1) 145 | df = pd.read_csv(filename, **kwargs) 146 | 147 | return df 148 | 149 | ###################### 150 | ### Get Data Array ### 151 | ###################### 152 | 153 | def get_data_from_file( 154 | self, 155 | filename: str, 156 | extend: bool = False, 157 | axis: str = 'z', 158 | usecol: Union[int, str] = None, 159 | **kwargs 160 | ) -> np.ndarray: 161 | """ 162 | Get data from a file. 163 | 164 | Parameters 165 | ---------- 166 | filename : str 167 | The path to the file that needs to be processed. This should 168 | include the file name and its extension. 169 | extend : bool, default: False 170 | If ``True``, a 2D dataset will be expanded in 3D in the selected ``axis``. 171 | axis : str, default: 'z' 172 | The axis in which data should be expanded if necessary. 173 | usecol : Union[int, str], default: None 174 | The rank of the column to consider. 175 | 176 | Returns 177 | ------- 178 | np.ndarray 179 | Numpy array containing the file data. 180 | """ 181 | # Get extension file 182 | valid_extensions = VALID_EXTENSIONS_DATA 183 | extension = DataReader._get_extension_file(filename, valid_extensions) 184 | 185 | ### Select the appropriate filling function 186 | 187 | # GSLIB 188 | if extension == 'gslib': 189 | # Get data in dataframe 190 | df = DataReader.get_dataframe_from_file(filename) 191 | # Select the right column 192 | if usecol is None: 193 | usecol = 0 194 | if isinstance(usecol, int): 195 | df = df.iloc[:, usecol] 196 | elif isinstance(usecol, str): 197 | df = df[usecol] 198 | # Transform dataframe into array 199 | data = self._get_data_from_gslib_df(df) 200 | 201 | # VOX 202 | elif extension == 'vox': 203 | # Get data in dataframe 204 | df = DataReader.get_dataframe_from_file(filename) 205 | # Select the right column 206 | if usecol is None: 207 | usecol = 3 208 | if isinstance(usecol, int): 209 | df = df.iloc[:, [0, 1, 2, usecol]] 210 | elif isinstance(usecol, str): 211 | df = df[['X', 'Y', 'Z', usecol]] 212 | 213 | # Transform dataframe into array 214 | data = self._get_data_from_vox_df(df) 215 | 216 | # CSV 217 | elif extension == 'csv': 218 | kwargs.setdefault('delimiter', ',') 219 | data = np.genfromtxt(filename, **kwargs).T 220 | 221 | # TXT 222 | elif extension == 'txt': 223 | data = np.genfromtxt(filename, **kwargs) 224 | 225 | # NPY 226 | elif extension == 'npy': 227 | data = np.load(filename) 228 | 229 | # RASTER 230 | elif extension in ['tif', 'tiff', 'asc']: 231 | data = rasterio.open(filename).read(1) 232 | data = np.rot90(data, k=3) 233 | 234 | # IMAGES 235 | elif extension in VALID_EXTENSIONS_IMAGE: 236 | data = DataReader._get_data_from_image(filename) 237 | 238 | ### Control the data dimension 239 | if self.has_grid: 240 | # TODO 241 | pass 242 | 243 | ### According to axis, repeat data along if necessary 244 | if self.has_grid and extend: 245 | if (axis.lower() == 'x'): 246 | data = np.repeat(data[np.newaxis, :, :], self.grid.nx, axis=0) 247 | elif (axis.lower() == 'y'): 248 | data = np.repeat(data[:, np.newaxis, :], self.grid.ny, axis=1) 249 | elif (axis.lower() == 'z'): 250 | data = np.repeat(data[:, :, np.newaxis], self.grid.nz, axis=2) 251 | 252 | return data 253 | 254 | ####################### 255 | ### REQUIRES A GRID ### 256 | ####################### 257 | 258 | def _get_data_from_gslib_df(self, 259 | df: pd.DataFrame, 260 | ) -> np.ndarray: 261 | """ 262 | Transform a pandas dataframe to a numpy array. 263 | """ 264 | self._requires_grid() 265 | 266 | # Transform dataframe into array 267 | data = df.to_numpy() 268 | 269 | # Reshape the array with the grid shape 270 | if len(data) == self.grid.nodes: 271 | new_shape = (self.grid.nx, self.grid.ny, self.grid.nz) 272 | data = np.reshape(data, new_shape, order='F') 273 | else: 274 | new_shape = (self.grid.nx, self.grid.ny) 275 | data = np.reshape(data, new_shape, order='F') 276 | return data 277 | 278 | def _get_data_from_vox_df(self, 279 | df: pd.DataFrame, 280 | ) -> np.ndarray: 281 | """ 282 | Transform a pandas dataframe to a numpy array. 283 | """ 284 | self._requires_grid() 285 | 286 | # Filter values out of grid 287 | xyz = df.iloc[:, [0, 1, 2]].to_numpy() 288 | df = df.assign(is_inbox=self.grid.is_inbox(xyz)) 289 | df = df[df['is_inbox']] 290 | 291 | # Retrieve valid coordinates and data 292 | xyz = df.iloc[:, [0, 1, 2]] 293 | d = df.iloc[:, 3] 294 | 295 | # Retrieve corresponding grid indices 296 | i, j, k = self.grid.get_indices(xyz) 297 | 298 | # Create the data array 299 | data = np.zeros_like(self.grid.data_volume) 300 | data[i, j, k] = d 301 | 302 | return data 303 | 304 | def _get_data_full_2D(self, value: float) -> np.ndarray: 305 | """ 306 | Set data to a 2D-matrice full of the provided value. 307 | """ 308 | self._requires_grid() 309 | 310 | dtype = np.int8 311 | out = np.full((self.grid.nx, self.grid.ny), value, dtype=dtype) 312 | return out 313 | 314 | def _get_data_full_3D(self, value: float) -> np.ndarray: 315 | """ 316 | Set data to a 3D-matrice full of the provided value. 317 | """ 318 | self._requires_grid() 319 | 320 | dtype = np.int8 321 | shape = (self.grid.nx, self.grid.ny, self.grid.nz) 322 | out = np.full(shape, value, dtype=dtype) 323 | return out 324 | 325 | ############################### 326 | ### DOES NOT REQUIRE A GRID ### 327 | ############################### 328 | 329 | @staticmethod 330 | def _get_data_from_image(filename: str) -> np.ndarray: 331 | """ 332 | Set data from an image file. The size of the image must be the 333 | same as the size of the grid. If nz > 1, the layer is horizontally 334 | repeated. 335 | 336 | .. info:: 337 | This method usage is not recommended, it should be used only for 338 | quick testing. 339 | """ 340 | import PIL 341 | from PIL import Image 342 | 343 | # Reads image 344 | pil_image = PIL.Image.open(filename) 345 | pil_image = pil_image.convert('L') 346 | pil_image = pil_image.transpose(Image.FLIP_TOP_BOTTOM) 347 | data = np.asarray(pil_image).T 348 | return data 349 | 350 | ######################### 351 | ### Only Read Methods ### 352 | ######################### 353 | 354 | @staticmethod 355 | def read_vox( 356 | filename: str, 357 | usecol: Union[int, str] = None, 358 | ) -> np.ndarray: 359 | """ 360 | Read a vox file. 361 | 362 | Parameters 363 | ---------- 364 | filename : str 365 | The path to the file that needs to be processed. This should 366 | include the file name and its extension. 367 | usecol : Union[int, str], default: None 368 | The rank of the column to consider. 369 | 370 | Returns 371 | ------- 372 | np.ndarray 373 | Numpy array of the collected data. 374 | """ 375 | 376 | # Read file 377 | df = DataReader._get_dataframe_from_vox(filename) 378 | 379 | # Retrieve axis 380 | x = df['X'].unique() 381 | y = df['Y'].unique() 382 | z = df['Z'].unique() 383 | 384 | # Retrieve grid parameters 385 | nx, ny, nz = len(x), len(y), len(z) 386 | x0, y0, z0 = x.min(), y.min(), z.min() 387 | dx, dy, dz = (x[1] - x[0]), (y[1] - y[0]), (z[1] - z[0]) 388 | 389 | # Create a grid 390 | grid = Grid(x0, y0, z0, nx, ny, nz, dx, dy, dz) 391 | 392 | # Retrieve corresponding grid indices 393 | xyz = df.iloc[:, [0, 1, 2]] 394 | if usecol is None: 395 | usecol = 3 396 | if isinstance(usecol, int): 397 | d = df.iloc[:, usecol] 398 | elif isinstance(usecol, str): 399 | d = df[usecol] 400 | i, j, k = grid.get_indices(xyz) 401 | 402 | # Construct the array 403 | data = np.zeros_like(grid.data_volume) 404 | data[i, j, k] = d 405 | 406 | return data 407 | -------------------------------------------------------------------------------- /pykasso/_utils/validation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing functions used for data flow validation. 3 | """ 4 | 5 | ### Internal dependencies 6 | import os 7 | import logging 8 | 9 | ### External dependencies 10 | import numpy as np 11 | 12 | from ..core._namespaces import ( 13 | SKS_VALID_ALGORITHM_VALUES, 14 | SKS_VALID_MODE_VALUES, 15 | ) 16 | 17 | ### Typing 18 | from typing import Union 19 | 20 | 21 | def is_filepath_valid( 22 | filepath: str, 23 | ) -> Union[FileNotFoundError, bool]: 24 | """ 25 | Test if the filepath is valid. 26 | 27 | Parameters 28 | ---------- 29 | filepath : str. 30 | String defining the filepath. 31 | 32 | Returns 33 | ------- 34 | Union[FileNotFoundError, bool] 35 | Return ``True`` if test pass. 36 | Otherwise raise a ``FileNotFoundError`` exception. 37 | 38 | Raises 39 | ------ 40 | FileNotFoundError 41 | """ 42 | if not os.path.exists(filepath): 43 | msg = ("Filepath '{}' does not exist.".format(filepath)) 44 | raise FileNotFoundError(msg) 45 | else: 46 | return True 47 | 48 | 49 | def is_variable_type_valid( 50 | variable_name: str, 51 | variable_value, 52 | valid_types: tuple, 53 | ) -> Union[TypeError, bool]: 54 | """ 55 | Test if the type of the variable is valid. 56 | 57 | Parameters 58 | ---------- 59 | variable_name : str 60 | Name of the parameter. 61 | variable_value : any 62 | Value of the parameter. 63 | valid_types : tuple 64 | Accepted types. 65 | 66 | Returns 67 | ------- 68 | Union[TypeError, bool] 69 | Return ``True`` if test pass. 70 | Otherwise raise a ``TypeError`` exception. 71 | 72 | Raises 73 | ------ 74 | TypeError 75 | """ 76 | if isinstance(variable_value, valid_types): 77 | return True 78 | else: 79 | msg = ("Parameter '{}' type is invalid. Valid type(s) : {}" 80 | .format(variable_name, valid_types)) 81 | raise TypeError(msg) 82 | 83 | 84 | def is_key_in_dict( 85 | dictionary: dict, 86 | dictionary_name: str, 87 | key_to_test: str, 88 | ) -> Union[KeyError, bool]: 89 | """ 90 | Test key presence in the dictionary. 91 | 92 | Parameters 93 | ---------- 94 | dictionary : dict 95 | Dictionary to test. 96 | dictionary_name : str 97 | Name of the dictionary. 98 | key_to_test : str 99 | Key to verify presence in dictionary. 100 | 101 | Returns 102 | ------- 103 | Union[KeyError, bool] 104 | Return ``True`` if test pass. 105 | Otherwise raise a ``KeyError`` exception. 106 | 107 | Raises 108 | ------ 109 | KeyError 110 | """ 111 | if key_to_test in dictionary: 112 | return True 113 | else: 114 | msg = ("Key '{}' is missing in '{}' dictionary." 115 | .format(key_to_test, dictionary_name)) 116 | raise KeyError(msg) 117 | 118 | 119 | def is_variable_in_list( 120 | variable_name: str, 121 | variable_value, 122 | accepted_values: list, 123 | ) -> Union[ValueError, bool]: 124 | """ 125 | TODO 126 | """ 127 | if variable_value not in accepted_values: 128 | msg = ("Parameter '{}' value is invalid. Accepted values : {}" 129 | .format(variable_name, accepted_values)) 130 | raise ValueError(msg) 131 | else: 132 | return True 133 | 134 | 135 | def is_parameter_comparison_valid( 136 | parameter_name: str, 137 | parameter_value, 138 | logical_test: str, 139 | compared_to, 140 | ) -> Union[ValueError, bool]: 141 | """ 142 | Test if the comparision returns true. 143 | 144 | Parameters 145 | ---------- 146 | parameter_name : str 147 | Name of the parameter. 148 | parameter_value : any 149 | Value of the parameter. 150 | logical_test : str 151 | Logical test to use ('>', '>=', '<', '<=', '==', '!='). 152 | compared_to : _type_ 153 | Value to compare. 154 | 155 | Returns 156 | ------- 157 | Union[ValueError, bool] 158 | Return ``True`` if test pass. 159 | Otherwise raise a ``ValueError`` exception. 160 | 161 | Raises 162 | ------ 163 | ValueError 164 | """ 165 | logical_test_text = { 166 | '>': 'greater than', 167 | '>=': 'greater than or equal to', 168 | '<': 'less than', 169 | '<=': 'less than or equal to', 170 | '==': 'equal to', 171 | '!=': 'not equal to' 172 | } 173 | test = str(parameter_value) + logical_test + str(compared_to) 174 | if not eval(test): 175 | msg = ("The value of the '{}' parameter must be {} {}." 176 | .format(parameter_name, 177 | logical_test_text[logical_test], 178 | compared_to)) 179 | raise ValueError(msg) 180 | else: 181 | return True 182 | 183 | ########################## 184 | ### Dictionary testing ### 185 | ########################## 186 | 187 | 188 | def test_sks_settings(settings: dict) -> None: 189 | """ 190 | """ 191 | logger = logging.getLogger("sks.validation") 192 | 193 | ### Test 'seed' value 194 | try: 195 | is_variable_type_valid( 196 | variable_name='seed', 197 | variable_value=settings['seed'], 198 | valid_types=(int), 199 | ) 200 | except TypeError as error: 201 | logger.error(error) 202 | raise 203 | 204 | ### Test 'algorithm' value 205 | try: 206 | is_variable_in_list( 207 | variable_name='algorithm', 208 | variable_value=settings['algorithm'], 209 | accepted_values=SKS_VALID_ALGORITHM_VALUES 210 | ) 211 | except ValueError as error: 212 | logger.error(error) 213 | raise 214 | 215 | ### Test 'costs' value 216 | try: 217 | is_variable_type_valid( 218 | variable_name='costs', 219 | variable_value=settings['costs'], 220 | valid_types=(dict), 221 | ) 222 | except TypeError as error: 223 | logger.error(error) 224 | raise 225 | 226 | ### Test 'factors' value 227 | try: 228 | is_variable_type_valid( 229 | variable_name='factors', 230 | variable_value=settings['factors'], 231 | valid_types=(dict), 232 | ) 233 | except TypeError as error: 234 | logger.error(error) 235 | raise 236 | 237 | ### Test 'mode' value 238 | try: 239 | is_variable_in_list( 240 | variable_name='mode', 241 | variable_value=settings['mode'], 242 | accepted_values=SKS_VALID_MODE_VALUES 243 | ) 244 | except ValueError as error: 245 | logger.error(error) 246 | raise 247 | 248 | return None 249 | 250 | 251 | # def test_geologic_feature_settings(settings: dict) -> None: 252 | # """ 253 | # """ 254 | # return None 255 | 256 | # def test_point_settings(kind: str, settings: dict) -> None: 257 | # """ 258 | # """ 259 | # logger = logging.getLogger("{}.validation".format(kind)) 260 | 261 | # ### Test 'number' value 262 | # try: 263 | # is_variable_type_valid( 264 | # variable_name='number', 265 | # variable_value=settings['number'], 266 | # valid_types=(int), 267 | # ) 268 | # except TypeError as error: 269 | # logger.error(error) 270 | # raise 271 | 272 | # ### Test 'data' value 273 | 274 | 275 | # ### Test 'shuffle' value 276 | 277 | 278 | # ### Test 'importance' value 279 | 280 | 281 | # ### Test 'subdomain' value 282 | 283 | 284 | # ### Test 'geology' value 285 | 286 | 287 | # ### Test 'seed' value 288 | 289 | 290 | # return None 291 | -------------------------------------------------------------------------------- /pykasso/_version.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata 2 | 3 | __version__ = importlib.metadata.version("pykasso") 4 | -------------------------------------------------------------------------------- /pykasso/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======== 3 | analysis 4 | ======== 5 | 6 | A subpackage for discrete karst conduit network analysis. 7 | """ 8 | 9 | __all__ = [] 10 | -------------------------------------------------------------------------------- /pykasso/analysis/analysis.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains a class able to mangage project results in order to 3 | perform statistical analysis. 4 | """ 5 | 6 | ### Internal dependencies 7 | import copy 8 | 9 | ### External dependencies 10 | import numpy as np 11 | import pandas as pd 12 | 13 | ### Optional dependencies 14 | try: 15 | import karstnet as kn 16 | except ImportError: 17 | _has_karstnet = False 18 | else: 19 | _has_karstnet = True 20 | 21 | ### Typing 22 | from typing import Union 23 | from ..core.project import Project 24 | from pandas import (DataFrame, Series) 25 | from pandas.io.formats.style import Styler 26 | 27 | 28 | def requires_karstnet(): 29 | """ 30 | If ``karstnet`` package is not installed, return ``ImportError`` exception 31 | when a method requiring ``karstnet`` is called. 32 | """ 33 | def _(function): 34 | def _wrapper(*args, **kwargs): 35 | if not _has_karstnet: 36 | msg = ("karstnet package is required to do this." 37 | " 'pip install -e pykasso[analysis]' to install it.") 38 | raise ImportError(msg) 39 | result = function(*args, **kwargs) 40 | return result 41 | return _wrapper 42 | return _ 43 | 44 | 45 | class Analyzer(): 46 | """ 47 | This class manages pyKasso's project and provides methods to compute 48 | statistical analysis. 49 | """ 50 | def __init__(self, 51 | project: Project 52 | ) -> None: 53 | """ 54 | Initialize the class. 55 | """ 56 | 57 | # Intialization 58 | self.project = project 59 | self.stats = None 60 | 61 | # Load reference metrics for statistical karstic network analysis 62 | self._load_statistics() 63 | 64 | def _load_statistics(self) -> None: 65 | """ 66 | Set the reference metrics for statistical karstic network analysis 67 | More details here : https://github.com/karstnet/karstnet 68 | """ 69 | package_location = self.project._pckg_paths['package_location'] 70 | statistics_file_path = "/../_misc/statistics.xlsx" 71 | statistics_file_location = package_location + statistics_file_path 72 | self.stats = pd.read_excel(statistics_file_location).describe() 73 | return None 74 | 75 | @requires_karstnet() 76 | def compute_metrics( 77 | self, 78 | verbose: bool = False, 79 | ) -> DataFrame: 80 | """ 81 | Compute the statistical metrics for each simulated discret karst 82 | conduit network using the karstnet package. 83 | 84 | Parameters 85 | ---------- 86 | verbosity : int, default: 0 87 | Verbosity of karstnet results. 88 | 89 | Returns 90 | ------- 91 | df_metrics : pandas.DataFrame 92 | Dataframe of karstnet metrics. 93 | 94 | Notes 95 | ----- 96 | Karstnet is a python3 project providing tools for the statistical 97 | analysis of karstic networks. More details here: 98 | https://github.com/karstnet/karstnet 99 | 100 | References 101 | ---------- 102 | .. [1] Collon, P., Bernasconi D., Vuilleumier C., and Renard P., 2017, 103 | Statistical metrics for the characterization of karst network 104 | geometry and topology. Geomorphology. 283: 122-142 doi:10.1016/ 105 | j.geomorph.2017.01.034 106 | http://dx.doi.org/doi:10.1016/j.geomorph.2017.01.034 107 | 108 | .. warning:: 109 | A corrigendum has been published in Geomorphology journal: 110 | Geomorphology 389, 107848, 111 | http://dx.doi.org/doi:10.1016/j.geomorph.2021.107848. 112 | 113 | Examples 114 | -------- 115 | >>> app = pk.pykasso() 116 | >>> ... 117 | >>> df_metrics = app.analyzer.compute_metrics() 118 | """ 119 | df_metrics = pd.DataFrame() 120 | 121 | # For each simulation, retrieve data and compute metrics 122 | for i, data in enumerate(self.project): 123 | 124 | # Retrieve data 125 | karstnet_edges = data["vectors"]["edges_"].to_numpy().tolist() 126 | karstnet_nodes = copy.deepcopy(data["vectors"]["nodes_"]) 127 | 128 | # Drop last item in list (the node type) for each dictionary entry 129 | karstnet_nodes = karstnet_nodes.drop(columns=['type', 'vadose']) 130 | index = karstnet_nodes.index 131 | values = karstnet_nodes.iloc[:, [0, 1, 2]].to_numpy().tolist() 132 | karstnet_nodes = {i: value for i, value in zip(index, values)} 133 | 134 | # Compute karstnet metrics 135 | # Make graph - edges must be a list, and nodes must be a dic of 136 | # format {nodeindex: [x,y]} 137 | k = kn.KGraph(karstnet_edges, karstnet_nodes, verbose=False) 138 | metrics = k.characterize_graph(verbose) 139 | 140 | # Concatenate dataframes 141 | df_ = pd.DataFrame(metrics, index=[i]) 142 | df_metrics = pd.concat([df_metrics, df_]) 143 | 144 | return df_metrics 145 | 146 | @requires_karstnet() 147 | def compare_metrics( 148 | self, 149 | dataframe: Union[DataFrame, Series, Styler], 150 | ) -> Styler: 151 | """ 152 | Compare the calculated statistical metrics with the reference. 153 | 154 | Parameters 155 | ---------- 156 | dataframe : Union[DataFrame, Series, Styler] 157 | Data to compare with karstnet metrics. 158 | 159 | Returns 160 | ------- 161 | df_metrics : Styler 162 | 163 | References 164 | ---------- 165 | .. [1] Collon, P., Bernasconi D., Vuilleumier C., and Renard P., 2017, 166 | Statistical metrics for the characterization of karst network 167 | geometry and topology. Geomorphology. 283: 122-142 doi:10.1016/ 168 | j.geomorph.2017.01.034 169 | http://dx.doi.org/doi:10.1016/j.geomorph.2017.01.034 170 | 171 | .. warning:: 172 | A corrigendum has been published in Geomorphology journal: 173 | Geomorphology 389, 107848, 174 | http://dx.doi.org/doi:10.1016/j.geomorph.2021.107848. 175 | 176 | Examples 177 | -------- 178 | >>> app = pk.pykasso() 179 | >>> ... 180 | >>> df_metrics = app.analyzer.compute_metrics() 181 | >>> app.analyzer.compare_metrics(df_metrics) 182 | """ 183 | ### Convert pandas Series in DataFrame 184 | if isinstance(dataframe, Series): 185 | dataframe = dataframe.to_frame().T 186 | 187 | ### Define the text coloring function 188 | # Green if inside [V_min, V_max] 189 | # Orange if outside 190 | def _bg_color(x, min_val, max_val): 191 | if pd.isnull(x): 192 | return 'color: grey' 193 | elif (x < min_val) or (x > max_val): 194 | return 'color: #FF8C00' 195 | else: 196 | return 'color: #00FF00' 197 | 198 | # Iterate in the dataframe columns 199 | df_metrics = dataframe.style 200 | for column_name in dataframe: 201 | kwargs = { 202 | 'min_val': self.stats[column_name]['min'], 203 | 'max_val': self.stats[column_name]['max'], 204 | 'subset': [column_name] 205 | } 206 | df_metrics = df_metrics.applymap(_bg_color, **kwargs) 207 | 208 | return df_metrics 209 | 210 | def compute_stats_on_networks( 211 | self, 212 | numpy_algorithm: str = 'mean', 213 | numpy_parameters: dict = {}, 214 | ) -> np.ndarray: 215 | """ 216 | Compute selected algorithm on the whole set of computed discrete karst 217 | conduit networks. 218 | 219 | Parameters 220 | ---------- 221 | numpy_algorithm : str, default: 'mean' 222 | Numpy algorithm to use. More details here: 223 | https://numpy.org/doc/stable/reference/routines.statistics.html 224 | numpy_parameters : dict, default: {} 225 | Parameters of the selected algorithm. 226 | 227 | Returns 228 | ------- 229 | out : np.ndarray 230 | 231 | Examples 232 | -------- 233 | >>> app = pk.pykasso() 234 | >>> ... 235 | >>> df_metrics = app.analyzer.compute_metrics() 236 | >>> karst_std = app.analyzer.compute_stats_on_networks('std') 237 | """ 238 | 239 | # For each simulation, retrieve data and store it 240 | karst_map = [] 241 | for data in self.project: 242 | karst_map.append(data['maps']['karst'][-1].copy()) 243 | 244 | # Retrieve algorithm 245 | try: 246 | numpy_func = getattr(np, numpy_algorithm) 247 | except ValueError: 248 | msg = "Asked algorithm is not valid." 249 | raise ValueError(msg) 250 | 251 | # Compute 252 | numpy_parameters.pop('axis', None) 253 | out = numpy_func(karst_map, axis=0, **numpy_parameters) 254 | 255 | return out 256 | -------------------------------------------------------------------------------- /pykasso/core/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Contains the core of pyKasso: application, project, grid, etc. 3 | 4 | This subpackage contains the core of pyKasso. It constructs an 5 | application class able to manage a project and to communicate between 6 | the different other subpackages. 7 | 8 | Please note that this module is private. All functions and objects 9 | are available in the main ``pykasso`` namespace - use that instead. 10 | """ 11 | 12 | __all__ = [] 13 | 14 | # Import pykasso function 15 | from .main import pykasso 16 | from .main import create_datareader 17 | __all__.extend(['pykasso', 'create_datareader']) 18 | -------------------------------------------------------------------------------- /pykasso/core/_namespaces.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module defining some constants in pyKasso. 3 | """ 4 | 5 | ### Internal dependencies 6 | from PIL import Image 7 | 8 | 9 | MISC_DIR_PATH = '/../_misc/' 10 | DEFAULT_PARAMETERS_FILENAME = 'parameters.yaml' 11 | DEFAULT_PROJECT_FILENAME = 'project.yaml' 12 | DEFAULT_LOG_FILENAME = 'project.log' 13 | 14 | GRID_PARAMETERS = [ 15 | 'x0', 16 | 'y0', 17 | 'z0', 18 | 'nx', 19 | 'ny', 20 | 'nz', 21 | 'dx', 22 | 'dy', 23 | 'dz' 24 | ] 25 | 26 | GEOLOGICAL_FEATURES = [ 27 | 'domain', 28 | 'geology', 29 | 'faults', 30 | 'fractures', 31 | ] 32 | 33 | SURFACE_FEATURES = [ 34 | 'topography', 35 | 'bedrock', 36 | 'water_table', 37 | ] 38 | 39 | DOMAIN_FEATURES = [ 40 | 'delimitation', 41 | 'topography', 42 | 'bedrock', 43 | 'water_table', 44 | ] 45 | 46 | ISOTROPIC_FEATURES = [ 47 | 'cost', 48 | 'time', 49 | 'karst', 50 | ] 51 | 52 | ANISOTROPIC_FEATURES = [ 53 | 'cost', 54 | 'alpha', 55 | 'beta', 56 | 'time', 57 | 'karst', 58 | 'gradient', 59 | ] 60 | 61 | features = [ 62 | GEOLOGICAL_FEATURES, 63 | DOMAIN_FEATURES, 64 | ANISOTROPIC_FEATURES 65 | ] 66 | 67 | AUTHORIZED_FEATURES = [f for list_ in features for f in list_] 68 | 69 | VALID_EXTENSIONS_DATAFRAME = [ 70 | 'gslib', 71 | 'vox', 72 | ] 73 | 74 | VALID_EXTENSIONS_IMAGE = [key.strip('.') for key in Image.EXTENSION.keys()] 75 | 76 | VALID_EXTENSIONS_DATA = [ 77 | 'gslib', 78 | 'vox', 79 | 'csv', 80 | 'txt', 81 | 'npy', 82 | 'tif', 83 | 'tiff', 84 | 'asc', 85 | ] 86 | VALID_EXTENSIONS_DATA.extend(VALID_EXTENSIONS_IMAGE) 87 | 88 | DEFAULT_FMM_COSTS = { 89 | 'out': 10, 90 | 'geology': 0.4, 91 | 'beddings': 0.35, 92 | 'faults': 0.2, 93 | 'fractures': 0.2, 94 | 'karst': 0.1, 95 | 'conduits': 0.1, 96 | 'ratio': 0.5, 97 | } 98 | 99 | DEFAULT_FEATURE_PARAMETERS = { 100 | 'geology': { 101 | 'nodata': 1, 102 | 'name': 'unit {}', 103 | 'model': True, 104 | }, 105 | 'faults': { 106 | 'nodata': 0, 107 | 'name': 'fault {}', 108 | 'model': True, 109 | }, 110 | 'fractures': { 111 | 'nodata': 0, 112 | 'name': 'family {}', 113 | 'model': True, 114 | } 115 | } 116 | 117 | SKS_VALID_ALGORITHM_VALUES = ['Isotropic3', 'Riemann3'] 118 | SKS_VALID_MODE_VALUES = ['A', 'B', 'C', 'D'] 119 | -------------------------------------------------------------------------------- /pykasso/core/application.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing the application class. 3 | """ 4 | 5 | ### Local dependencies 6 | from .project import Project 7 | from ..model.sks import SKS 8 | from ..analysis.analysis import Analyzer 9 | from ..visualization.visualizer import Visualizer 10 | 11 | ### Validation 12 | from .._utils.validation import ( 13 | is_variable_type_valid, 14 | is_key_in_dict, 15 | is_parameter_comparison_valid, 16 | ) 17 | 18 | ### Variables 19 | from ._namespaces import ( 20 | GRID_PARAMETERS, 21 | ) 22 | 23 | 24 | class Application(): 25 | """ 26 | Class modeling an application and embedding the pyKasso subpackages. 27 | 28 | This class manages a pyKasso project and provides access to the different 29 | subpackages by storing them as class attributes. 30 | 31 | Attributes 32 | ---------- 33 | project 34 | Project class. 35 | model 36 | Model class. 37 | analyzer 38 | Analyzer class. 39 | visualizer 40 | Visualizer class 41 | 42 | Notes 43 | ----- 44 | The attributes are set to ``None`` until a project is created or loaded. 45 | 46 | Examples 47 | -------- 48 | This class can be instancied by using the public function ``pykasso()``. 49 | >>> import pykasso as pk 50 | >>> app = pk.pykasso() 51 | """ 52 | 53 | def __init__(self) -> None: 54 | self.__project = None 55 | self.__model = None 56 | self.__analyzer = None 57 | self.__visualizer = None 58 | 59 | ###################### 60 | ### MANAGE PROJECT ### 61 | ###################### 62 | 63 | def new_project( 64 | self, 65 | name: str, 66 | grid_parameters: dict, 67 | force: bool = True, 68 | ) -> None: 69 | """ 70 | Create a new project. 71 | 72 | Instance a ``Project`` class within the ``project`` attribute and 73 | initialize the subpackages. 74 | 75 | Parameters 76 | ---------- 77 | name : str 78 | The name of the project. A new directory is created if the 79 | argument points to a non-existant folder. 80 | grid_parameters : dict 81 | The dictionary containing the grid parameters. 82 | force : bool, default: True 83 | If True, overwrite files in case of conflict when ``name`` 84 | points to an already existing directory. 85 | 86 | Examples 87 | -------- 88 | >>> import pykasso as pk 89 | >>> app = pk.pykasso() 90 | >>> name = "new_project" 91 | >>> grid_parameters = { 92 | >>> 'nx': 10, 'ny': 10, 'nz': 10, 93 | >>> 'x0': 0, 'y0': 0, 'z0': 0, 94 | >>> 'dx': 10, 'dy': 10, 'dz': 10, 95 | >>> } 96 | >>> app.new_project(name, grid_parameters) 97 | """ 98 | 99 | ### Input validation 100 | 101 | # Test 'name' type 102 | try: 103 | is_variable_type_valid(variable_name='name', 104 | variable_value=name, 105 | valid_types=(str)) 106 | except TypeError: 107 | raise 108 | 109 | # Test 'grid_parameters' type 110 | try: 111 | is_variable_type_valid(variable_name='grid_parameters', 112 | variable_value=grid_parameters, 113 | valid_types=(dict)) 114 | except TypeError: 115 | raise 116 | 117 | # Test 'Grid' parameters presence 118 | for parameter in GRID_PARAMETERS: 119 | try: 120 | is_key_in_dict(dictionary=grid_parameters, 121 | dictionary_name='grid_parameters', 122 | key_to_test=parameter) 123 | except KeyError: 124 | raise 125 | 126 | # Test if the values of attributes are of type int or float 127 | for parameter_name in ['x0', 'y0', 'z0', 'dx', 'dy', 'dz']: 128 | try: 129 | parameter_value = grid_parameters[parameter_name] 130 | is_variable_type_valid(variable_name=parameter_name, 131 | variable_value=parameter_value, 132 | valid_types=(int, float)) 133 | except TypeError: 134 | raise 135 | 136 | # Test if the values of attributes are of type int 137 | for parameter_name in ['nx', 'ny', 'nz']: 138 | try: 139 | parameter_value = grid_parameters[parameter_name] 140 | is_variable_type_valid(variable_name=parameter_name, 141 | variable_value=parameter_value, 142 | valid_types=(int)) 143 | except TypeError: 144 | raise 145 | 146 | # Test if the values of attributes are well upper 0 147 | for parameter_name in ['nx', 'ny', 'nz']: 148 | try: 149 | parameter_value = grid_parameters[parameter_name] 150 | is_parameter_comparison_valid(parameter_name=parameter_name, 151 | parameter_value=parameter_value, 152 | logical_test='>', 153 | compared_to=0) 154 | except ValueError: 155 | raise 156 | 157 | ### Initialization of the application 158 | 159 | # Set a project instance 160 | self.__project = Project( 161 | grid_parameters=grid_parameters, 162 | project_location=name, 163 | force=force, 164 | ) 165 | 166 | # Initialize the 'model' module 167 | self.__model = SKS(self.project) 168 | 169 | # Initialize the 'analysis' module 170 | self.__analyzer = Analyzer(self.project) 171 | 172 | # Initialize the 'visualisation' module 173 | self.__visualizer = Visualizer(self.project) 174 | 175 | return None 176 | 177 | def open_project(self) -> NotImplementedError: 178 | """ 179 | Not implemented yet. 180 | """ 181 | msg = "Not implemented yet." 182 | raise NotImplementedError(msg) 183 | 184 | def save_project(self) -> NotImplementedError: 185 | """ 186 | Not implemented yet. 187 | """ 188 | msg = "Not implemented yet." 189 | raise NotImplementedError(msg) 190 | 191 | def export_project(self) -> NotImplementedError: 192 | """ 193 | Not implemented yet. 194 | """ 195 | msg = "Not implemented yet." 196 | raise NotImplementedError(msg) 197 | 198 | ############### 199 | ### GETTERS ### 200 | ############### 201 | 202 | @property 203 | def project(self) -> Project: 204 | """ 205 | Return the project class. 206 | """ 207 | if self.__project is None: 208 | msg = "No project available yet. Please create or load a project." 209 | print(msg) 210 | return None 211 | else: 212 | return self.__project 213 | 214 | @property 215 | def model(self) -> SKS: 216 | """ 217 | Return the SKS model class. 218 | """ 219 | if self.__model is None: 220 | msg = ("This feature is not available yet. Please create or load a" 221 | " project first.") 222 | print(msg) 223 | return None 224 | else: 225 | return self.__model 226 | 227 | @property 228 | def analyzer(self) -> Analyzer: 229 | """ 230 | Return the analyzer class. 231 | """ 232 | if self.__analyzer is None: 233 | msg = ("This feature is not available yet. Please create or load a" 234 | " project first.") 235 | print(msg) 236 | return None 237 | else: 238 | return self.__analyzer 239 | 240 | @property 241 | def visualizer(self) -> Visualizer: 242 | """ 243 | Return the visualizer class. 244 | """ 245 | if self.__visualizer is None: 246 | msg = ("This feature is not available yet. Please create or load a" 247 | " project first.") 248 | print(msg) 249 | return None 250 | else: 251 | return self.__visualizer 252 | -------------------------------------------------------------------------------- /pykasso/core/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing functions for accessing the public content of pyKasso. 3 | """ 4 | 5 | ### Local dependencies 6 | from .application import Application 7 | from .grid import Grid 8 | from .._utils.datareader import DataReader 9 | 10 | 11 | def pykasso() -> Application: 12 | """ 13 | Create and return an ``Application``. 14 | 15 | Returns 16 | ------- 17 | Application 18 | 19 | See Also 20 | -------- 21 | Application, Project, Grid 22 | 23 | Examples 24 | -------- 25 | >>> import pykasso as pk 26 | >>> app = pk.pykasso() 27 | """ 28 | out = Application() 29 | return out 30 | 31 | 32 | def create_datareader(grid: Grid = None) -> DataReader: 33 | """ 34 | Create and return a ``DataReader``. 35 | 36 | Returns 37 | ------- 38 | DataReader 39 | 40 | Parameters 41 | ---------- 42 | Grid 43 | 44 | See Also 45 | -------- 46 | DataReader 47 | 48 | Examples 49 | -------- 50 | >>> import pykasso as pk 51 | >>> data_reader = pk.create_datareader() 52 | """ 53 | out = DataReader(grid=grid) 54 | return out 55 | -------------------------------------------------------------------------------- /pykasso/model/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Contains the model of pyKasso. 3 | """ 4 | 5 | __all__ = [] 6 | -------------------------------------------------------------------------------- /pykasso/model/_validations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Input validation functions. 3 | """ 4 | 5 | import PIL 6 | import sys 7 | import logging 8 | import rasterio 9 | import numpy as np 10 | 11 | from .._utils import datareader 12 | from .._utils import validation as val 13 | 14 | this = sys.modules[__name__] 15 | 16 | 17 | ################# 18 | ### FUNCTIONS ### 19 | ################# 20 | 21 | def read_file(path: str, attribute: str) -> np.ndarray: 22 | extension = path.split('.')[-1] 23 | try: 24 | ### GSLIB 25 | if extension == 'gslib': 26 | data = np.genfromtxt(path, skip_header=3, dtype=np.int8) 27 | 28 | ### Numpy_pickle 29 | elif extension == 'npy': 30 | data = np.load(path) 31 | 32 | ### Images 33 | elif extension in ['jpg', 'png']: 34 | data = np.asarray(PIL.Image.open(path).convert('L')).T 35 | 36 | ### CSV 37 | elif extension == 'csv': 38 | data = np.genfromtxt(path, delimiter=',').T 39 | 40 | ### TIF, TIFF 41 | elif extension in ['tif', 'tiff']: 42 | data = rasterio.open(path).read(1).T 43 | 44 | ### Others 45 | else: 46 | data = np.genfromtxt(path) 47 | 48 | except Exception as err: 49 | msg = ("Impossible to read the file designated by the '{}' attribute." 50 | " Location : {}".format(attribute, path)) 51 | this.logger.error(msg) 52 | raise err 53 | else: 54 | return data 55 | 56 | 57 | def is_list_length_valid(data: list, value: int, attribute: str) -> bool: 58 | if len(data) < value: 59 | msg = ("'{}' data length is too short ({} elements minimum)." 60 | .format(attribute, value)) 61 | this.logger.critical(msg) 62 | raise ValueError(msg) 63 | else: 64 | return True 65 | 66 | 67 | def is_coordinate_type_valid(coordinate: tuple, types: tuple, 68 | attribute: str) -> bool: 69 | if not isinstance(coordinate, types): 70 | msg = ("The values of the '{}' attribute contains at least one invalid" 71 | " vertex. Coordinates must be of type : {}." 72 | .format(attribute, types)) 73 | this.logger.critical(msg) 74 | raise TypeError(msg) 75 | else: 76 | return True 77 | 78 | 79 | def is_surface_dimensions_valid(attribute: str, array: np.ndarray, 80 | grid) -> bool: 81 | nx, ny, nz = grid.shape 82 | if not (array.shape == (nx, ny)): 83 | msg = ("The '{}' array shape does not match with grid surface." 84 | " Array shape: {}, Grid surface shape: {}" 85 | .format(attribute, array.shape, (nx, ny))) 86 | this.logger.critical(msg) 87 | raise ValueError(msg) 88 | else: 89 | return True 90 | 91 | 92 | def is_costs_dictionnary_valid(costs_dictionnary: dict, ids_data: list): 93 | """ """ 94 | for i in ids_data: 95 | if i not in costs_dictionnary: 96 | msg = ("The data id ({}) is not within 'costs' dictionnary keys" 97 | " ({})".format(i, list(costs_dictionnary.keys()))) 98 | this.logger.error(msg) 99 | raise KeyError(msg) 100 | return True 101 | 102 | 103 | ############################################################################### 104 | 105 | ######################## 106 | ### OUTLETS - INLETS ### 107 | ######################## 108 | 109 | def validate_settings_points(settings: dict, 110 | attribute: str, 111 | ) -> dict: 112 | """ 113 | Validate the parameters of ``outlets`` and ``inlets`` keys. 114 | 115 | TODO 116 | 117 | Tested parameters: 118 | - ``seed``: must be of type int; 119 | - ``number``: must be of type int, value must be greater than zero; 120 | - ``data``: TODO 121 | """ 122 | # Set logger 123 | logger = logging.getLogger("{}.validation".format(attribute)) 124 | 125 | ### 'seed' ### 126 | 127 | # Test if 'seed' is of type int or None 128 | try: 129 | if settings['seed'] is not None: 130 | val.is_variable_type_valid(variable_name='seed', 131 | variable_value=settings['seed'], 132 | valid_types=(int)) 133 | except TypeError as error: 134 | logger.error(error) 135 | raise 136 | 137 | ### 'subdomain' ### 138 | 139 | # Test if 'subdomain' is of type str 140 | try: 141 | val.is_variable_type_valid(variable_name='subdomain', 142 | variable_value=settings['subdomain'], 143 | valid_types=(str)) 144 | except TypeError as error: 145 | logger.error(error) 146 | raise 147 | 148 | ### 'shuffle' ### 149 | 150 | # Test if 'shuffle' is of type bool 151 | try: 152 | val.is_variable_type_valid(variable_name='shuffle', 153 | variable_value=settings['shuffle'], 154 | valid_types=(bool)) 155 | except TypeError as error: 156 | logger.error(error) 157 | raise 158 | 159 | ### 'number' ### 160 | 161 | # Test if 'number' is of type int 162 | try: 163 | val.is_variable_type_valid(variable_name='number', 164 | variable_value=settings['number'], 165 | valid_types=(int)) 166 | except TypeError as error: 167 | logger.error(error) 168 | raise 169 | 170 | # Test if 'number' is greater than zero 171 | try: 172 | val.is_parameter_comparison_valid(parameter_name='number', 173 | parameter_value=settings['number'], 174 | logical_test='>', 175 | compared_to=0) 176 | except ValueError as error: 177 | logger.error(error) 178 | raise 179 | 180 | ### 'importance' ### 181 | 182 | # Test if 'importance' is of type list 183 | try: 184 | val.is_variable_type_valid(variable_name='importance', 185 | variable_value=settings['importance'], 186 | valid_types=(list)) 187 | except TypeError as error: 188 | logger.error(error) 189 | raise 190 | 191 | # Test if length of the list is adequate with declared number of points 192 | try: 193 | if len(settings['importance']) > settings['number']: 194 | # TODO - write error msg 195 | msg = "" 196 | raise Exception(msg) 197 | # TODO - custom exception ? 198 | except Exception as error: 199 | logger.error(error) 200 | raise 201 | 202 | ### 'data' 203 | 204 | # Test if data is empty 205 | if isinstance(settings['data'], str) and (settings['data'] == ''): 206 | settings['data'] = [] 207 | 208 | # If 'data' type is str, try to read the file 209 | if isinstance(settings['data'], str): 210 | filepath = settings['data'] 211 | 212 | # Test if filepath is valid 213 | val.is_filepath_valid(filepath) 214 | 215 | # Try to read filepath 216 | dr = datareader.DataReader() 217 | settings['data'] = dr.get_data_from_file(filepath) 218 | 219 | # If 'data' type is np.ndarray, transform it to list 220 | if isinstance(settings['data'], np.ndarray): 221 | 222 | # If the list of points contains only one element 223 | if len(settings['data'].shape) == 1: 224 | settings['data'] = np.array([settings['data']]) 225 | 226 | settings['data'] = settings['data'].tolist() 227 | 228 | return settings 229 | -------------------------------------------------------------------------------- /pykasso/model/_wrappers.py: -------------------------------------------------------------------------------- 1 | """pyKasso's wrappers functions.""" 2 | 3 | ### Internal dependencies 4 | import logging 5 | 6 | ### Local dependencies 7 | from .._utils.validation import test_sks_settings 8 | from ..model import _validations as val 9 | from ..core._namespaces import DEFAULT_FMM_COSTS 10 | 11 | 12 | DEFAULT_VALUES = { 13 | 'sks': { 14 | 'seed': 0, 15 | 'algorithm': 'Isotropic3', 16 | 'costs': DEFAULT_FMM_COSTS, 17 | 'factors': {'F': 100, 'F1': 100, 'F2': 50} 18 | }, 19 | 'geology': { 20 | 'data': None, 21 | 'axis': 'z', 22 | 'names': {}, 23 | 'costs': {}, 24 | 'model': {} 25 | }, 26 | 'faults': { 27 | 'data': None, 28 | 'axis': 'z', 29 | 'names': {}, 30 | 'costs': {}, 31 | 'model': {} 32 | }, 33 | 'fractures': { 34 | 'data': None, 35 | 'axis': 'z', 36 | 'names': {}, 37 | # 'costs': {}, 38 | 'model': {}, 39 | 'seed': None, 40 | }, 41 | 'domain': { 42 | 'delimitation': None, 43 | 'topography': None, 44 | 'bedrock': None, 45 | 'water_table': None, 46 | }, 47 | 'outlets': { 48 | # 'number': ['required', ''], 49 | 'data': [], 50 | 'shuffle': False, 51 | 'importance': [1], 52 | 'subdomain': 'domain_surface', 53 | 'geology': None, 54 | 'seed': None, 55 | }, 56 | 'inlets': { 57 | # 'number': ['required', ''], 58 | 'data': [], 59 | 'shuffle': False, 60 | 'importance': [1], 61 | # 'per_outlet': [1], 62 | 'subdomain': 'domain_surface', 63 | 'geology': None, 64 | 'seed': None, 65 | }, 66 | } 67 | 68 | 69 | def _parameters_validation(feature, kind): 70 | """ 71 | This decorator validates input parameters before creatings modeling classes. 72 | """ 73 | def _(function): 74 | def _wrapper(*args, **kwargs): 75 | logger = logging.getLogger("validation.{}".format(feature)) 76 | model = args[0] 77 | 78 | # Add feature dictionary if value is missing 79 | if feature not in model.model_parameters: 80 | if kind == 'required': 81 | msg = "The '{}' key is missing.".format(feature) 82 | logger.error(msg) 83 | raise KeyError(msg) 84 | else: 85 | model.model_parameters[feature] = {} 86 | 87 | # Add default feature values 88 | user_params = model.model_parameters[feature].copy() 89 | default_params = DEFAULT_VALUES[feature].copy() 90 | for (key, value) in default_params.items(): 91 | if key not in user_params: 92 | msg = ("The '{}' attribute is missing. Set to default" 93 | " value.").format(key) 94 | logger.warning(msg) 95 | default_params.update(user_params) 96 | 97 | # Test special key presences 98 | if feature == 'sks': 99 | # Travel cost 100 | costs = default_params['costs'].copy() 101 | default_costs = DEFAULT_FMM_COSTS.copy() 102 | default_costs.update(costs) 103 | default_params['costs'] = default_costs 104 | # Mode 105 | if default_params['algorithm'] == 'Isotropic3': 106 | default_params['mode'] = 'A' 107 | else: 108 | default_params.setdefault('mode', 'D') 109 | if feature in ['outlets', 'inlets']: 110 | for key in ['number']: 111 | if key not in default_params: 112 | msg = ("The mandatory '{}' attribute is missing." 113 | ).format(key) 114 | logger.error(msg) 115 | raise KeyError(msg) 116 | 117 | # Control values 118 | if feature == 'sks': 119 | test_sks_settings(default_params) 120 | elif feature in ['geology', 'faults', 'fractures']: 121 | pass 122 | elif feature == 'domain': 123 | pass 124 | elif feature in ['inlets', 'outlets']: 125 | val.validate_settings_points(default_params, feature) 126 | # if isinstance(default_params['data'], str) 127 | 128 | pass 129 | 130 | # Update dictionary 131 | model.model_parameters[feature] = default_params 132 | msg = "'{}' parameters have been validated.".format(feature) 133 | logger.info(msg) 134 | result = function(*args, **kwargs) 135 | return model 136 | return _wrapper 137 | return _ 138 | 139 | 140 | def _memoize(feature): 141 | """ 142 | This decorator caches the results of function calls, preventing the need 143 | to recompute results for the same inputs. 144 | """ 145 | def _(function): 146 | def _wrapper(*args, **kwargs): 147 | logger = logging.getLogger("construction.{}".format(feature)) 148 | model = args[0] 149 | memoization = model.project._memoization 150 | if model.model_parameters[feature] is not memoization['settings'][feature]: 151 | # print('is not') # TODO 152 | result = function(*args, **kwargs) 153 | memoization['settings'][feature] = model.model_parameters[feature] 154 | memoization['model'][feature] = getattr(model, feature) 155 | logger.info("'{}' has been constructed".format(feature)) 156 | else: 157 | setattr(model, feature, memoization['model'][feature]) 158 | msg = "'{}' has been reused from previous simulation".format(feature) 159 | logger.info(msg) 160 | return model 161 | return _wrapper 162 | return _ 163 | 164 | 165 | def _logging(feature=None, step=None): 166 | """ 167 | This decorator records messages to a log fileand tracks events, errors, 168 | and informational messages. 169 | """ 170 | def _(function): 171 | def _wrapper(*args, **kwargs): 172 | if feature is not None: 173 | logger = logging.getLogger("{}.{}".format(feature, step)) 174 | else: 175 | logger = logging.getLogger(".") 176 | try: 177 | result = function(*args, **kwargs) 178 | except Exception as err: 179 | msg = "Critical error during '{}'".format(function.__name__) 180 | logger.critical(msg) 181 | raise err 182 | else: 183 | logger.debug("'{}' went well".format(function.__name__)) 184 | return result 185 | return _wrapper 186 | return _ 187 | -------------------------------------------------------------------------------- /pykasso/model/domain_features/__init__.py: -------------------------------------------------------------------------------- 1 | from .bedrock import Bedrock 2 | from .delimitation import Delimitation 3 | from .domain import Domain 4 | from .topography import Topography 5 | from .watertable import WaterTable 6 | -------------------------------------------------------------------------------- /pykasso/model/domain_features/bedrock.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class Bedrock(Surface): 6 | """ 7 | Class modeling the lower horizontal limit of the study site. 8 | """ 9 | def __init__( 10 | self, 11 | grid: Grid, 12 | *args, 13 | **kwargs, 14 | ) -> None: 15 | feature = 'bedrock' 16 | super().__init__(grid, feature, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /pykasso/model/domain_features/delimitation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from shapely.geometry import Point, Polygon 3 | from ...core.grid import Grid 4 | 5 | 6 | class Delimitation(): 7 | """ 8 | Class modeling the vertical limits of the study site. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | vertices: list, 14 | grid: Grid, 15 | ) -> None: 16 | """ 17 | Construct the delimitation, the vertical limits of the study site. 18 | 19 | Parameters 20 | ---------- 21 | vertices : list 22 | List of coordinates representing the vertices of the boundary 23 | polygon : [[x0,y0], ..., [xn, yn]]. The list must contain at least 24 | 3 vertices. 25 | grid : Grid 26 | Grid of the model. 27 | """ 28 | self.label = 'delimitation' 29 | self.vertices = vertices 30 | 31 | ### Set the polygon with shapely 32 | path_vertices = self.vertices.copy() 33 | self.polygon = Polygon(path_vertices) 34 | 35 | ### Sets the mask array with a numpy-array 36 | row, col = np.indices((grid.nx, grid.ny)) 37 | X, Y, Z = grid.get_meshgrids() 38 | pts = np.column_stack((X[row, col, 0].flatten(), 39 | Y[row, col, 0].flatten())) 40 | msk = [self.polygon.contains(Point(x, y)) for (x, y) in pts] 41 | msk = np.array(msk).reshape((grid.nx, grid.ny)).astype(int) 42 | self.data_volume = np.repeat(msk[:, :, np.newaxis], grid.nz, axis=2) 43 | -------------------------------------------------------------------------------- /pykasso/model/domain_features/topography.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class Topography(Surface): 6 | """ 7 | Class modeling the upper horizontal limit of the study site. 8 | """ 9 | def __init__( 10 | self, 11 | grid: Grid, 12 | *args, 13 | **kwargs, 14 | ) -> None: 15 | feature = 'topography' 16 | super().__init__(grid, feature, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /pykasso/model/domain_features/watertable.py: -------------------------------------------------------------------------------- 1 | from ...core.grid import Grid 2 | from ..geologic_features.surface import Surface 3 | 4 | 5 | class WaterTable(Surface): 6 | """ 7 | Class modeling the water level elevation, the phreatic/vadose limit of the 8 | study site. 9 | """ 10 | def __init__( 11 | self, 12 | grid: Grid, 13 | *args, 14 | **kwargs, 15 | ) -> None: 16 | feature = 'water_table' 17 | super().__init__(grid, feature, *args, **kwargs) 18 | -------------------------------------------------------------------------------- /pykasso/model/geologic_features/__init__.py: -------------------------------------------------------------------------------- 1 | from .faults import Faults 2 | from .fractures import Fractures 3 | from .geologicfeature import GeologicFeature 4 | from .geology import Geology 5 | from .surface import Surface 6 | -------------------------------------------------------------------------------- /pykasso/model/geologic_features/faults.py: -------------------------------------------------------------------------------- 1 | from ...core._namespaces import DEFAULT_FMM_COSTS 2 | from ...core.grid import Grid 3 | from .geologicfeature import GeologicFeature 4 | 5 | 6 | class Faults(GeologicFeature): 7 | """ 8 | Class modeling the faults model. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | grid: Grid, 14 | default_fmm_cost: float = DEFAULT_FMM_COSTS['faults'], 15 | *args, 16 | **kwargs, 17 | ) -> None: 18 | feature = 'faults' 19 | dim = 3 20 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 21 | 22 | def set_names( 23 | self, 24 | names: dict[int, str], 25 | default_name: str = 'fault {}', 26 | ) -> None: 27 | """ 28 | Assign names to fault items based on the provided ``names`` dictionary 29 | , with an optional default naming pattern. 30 | 31 | Parameters 32 | ---------- 33 | names : dict[int, str] 34 | A dictionary where the keys are fault item indices (integers) and 35 | the values are the corresponding names (strings) to be assigned. 36 | This dictionary specifies which geologic unit should receive 37 | custom names. 38 | default_name : str, default: 'fault {}' 39 | A format string used to generate default fault item names for 40 | items not explicitly named in the ``names`` dictionary. The format 41 | string should include a placeholder (e.g., '{}') that will be 42 | replaced by the item's index. 43 | 44 | Notes 45 | ----- 46 | This function does not return a value. It rewrites the ``self.names`` 47 | attribute with the new specified dictionary. 48 | """ 49 | return super().set_names(names, default_name) 50 | 51 | def set_model( 52 | self, 53 | model: dict[int, str], 54 | default_model: bool = True, 55 | ) -> None: 56 | """ 57 | Indicate if a fault item should be considered in the modelisation 58 | based on the provided dictionary, with an optional default setting. 59 | 60 | Parameters 61 | ---------- 62 | model : dict[int, bool] 63 | A dictionary where the keys are fault item indices (integers) and 64 | the values are booleans indicating if the item is considered or 65 | not. 66 | default_model : bool, default: True 67 | The default value to be applied to fault items not explicitly 68 | listed in the ``model`` dictionary. 69 | 70 | Notes 71 | ----- 72 | This function does not return a value. It rewrites the ``self.model`` 73 | attribute with the new specified dictionary. 74 | """ 75 | model.setdefault(0, False) 76 | return super().set_model(model, default_model) 77 | -------------------------------------------------------------------------------- /pykasso/model/geologic_features/geologicfeature.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains classes modeling the geological features. 3 | """ 4 | 5 | ### External dependencies 6 | import numpy as np 7 | import pandas as pd 8 | 9 | ### Local dependencies 10 | from ..._utils.datareader import DataReader 11 | from ...core._namespaces import DEFAULT_FEATURE_PARAMETERS 12 | 13 | ### Typing 14 | from typing import Union 15 | from ...core.grid import Grid 16 | 17 | 18 | class GeologicFeature(DataReader): 19 | """ 20 | Class modeling a geological feature. 21 | """ 22 | 23 | def __init__( 24 | self, 25 | grid: Grid, 26 | feature: str, 27 | dim: int, 28 | default_fmm_cost: float, 29 | *args, 30 | **kwargs, 31 | ) -> None: 32 | """ 33 | Construct a geological feature. 34 | 35 | Parameters 36 | ---------- 37 | grid : Grid 38 | pyKasso's ``Grid`` of the model. 39 | feature : str 40 | Define the type of geological feature. 41 | 42 | Available 2D geological features: 43 | - ``'topography'`` 44 | - ``'water_table'`` 45 | - ``'bedrock'`` 46 | 47 | Available 3D geological features: 48 | - ``'geology'`` 49 | - ``'faults'`` 50 | - ``'fractures'`` 51 | dim : int 52 | Define whether the geological feature corresponds to a 2D or 3D dataset. 53 | 54 | default_fmm_cost : float 55 | Define the default fast-marching method cost value. 56 | """ 57 | 58 | # Initialization 59 | super().__init__(grid, *args, **kwargs) 60 | self.feature = feature 61 | self.dim = dim 62 | self.default_fmm_cost = default_fmm_cost 63 | self.data_surface = None 64 | self.data_volume = None 65 | self.stats = None 66 | self.names = None 67 | self.costs = None 68 | self.model = None 69 | 70 | # Retrieve arguments from kwargs 71 | data = kwargs.get('data', None) 72 | axis = kwargs.get('axis', 'z') 73 | names = kwargs.get('names', {}) 74 | costs = kwargs.get('costs', {}) 75 | model = kwargs.get('model', {}) 76 | 77 | # Set the data 78 | self.set_data(data, axis) 79 | self.compute_statistics() 80 | if self.feature not in ['topography', 'water_table', 'bedrock']: 81 | self.set_names(names) 82 | self.set_costs(costs, self.default_fmm_cost) 83 | self.set_model(model) 84 | 85 | def overview(self) -> pd.DataFrame: 86 | """ 87 | Return a pandas DataFrame describing each contained unit with its name, 88 | its cost, and if it will be considered during the simulation. Basic 89 | statistics are also described. 90 | """ 91 | index = self.stats.index 92 | data = { 93 | 'names': self.names.values(), 94 | 'costs': self.costs.values(), 95 | 'model': self.model.values(), 96 | } 97 | df = pd.DataFrame(data, index=index) 98 | df = pd.merge(df, self.stats, left_index=True, right_index=True) 99 | return df 100 | 101 | ############### 102 | ### SETTERS ### 103 | ############### 104 | 105 | def set_data( 106 | self, 107 | data: Union[None, str, np.ndarray], 108 | axis: str = 'z', 109 | ) -> None: 110 | """ 111 | """ 112 | # If no data is provdided 113 | if data is None: 114 | 115 | value = DEFAULT_FEATURE_PARAMETERS[self.feature]['nodata'] 116 | 117 | if self.dim == 2: 118 | self.data_surface = self._get_data_full_2D(value) 119 | elif self.dim == 3: 120 | self.data_volume = self._get_data_full_3D(value) 121 | 122 | # Else 123 | elif isinstance(data, np.ndarray): 124 | if self.dim == 2: 125 | self.data_surface = data 126 | elif self.dim == 3: 127 | self.data_volume = data 128 | else: 129 | if self.dim == 2: 130 | self.data_surface = self.get_data_from_file(data, 131 | False) 132 | elif self.dim == 3: 133 | self.data_volume = self.get_data_from_file(data, 134 | True, 135 | axis) 136 | return None 137 | 138 | def set_names( 139 | self, 140 | names: dict[int, str], 141 | default_name: str = 'item {}', 142 | ) -> None: 143 | """ 144 | Assign names to items based on the provided ``names`` dictionary, with 145 | an optional default naming pattern. 146 | 147 | Parameters 148 | ---------- 149 | names : dict[int, str] 150 | A dictionary where the keys are item indices (integers) and the 151 | values are the corresponding names (strings) to be assigned. This 152 | dictionary specifies which items should receive custom names. 153 | default_name : str, default: 'item {}' 154 | A format string used to generate default names for items not 155 | explicitly named in the ``names`` dictionary. The format string 156 | should include a placeholder (e.g., '{}') that will be replaced by 157 | the item's index. 158 | 159 | Notes 160 | ----- 161 | This function does not return a value. It rewrites the ``self.names`` 162 | attribute with the new specified dictionary. 163 | """ 164 | ids = self.stats.index 165 | names_df = {} 166 | for id in ids: 167 | names_df[id] = names.get(id, default_name.format(id)) 168 | self.names = names_df 169 | return None 170 | 171 | def set_costs( 172 | self, 173 | costs: dict[int, str], 174 | default_cost: float = None, 175 | ) -> None: 176 | """ 177 | Assign costs to items based on the provided dictionary, with an 178 | optional default cost. 179 | 180 | Parameters 181 | ---------- 182 | costs : dict[int, str] 183 | A dictionary where the keys are item indices (integers) and the 184 | values are the corresponding costs (floats) to be assigned. This 185 | dictionary specifies which items should receive custom costs. 186 | default_cost : float, default: 0.5 187 | The default cost to be applied to items not explicitly listed in 188 | the ``costs`` dictionary. 189 | 190 | Notes 191 | ----- 192 | This function does not return a value. It rewrites the ``self.costs`` 193 | attribute with the new specified dictionary. 194 | """ 195 | # Retrieve default fmm cost 196 | if default_cost is None: 197 | default_cost = self.default_fmm_cost 198 | 199 | # Assign costs 200 | ids = self.stats.index 201 | costs_df = {} 202 | for id in ids: 203 | costs_df[id] = costs.get(id, default_cost) 204 | self.costs = costs_df 205 | 206 | return None 207 | 208 | def set_model( 209 | self, 210 | model: dict[int, str], 211 | default_model: bool = True, 212 | ) -> None: 213 | """ 214 | Indicate if an item should be considered in the modelisation based on 215 | the provided dictionary, with an optional default setting. 216 | 217 | Parameters 218 | ---------- 219 | model : dict[int, bool] 220 | A dictionary where the keys are item indices (integers) and the 221 | values are booleans indicating if the item is considered or not. 222 | default_model : bool, default: True 223 | The default value to be applied to items not explicitly listed in 224 | the ``model`` dictionary. 225 | 226 | Notes 227 | ----- 228 | This function does not return a value. It rewrites the ``self.model`` 229 | attribute with the new specified dictionary. 230 | """ 231 | ids = self.stats.index 232 | model_df = {} 233 | for id in ids: 234 | model_df[id] = model.get(id, default_model) 235 | self.model = model_df 236 | return None 237 | 238 | ############### 239 | ### GETTERS ### 240 | ############### 241 | 242 | def get_data_units(self, units: list[int]) -> np.ndarray: 243 | """ 244 | Return a copy of the ``self.data_volume`` attribute only containing 245 | the specified units. 246 | 247 | Parameters 248 | ---------- 249 | units: list[int] 250 | List of units to retrieve. 251 | 252 | Returns 253 | ------- 254 | np.ndarray 255 | """ 256 | # data = np.empty(self.grid.shape) * np.nan # ISSUES with plotting 257 | data = np.zeros(self.grid.shape) 258 | test = np.isin(self.data_volume, units) 259 | data = np.where(test, self.data_volume, data) 260 | return data 261 | 262 | def get_data_model(self) -> np.ndarray: 263 | """ 264 | Return a copy of the ``self.data_volume`` attribute corresponding of 265 | the state of the ``self.model`` attribute. 266 | """ 267 | valid_ids = [id_ for (id_, boolean) in self.model.items() if boolean] 268 | geology = self.get_data_units(valid_ids) 269 | return geology 270 | 271 | ############# 272 | ### OTHER ### 273 | ############# 274 | 275 | def compute_statistics(self) -> None: 276 | """ 277 | Populate the ``self.stats`` attribute with a pandas DataFrame 278 | containing statistics (counts and frequency) on the data. 279 | 280 | Returns 281 | ------- 282 | None 283 | """ 284 | values, counts = np.unique(self.data_volume, return_counts=True) 285 | values = values.astype('int') 286 | stats = { 287 | 'counts': counts, 288 | 'freq': counts / self.grid.nodes, 289 | 'volume': counts * self.grid.node_volume, 290 | } 291 | self.stats = pd.DataFrame(data=stats, index=values) 292 | return None 293 | -------------------------------------------------------------------------------- /pykasso/model/geologic_features/geology.py: -------------------------------------------------------------------------------- 1 | from ...core._namespaces import DEFAULT_FMM_COSTS 2 | from ...core.grid import Grid 3 | from .geologicfeature import GeologicFeature 4 | 5 | 6 | class Geology(GeologicFeature): 7 | """ 8 | Class modeling the geologic model. 9 | """ 10 | 11 | def __init__( 12 | self, 13 | grid: Grid, 14 | default_fmm_cost: float = DEFAULT_FMM_COSTS['geology'], 15 | *args, 16 | **kwargs, 17 | ) -> None: 18 | feature = 'geology' 19 | dim = 3 20 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 21 | 22 | def set_names( 23 | self, 24 | names: dict[int, str], 25 | default_name: str = 'unit {}', 26 | ) -> None: 27 | """ 28 | Assign names to geologic units based on the provided ``names`` 29 | dictionary, with an optional default naming pattern. 30 | 31 | Parameters 32 | ---------- 33 | names : dict[int, str] 34 | A dictionary where the keys are geologic unit indices (integers) 35 | and the values are the corresponding names (strings) to be 36 | assigned. This dictionary specifies which geologic unit should 37 | receive custom names. 38 | default_name : str, default: 'unit {}' 39 | A format string used to generate default geologic unit names for 40 | items not explicitly named in the ``names`` dictionary. The format 41 | string should include a placeholder (e.g., '{}') that will be 42 | replaced by the item's index. 43 | 44 | Notes 45 | ----- 46 | This function does not return a value. It rewrites the ``self.names`` 47 | attribute with the new specified dictionary. 48 | """ 49 | return super().set_names(names, default_name) 50 | 51 | def set_model( 52 | self, 53 | model: dict[int, str], 54 | default_model: bool = True, 55 | ) -> None: 56 | """ 57 | Indicate if a geologic unit should be considered in the modelisation 58 | based on the provided dictionary, with an optional default setting. 59 | 60 | Parameters 61 | ---------- 62 | model : dict[int, bool] 63 | A dictionary where the keys are geologic unit indices (integers) 64 | and the values are booleans indicating if the item is considered or 65 | not. 66 | default_model : bool, default: True 67 | The default value to be applied to geologic units not explicitly 68 | listed in the ``model`` dictionary. 69 | 70 | Notes 71 | ----- 72 | This function does not return a value. It rewrites the ``self.model`` 73 | attribute with the new specified dictionary. 74 | """ 75 | model.setdefault(0, True) 76 | return super().set_model(model, default_model) 77 | -------------------------------------------------------------------------------- /pykasso/model/geologic_features/surface.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from .geologicfeature import GeologicFeature 5 | from ...core.grid import Grid 6 | 7 | 8 | class Surface(GeologicFeature): 9 | """ 10 | Subclass modeling a two dimensional geological feature. 11 | """ 12 | 13 | def __init__( 14 | self, 15 | grid: Grid, 16 | feature: str, 17 | *args, 18 | **kwargs, 19 | ) -> None: 20 | dim = 2 21 | default_fmm_cost = None, 22 | super().__init__(grid, feature, dim, default_fmm_cost, *args, **kwargs) 23 | 24 | def _surface_to_volume(self, condition: str, grid: Grid) -> np.ndarray: 25 | """ 26 | Convert a two dimensional array in a three dimensional array. 27 | """ 28 | k = grid.get_k(self.data_surface) 29 | data_volume = np.zeros((grid.nx, grid.ny, grid.nz)) 30 | for z in range(grid.nz): 31 | data_volume[:, :, z] = z 32 | if condition == '>=': 33 | test = data_volume[:, :, z] >= k 34 | elif condition == '=': 35 | test = data_volume[:, :, z] == k 36 | elif condition == '<=': 37 | test = data_volume[:, :, z] <= k 38 | data_volume[:, :, z] = np.where(test, 1, 0) 39 | return data_volume 40 | 41 | def compute_statistics(self) -> None: 42 | """ 43 | Populate the ``self.stats`` attribute with a pandas DataFrame 44 | containing statistics (counts and frequency) on the data. 45 | 46 | Returns 47 | ------- 48 | None 49 | """ 50 | values, counts = np.unique(self.data_surface, return_counts=True) 51 | values = values.astype('int') 52 | stats = { 53 | 'counts': counts, 54 | 'freq': counts / self.grid.nodes, 55 | 'surface': counts * self.grid.node_area, 56 | } 57 | self.stats = pd.DataFrame(data=stats, index=values) 58 | return None 59 | -------------------------------------------------------------------------------- /pykasso/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============= 3 | visualization 4 | ============= 5 | 6 | A sub-module for karst network visualization. 7 | """ 8 | 9 | __all__ = [] 10 | 11 | from .visualizer import * 12 | from .._version import __version__ 13 | 14 | __all__.extend(['__version__']) 15 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pykasso" 3 | version = "1.0.1" 4 | requires-python = ">=3.10" 5 | description = "Python project intended to simulate stochastic karst network" 6 | authors = [ 7 | {name = "François Miville", email = "francois.miville@ikmail.com"}, 8 | {name = "Chloé Fandel"}, 9 | {name = "Philippe Renard"}, 10 | ] 11 | maintainers = [ 12 | {name = "François Miville", email = "francois.miville@ikmail.com"}, 13 | ] 14 | license = {file = "LICENSE"} 15 | readme = "README.md" 16 | keywords = ['hydrogeology', '3-D modeling', 'stochasticity'] 17 | classifiers = [ 18 | "Development Status :: 4 - Beta", 19 | "Programming Language :: Python :: 3.10", 20 | "Programming Language :: Python :: 3.11", 21 | "Operating System :: OS Independent", 22 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 23 | "Natural Language :: English", 24 | "Topic :: Scientific/Engineering", 25 | ] 26 | dependencies = [ 27 | 'notebook', 28 | 'jupyterlab', 29 | 'ipykernel', 30 | 'ipython', 31 | 'ipywidgets', 32 | 'nbformat', 33 | 'nbclient', 34 | 'numpy', 35 | 'pandas', 36 | 'matplotlib', 37 | 'PyQt5', 38 | 'pillow', 39 | 'Jinja2', 40 | 'openpyxl', 41 | 'mpmath', 42 | 'PyYAML', 43 | 'scipy', 44 | 'plotly', 45 | 'shapely', 46 | 'rasterio', 47 | 'agd', 48 | 'networkx', 49 | 'mplstereonet', 50 | 'pyvista', 51 | 'imageio', 52 | 'trame', 53 | 'trame-vuetify', 54 | 'trame-vtk', 55 | 'karstnet', 56 | ] 57 | 58 | [tool.setuptools.packages.find] 59 | include = ['pykasso', 'pykasso.*'] 60 | 61 | [tool.setuptools.package-data] 62 | 'pykasso._misc' = ['*'] 63 | 64 | [project.urls] 65 | Homepage = "https://github.com/randlab/pyKasso" 66 | Repository = "https://github.com/randlab/pyKasso" 67 | 68 | [build-system] 69 | requires = ["setuptools"] 70 | build-backend = "setuptools.build_meta" --------------------------------------------------------------------------------