├── .github
└── workflows
│ └── pythonpackage.yml
├── .gitignore
├── .pylintrc
├── CONTRIBUTING.md
├── LICENSE.txt
├── MANIFEST.in
├── Makefile
├── README.md
├── cli
├── comp_ADCP_t2d.py
├── landxml_to_slf.py
├── plot_comp_ADCP_t2d.py
├── pyteltools_gui.py
├── slf_3d_to_2d.py
├── slf_base.py
├── slf_bottom_friction.py
├── slf_bottom_zones.py
├── slf_flux2d.py
├── slf_int2d.py
├── slf_last.py
├── slf_max_over_files.py
├── slf_sedi_chain.py
├── slf_to_raster.py
├── slf_volume.py
└── update_culverts_file.py
├── conventions.md
├── doxygen.config
├── notebook
├── Handle Serafin files.ipynb
└── Post-processing examples.ipynb
├── pyteltools
├── __init__.py
├── __main__.py
├── arcpy_scripts
│ ├── landxml_to_tin.py
│ └── mxd_to_png.py
├── conf
│ ├── __init__.py
│ └── default_settings.py
├── geom
│ ├── BlueKenue.py
│ ├── Shapefile.py
│ ├── __init__.py
│ ├── conversion.py
│ ├── geometry.py
│ ├── transformation.py
│ └── util.py
├── gui
│ ├── CalculatorGUI.py
│ ├── CompareResultsGUI.py
│ ├── ComputeFluxGUI.py
│ ├── ComputeVolumeGUI.py
│ ├── ConfigTransformation.py
│ ├── ExtractVariablesGUI.py
│ ├── GeometryConverterGUI.py
│ ├── LinesGUI.py
│ ├── MaxMinMeanGUI.py
│ ├── PointsGUI.py
│ ├── ProjectLinesGUI.py
│ ├── ProjectMeshGUI.py
│ ├── __init__.py
│ ├── classic_gui.py
│ └── util.py
├── main_interface.py
├── outil_carto.py
├── slf
│ ├── Serafin.py
│ ├── __init__.py
│ ├── comparison.py
│ ├── data
│ │ ├── Serafin_var2D.csv
│ │ └── Serafin_var3D.csv
│ ├── datatypes.py
│ ├── expression
│ │ ├── __init__.py
│ │ ├── condition.py
│ │ ├── expression.py
│ │ └── pool.py
│ ├── flux.py
│ ├── interpolation.py
│ ├── mesh2D.py
│ ├── misc.py
│ ├── util.py
│ ├── variable
│ │ ├── __init__.py
│ │ ├── variables_2d.py
│ │ ├── variables_3d.py
│ │ └── variables_utils.py
│ ├── variables.py
│ └── volume.py
├── tests
│ ├── __init__.py
│ ├── test_flux.py
│ ├── test_variables.py
│ └── test_volume.py
├── utils
│ ├── __init__.py
│ ├── cli_base.py
│ ├── log.py
│ └── write_cli_usage.py
└── workflow
│ ├── MultiNode.py
│ ├── Node.py
│ ├── __init__.py
│ ├── mono_gui.py
│ ├── multi_func.py
│ ├── multi_gui.py
│ ├── multi_nodes.py
│ ├── nodes_calc.py
│ ├── nodes_io.py
│ ├── nodes_op.py
│ ├── nodes_vis.py
│ ├── util.py
│ └── workflow_gui.py
├── requirements.txt
└── setup.py
/.github/workflows/pythonpackage.yml:
--------------------------------------------------------------------------------
1 | name: Python package
2 |
3 | on: [push, workflow_dispatch]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 | strategy:
10 | max-parallel: 5
11 | matrix:
12 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | - name: Set up Python ${{ matrix.python-version }}
17 | uses: actions/setup-python@v1
18 | with:
19 | python-version: ${{ matrix.python-version }}
20 | - name: Install dependencies
21 | run: |
22 | sudo apt-get update
23 | sudo apt-get install libgdal-dev libspatialindex-dev
24 | python -m pip install --upgrade pip
25 | pip install -r requirements.txt
26 | - name: Lint with flake8
27 | run: |
28 | pip install flake8
29 | # stop the build if there are Python syntax errors or undefined names
30 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
31 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
32 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
33 | - name: Unit tests
34 | run: |
35 | python -m unittest
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *~
2 | __pycache__/
3 | *.pyc
4 | .ipynb_checkpoints/
5 | \#*\#
6 | .\#*
7 | .cache
8 | .idea
9 | doc/
10 | venv/
11 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | [MASTER]
2 | # Add files or directories to the blacklist. They should be base names, not
3 | # paths.
4 | ignore=pyteltools/tests
5 |
6 | extension-pkg-whitelist=PyQt5
7 |
8 | [TYPECHECK]
9 | # List of classes names for which member attributes should not be checked
10 | # (useful for classes with attributes dynamically set).
11 | ignored-classes=BaseGeometry,Serafin
12 |
13 | [FORMAT]
14 | # Maximum number of characters on a single line.
15 | max-line-length=120
16 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | > First off, thank you for considering contributing to the opensource project PyTelTools.
4 | > It's people like you that make such great tools for Telemac post-processing tasks.
5 |
6 | Requiring needs include: development, documentation, tests, ...
7 |
8 |
9 | ## Report a bug
10 |
11 | Please [write an issue](https://github.com/CNR-Engineering/PyTelTools/issues/new) with: the error message (with traceback) and a minimal non-working example to reproduce it.
12 |
13 |
14 | ## Documentate a tool
15 |
16 | Ask for write access (e.g. per email at: l _(dot)_ duron _(at)_ cnr _(dot)_ tm _(dot)_ fr).
17 |
18 | Screenshots should be upload on https://github.com/CNR-Engineering/PyTelTools_media.
19 |
20 | ### Some notions
21 | * DAG: directed acyclic graph
22 | * DFS: depth first search
23 |
24 |
25 | ## Want to develop something?
26 |
27 | ### Implement your fix or feature
28 |
29 | Please do a pull request if you're ready to make your changes!
30 |
31 | Feel free to ask for help; everyone is a beginner at first.
32 |
33 | ### Add a new unitary tool
34 |
35 | TODO
36 |
37 | ### Add a new tool in workflow
38 |
39 | 1. Add it to Mono tab
40 | * `nodes_*`: add a new class which derives from Node (e.g. `TwoInOneOutNode`) in the corresponding file (depending on its category)
41 | * `mono_gui`: add a new entry in dict `NODES`
42 | 2. Add it to Multi tab
43 | * `multi_nodes`: define a new class which derives from Node (e.g. `MultiDoubleInputNode`) and define `load` method to capture options
44 | * `multi_gui`: add a new entry in dict `NODES`
45 | * `multi_func`: define a function and add it in dict `FUNCTIONS`
46 |
47 | #### Datatypes for ports
48 |
49 | Possible datatypes as input/output for workflow tools are currently:
50 | * Serafin
51 | * `slf`
52 | * `slf reference`
53 | * `slf 3d`
54 | * `slf geom`
55 | * `slf out`
56 | * CSV output results
57 | * `point csv`
58 | * `volume csv`
59 | * `flux csv`
60 | * Geometry data
61 | * `point 2d`
62 | * `polyline 2d`
63 | * `polygon 2d`
64 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include *.md
2 | include *.txt
3 | include pyteltools/slf/data/*.csv
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # make clean
2 | # Remove all potential generated folder/files
3 | #
4 | # make doc
5 | # Generate doxygen documentation in doc
6 | # make update_doc
7 | # Update generated documentation (path to local git repository is specified in variable `DOC_PATH`)
8 | #
9 | # make test
10 | # Check test cases
11 | #
12 | # make venv
13 | # Create python virtual environnement
14 | #
15 | # Required dependencies:
16 | # - doc: doxygen
17 | # - test: pytest (Python package, listed in requirements.txt)
18 | # - venv: virtualenv
19 | #
20 | DOC_PATH=../CNR-Engineering.github.io/PyTelTools
21 |
22 | doc: doxygen.config
23 | doxygen $<
24 | python pyteltools/utils/write_cli_usage.py cli doc/cli_usage.md
25 |
26 | update_doc: doc
27 | rm -r ${DOC_PATH} && cp -r doc/html/* ${DOC_PATH}
28 | cd ${DOC_PATH} && git add -A && git commit -m "Update doc" && git pull && git push && cd -
29 |
30 | venv:
31 | virtualenv venv --python=python3
32 | source venv/bin/activate && pip install -r requirements.txt --upgrade
33 |
34 | test:
35 | pytest pyteltools/tests -v
36 |
37 | # .cache (generated by pytest)
38 | clean:
39 | rm -rf doc venv .cache
40 | find . -name '__pycache__' | xargs rm -rf
41 |
42 | .PHONY: clean doc test update_doc venv
43 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PyTelTools
2 |
3 |
4 | 
5 |
6 | Tested versions: 3.9, 3.10, 3.11, 3.12 and 3.13.
7 |
8 | * [Documentations](#documentations)
9 | * [Installation and requirements](#installation-and-requirements)
10 | * [Usage](#usage)
11 | * [Configure](#configure)
12 |
13 |
14 | ## Documentations
15 | * [User documentation](https://github.com/CNR-Engineering/PyTelTools/wiki)
16 | * [Developer documentation](https://cnr-engineering.github.io/PyTelTools) ([repository with static files](https://github.com/CNR-Engineering/CNR-Engineering.github.io))
17 |
18 |
19 | ## Installation and requirements
20 | PyTelTools relies on **Python3** and requires packages which are listed in [requirements.txt](https://github.com/CNR-Engineering/PyTelTools/blob/master/requirements.txt).
21 |
22 | > :warning: If you have multiple versions of Python installed, beware of using the right **python** or **pip** executable (or consider using a [virtual environnement](https://virtualenv.pypa.io/en/stable/) if you are on Linux), which has to be a Python 3 version.
23 |
24 | > :information_source: For **Windows** users who face problems with the installation of these packages (especially PyQt5, scipy or numpy), consider using a Python version with a full set of **pre-installed scientific packages**, such as [WinPython](http://winpython.github.io) or [Conda](https://conda.io). All the major packages will be already installed, therefore it should facilitate the installation.
25 | >
26 | > It is even possible to download a WinPython portable installation for Python 3.6 (64 bits) with all the dependencies required by PyTelTools (and many more packages) already installed [here (~500 MB)](https://drive.google.com/file/d/1IihdjBCefjq8EoTOnY9WBjDwLK5-vLvc/view?usp=sharing).
27 |
28 | PyTelTools can be installed as a Python module (A) or an external program (B).
29 | The recommended installation is within Python (A) as it becomes fully integrated with Python and more easier to install, upgrade and use.
30 |
31 | ### A) Installation as Python module
32 | If you want to use a [virtual environment](https://virtualenv.pypa.io/en/stable/) do the following:
33 | ```bash
34 | $ virtualenv venv --python=python3
35 | $ source venv/bin/activate
36 | ```
37 | This step to create and use virtualenv is optional and can also be done trough `make venv`.
38 |
39 | PyTelTools can be installed directly from its repository with **pip**:
40 | ```bash
41 | # user install
42 | pip install -e git://github.com/CNR-Engineering/PyTelTools.git#egg=pyteltools --user
43 | # default install (eventually in a virtualenv or needs to be root)
44 | pip install -e git://github.com/CNR-Engineering/PyTelTools.git#egg=pyteltools
45 | ```
46 |
47 | > :information_source: If you do not have a `git` client (which might be the case if you are using Windows), you can try to install it with:
48 | > ```python
49 | > pip install https://github.com/CNR-Engineering/PyTelTools/zipball/master
50 | > ```
51 |
52 | #### Upgrade
53 | To upgrade PyTelTools, simply use **pip**:
54 | ```bash
55 | $ pip install PyTelTools --upgrade
56 | ```
57 |
58 | ### B) PyTelTools as an external program
59 |
60 | #### B.1) Get the source code
61 | Clone source code repository in a folder `PyTelTools`.
62 | ```bash
63 | $ git clone https://github.com/CNR-Engineering/PyTelTools.git
64 | ```
65 |
66 | > :information_source: If you do not have a `git` client, simply unzip the [source code repository](https://github.com/CNR-Engineering/PyTelTools/archive/master.zip).
67 |
68 | For the next steps, the source code is expected to be in a folder named `PyTelTools` (containing this `README.md` file).
69 |
70 | #### B.2) Install dependencies
71 | If you want to use a [virtual environnement](https://virtualenv.pypa.io/en/stable/) do the following:
72 | ```bash
73 | $ virtualenv venv --python=python3
74 | $ source venv/bin/activate
75 | ```
76 | This step to create and use virtualenv is optional and can also be done trough `make venv`.
77 |
78 | Packages installation can be done directly with **pip**:
79 | ```bash
80 | $ pip install -r requirements.txt
81 | ```
82 |
83 | ## Usage
84 |
85 | Depending on the followed installation procedure, see the correspond paragraph.
86 |
87 | ### A) Python module
88 |
89 | #### A.1) Inside a Python interpreter
90 | If PyTelTools is installed (the module is named `pyteltools`), it can be imported with:
91 | ```bash
92 | $ python
93 | >>> import pyteltools
94 | >>>
95 | ```
96 |
97 | Then all the methods and classes are accessible (such as `pyteltools.slf.Serafin`).
98 |
99 | It can be usefull to define your own script adapted to your needs and still relying on PyTelTools core.
100 |
101 | #### A.2) Call scripts
102 |
103 | ```bash
104 | # Classic or Workflow interface (GUI)
105 | $ pyteltools_gui.py
106 | # Command line script (CLI) can be called directly from any directory
107 | $ slf_base.py -h
108 | ```
109 |
110 | Beware, that the Python executable is the one you configured (a Python3 which meets the requirements presented above).
111 | Otherwise you could try to specify complete path to the Python executable and the script.
112 |
113 | ### B) PyTelTools as an external program
114 |
115 | ### B.1) Configure PYTHONPATH
116 | Firstly, add the `PyTelTools` folder (which contains this `README.md` file) repository into the `PYTHONPATH`
117 | environment variable of your operating system.
118 |
119 | For Windows, you can find some help on the [official python documentation](https://docs.python.org/3.7/using/windows.html#excursus-setting-environment-variables).
120 |
121 | On Linux, you easily do this through a command line like (or add directly this line in your `~/.bashrc`):
122 | ```bash
123 | $ export PYTHONPATH=$PYTHONPATH:/home/opentelemac/PyTelTools
124 | ```
125 |
126 | ### Open interface
127 | From the `PyTelTools` folder (containing this `README.md` file), simply run:
128 | ```bash
129 | $ python cli/pyteltools_gui.py
130 | # See help message to open a specific interface (classic or workflow)
131 | $ python cli/pyteltools_gui.py -h
132 | ```
133 |
134 | ### Use command line for workflow
135 | **Load** a workflow project file in the **GUI** (in mono tab):
136 | ```bash
137 | $ python pyteltools/workflow/workflow_gui.py -i path_to_workflow_project_file.txt
138 | ```
139 |
140 | **Load** and **run** a workflow project from the **command line**:
141 | ```bash
142 | $ python pyteltools/workflow/mono_gui.py -i path_to_workflow_project_file.txt
143 | $ python pyteltools/workflow/multi_gui.py -i path_to_workflow_project_file.txt
144 | ```
145 |
146 | The argument `-h` provides a **help** message for the corresponding script and specify its **usage**.
147 | Output **verbosity** can be increased (debug mode) with `-v` argument.
148 |
149 | ## Configure
150 | PyTelTools comes with a set of **default parameters** which determine its behavior and some assumptions and options.
151 |
152 | The recommended way to modify PyTelTools configuration is to write a **user configuration file** in **JSON** file format
153 | and refer to it in an environment variable named `PYTELTOOLS_SETTINGS`.
154 | The environment variable `PYTELTOOLS_SETTINGS` has to contain the **absolute path** to this file.
155 | For example, it could something like:
156 | `/home/user/pyteltools/cfg.json` or `C:\Users\MyAccount\Documents\config_pyteltools.json`.
157 |
158 | The parameters defined in the user configuration file will be used instead of the default parameter.
159 |
160 | For example to change default Serafin language (for variable detection)
161 | and to change increase verbosity (to debug mode), the JSON file should be:
162 | ```json
163 | {
164 | "LANG": "en",
165 | "LOGGING_LEVEL": 10
166 | }
167 | ```
168 |
169 | Here is a second example of a JSON configuration file with a more complex configuration:
170 | ```json
171 | {
172 | "DIGITS": 8,
173 | "NCSIZE": 6,
174 | "FIG_SIZE": [12, 8],
175 | "FMT_COORD": "{:.5f}",
176 | "SCENE_SIZE": [2000, 1200],
177 | "SERAFIN_EXT": [".slf", ".srf", ".res"],
178 | "WRITE_XYZ_HEADER": false,
179 | "X_AXIS_LABEL": "X coordinate (m)",
180 | "Y_AXIS_LABEL": "Y coordinate (m)",
181 | "DEFAULT_COLORS":
182 | {
183 | "Yellow": "#bcbd22",
184 | "Cyan": "#17becf",
185 | "Red": "#d62728"
186 | }
187 | }
188 | ```
189 |
190 | PyTelTools configuration relies on the Python package [simple-settings](https://pypi.python.org/pypi/simple-settings)
191 | and all the parameters are defined and described in [pyteltools/conf/default_settings.py](https://github.com/CNR-Engineering/PyTelTools/blob/master/pyteltools/conf/default_settings.py).
192 |
--------------------------------------------------------------------------------
/cli/comp_ADCP_t2d.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Read ADCP file and TELEMAC file to make a comparison
4 | """
5 |
6 | from pyproj import Proj, transform
7 | import csv
8 | import fiona
9 | from fiona.crs import from_epsg
10 | import os
11 | from shapely.geometry import mapping, LineString
12 |
13 | from adcploader import *
14 | import averaging
15 |
16 | from pyteltools.slf import Serafin
17 | from pyteltools.geom import Shapefile
18 | from pyteltools.slf.interpolation import MeshInterpolator
19 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
20 | from pyteltools.geom.transformation import Transformation
21 |
22 |
23 | NODATA = '-32768'
24 |
25 |
26 | def ADCP_comp(args):
27 | x_mes = []
28 | y_mes = []
29 | cord_mes = open(args.inADCP_GPS).read().splitlines()
30 | for x_l in cord_mes:
31 | y, x = x_l.split(',')
32 | if x == NODATA or y == NODATA:
33 | print("Warning: one point is missing")
34 | else:
35 | x_mes.append(x)
36 | y_mes.append(y)
37 | x_mes = [float(a) for a in x_mes]
38 | y_mes = [float(a) for a in y_mes]
39 | inProj = Proj("+init=EPSG:%i" % args.inEPSG)
40 | outProj = Proj("+init=EPSG:%i" % args.outEPSG)
41 | x_mes, y_mes = transform(inProj, outProj, x_mes, y_mes)
42 |
43 | SCHEMA = {'geometry': 'LineString',
44 | 'properties': {'nom': 'str'}}
45 | with fiona.open(args.outADCP_GPS, 'w', 'ESRI Shapefile', SCHEMA, crs=from_epsg(args.outEPSG)) as out_shp:
46 | Ltest = LineString([(x_2, y_2) for x_2, y_2 in zip(x_mes, y_mes)])
47 | elem = {}
48 | elem['geometry'] = mapping(Ltest)
49 | elem['properties'] = {
50 | 'nom': 'ADCP line'}
51 | out_shp.write(elem)
52 |
53 | p_raw = RawProfileObj(args.inADCP)
54 | processing_settings = {'proj_method': 2}
55 | startingpoint = dict(start=Vector(0, 0))
56 | p0 = ProcessedProfileObj(p_raw, processing_settings, startingpoint)
57 | profile_averaged = averaging.get_averaged_profile(p0, cfg={'order': 15})
58 | header = 'X;Y;Uadcp;Vadcp;MagnitudeXY;Hadcp\n'
59 | writeAscii2D(profile_averaged, '{x};{y};{vx};{vy};{vmag};{depth}', args.outADCP, header=header)
60 |
61 | if args.inTELEMAC:
62 | with open(args.outT2DCSV, 'w', newline='') as csvfile:
63 | csvwriter = csv.writer(csvfile, delimiter=';')
64 | HEADER = ['folder', 'time_id', 'time', 'point_x', 'point_y', 'distance', 'value']
65 | csvwriter.writerow(HEADER)
66 |
67 | for slf_path in args.inTELEMAC:
68 | folder = os.path.basename(os.path.split(slf_path)[0])
69 | with Serafin.Read(slf_path, 'fr') as resin:
70 | resin.read_header()
71 | logger.info(resin.header.summary())
72 | resin.get_time()
73 | output_header = resin.header.copy()
74 | if args.shift:
75 | output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
76 | mesh = MeshInterpolator(output_header, True)
77 | lines = []
78 | for poly in Shapefile.get_lines(args.outADCP_GPS, shape_type=3):
79 | lines.append(poly)
80 | nb_nonempty, indices_nonempty, line_interpolators, line_interpolators_internal = \
81 | mesh.get_line_interpolators(lines)
82 | res = mesh.interpolate_along_lines(resin, 'M', list(range(len(resin.time))), indices_nonempty,
83 | line_interpolators, '{:.6e}')
84 | csvwriter.writerows([[folder] + x[2] for x in res])
85 |
86 |
87 | parser = PyTelToolsArgParse(description=__doc__, add_args=['shift'])
88 | parser.add_argument("inADCP_GPS", help="GPS ADCP (_gps_ASC.txt) input filename")
89 | parser.add_argument("inADCP", help="ADCP (_ASC.txt) input filename")
90 | parser.add_argument("--inTELEMAC", help="Telemac-2D result files with M (r2d_last.slf)", nargs='*')
91 | parser.add_argument("outADCP_GPS", help="GPS ADCP (.shp) output filename")
92 | parser.add_argument("outADCP", help="ADCP (.csv) output filename")
93 | parser.add_argument("outT2DCSV", help="CSV output filename")
94 | parser.add_argument("--inEPSG", help="input EPSG", type=int, default=4326) # WGS-84
95 | parser.add_argument("--outEPSG", help="output EPSG", type=int, default=2154) # Lambert 93 (France)
96 |
97 |
98 | if __name__ == "__main__":
99 | args = parser.parse_args()
100 | ADCP_comp(args)
101 |
--------------------------------------------------------------------------------
/cli/landxml_to_slf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Converts LandXML surfaces to 2D Selafin file
4 | In case of multiple surfaces, they should have the same mesh (coordinates and connectivity table)
5 | A single variable is written and its name is taken from the first surface name attribute
6 | """
7 | import numpy as np
8 | import sys
9 | import xml.etree.ElementTree as ET
10 |
11 | from pyteltools.slf import Serafin
12 | from pyteltools.utils.cli_base import PyTelToolsArgParse
13 |
14 |
15 | def landxml_to_slf(args):
16 | root = ET.parse(args.in_xml).getroot()
17 | PREFIX = '{http://www.landxml.org/schema/LandXML-1.2}'
18 |
19 | nodes = [] # list of (x, y) coordinates
20 | ikle = [] # list of triangle triplet (1-indexed)
21 | output_header = None
22 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
23 | for i, surface in enumerate(root.find(PREFIX + 'Surfaces')):
24 | surface_name = surface.get('name')
25 | if ' ' in surface_name:
26 | varname = surface_name.split(' ')[0]
27 | else:
28 | varname = surface_name
29 | # time_duration = surface_name.split(' ')[-1]
30 | tin = surface.find(PREFIX + 'Definition')
31 | values = []
32 | for j, pnts in enumerate(tin.find(PREFIX + 'Pnts')):
33 | assert int(pnts.get('id')) == j + 1
34 | y, x, z = (float(n) for n in pnts.text.split())
35 | values.append(z)
36 | if i == 0:
37 | nodes.append((x, y))
38 | else:
39 | if (x, y) != nodes[j]:
40 | raise RuntimeError("Coordinates are not strictly identical")
41 |
42 | for j, face in enumerate(tin.find(PREFIX + 'Faces')):
43 | if 'id' in face.attrib:
44 | assert int(face.get('id')) == j + 1
45 | n1, n2, n3 = (int(n) for n in face.text.split())
46 | if i == 0:
47 | ikle.append((n1, n2, n3))
48 | else:
49 | if (n1, n2, n3) != ikle[j]:
50 | raise RuntimeError("Mesh is not strictly identical")
51 |
52 | if i == 0:
53 | output_header = Serafin.SerafinHeader(title='Converted from LandXML (written by PyTelTools)')
54 | output_header.from_triangulation(np.array(nodes, dtype=np.int64),
55 | np.array(ikle, dtype=np.int64))
56 | output_header.add_variable_str(varname, varname, '')
57 | resout.write_header(output_header)
58 |
59 | time = i * 3600.0 # FIXME: should convert time_duration to float
60 | resout.write_entire_frame(output_header, time, np.expand_dims(np.array(values), axis=0))
61 |
62 |
63 | parser = PyTelToolsArgParse(description=__doc__)
64 | parser.add_argument('in_xml', help='input LandXML file (with .xml extension)')
65 | parser.add_known_argument('out_slf')
66 | parser.add_group_general(['force', 'verbose'])
67 |
68 |
69 | if __name__ == '__main__':
70 | args = parser.parse_args()
71 |
72 | try:
73 | landxml_to_slf(args)
74 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
75 | # Message is already reported by slf logger
76 | sys.exit(1)
77 |
--------------------------------------------------------------------------------
/cli/plot_comp_ADCP_t2d.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | Plot ADCP and TELEMAC velocity and compute RMSE
5 | """
6 |
7 | import pandas as pd
8 | import numpy as np
9 | import matplotlib.pyplot as plt
10 | import glob
11 | import os
12 |
13 | from pyteltools.utils.cli_base import PyTelToolsArgParse
14 |
15 |
16 | def plot_comp_ADCP_t2d(args):
17 |
18 | ADCP = pd.read_csv(args.inADCP, sep=';', header=0)
19 |
20 | ADCP['Distance_pts'] =np.nan
21 |
22 | for y in range(len(ADCP["X"])):
23 | if(y == 0):
24 | ADCP["Distance_pts"][0] = 0
25 | else:
26 | ADCP["Distance_pts"][y] = np.sqrt( ((ADCP['X'][y]-ADCP['X'][y-1])**2) + ((ADCP['Y'][y]-ADCP['Y'][y-1])**2))
27 |
28 | ADCP['Distance'] = ADCP['Distance_pts'].cumsum(axis = 0)
29 | grid = np.arange(0,float(ADCP["Distance"][-1:]), int(float(ADCP["Distance"][-1:]))/args.Numberdiv)
30 | grid = np.append(grid,float(ADCP["Distance"][-1:]))
31 | file_df_mean = pd.DataFrame()
32 | file_df_mean["Distance"] = grid
33 | Mean_vel = []
34 |
35 | fig, axs = plt.subplots(2)
36 |
37 |
38 | for i in range(len(grid)):
39 | if(i == 0):
40 | Mean_vel.append(ADCP['MagnitudeXY'][0])
41 | else:
42 | Magnitude = []
43 | Distance = []
44 | Distance_pts = []
45 | Distance_pts_cum = []
46 | mean = []
47 | Distance.append(grid[i -1])
48 | Magnitude.append(np.interp(grid[i - 1], ADCP['Distance'], ADCP['MagnitudeXY']))
49 |
50 | Distance.extend(ADCP['Distance'][((np.where(ADCP['Distance'] >= grid[i - 1]))[0][0]): (
51 | (np.where(ADCP['Distance'] >= grid[i]))[0][0] - 1)])
52 | Magnitude.extend(ADCP['MagnitudeXY'][((np.where(ADCP['Distance'] >= grid[i - 1]))[0][0]): (
53 | (np.where(ADCP['Distance'] >= grid[i]))[0][0] - 1)])
54 |
55 | Distance.append(grid[i])
56 | Magnitude.append(np.interp(grid[i], ADCP['Distance'], ADCP['MagnitudeXY']))
57 | Distance_pts.append(0)
58 | Distance_pts.extend( [Distance[x+1] - Distance[x] for x in range(0, len(Distance)-1)] )
59 | Distance_pts_cum.extend(np.cumsum(Distance_pts))
60 | mean.extend([a*b for a,b in zip(Distance_pts,Magnitude)])
61 | Mean_vel.append(sum(mean) / Distance_pts_cum[-1])
62 |
63 | file_df_mean["MagnitudeXY"] = Mean_vel
64 | axs[0].plot(file_df_mean['Distance'], file_df_mean['MagnitudeXY'], label=("Mean_ADCP"))
65 | axs[0].scatter(ADCP['Distance'], ADCP['MagnitudeXY'], label="ADCP")
66 |
67 | data_t2d = pd.read_csv(args.inT2DCSV, sep=';')
68 | col_labels = list(np.unique(data_t2d['folder']))
69 | RMSE = []
70 | for col in col_labels:
71 | file_df = data_t2d[data_t2d['folder'] == col]
72 | #axs[0].scatter(file_df['Distance'],file_df['value'],label = os.path.basename(os.path.dirname(dirfor)))
73 | file_df_mean_TEL = pd.DataFrame()
74 | file_df_mean_TEL["distance"] = grid
75 | Mean_vel = []
76 | for i in range(len(grid)):
77 | Magnitude = []
78 | Distance = []
79 | Distance_pts = []
80 | Distance_pts_cum = []
81 | mean = []
82 | Distance.append(grid[i-1])
83 | Magnitude.append(np.interp(grid[i-1], file_df['distance'], file_df['value']))
84 |
85 | #Distance.extend(file_df['Distance'][((np.where(file_df['Distance'] >= grid[i-1]))[0][0]): (
86 | #(np.where(file_df['Distance'] >= grid[i]))[0][0] - 1)])
87 | #Magnitude.extend(file_df['value'][((np.where(file_df['Distance'] >= grid[i-1]))[0][0]): (
88 | # (np.where(file_df['Distance'] >= grid[i]))[0][0] - 1)])
89 | Distance.append(grid[i])
90 | Magnitude.append(np.interp(grid[i], file_df['distance'], file_df['value']))
91 | Distance_pts.append(0)
92 | Distance_pts.extend([Distance[x + 1] - Distance[x] for x in range(0, len(Distance) - 1)])
93 | Distance_pts_cum.extend(np.cumsum(Distance_pts))
94 | mean.extend([a * b for a, b in zip(Distance_pts, Magnitude)])
95 | Mean_vel.append(sum(mean) / Distance_pts_cum[-1])
96 | file_df_mean_TEL["value"] = Mean_vel
97 | axs[0].plot(file_df_mean_TEL['distance'], file_df_mean_TEL['value'], label=("Mean"+col))
98 | RMSE.append(np.sqrt(((file_df_mean['MagnitudeXY'] - file_df_mean_TEL['value'])**2).mean()))
99 |
100 | axs[1].bar(col_labels,RMSE)
101 | axs[1].set_ylabel('Root Mean Square error')
102 | axs[0].set_xlabel('Distance [m]')
103 | axs[0].set_ylabel('Vitesse [m/s]')
104 | box = axs[0].get_position()
105 | axs[0].set_position([box.x0, box.y0, box.width * 0.8, box.height])
106 | axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5))
107 | fig.set_size_inches(18.5, 10.5)
108 | plt.savefig(args.outGraph)
109 | plt.show()
110 |
111 |
112 | parser = PyTelToolsArgParse(description=__doc__)
113 | parser.add_argument("inADCP", help="ADCP (.csv) input filename")
114 | parser.add_argument("inT2DCSV", help="List of folder containing (.csv) files")
115 | parser.add_argument("--Numberdiv", help="Segments number of the line to compute average velocity on a normal grid",
116 | default=10)
117 | parser.add_argument("outGraph", help="Filename of plot (.png)")
118 |
119 |
120 | if __name__ == "__main__":
121 | args = parser.parse_args()
122 | plot_comp_ADCP_t2d(args)
123 |
--------------------------------------------------------------------------------
/cli/pyteltools_gui.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Run PyTelTools GUI: classic or workflow interface
4 | """
5 |
6 | from PyQt5.QtWidgets import QApplication
7 | import sys
8 |
9 | from pyteltools.gui.classic_gui import exception_hook, ClassicMainWindow
10 | from pyteltools.main_interface import run_gui_app
11 | from pyteltools.utils.cli_base import PyTelToolsArgParse
12 | from pyteltools.workflow.workflow_gui import WorkflowWelcomeWindow
13 |
14 |
15 | def exec_gui(window):
16 | """
17 | Execute a simple GUI application
18 | @param window : window to display
19 | """
20 | app = QApplication(sys.argv)
21 | window = window()
22 | window.show()
23 | app.exec_()
24 |
25 |
26 | parser = PyTelToolsArgParse(description=__doc__)
27 | parser.add_argument('-c', '--interface', help='select and open corresponding GUI', choices=('classic', 'workflow'))
28 |
29 |
30 | if __name__ == '__main__':
31 | args = parser.parse_args()
32 |
33 | # suppress explicitly traceback silencing
34 | sys._excepthook = sys.excepthook
35 | sys.excepthook = exception_hook
36 |
37 | if args.interface is None:
38 | run_gui_app()
39 | else:
40 | if args.interface == 'classic':
41 | exec_gui(ClassicMainWindow)
42 | elif args.interface == 'workflow':
43 | exec_gui(WorkflowWelcomeWindow)
44 |
--------------------------------------------------------------------------------
/cli/slf_3d_to_2d.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Perform a vertical operation on a 3D results file to get 2D
4 | """
5 | import numpy as np
6 | import sys
7 | from tqdm import tqdm
8 |
9 | from pyteltools.geom.transformation import Transformation
10 | import pyteltools.slf.misc as operations
11 | from pyteltools.slf import Serafin
12 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
13 |
14 |
15 | def slf_3d_to_2d(args):
16 | with Serafin.Read(args.in_slf, args.lang) as resin:
17 | resin.read_header()
18 | logger.info(resin.header.summary())
19 | resin.get_time()
20 |
21 | if resin.header.is_2d:
22 | logger.critical('The input file is not 3D.')
23 | sys.exit(1)
24 | if 'Z' not in resin.header.var_IDs:
25 | logger.critical('The elevation variable Z is not found in the Serafin file.')
26 | sys.exit(1)
27 | if args.layer is not None:
28 | upper_plane = resin.header.nb_planes
29 | if args.layer < 1 or args.layer > upper_plane:
30 | logger.critical('Layer has to be in [1, %i]' % upper_plane)
31 | sys.exit(1)
32 |
33 | output_header = resin.header.copy_as_2d()
34 | # Shift mesh coordinates if necessary
35 | if args.shift:
36 | output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
37 |
38 | # Toggle output file endianness if necessary
39 | if args.toggle_endianness:
40 | output_header.toggle_endianness()
41 |
42 | # Convert to single precision
43 | if args.to_single_precision:
44 | if resin.header.is_double_precision():
45 | output_header.to_single_precision()
46 | else:
47 | logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')
48 |
49 | if args.aggregation is not None:
50 | if args.aggregation == 'max':
51 | operation_type = operations.MAX
52 | elif args.aggregation == 'min':
53 | operation_type = operations.MIN
54 | else: # args.aggregation == 'mean'
55 | operation_type = operations.MEAN
56 | selected_vars = [var for var in output_header.iter_on_all_variables()]
57 | vertical_calculator = operations.VerticalMaxMinMeanCalculator(operation_type, resin, output_header,
58 | selected_vars, args.vars)
59 | output_header.set_variables(vertical_calculator.get_variables()) # sort variables
60 |
61 | # Add some elevation variables
62 | for var_ID in args.vars:
63 | output_header.add_variable_from_ID(var_ID)
64 |
65 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
66 | resout.write_header(output_header)
67 |
68 | vars_2d = np.empty((output_header.nb_var, output_header.nb_nodes_2d), dtype=output_header.np_float_type)
69 | for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
70 | if args.aggregation is not None:
71 | vars_2d = vertical_calculator.max_min_mean_in_frame(time_index)
72 | else:
73 | for i, var in enumerate(output_header.var_IDs):
74 | vars_2d[i, :] = resin.read_var_in_frame_as_3d(time_index, var)[args.layer - 1, :]
75 | resout.write_entire_frame(output_header, time, vars_2d)
76 |
77 |
78 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'shift'])
79 | group = parser.add_mutually_exclusive_group(required=True)
80 | group.add_argument('--layer', help='layer number (1=lower, nb_planes=upper)', type=int, metavar=1)
81 | group.add_argument('--aggregation', help='operation over the vertical', choices=('max', 'min', 'mean'))
82 | parser.add_argument('--vars', nargs='+', help='variable(s) deduced from Z', default=[], choices=('B', 'S', 'H'))
83 | parser.add_group_general(['force', 'verbose'])
84 |
85 |
86 | if __name__ == '__main__':
87 | args = parser.parse_args()
88 |
89 | try:
90 | slf_3d_to_2d(args)
91 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
92 | # Message is already reported by slf logger
93 | sys.exit(1)
94 |
--------------------------------------------------------------------------------
/cli/slf_base.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Performs multiple operations on a Serafin file:
4 | - compute and/or remove variables
5 | - coordinates mesh transformations
6 | - frames selection
7 | - shift time series
8 | """
9 |
10 | from pyproj import Transformer
11 | import sys
12 | from tqdm import tqdm
13 |
14 | from pyteltools.geom.transformation import Transformation
15 | from pyteltools.slf import Serafin
16 | from pyteltools.slf.variables import do_calculations_in_frame, get_necessary_equations
17 | from pyteltools.slf.variable.variables_2d import FRICTION_LAWS, get_US_equation, STRICKLER_ID
18 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
19 |
20 |
21 | def slf_base(args):
22 | with Serafin.Read(args.in_slf, args.lang) as resin:
23 | resin.read_header()
24 | logger.info(resin.header.summary())
25 | resin.get_time()
26 |
27 | output_header = resin.header.copy()
28 | # Shift and transform mesh coordinates (change EPSG) if necessary
29 | if args.shift:
30 | output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
31 | if args.epsg_mesh_transformation:
32 | old_x, old_y = output_header.x, output_header.y
33 | new_x, new_y = Transformer.from_crs("EPSG:%i" % args.epsg_mesh_transformation[0],
34 | "EPSG:%i" % args.epsg_mesh_transformation[1]).transform(old_x, old_y)
35 | output_header.x_stored = new_x
36 | output_header.y_stored = new_y
37 |
38 | # Set mesh origin coordinates
39 | if args.set_mesh_origin:
40 | output_header.set_mesh_origin(args.set_mesh_origin[0], args.set_mesh_origin[1])
41 | else:
42 | output_header.set_mesh_origin(0, 0)
43 |
44 | # Toggle output file endianness if necessary
45 | if args.toggle_endianness:
46 | output_header.toggle_endianness()
47 |
48 | # Convert to single precision
49 | if args.to_single_precision:
50 | if resin.header.is_double_precision():
51 | output_header.to_single_precision()
52 | else:
53 | logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')
54 |
55 | # Remove variables if necessary
56 | if args.var2del:
57 | output_header.empty_variables()
58 | for var_ID, var_name, var_unit in zip(resin.header.var_IDs, resin.header.var_names, resin.header.var_units):
59 | if var_ID not in args.var2del:
60 | output_header.add_variable(var_ID, var_name, var_unit)
61 |
62 | # Add new derived variables
63 | if args.var2add is not None:
64 | for var_ID in args.var2add:
65 | if var_ID in output_header.var_IDs:
66 | logger.warn('Variable %s is already present (or asked)' % var_ID)
67 | else:
68 | output_header.add_variable_from_ID(var_ID)
69 |
70 | us_equation = get_US_equation(args.friction_law)
71 | necessary_equations = get_necessary_equations(resin.header.var_IDs, output_header.var_IDs,
72 | is_2d=resin.header.is_2d, us_equation=us_equation)
73 |
74 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
75 | resout.write_header(output_header)
76 |
77 | for time_index, time in tqdm(resin.subset_time(args.start, args.end, args.ech), unit='frame'):
78 | values = do_calculations_in_frame(necessary_equations, resin, time_index, output_header.var_IDs,
79 | output_header.np_float_type, is_2d=output_header.is_2d,
80 | us_equation=us_equation, ori_values={})
81 | resout.write_entire_frame(output_header, time + args.shift_time, values)
82 |
83 |
84 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'shift'])
85 |
86 | parser.add_argument('--set_mesh_origin', type=int, nargs=2, help='Mesh origin coordinates (x, y)', metavar=('X', 'Y'))
87 | parser.add_argument('--epsg_mesh_transformation', type=int, nargs=2,
88 | help='Mesh transformation from IN_EPSG to OUT_EPSG (with pyproj)', metavar=('IN_EPSG', 'OUT_EPSG'))
89 |
90 | group_var = parser.add_argument_group('Serafin variables (optional)',
91 | 'See variables abbrevations on https://github.com/CNR-Engineering/PyTelTools/wiki/Notations-of-variables')
92 | group_var.add_argument('--var2del', nargs='+', help='variable(s) to delete', default=[], metavar=('VA', 'VB'))
93 | group_var.add_argument('--var2add', nargs='+', help='variable(s) to add', default=[], metavar=('VA', 'VB'))
94 | help_friction_laws = ', '.join(['%i=%s' % (i, law) for i, law in enumerate(FRICTION_LAWS)])
95 | group_var.add_argument('--friction_law', type=int, help='friction law identifier: %s' % help_friction_laws,
96 | choices=range(len(FRICTION_LAWS)), default=STRICKLER_ID)
97 |
98 | group_temp = parser.add_argument_group('Temporal operations (optional)')
99 | group_temp.add_argument('--ech', type=int, help='frequency sampling of input', default=1)
100 | group_temp.add_argument('--start', type=float, help='minimum time (in seconds)', default=-float('inf'))
101 | group_temp.add_argument('--end', type=float, help='maximum time (in seconds)', default=float('inf'))
102 | group_temp.add_argument('--shift_time', type=float, help='shift in time (in seconds)', default=0)
103 |
104 | parser.add_group_general(['force', 'verbose'])
105 |
106 |
107 | if __name__ == '__main__':
108 | args = parser.parse_args()
109 |
110 | try:
111 | slf_base(args)
112 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
113 | # Message is already reported by slf logger
114 | sys.exit(1)
115 |
--------------------------------------------------------------------------------
/cli/slf_bottom_friction.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Compute bottom friction force on multiple zones
4 |
5 | Outputs:
6 | * write a SerafinOutput variables : 'W', 'US', 'TAU'
7 | * write the values in stdout
8 | """
9 | import csv
10 | import numpy as np
11 | from shapefile import ShapefileException
12 | from shapely.geometry import Point
13 | import sys
14 | from tqdm import tqdm
15 |
16 | from pyteltools.geom import Shapefile
17 | from pyteltools.slf import Serafin
18 | from pyteltools.slf.variables import do_calculations_in_frame, get_necessary_equations
19 | from pyteltools.slf.variable.variables_2d import FRICTION_LAWS, get_US_equation, STRICKLER_ID
20 | from pyteltools.slf.volume import VolumeCalculator
21 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
22 |
23 |
24 | strickler_equation = get_US_equation(STRICKLER_ID)
25 |
26 |
27 | def slf_bottom_friction(args):
28 | # Check argument consistency
29 | if args.in_strickler_zones is not None or args.in_strickler_attr is not None:
30 | if args.in_strickler_zones is None or args.in_strickler_attr is None:
31 | logger.critical('Both arguments `--in_strickler_zones` and `--in_strickler_attr` have to be defined.')
32 | sys.exit(2)
33 |
34 | # Read polygons to compute volume
35 | if not args.in_polygons.endswith('.shp'):
36 | logger.critical('File "%s" is not a shp file.' % args.in_polygons)
37 | sys.exit(3)
38 | polygons = []
39 | try:
40 | for polygon in Shapefile.get_polygons(args.in_polygons):
41 | polygons.append(polygon)
42 | except ShapefileException as e:
43 | logger.error(e)
44 | sys.exit(3)
45 |
46 | if not polygons:
47 | logger.error('The file does not contain any polygon.')
48 | sys.exit(1)
49 | logger.debug('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else ''))
50 |
51 | names = ['Polygon %d' % (i + 1) for i in range(len(polygons))]
52 |
53 | varIDs = ['US', 'TAU']
54 | out_varIDs = ['W'] + varIDs
55 | pos_TAU = out_varIDs.index('TAU')
56 | with Serafin.Read(args.in_slf, args.lang) as resin:
57 | resin.read_header()
58 | if not resin.header.is_2d:
59 | logger.critical('The file has to be a 2D Serafin!')
60 | sys.exit(3)
61 |
62 | in_varIDs = resin.header.var_IDs
63 |
64 | # Compute Strickler values if necessary
65 | ori_values = {}
66 | if args.in_strickler_zones is not None:
67 | if not args.in_strickler_zones.endswith('.shp'):
68 | logger.critical('File "%s" is not a shp file.' % args.in_strickler_zones)
69 | sys.exit(3)
70 |
71 | attributes = Shapefile.get_numeric_attribute_names(args.in_strickler_zones)
72 | try:
73 | index_attr = [attr for _, attr in attributes].index(args.in_strickler_attr)
74 | except ValueError:
75 | logger.critical('Attribute "%s" is not found.' % args.in_strickler_attr)
76 | sys.exit(1)
77 |
78 | strickler_zones = []
79 | try:
80 | for zone in Shapefile.get_polygons(args.in_strickler_zones):
81 | strickler_zones.append(zone)
82 | except ShapefileException as e:
83 | logger.error(e)
84 | sys.exit(3)
85 |
86 | if not strickler_zones:
87 | logger.error('The file does not contain any friction zone.')
88 | sys.exit(1)
89 |
90 | logger.debug('Recomputing friction coefficient values from zones')
91 | friction_coeff = np.full(resin.header.nb_nodes_2d, 0.0) # default value for nodes not included in any zone
92 | for i, (x, y) in enumerate(zip(tqdm(resin.header.x), tqdm(resin.header.y))):
93 | point = Point(x, y)
94 | for zone in strickler_zones:
95 | if zone.contains(point):
96 | friction_coeff[i] = zone.attributes()[index_attr]
97 | exit
98 | in_varIDs.append('W')
99 | ori_values['W'] = friction_coeff
100 | else:
101 | if 'W' not in resin.header.varIDs:
102 | logger.critical('The variable W is missing.')
103 | sys.exit(1)
104 |
105 | us_equation = None
106 | if args.friction_law:
107 | us_equation = get_US_equation(args.friction_law)
108 |
109 | resin.get_time()
110 | necessary_equations = get_necessary_equations(in_varIDs, out_varIDs, is_2d=True, us_equation=us_equation)
111 |
112 | calculator = VolumeCalculator(VolumeCalculator.NET, 'TAU', None, resin, names, polygons, 1)
113 | calculator.construct_triangles(tqdm)
114 | calculator.construct_weights(tqdm)
115 |
116 | output_header = resin.header.copy()
117 | output_header.empty_variables()
118 | for var_ID in out_varIDs:
119 | output_header.add_variable_from_ID(var_ID)
120 |
121 | with Serafin.Write(args.out_slf, args.lang, args.force) as resout:
122 | resout.write_header(output_header)
123 |
124 | mode = 'w' if args.force else 'x'
125 | with open(args.out_csv, mode, newline='') as csvfile:
126 | csvwriter = csv.writer(csvfile, delimiter=args.sep)
127 | csvwriter.writerow(['time'] + names)
128 |
129 | for time_index, time in enumerate(tqdm(resin.time)):
130 | values = do_calculations_in_frame(necessary_equations, resin, time_index, out_varIDs,
131 | resin.header.np_float_type, is_2d=True,
132 | us_equation=strickler_equation, ori_values=ori_values)
133 | resout.write_entire_frame(output_header, time, values)
134 |
135 | row = [time]
136 | for j in range(len(calculator.polygons)):
137 | weight = calculator.weights[j]
138 | volume = calculator.volume_in_frame_in_polygon(weight, values[pos_TAU], calculator.polygons[j])
139 | row.append(volume)
140 | csvwriter.writerow(row)
141 |
142 |
143 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'out_csv'])
144 | parser.add_argument('in_polygons', help='polygons file (*.shp)')
145 |
146 | parser.add_argument('--in_strickler_zones', help='strickler zones file (*.shp)')
147 | parser.add_argument('--in_strickler_attr', help='attribute to read strickler values `--in_stricker_zone`')
148 | help_friction_laws = ', '.join(['%i=%s' % (i, law) for i, law in enumerate(FRICTION_LAWS)])
149 | parser.add_argument('--friction_law', type=int, help='friction law identifier: %s' % help_friction_laws,
150 | choices=range(len(FRICTION_LAWS)), default=STRICKLER_ID)
151 | parser.add_group_general(['force', 'verbose'])
152 |
153 |
154 | if __name__ == '__main__':
155 | args = parser.parse_args()
156 |
157 | try:
158 | slf_bottom_friction(args)
159 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
160 | # Message is already reported by slf logger
161 | sys.exit(1)
162 | except FileNotFoundError as e:
163 | logger.critical('Input file %s not found.' % e.filename)
164 | sys.exit(3)
165 | except FileExistsError as e:
166 | logger.critical('Output file %s already exists. Remove it or add `--force` argument' % e.filename)
167 | sys.exit(3)
168 |
--------------------------------------------------------------------------------
/cli/slf_bottom_zones.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | """
3 | @brief:
4 | Modify BOTTOM elevation from zones defined by a set of polylines.
5 | Order of files is relevant and determines the priority in case of overlapping: first has the highest priority.
6 |
7 | @features:
8 | * interpolate intermediate point if bottom is below a threshold
9 |
10 | @prerequisites:
11 | * file is a mesh 2D
12 | * variable 'B' (BOTTOM) is required
13 | """
14 | import numpy as np
15 | import sys
16 | import shapely.geometry as geo
17 |
18 | import pyteltools.geom.BlueKenue as bk
19 | import pyteltools.geom.Shapefile as shp
20 | from pyteltools.geom.geometry import Polyline
21 | from pyteltools.geom.transformation import Transformation
22 | from pyteltools.slf import Serafin
23 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
24 |
25 |
26 | def set(value, _):
27 | return value
28 |
29 |
30 | class Zone:
31 | def __init__(self, polyline_1, polyline_2, operator_str):
32 | self.polyline_1 = polyline_1.polyline()
33 | self.polyline_2 = polyline_2.polyline()
34 | if operator_str == 'min':
35 | self.operator = min
36 | elif operator_str == 'max':
37 | self.operator = max
38 | elif operator_str == 'set':
39 | self.operator = set
40 | else:
41 | raise NotImplementedError
42 | self.polygon = None
43 | self._build_polygon()
44 |
45 | def _build_polygon(self):
46 | outline_pts = list(self.polyline_1.coords) + list(reversed(self.polyline_2.coords))
47 | self.polygon = geo.Polygon(outline_pts)
48 | if not self.polygon.is_simple: # FIXME: it should be "if not self.polygon.is_valid"
49 | print("Distance ligne = %s" % self.polyline_1.distance(self.polyline_2))
50 | print("Distance début = %s" % self.polyline_1.interpolate(0, normalized=True).distance(
51 | self.polyline_2.interpolate(0, normalized=True)))
52 | print("Distance fin = %s" % self.polyline_1.interpolate(1, normalized=True).distance(
53 | self.polyline_2.interpolate(1, normalized=True)))
54 | with bk.Write('debug.i3s') as out_i3s:
55 | out_i3s.write_header()
56 | out_i3s.write_lines([Polyline(self.polygon.exterior.coords)], [0.0])
57 | sys.exit("ERROR: Zone is invalid. Check polyline direction consistancy!")
58 |
59 | def contains(self, point):
60 | return self.polygon.contains(point)
61 |
62 | def interpolate(self, point):
63 | a = self.polyline_1
64 | b = self.polyline_2
65 | za = a.interpolate(a.project(point)).z
66 | zb = b.interpolate(b.project(point)).z
67 | da = point.distance(a)
68 | db = point.distance(b)
69 | return (db*za + da*zb)/(da + db)
70 |
71 | def get_closest_point(self, point):
72 | outline = self.polygon.exterior
73 | return outline.interpolate(outline.project(point))
74 |
75 | @staticmethod
76 | def get_zones_from_i3s_file(shp_name, threshold, operator_str):
77 | polylines = []
78 |
79 | attributes = shp.get_numeric_attribute_names(shp_name)
80 | if args.attr_to_shift_z is not None:
81 | try:
82 | index_attr = [attr for _, attr in attributes].index(args.attr_to_shift_z)
83 | except ValueError:
84 | logger.critical('Attribute "%s" is not found.' % args.attr_to_shift_z)
85 | sys.exit(1)
86 |
87 | for polyline in shp.get_open_polylines(shp_name):
88 | if not polyline.polyline().is_valid:
89 | sys.exit("ERROR: polyline is not valid (probably because it intersects itself)!")
90 |
91 | # Shift z (if requested)
92 | if args.attr_to_shift_z is not None:
93 | dz = polyline.attributes()[index_attr]
94 | print(dz)
95 |
96 | polyline = polyline.apply_transformations([Transformation(0.0, 1.0, 1.0, 0.0, 0.0, dz)])
97 |
98 | # Linear interpolation along the line for values below the threshold
99 | if threshold is not None:
100 | np_coord = np.array(polyline.coords())
101 | Xt = np.sqrt(np.power(np.ediff1d(np_coord[:, 0], to_begin=0.), 2) +
102 | np.power(np.ediff1d(np_coord[:, 1], to_begin=0.), 2))
103 | Xt = Xt.cumsum()
104 | ref_rows = np_coord[:, 2] > args.threshold
105 | np_coord[:, 2] = np.interp(Xt, Xt[ref_rows], np_coord[ref_rows, 2])
106 | polyline = geo.LineString(np_coord)
107 | polylines.append(polyline)
108 |
109 | zones = []
110 | for prev_line, next_line in zip(polylines[:-1], polylines[1:]):
111 | zones.append(Zone(prev_line, next_line, operator_str))
112 | return zones
113 |
114 |
115 | def bottom(args):
116 | if args.operations is None:
117 | args.operations = ['set'] * len(args.in_i3s_paths)
118 | if len(args.in_i3s_paths) != len(args.operations):
119 | raise RuntimeError
120 |
121 | # global prev_line, zones, np_coord, Xt, Z, ref_rows, polyline
122 | with Serafin.Read(args.in_slf, 'fr') as resin:
123 | resin.read_header()
124 |
125 | if not resin.header.is_2d:
126 | sys.exit("The current script is working only with 2D meshes !")
127 |
128 | resin.get_time()
129 |
130 | # Define zones from polylines
131 | zones = []
132 | for i3s_path, operator_str in zip(args.in_i3s_paths, args.operations):
133 | zones += Zone.get_zones_from_i3s_file(i3s_path, args.threshold, operator_str)
134 |
135 | with Serafin.Write(args.out_slf, 'fr', args.force) as resout:
136 | output_header = resin.header
137 | resout.write_header(output_header)
138 | pos_B = output_header.var_IDs.index('B')
139 |
140 | for time_index, time in enumerate(resin.time):
141 | var = resin.read_vars_in_frame(time_index)
142 |
143 | # Replace bottom locally
144 | nmodif = 0
145 | for i in range(output_header.nb_nodes): # iterate over all nodes
146 | x, y = output_header.x[i], output_header.y[i]
147 | pt = geo.Point(x, y)
148 | old_z = var[pos_B, i]
149 |
150 | found = False
151 | # Check if it is inside a zone
152 | for j, zone in enumerate(zones):
153 | if zone.contains(pt):
154 | # Current point is inside zone number j and is between polylines a and b
155 | z_int = zone.interpolate(pt)
156 | new_z = zone.operator(z_int, old_z)
157 | var[pos_B, i] = new_z
158 |
159 | print("BOTTOM at node {} (zone n°{}) {} to {} (dz={})".format(
160 | i + 1, j, operator_str, new_z, new_z - old_z
161 | ))
162 |
163 | nmodif += 1
164 | found = True
165 | break
166 |
167 | if not found and args.rescue_distance > 0.0:
168 | # Try to rescue some very close nodes
169 | for j, zone in enumerate(zones):
170 | if zone.polygon.distance(pt) < args.rescue_distance:
171 | pt_projected = zone.get_closest_point(pt)
172 |
173 | # Replace value by a linear interpolation
174 | z_int = zone.interpolate(pt_projected)
175 | new_z = zone.operator(z_int, old_z)
176 | var[pos_B, i] = new_z
177 |
178 | print("BOTTOM at node {} (zone n°{}, rescued) {} to {} (dz={})".format(
179 | i + 1, j, operator_str, new_z, new_z - old_z
180 | ))
181 |
182 | nmodif += 1
183 | break
184 |
185 | resout.write_entire_frame(output_header, time, var)
186 | print("{} nodes were overwritten".format(nmodif))
187 |
188 |
189 | if __name__ == '__main__':
190 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf'])
191 | parser.add_argument("in_i3s_paths", help="i3s BlueKenue 3D polyline file", nargs='+')
192 | parser.add_argument("--operations", help="list of operations (set is used by default)", nargs='+',
193 | choices=('set', 'max', 'min'))
194 | parser.add_argument("--threshold", type=float, help="value from which to interpolate")
195 | parser.add_argument('--attr_to_shift_z', help='attribute to shift z')
196 | parser.add_argument('--rescue_distance', default=0.1,
197 | help='distance buffer (in m) to match nodes close to a zone nut not inside')
198 |
199 | parser.add_group_general(['force', 'verbose'])
200 | args = parser.parse_args()
201 |
202 | bottom(args)
203 |
--------------------------------------------------------------------------------
/cli/slf_flux2d.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Compute (liquid or solid) 2D fluxes over time across sections
4 | """
5 |
6 | from tqdm import tqdm
7 | from shapefile import ShapefileException
8 | import sys
9 |
10 | from pyteltools.conf import settings
11 | from pyteltools.geom import BlueKenue, Shapefile
12 | from pyteltools.slf import Serafin
13 | from pyteltools.slf.flux import FluxCalculator, PossibleFluxComputation
14 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
15 |
16 |
17 | def slf_flux2d(args):
18 | if len(args.scalars) > 2:
19 | logger.critical('Only two scalars can be integrated!')
20 | sys.exit(2)
21 |
22 | # Read set of lines from input file
23 | polylines = []
24 | if args.in_sections.endswith('.i2s'):
25 | with BlueKenue.Read(args.in_sections) as f:
26 | f.read_header()
27 | for polyline in f.get_open_polylines():
28 | polylines.append(polyline)
29 | elif args.in_sections.endswith('.shp'):
30 | try:
31 | for polyline in Shapefile.get_open_polylines(args.in_sections):
32 | polylines.append(polyline)
33 | except ShapefileException as e:
34 | logger.critical(e)
35 | sys.exit(3)
36 | else:
37 | logger.critical('File "%s" is not a i2s or shp file.' % args.in_sections)
38 | sys.exit(2)
39 |
40 | if not polylines:
41 | logger.critical('The file does not contain any open polyline.')
42 | sys.exit(1)
43 | logger.debug('The file contains {} open polyline{}.'.format(len(polylines), 's' if len(polylines) > 1 else ''))
44 |
45 | # Read Serafin file
46 | with Serafin.Read(args.in_slf, args.lang) as resin:
47 | resin.read_header()
48 | logger.info(resin.header.summary())
49 | resin.get_time()
50 |
51 | if not resin.header.is_2d:
52 | logger.critical('The file has to be a 2D Serafin!')
53 | sys.exit(3)
54 |
55 | # Determine flux computations properties
56 | var_IDs = args.vectors + args.scalars
57 | variables_missing = [var_ID for var_ID in var_IDs if var_ID not in resin.header.var_IDs]
58 | if variables_missing:
59 | if len(variables_missing) > 1:
60 | logger.critical('Variables {} are not present in the Serafin file'.format(variables_missing))
61 | else:
62 | logger.critical('Variable {} is not present in the Serafin file'.format(variables_missing[0]))
63 | logger.critical('Check also `--lang` argument for variable detection.')
64 | sys.exit(1)
65 | if var_IDs not in PossibleFluxComputation.common_fluxes():
66 | logger.warn('Flux computations is not common. Check what you are doing (or the language).')
67 |
68 | flux_type = PossibleFluxComputation.get_flux_type(var_IDs)
69 |
70 | section_names = ['Section %i' % (i + 1) for i in range(len(polylines))]
71 | calculator = FluxCalculator(flux_type, var_IDs, resin, section_names, polylines, args.ech)
72 | calculator.construct_triangles(tqdm)
73 | calculator.construct_intersections()
74 | result = []
75 | for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
76 | i_result = [str(time)]
77 | values = []
78 |
79 | for var_ID in calculator.var_IDs:
80 | values.append(resin.read_var_in_frame(time_index, var_ID))
81 |
82 | for j in range(len(polylines)):
83 | intersections = calculator.intersections[j]
84 | flux = calculator.flux_in_frame(intersections, values)
85 | i_result.append(settings.FMT_FLOAT.format(flux))
86 |
87 | result.append(i_result)
88 |
89 | # Write CSV
90 | mode = 'w' if args.force else 'x'
91 | with open(args.out_csv, mode) as out_csv:
92 | calculator.write_csv(result, out_csv, args.sep)
93 |
94 |
95 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf'])
96 | parser.add_argument('in_sections', help='set of lines file (*.shp, *.i2s)')
97 | parser.add_argument('--ech', type=int, help='frequency sampling of input', default=1)
98 | parser.add_argument('--scalars', nargs='*', help='scalars to integrate (up to 2)', default=[], metavar=('VA', 'VB'))
99 | parser.add_argument('--vectors', nargs=2, help='couple of vectors to integrate (X and Y vectors)', default=[],
100 | metavar=('VX', 'VY'))
101 |
102 | parser.add_known_argument('out_csv')
103 | parser.add_group_general(['force', 'verbose'])
104 |
105 |
106 | if __name__ == '__main__':
107 | args = parser.parse_args()
108 |
109 | try:
110 | slf_flux2d(args)
111 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
112 | # Message is already reported by slf logger
113 | sys.exit(1)
114 | except FileNotFoundError as e:
115 | logger.critical('Input file %s not found.' % e.filename)
116 | sys.exit(3)
117 | except FileExistsError as e:
118 | logger.critical('Output file %s already exists. Remove it or add `--force` argument' % e.filename)
119 | sys.exit(3)
120 |
--------------------------------------------------------------------------------
/cli/slf_int2d.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Interpolate on a set of points for every frame
4 | """
5 |
6 | import csv
7 | import sys
8 | from tqdm import tqdm
9 | from shapefile import ShapefileException
10 |
11 | from pyteltools.conf import settings
12 | from pyteltools.geom import Shapefile
13 | from pyteltools.slf import Serafin
14 | from pyteltools.slf.interpolation import MeshInterpolator
15 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
16 |
17 |
18 | def slf_int2d(args):
19 | # Read set of points file
20 | fields, indices = Shapefile.get_attribute_names(args.in_points)
21 | points = []
22 | attributes = []
23 | for point, attribute in Shapefile.get_points(args.in_points, indices):
24 | points.append(point)
25 | attributes.append(attribute)
26 |
27 | if not points:
28 | logger.critical('The Shapefile does not contain any point.')
29 | sys.exit(1)
30 |
31 | # Read Serafin file
32 | with Serafin.Read(args.in_slf, args.lang) as resin:
33 | resin.read_header()
34 | logger.info(resin.header.summary())
35 |
36 | if not resin.header.is_2d:
37 | logger.critical('The file has to be a 2D Serafin!')
38 | sys.exit(3)
39 |
40 | resin.get_time()
41 |
42 | output_header = resin.header.copy()
43 |
44 | mesh = MeshInterpolator(output_header, True)
45 | is_inside, point_interpolators = mesh.get_point_interpolators(points)
46 | nb_inside = sum(map(int, is_inside))
47 |
48 | if nb_inside == 0:
49 | logger.critical('No point inside the mesh.')
50 | sys.exit(3)
51 | logger.debug('The file contains {} point{}. {} point{} inside the mesh'.format(
52 | len(points), 's' if len(points) > 1 else '',
53 | nb_inside, 's are' if nb_inside > 1 else ' is'))
54 |
55 | var_IDs = output_header.var_IDs if args.vars is None else args.vars
56 |
57 | mode = 'w' if args.force else 'x'
58 | with open(args.out_csv, mode, newline='') as csvfile:
59 | csvwriter = csv.writer(csvfile, delimiter=args.sep)
60 |
61 | header = ['time_id', 'time']
62 | if args.long:
63 | header = header + ['point_id', 'point_x', 'point_y', 'variable', 'value']
64 | else:
65 | for pt_id, (x, y) in enumerate(points):
66 | for var in var_IDs:
67 | header.append('Point %d %s (%s|%s)' % (pt_id + 1, var, settings.FMT_COORD.format(x),
68 | settings.FMT_COORD.format(y)))
69 | csvwriter.writerow(header)
70 |
71 | for time_index, time in enumerate(tqdm(resin.time, unit='frame')):
72 | values = [time_index, time]
73 |
74 | for var_ID in var_IDs:
75 | var = resin.read_var_in_frame(time_index, var_ID)
76 | for pt_id, (point, point_interpolator) in enumerate(zip(points, point_interpolators)):
77 | if args.long:
78 | values_long = values + [str(pt_id + 1)] + [settings.FMT_COORD.format(x) for x in point]
79 |
80 | if point_interpolator is None:
81 | if args.long:
82 | csvwriter.writerow(values_long + [var_ID, settings.NAN_STR])
83 | else:
84 | values.append(settings.NAN_STR)
85 | else:
86 | (i, j, k), interpolator = point_interpolator
87 | int_value = settings.FMT_FLOAT.format(interpolator.dot(var[[i, j, k]]))
88 | if args.long:
89 | csvwriter.writerow(values_long + [var_ID, int_value])
90 | else:
91 | values.append(int_value)
92 |
93 | if not args.long: csvwriter.writerow(values)
94 |
95 |
96 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf'])
97 | parser.add_argument('in_points', help='set of points file (*.shp)')
98 | parser.add_known_argument('out_csv')
99 | parser.add_argument('--long', help='write CSV with long format (variables are also in rows) instead of wide format',
100 | action='store_true')
101 | parser.add_argument('--vars', nargs='+', help='variable(s) to extract (by default: every variables)', default=None,
102 | metavar=('VA', 'VB'))
103 | parser.add_group_general(['force', 'verbose'])
104 |
105 |
106 | if __name__ == '__main__':
107 | args = parser.parse_args()
108 |
109 | try:
110 | slf_int2d(args)
111 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
112 | # Message is already reported by slf logger
113 | sys.exit(1)
114 | except ShapefileException as e:
115 | logger.critical(e)
116 | sys.exit(3)
117 | except FileNotFoundError as e:
118 | logger.critical('Input file %s not found.' % e.filename)
119 | sys.exit(3)
120 | except FileExistsError as e:
121 | logger.critical('Output file %s already exists. Remove it or add `--force` argument' % e.filename)
122 | sys.exit(3)
123 |
--------------------------------------------------------------------------------
/cli/slf_last.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Extract last temporal frame of a 2D/3D Serafin file
4 | """
5 |
6 | import numpy as np
7 | import sys
8 |
9 | from pyteltools.geom.transformation import Transformation
10 | from pyteltools.slf import Serafin
11 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
12 |
13 |
14 | def slf_last(args):
15 | with Serafin.Read(args.in_slf, args.lang) as resin:
16 | resin.read_header()
17 | logger.info(resin.header.summary())
18 | resin.get_time()
19 |
20 | output_header = resin.header.copy()
21 | # Shift mesh coordinates if necessary
22 | if args.shift:
23 | output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
24 |
25 | # Toggle output file endianness if necessary
26 | if args.toggle_endianness:
27 | output_header.toggle_endianness()
28 |
29 | # Convert to single precision
30 | if args.to_single_precision:
31 | if resin.header.is_double_precision():
32 | output_header.to_single_precision()
33 | else:
34 | logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')
35 |
36 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
37 | resout.write_header(output_header)
38 |
39 | time_index = len(resin.time) - 1
40 | time = resin.time[-1] if args.time is None else args.time
41 |
42 | values = resin.read_vars_in_frame(time_index)
43 | resout.write_entire_frame(output_header, time, values)
44 |
45 |
46 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'shift'])
47 | parser.add_argument('--time', help='time in seconds to write last frame (set to frame time by default)', type=float)
48 | parser.add_group_general(['force', 'verbose'])
49 |
50 |
51 | if __name__ == '__main__':
52 | args = parser.parse_args()
53 |
54 | try:
55 | slf_last(args)
56 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
57 | # Message is already reported by slf logger
58 | sys.exit(1)
59 |
--------------------------------------------------------------------------------
/cli/slf_max_over_files.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Compute the max or min over some variables for a serie of files (which may contain several frames).
4 | This operation can be performed only on some zones defined by polygon(s).
5 | In this case the values in the area not covered by the polygons are set to the first file and first frame.
6 | The output file contains a single frame with the variables.
7 |
8 | This tool has some limitations:
9 | - vectors are not considered as such
10 | - meshes have to be similar
11 | """
12 | import numpy as np
13 | from shapefile import ShapefileException
14 | from shapely.geometry import Point
15 | import sys
16 |
17 | from pyteltools.conf import settings
18 | from pyteltools.geom import Shapefile
19 | from pyteltools.slf import Serafin
20 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
21 |
22 |
23 | def slf_max_over_files(args):
24 | if args.vars is None:
25 | with Serafin.Read(args.in_slfs[0], args.lang) as resin:
26 | resin.read_header()
27 | var_IDs = resin.header.var_IDs if args.vars is None else args.vars
28 | else:
29 | var_IDs = args.vars
30 |
31 | if args.operation == 'max':
32 | fun = np.maximum
33 | elif args.operation == 'min':
34 | fun = np.minimum
35 | else:
36 | raise NotImplementedError
37 |
38 | # Read polygons
39 | if args.in_polygons is not None:
40 | if not args.in_polygons.endswith('.shp'):
41 | logger.critical('File "%s" is not a shp file.' % args.in_polygons)
42 | sys.exit(3)
43 | polygons = []
44 | try:
45 | for polygon in Shapefile.get_polygons(args.in_polygons):
46 | polygons.append(polygon)
47 | except ShapefileException as e:
48 | logger.error(e)
49 | sys.exit(3)
50 |
51 | if not polygons:
52 | logger.error('The file does not contain any polygon.')
53 | sys.exit(1)
54 | logger.info('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else ''))
55 | else:
56 | polygons = None
57 |
58 | output_header = None
59 | out_values = None # min or max values
60 | mask_nodes = None
61 | for i, in_slf in enumerate(args.in_slfs):
62 | with Serafin.Read(in_slf, args.lang) as resin:
63 | resin.read_header()
64 | logger.info(resin.header.summary())
65 | if not resin.header.is_2d:
66 | logger.critical('The file has to be a 2D Serafin!')
67 | sys.exit(3)
68 | resin.get_time()
69 |
70 | for var_ID in var_IDs:
71 | if var_ID not in resin.header.var_IDs:
72 | logger.critical('The variable %s is missing in %s' % (var_ID, in_slf))
73 | sys.exit(3)
74 |
75 | if i == 0:
76 | output_header = resin.header.copy()
77 | output_header.empty_variables()
78 | for var_ID in var_IDs:
79 | output_header.add_variable_from_ID(var_ID)
80 | out_values = np.empty((output_header.nb_var, output_header.nb_nodes),
81 | dtype=output_header.np_float_type)
82 | if polygons is not None:
83 | mask_nodes = np.zeros(output_header.nb_nodes, dtype=bool)
84 | for idx_node, (x, y) in enumerate(zip(output_header.x, output_header.y)):
85 | point = Point(x, y)
86 | for polygon in polygons:
87 | if polygon.contains(point):
88 | mask_nodes[idx_node] = True
89 | break
90 | logger.info('Number of nodes inside polygon(s): %i (over %i)'
91 | % (mask_nodes.sum(), output_header.nb_nodes))
92 | else:
93 | mask_nodes = np.ones(output_header.nb_nodes, dtype=bool)
94 | else:
95 | if not resin.header.same_2d_mesh(output_header):
96 | logger.critical('The mesh of %s is different from the first one' % in_slf)
97 | sys.exit(1)
98 |
99 | for time_index, time in enumerate(resin.time):
100 | for j, var_ID in enumerate(var_IDs):
101 | values = resin.read_var_in_frame(time_index, var_ID)
102 | if time_index == 0 and i == 0:
103 | out_values[j, :] = values
104 | else:
105 | out_values[j, mask_nodes] = fun(out_values[j, mask_nodes], values[mask_nodes])
106 |
107 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
108 | resout.write_header(output_header)
109 | resout.write_entire_frame(output_header, 0.0, out_values)
110 |
111 |
112 | parser = PyTelToolsArgParse(description=__doc__)
113 | parser.add_argument('in_slfs', help='List of Serafin input filenames', nargs='+')
114 | parser.add_argument('out_slf', help='Serafin output filename')
115 | parser.add_argument('--operation', help='min or max function selector', choices=('min', 'max'), default='max')
116 | parser.add_argument('--in_polygons', help='file containing polygon(s)')
117 | parser.add_argument('--vars', nargs='+', help='variable(s) to extract (by default: all variables)', default=None,
118 | metavar=('VA', 'VB'))
119 | parser.add_argument('--lang', help="Serafin language for variables detection: 'fr' or 'en'",
120 | default=settings.LANG)
121 | parser.add_group_general(['force', 'verbose'])
122 |
123 |
124 | if __name__ == '__main__':
125 | args = parser.parse_args()
126 |
127 | try:
128 | slf_max_over_files(args)
129 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
130 | # Message is already reported by slf logger
131 | sys.exit(1)
132 |
--------------------------------------------------------------------------------
/cli/slf_sedi_chain.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Estimate roughly bottom evolution from a 2D result in case of cohesive sediments
4 | Basic implementation of Krone/Partheniades laws
5 | """
6 | from copy import copy
7 | import numpy as np
8 | import sys
9 |
10 | from pyteltools.geom.transformation import Transformation
11 | from pyteltools.slf import Serafin
12 | from pyteltools.slf.variable.variables_2d import FRICTION_LAWS, get_US_equation, STRICKLER_ID
13 | from pyteltools.slf.variables import do_calculations_in_frame, get_necessary_equations
14 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
15 |
16 |
17 | def slf_sedi_chain(args):
18 | # Check that float parameters are positive (especially ws!)
19 | for arg in ('Cmud', 'ws', 'C', 'M'):
20 | if getattr(args, arg) < 0:
21 | logger.critical('The argument %s has to be positive' % args)
22 | sys.exit(1)
23 |
24 | with Serafin.Read(args.in_slf, args.lang) as resin:
25 | resin.read_header()
26 | logger.info(resin.header.summary())
27 | resin.get_time()
28 |
29 | us_equation = get_US_equation(args.friction_law)
30 | necessary_equations = get_necessary_equations(resin.header.var_IDs, ['TAU'],
31 | is_2d=True, us_equation=us_equation)
32 |
33 | if resin.header.nb_frames < 1:
34 | logger.critical('The input file must have at least one frame!')
35 | sys.exit(1)
36 |
37 | output_header = resin.header.copy()
38 | # Shift mesh coordinates if necessary
39 | if args.shift:
40 | output_header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
41 |
42 | # Toggle output file endianness if necessary
43 | if args.toggle_endianness:
44 | output_header.toggle_endianness()
45 |
46 | # Convert to single precision
47 | if args.to_single_precision:
48 | if resin.header.is_double_precision():
49 | output_header.to_single_precision()
50 | else:
51 | logger.warn('Input file is already single precision! Argument `--to_single_precision` is ignored')
52 |
53 | output_header.empty_variables()
54 | output_header.add_variable_from_ID('B')
55 | output_header.add_variable_from_ID('EV')
56 |
57 | with Serafin.Write(args.out_slf, args.lang, overwrite=args.force) as resout:
58 | resout.write_header(output_header)
59 |
60 | prev_time = None
61 | prev_tau = None
62 | initial_bottom = resin.read_var_in_frame(0, 'B')
63 | bottom = copy(initial_bottom)
64 | for time_index, time in enumerate(resin.time):
65 | tau = do_calculations_in_frame(necessary_equations, resin, time_index, ['TAU'],
66 | output_header.np_float_type, is_2d=True, us_equation=us_equation,
67 | ori_values={})[0]
68 | if prev_time is not None:
69 | dt = time - prev_time
70 | mean_tau = (prev_tau + tau)/2
71 | if args.Tcd > 0:
72 | bottom += args.ws * args.C * \
73 | (1 - np.clip(mean_tau/args.Tcd, a_min=None, a_max=1.)) * dt / args.Cmud
74 | if args.Tce > 0:
75 | bottom -= args.M * (np.clip(mean_tau/args.Tce, a_min=1., a_max=None) - 1.) * dt / args.Cmud
76 |
77 | evol_bottom = bottom - initial_bottom
78 | resout.write_entire_frame(output_header, time, np.vstack((bottom, evol_bottom)))
79 |
80 | prev_time = time
81 | prev_tau = tau
82 |
83 |
84 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'out_slf', 'shift'])
85 | help_friction_laws = ', '.join(['%i=%s' % (i, law) for i, law in enumerate(FRICTION_LAWS)])
86 | parser.add_argument('--friction_law', type=int, help='friction law identifier: %s' % help_friction_laws,
87 | choices=range(len(FRICTION_LAWS)), default=STRICKLER_ID)
88 | parser.add_argument('--Cmud', help='mud concentration (liquid) [kg/m³]', type=float, default=1200)
89 | group_deposition = parser.add_argument_group('Deposition', 'Parameters of Krone deposition law')
90 | group_deposition.add_argument('--Tcd', help='critical Shear Stress for Deposition [Pa]', type=float, default=0.0)
91 | group_deposition.add_argument('--ws', help='settling velocity [m/s]', type=float)
92 | group_deposition.add_argument('--C', help='concentration (for deposition law) [kg/m³]', type=float)
93 | group_erosion = parser.add_argument_group('Erosion', 'Parameters of Partheniades erosion law')
94 | group_erosion.add_argument('--Tce', help='critical Shear Stress for Erosion [Pa]', type=float, default=0.0)
95 | group_erosion.add_argument('--M', help='Partheniades coefficient', type=float)
96 | parser.add_group_general(['force', 'verbose'])
97 |
98 |
99 | if __name__ == '__main__':
100 | args = parser.parse_args()
101 |
102 | try:
103 | slf_sedi_chain(args)
104 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
105 | # Message is already reported by slf logger
106 | sys.exit(1)
107 |
--------------------------------------------------------------------------------
/cli/slf_to_raster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Convert all variables of a single frame (from a Serafin file) to a tif raster (one band per variable)
4 | Beware: Output file is overwritten if already present
5 | """
6 | from osgeo import gdal, osr
7 | import matplotlib.tri as mtri
8 | import numpy as np
9 | import sys
10 |
11 | from pyteltools.geom.transformation import Transformation
12 | from pyteltools.slf import Serafin
13 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
14 |
15 |
16 | WATER_DEPTH_ID = 'H'
17 |
18 |
19 | def arrays2raster(raster_filename, xy_raster_origin, dx, dy, array_list, epsg=None):
20 | nb_var = len(array_list)
21 | nb_rows, nb_cols = array_list[0][1].shape
22 | logger.info("Regular grid size : %i rows x %i columns" % (nb_rows, nb_cols))
23 |
24 | origin_x = xy_raster_origin[0]
25 | origin_y = xy_raster_origin[1]
26 |
27 | driver = gdal.GetDriverByName('GTiff')
28 | out_raster = driver.Create(raster_filename, nb_cols, nb_rows, nb_var, gdal.GDT_Float64)
29 |
30 | # Set grid and EPSG if necessary
31 | out_raster.SetGeoTransform((origin_x, dx, 0, origin_y, 0, dy))
32 | if epsg is not None: # EPSG attribution seems buggy
33 | out_raster_srs = osr.SpatialReference()
34 | out_raster_srs.ImportFromEPSG(epsg)
35 | out_raster.SetProjection(out_raster_srs.ExportToWkt())
36 |
37 | # Add one band per variable
38 | for i_var, (var_ID, array) in enumerate(array_list):
39 | if array.shape != (nb_rows, nb_cols):
40 | raise RuntimeError
41 | outband = out_raster.GetRasterBand(i_var + 1)
42 | outband.SetDescription(var_ID)
43 | outband.WriteArray(array)
44 | outband.FlushCache()
45 |
46 |
47 | def slf_to_raster(args):
48 | with Serafin.Read(args.in_slf, args.lang) as resin:
49 | resin.read_header()
50 | header = resin.header
51 | logger.info(header.summary())
52 | resin.get_time()
53 |
54 | if args.vars is None:
55 | var_names = [var_name.decode('utf-8') for var_name in header.var_names]
56 | var_IDs = header.var_IDs
57 | else:
58 | var_names = []
59 | var_IDs = []
60 | for var_ID, var_name in zip(header.var_IDs, header.var_names):
61 | if var_ID in args.vars:
62 | var_names.append(var_name.decode('utf-8'))
63 | var_IDs.append(var_ID)
64 |
65 | # Shift mesh coordinates if necessary
66 | if args.shift:
67 | header.transform_mesh([Transformation(0, 1, 1, args.shift[0], args.shift[1], 0)])
68 |
69 | # Build output regular grid and matplotlib triangulation of the mesh
70 | m_xi, m_yi = np.meshgrid(np.arange(header.x.min(), header.x.max(), args.resolution),
71 | np.arange(header.y.min(), header.y.max(), args.resolution))
72 | triang = mtri.Triangulation(header.x, header.y, triangles=header.ikle_2d - 1)
73 |
74 | # Build mask to clip values where water depth is below Hmin_to_clip
75 | if args.Hmin_to_clip is not None:
76 | values = resin.read_var_in_frame(args.frame_index, WATER_DEPTH_ID)
77 | interp = mtri.LinearTriInterpolator(triang, values)
78 | data = interp(m_xi, m_yi)[::-1] # reverse array so the tif looks like the array
79 | with np.errstate(invalid='ignore'):
80 | mask = data <= args.Hmin_to_clip
81 | else:
82 | mask = None
83 |
84 | # Build list containing all interpolated variables on the regular grid
85 | array_list = []
86 | for i, (var_ID, var_name) in enumerate(zip(var_IDs, var_names)):
87 | values = resin.read_var_in_frame(args.frame_index, var_ID)
88 | interp = mtri.LinearTriInterpolator(triang, values)
89 | data = interp(m_xi, m_yi)[::-1] # reverse array so the tif looks like the array
90 |
91 | if mask is not None:
92 | data = np.where(mask, np.nan, data)
93 |
94 | array_list.append((var_name, data))
95 | logger.info("Min and max values for interpolated %s variable: [%f, %f]"
96 | % (var_name, np.nanmin(data), np.nanmax(data)))
97 |
98 | # Write data in the raster output file
99 | arrays2raster(args.out_tif, (header.x.min(), header.y.max()),
100 | args.resolution, -args.resolution, array_list, epsg=args.epsg)
101 |
102 |
103 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf', 'shift'])
104 | parser.add_argument('out_tif', help='output GeoTIFF raster file (with .tif extension)')
105 | parser.add_argument('resolution', type=float, help='sampling space step (in meters)')
106 | parser.add_argument('--vars', nargs='+', help='variable(s) to extract (by default: every variables)', default=None,
107 | metavar=('VA', 'VB'))
108 | parser.add_argument('--frame_index', type=int, help='index of the target temporal frame (0-indexed integer)', default=0)
109 | parser.add_argument('--epsg', type=int, help='EPSG code for output file', default=None)
110 | parser.add_argument('--Hmin_to_clip', type=float,
111 | help='set to NaN all values where water depth (H) is below this threshold', default=None)
112 | parser.add_group_general(['verbose'])
113 |
114 |
115 | if __name__ == '__main__':
116 | args = parser.parse_args()
117 |
118 | try:
119 | slf_to_raster(args)
120 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
121 | # Message is already reported by slf logger
122 | sys.exit(1)
123 |
--------------------------------------------------------------------------------
/cli/slf_volume.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Compute volumes (between upper and lower variables) within polygons
4 | """
5 | from shapefile import ShapefileException
6 | import sys
7 | from tqdm import tqdm
8 |
9 | from pyteltools.conf import settings
10 | from pyteltools.geom import BlueKenue, Shapefile
11 | from pyteltools.slf import Serafin
12 | from pyteltools.slf.volume import VolumeCalculator
13 | from pyteltools.utils.cli_base import logger, PyTelToolsArgParse
14 |
15 |
16 | def slf_volume(args):
17 | # Read set of lines from input file
18 | polygons = []
19 | if args.in_polygons.endswith('.i2s'):
20 | with BlueKenue.Read(args.in_polygons) as f:
21 | f.read_header()
22 | for poly in f.get_polygons():
23 | polygons.append(poly)
24 | elif args.in_polygons.endswith('.shp'):
25 | try:
26 | for polygon in Shapefile.get_polygons(args.in_polygons):
27 | polygons.append(polygon)
28 | except ShapefileException as e:
29 | logger.error(e)
30 | sys.exit(3)
31 | else:
32 | logger.error('File "%s" is not a i2s or shp file.' % args.in_polygons)
33 | sys.exit(2)
34 |
35 | if not polygons:
36 | logger.error('The file does not contain any polygon.')
37 | sys.exit(1)
38 | logger.debug('The file contains {} polygon{}.'.format(len(polygons), 's' if len(polygons) > 1 else ''))
39 |
40 | names = ['Polygon %d' % (i + 1) for i in range(len(polygons))]
41 |
42 | # Read Serafin file
43 | with Serafin.Read(args.in_slf, args.lang) as resin:
44 | resin.read_header()
45 | logger.info(resin.header.summary())
46 | resin.get_time()
47 |
48 | if not resin.header.is_2d:
49 | logger.error('The file has to be a 2D Serafin!')
50 | sys.exit(3)
51 |
52 | # Check variables consistency
53 | if args.upper_var not in resin.header.var_IDs:
54 | logger.error('Upper variable "%s" is not in Serafin file' % args.upper_var)
55 | sys.exit(1)
56 | upper_var = args.upper_var
57 | lower_var = args.lower_var
58 | if args.lower_var is not None:
59 | if args.lower_var == 'init':
60 | lower_var = VolumeCalculator.INIT_VALUE
61 | else:
62 | if lower_var not in resin.header.var_IDs:
63 | logger.error('Lower variable "%s" is not in Serafin file' % lower_var)
64 | sys.exit(1)
65 |
66 | if args.detailed:
67 | volume_type = VolumeCalculator.POSITIVE
68 | else:
69 | volume_type = VolumeCalculator.NET
70 | calculator = VolumeCalculator(volume_type, upper_var, lower_var, resin, names, polygons, args.ech)
71 | calculator.construct_triangles(tqdm)
72 | calculator.construct_weights(tqdm)
73 |
74 | result = []
75 | for time_index in tqdm(calculator.time_indices, unit='frame'):
76 | i_result = [str(resin.time[time_index])]
77 | values = calculator.read_values_in_frame(time_index)
78 |
79 | for j in range(len(calculator.polygons)):
80 | weight = calculator.weights[j]
81 | volume = calculator.volume_in_frame_in_polygon(weight, values, calculator.polygons[j])
82 | if calculator.volume_type == VolumeCalculator.POSITIVE:
83 | for v in volume:
84 | i_result.append(settings.FMT_FLOAT.format(v))
85 | else:
86 | i_result.append(settings.FMT_FLOAT.format(volume))
87 | result.append(i_result)
88 |
89 | # Write CSV
90 | mode = 'w' if args.force else 'x'
91 | with open(args.out_csv, mode) as out_csv:
92 | calculator.write_csv(result, out_csv, args.sep)
93 |
94 |
95 | parser = PyTelToolsArgParse(description=__doc__, add_args=['in_slf'])
96 | parser.add_argument('in_polygons', help='set of polygons file (*.shp, *.i2s)')
97 | parser.add_argument('--ech', type=int, help='frequency sampling of input', default=1)
98 | parser.add_argument('--upper_var', help='upper variable', metavar='VA', required=True)
99 | parser.add_argument('--lower_var', help='lower variable', metavar='VB', default=None)
100 | parser.add_argument('--detailed', help='add positive and negative volumes', action='store_true')
101 |
102 | parser.add_known_argument('out_csv')
103 | parser.add_group_general(['force', 'verbose'])
104 |
105 |
106 | if __name__ == '__main__':
107 | args = parser.parse_args()
108 |
109 | try:
110 | slf_volume(args)
111 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
112 | # Message is already reported by slf logger
113 | sys.exit(1)
114 | except FileNotFoundError as e:
115 | logger.error('Input file %s not found.' % e.filename)
116 | sys.exit(3)
117 | except FileExistsError as e:
118 | logger.error('Output file %s already exists. Remove it or add `--force` argument' % e.filename)
119 | sys.exit(3)
120 |
--------------------------------------------------------------------------------
/cli/update_culverts_file.py:
--------------------------------------------------------------------------------
1 | """
2 | Update node numbering in a file describing culverts
3 | """
4 |
5 | import sys
6 |
7 | from pyteltools.conf import settings
8 | from pyteltools.slf import Serafin
9 | from pyteltools.utils.cli_base import PyTelToolsArgParse
10 |
11 |
12 | def update_culverts_file(args):
13 | with Serafin.Read(args.in_slf_ori, args.lang) as mesh_ori:
14 | mesh_ori.read_header()
15 | with Serafin.Read(args.in_slf_new, args.lang) as mesh_new:
16 | mesh_new.read_header()
17 | with open(args.in_txt, 'r') as in_txt:
18 | with open(args.out_txt, 'w', newline='') as out_txt:
19 | for i, line in enumerate(in_txt):
20 | if i < 3:
21 | out_txt.write(line)
22 | else:
23 | n1_ori, n2_ori, txt = line.split(maxsplit=2)
24 | n1_new = mesh_new.header.nearest_node(mesh_ori.header.x[int(n1_ori) - 1],
25 | mesh_ori.header.y[int(n1_ori) - 1])
26 | n2_new = mesh_new.header.nearest_node(mesh_ori.header.x[int(n2_ori) - 1],
27 | mesh_ori.header.y[int(n2_ori) - 1])
28 | out_txt.write('%i %i %s' % (n1_new, n2_new, txt))
29 |
30 |
31 | parser = PyTelToolsArgParse(description=__doc__, add_args=[])
32 | parser.add_argument('in_txt', help='Original input culverts file')
33 | parser.add_argument('out_txt', help='New output culverts file')
34 | parser.add_argument('in_slf_ori', help='Original Serafin file')
35 | parser.add_argument('in_slf_new', help='New Serafin file')
36 | parser.add_argument('--lang', help="Serafin language for variables detection: 'fr' or 'en'", default=settings.LANG)
37 | parser.add_group_general(['force', 'verbose'])
38 |
39 |
40 | if __name__ == '__main__':
41 | args = parser.parse_args()
42 |
43 | try:
44 | update_culverts_file(args)
45 | except (Serafin.SerafinRequestError, Serafin.SerafinValidationError):
46 | # Message is already reported by slf logger
47 | sys.exit(1)
48 |
--------------------------------------------------------------------------------
/conventions.md:
--------------------------------------------------------------------------------
1 | Conventions
2 | ===========
3 |
4 | ## Coding conventions
5 | * encoding: utf-8
6 | * linux line breaking
7 | * indent: 4 spaces
8 | * comment language: English
9 | * shebang: `#!/usr/bin/env python`
10 |
11 | ## pylint
12 | 
13 |
14 | Simply run pylint with:
15 | ```
16 | pylint pyteltools
17 | ```
18 | The configuration file `.pylintrc` will be used.
19 |
20 | ## Module imports
21 | Avoid any wildcard imports.
22 |
23 | ### Group and order imports
24 | Three groups for line imports are separated by an empty line:
25 | 1. internal Python imports
26 | 2. imports from PyTelTools
27 | 3. relative imports (within a PyTelTools module)
28 |
29 | Imports are sorted by alphabetic order.
30 |
31 | Example:
32 | ```python
33 | import sys
34 | from time import time
35 |
36 | from pyteltools.conf import settings
37 | from pyteltools.slf import Serafin
38 |
39 | from .Node import Box, Link, Port
40 | from .util import logger
41 | ```
42 |
43 | ### Common abbreviations
44 | Some common import renamings:
45 | ```python
46 | import numpy as np
47 | import pyteltools.slf.misc as operations
48 | ```
49 |
50 | ## Naming conventions
51 | * variables, functions, methods: lowercase_with_underscores
52 | * class: CapWords
53 |
54 | PyQt element prefixes :
55 | * `qcb` = QCheckBox
56 | * `qds` = QDoubleSpinBox
57 | * `qle` = QLineEdit
58 | * `qpb` = QPushButton
59 |
60 | ### Common custom methods for PyQt5
61 | * `_initWidgets()`: fix element sizes, tooltips, ...
62 | * `_setLayout()`: add widgets, items and finally calls `setLayouts()`
63 | * `_bindEvents()`: bind events with methods
64 |
65 | ## Logging
66 | Use with following logging levels (with corresponding numeric value) :
67 | * `CRITICAL` (40)
68 | * `WARNING` (30)
69 | * `INFO` (20)
70 | * `DEBUG` (10)
71 |
72 | ## CLI exiting code
73 | * 0 = successful termination
74 | * 1 = different kind of errors/inconsistencies: in input/output, error during computation, ...
75 | * 2 = error or inconsistencies with command-line arguments
76 | * 3 = file error (parser, writer)
77 |
78 | ## Code documentation
79 | Developer documentation is generated with doyxgen and provided on https://cnr-engineering.github.io/PyTelTools.
80 |
81 | Doxygen will extract preformatted comments following [some conventions](https://www.stack.nl/~dimitri/doxygen/manual/docblocks.html#pythonblocks).
82 |
--------------------------------------------------------------------------------
/notebook/Handle Serafin files.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "Main classes to deal with:\n",
8 | "- `SerafinHeader`\n",
9 | "- `Read` (derived from `Serafin`)\n",
10 | "- `Write` (derived from `Serafin`)"
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "# Read Telemac file"
18 | ]
19 | },
20 | {
21 | "cell_type": "markdown",
22 | "metadata": {},
23 | "source": [
24 | "Read a binary Selafin file.\n",
25 | "Automatic dectection of **precision** (single or double) and **endianness** (big or little endian)."
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 5,
31 | "metadata": {},
32 | "outputs": [
33 | {
34 | "name": "stderr",
35 | "output_type": "stream",
36 | "text": [
37 | "Reading the input file: \"../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf\" of size 2078184 bytes\n",
38 | "WARNING: The 2D variable name \"C 1ST CLASS\" is not known (lang=en). The complete name will be used as ID\n",
39 | "WARNING: The 2D variable name \"PRIVE 1\" is not known (lang=en). The complete name will be used as ID\n",
40 | "WARNING: The 2D variable name \"PRIVE 2\" is not known (lang=en). The complete name will be used as ID\n"
41 | ]
42 | },
43 | {
44 | "name": "stdout",
45 | "output_type": "stream",
46 | "text": [
47 | "The file is of type SERAPHIN 2D. It has 14 variables,\n",
48 | "on 1894 nodes and 3093 elements for 19 time frames.\n",
49 | "[0.0, 1000.0, 2000.0, 3000.0, 4000.0, 5000.0, 6000.0, 7000.0, 8000.0, 9000.0, 10000.0, 11000.0, 12000.0, 13000.0, 14000.0, 15000.0, 16000.0, 17000.0, 18000.0]\n"
50 | ]
51 | }
52 | ],
53 | "source": [
54 | "from pyteltools.slf import Serafin\n",
55 | "\n",
56 | "with Serafin.Read('../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf', 'en') as resin:\n",
57 | " # Read header (SerafinHeader is stored in `header` attribute of `Serafin` class)\n",
58 | " resin.read_header()\n",
59 | " \n",
60 | " # Display a summary\n",
61 | " print(resin.header.summary())\n",
62 | " \n",
63 | " # Get time (in seconds) and display it\n",
64 | " resin.get_time()\n",
65 | " print(resin.time)"
66 | ]
67 | },
68 | {
69 | "cell_type": "markdown",
70 | "metadata": {},
71 | "source": [
72 | "# Write Telemac file"
73 | ]
74 | },
75 | {
76 | "cell_type": "code",
77 | "execution_count": 2,
78 | "metadata": {},
79 | "outputs": [
80 | {
81 | "name": "stderr",
82 | "output_type": "stream",
83 | "text": [
84 | "Reading the input file: \"../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf\" of size 2078184 bytes\n",
85 | "WARNING: The 2D variable name \"C 1ST CLASS\" is not known (lang=en). The complete name will be used as ID\n",
86 | "WARNING: The 2D variable name \"PRIVE 1\" is not known (lang=en). The complete name will be used as ID\n",
87 | "WARNING: The 2D variable name \"PRIVE 2\" is not known (lang=en). The complete name will be used as ID\n",
88 | "Writing the output file: \"/tmp/test.slf\"\n"
89 | ]
90 | }
91 | ],
92 | "source": [
93 | "import numpy as np\n",
94 | "\n",
95 | "from pyteltools.slf import Serafin\n",
96 | "\n",
97 | "\n",
98 | "with Serafin.Read('../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf', 'en') as resin:\n",
99 | " resin.read_header()\n",
100 | " # Copy header\n",
101 | " output_header = resin.header.copy()\n",
102 | " \n",
103 | " # Change some header attributes if required\n",
104 | " #output_header.toggle_endianness()\n",
105 | " #output_header.to_single_precision()\n",
106 | "\n",
107 | " values = np.empty((output_header.nb_var, output_header.nb_nodes), dtype=output_header.np_float_type)\n",
108 | " with Serafin.Write('/tmp/test.slf', 'fr', overwrite=True) as resout:\n",
109 | " resout.write_header(output_header)\n",
110 | "\n",
111 | " # Copy all frames\n",
112 | " for time_index, time in enumerate(resin.time):\n",
113 | " for i, var_ID in enumerate(output_header.var_IDs):\n",
114 | " values[i, :] = resin.read_var_in_frame(time_index, var_ID)\n",
115 | " resout.write_entire_frame(output_header, time, values)"
116 | ]
117 | },
118 | {
119 | "cell_type": "markdown",
120 | "metadata": {},
121 | "source": [
122 | "# Handle exceptions"
123 | ]
124 | },
125 | {
126 | "cell_type": "markdown",
127 | "metadata": {},
128 | "source": [
129 | "Some custom exceptions are defined in `pyteltools.slf.Serafin`:\n",
130 | "* `SerafinRequestError`: Serafin file content checking\n",
131 | "* `SerafinValidationError`: requesting invalid values from Serafin object"
132 | ]
133 | }
134 | ],
135 | "metadata": {
136 | "kernelspec": {
137 | "display_name": "Python 3",
138 | "language": "python",
139 | "name": "python3"
140 | },
141 | "language_info": {
142 | "codemirror_mode": {
143 | "name": "ipython",
144 | "version": 3
145 | },
146 | "file_extension": ".py",
147 | "mimetype": "text/x-python",
148 | "name": "python",
149 | "nbconvert_exporter": "python",
150 | "pygments_lexer": "ipython3",
151 | "version": "3.7.3"
152 | }
153 | },
154 | "nbformat": 4,
155 | "nbformat_minor": 2
156 | }
157 |
--------------------------------------------------------------------------------
/notebook/Post-processing examples.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Extract at points from Serafin 2D"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 6,
13 | "metadata": {},
14 | "outputs": [
15 | {
16 | "name": "stderr",
17 | "output_type": "stream",
18 | "text": [
19 | "Reading the input file: \"../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf\" of size 2078184 bytes\n",
20 | "WARNING: The 2D variable name \"C 1ST CLASS\" is not known (lang=en). The complete name will be used as ID\n",
21 | "WARNING: The 2D variable name \"PRIVE 1\" is not known (lang=en). The complete name will be used as ID\n",
22 | "WARNING: The 2D variable name \"PRIVE 2\" is not known (lang=en). The complete name will be used as ID\n"
23 | ]
24 | },
25 | {
26 | "name": "stdout",
27 | "output_type": "stream",
28 | "text": [
29 | "2 points are inside the mesh\n",
30 | "[10.052406803669136, 10.050774145984624]\n"
31 | ]
32 | }
33 | ],
34 | "source": [
35 | "from pyteltools.geom import Shapefile\n",
36 | "from pyteltools.slf.interpolation import MeshInterpolator\n",
37 | "from pyteltools.slf import Serafin\n",
38 | "\n",
39 | "\n",
40 | "points = [(97.0, 32.5), (97.5, 33.5)]\n",
41 | "\n",
42 | "with Serafin.Read('../scripts_PyTelTools_validation/data/Yen/fis_yen-exp.slf', 'en') as resin:\n",
43 | " resin.read_header()\n",
44 | " resin.get_time()\n",
45 | "\n",
46 | " # Determine mesh interpolation\n",
47 | " mesh = MeshInterpolator(resin.header, True)\n",
48 | " is_inside, point_interpolators = mesh.get_point_interpolators(points)\n",
49 | " nb_inside = sum(map(int, is_inside))\n",
50 | " print(\"%i points are inside the mesh\" % nb_inside)\n",
51 | "\n",
52 | " # Interpolate one variable (BOTTOM) and one frame only (the last)\n",
53 | " values = resin.read_var_in_frame(0, 'S')\n",
54 | "\n",
55 | " results = []\n",
56 | " for pt_id, (point, point_interpolator) in enumerate(zip(points, point_interpolators)):\n",
57 | " if point_interpolator is not None:\n",
58 | " (i, j, k), interpolator = point_interpolator\n",
59 | " results.append(interpolator.dot(values[[i, j, k]]))\n",
60 | " print(results)\n"
61 | ]
62 | }
63 | ],
64 | "metadata": {
65 | "kernelspec": {
66 | "display_name": "Python 3",
67 | "language": "python",
68 | "name": "python3"
69 | },
70 | "language_info": {
71 | "codemirror_mode": {
72 | "name": "ipython",
73 | "version": 3
74 | },
75 | "file_extension": ".py",
76 | "mimetype": "text/x-python",
77 | "name": "python",
78 | "nbconvert_exporter": "python",
79 | "pygments_lexer": "ipython3",
80 | "version": "3.7.3"
81 | }
82 | },
83 | "nbformat": 4,
84 | "nbformat_minor": 2
85 | }
86 |
--------------------------------------------------------------------------------
/pyteltools/__init__.py:
--------------------------------------------------------------------------------
1 | VERSION = '0.3'
2 |
--------------------------------------------------------------------------------
/pyteltools/__main__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/__main__.py
--------------------------------------------------------------------------------
/pyteltools/arcpy_scripts/landxml_to_tin.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 |
4 | try:
5 | import arcpy
6 | except ModuleNotFoundError:
7 | sys.exit(1)
8 |
9 | xml_name, tin_folder, tin_name = sys.argv[1], sys.argv[2], sys.argv[3]
10 |
11 | try:
12 | arcpy.CheckOutExtension('3D') # obtain a license for the ArcGIS 3D Analyst extension
13 | except:
14 | sys.exit(2)
15 |
16 | try:
17 | arcpy.LandXMLToTin_3d(xml_name, tin_folder, tin_name, '1')
18 | except Exception as e:
19 | sys.stderr.write(str(e))
20 | sys.exit(3)
21 |
22 | sys.exit(0)
23 |
--------------------------------------------------------------------------------
/pyteltools/arcpy_scripts/mxd_to_png.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 |
4 | try:
5 | import arcpy.mapping
6 | except ModuleNotFoundError:
7 | sys.exit(1)
8 |
9 | mxd_path, png_name, resolution = sys.argv[1], sys.argv[2], int(sys.argv[3])
10 |
11 | try:
12 | mxd = arcpy.mapping.MapDocument(mxd_path)
13 | layers = arcpy.mapping.ListLayers(mxd)
14 | except Exception as e:
15 | sys.stderr.write(str(e))
16 | sys.exit(2)
17 |
18 | try:
19 | arcpy.mapping.ExportToPNG(mxd, png_name, resolution=resolution)
20 | except Exception as e:
21 | sys.stderr.write(str(e))
22 | sys.exit(3)
23 |
24 | sys.exit(0)
25 |
--------------------------------------------------------------------------------
/pyteltools/conf/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | from simple_settings import LazySettings
3 | import sys
4 |
5 |
6 | # Add default configuration
7 | settings_list = ['pyteltools.conf.default_settings']
8 |
9 | # Add user configuration if `PYTELTOOLS_SETTINGS` environment variable is present and not empty
10 | settings_env = os.environ.get('PYTELTOOLS_SETTINGS')
11 | if settings_env is not None:
12 | if settings_env:
13 | settings_list.append(settings_env)
14 |
15 | try:
16 | settings = LazySettings(*settings_list)
17 | settings.as_dict() # Only check if settings could be read
18 | except FileNotFoundError:
19 | sys.stderr.write('User configuration file could not be found\n')
20 | sys.stderr.write('File "%s" does not exist (or check `PYTELTOOLS_SETTINGS` environment file)\n' % settings_env)
21 | sys.exit(1)
22 |
--------------------------------------------------------------------------------
/pyteltools/conf/default_settings.py:
--------------------------------------------------------------------------------
1 | from collections import OrderedDict
2 | import logging
3 | from multiprocessing import cpu_count
4 |
5 |
6 | # ~> GENERAL CONFIGURATION
7 |
8 | # Logging level
9 | # logging.DEBUG, logging.INFO, logging.ERROR
10 | LOGGING_LEVEL = logging.INFO
11 |
12 | # Logging formats
13 | LOGGING_FMT_CLI = '%(message)s'
14 | LOGGING_FMT_GUI = '%(asctime)s - [%(levelname)s] - \n%(message)s'
15 |
16 | # Color logging messages (requires coloredlogs package)"
17 | COLOR_LOGS = True
18 |
19 | # CPU Cores for parallel computation (workflow multi-folder view)
20 | NCSIZE = cpu_count()
21 |
22 | # Path to ArGIS Python executable (for `outil_carto.py`)
23 | PY_ARCGIS = 'C:\\Python27\\ArcGIS10.8\\python.exe'
24 |
25 | # ~> SERAFIN
26 |
27 | # Use to define mesh origin coordinates (in iparam array)
28 | ENABLE_MESH_ORIGIN = True
29 |
30 | # Serafin extensions for file name filtering (default extension is the first)
31 | SERAFIN_EXT = ['.srf', '.slf', '.res', '.geo']
32 |
33 | # Language (for variables detection)
34 | LANG = 'fr'
35 |
36 | # ~> INPUTS/OUTPUTS
37 |
38 | # Format to write float values (in CSV, LandXML, VTK)
39 | FMT_FLOAT = '{:.5e}' # 1.53849e5 (6 significant numbers)
40 |
41 | # Format to write x, y (and z) coordinates (in CSV, LandXML, VTK)
42 | FMT_COORD = '{:.4f}' # 153849.2841
43 |
44 | # Representation of a "Not A Number" value (to write in CSV files)
45 | NAN_STR = '#N/A'
46 |
47 | # CSV column delimiter
48 | CSV_SEPARATOR = ';'
49 |
50 | # Write XYZ header
51 | WRITE_XYZ_HEADER = True
52 |
53 | # Arcpy png resolution
54 | ARCPY_PNG_DPI = 192
55 |
56 | # ~> VISUALIZATION
57 |
58 | # Figure size (in inches)
59 | FIG_SIZE = (8, 6)
60 |
61 | # Figure output dot density
62 | FIG_OUT_DPI = 100
63 |
64 | # Map size (in inches)
65 | MAP_SIZE = (10, 10)
66 |
67 | # Map output dot density
68 | MAP_OUT_DPI = 100
69 |
70 | # Window size (in pixels) for workflow scheme interface
71 | SCENE_SIZE = (2400, 1000)
72 |
73 | # Number of color levels to plot
74 | NB_COLOR_LEVELS = 512
75 |
76 | # Color style
77 | ## Discrete color map (loop over the list if more are required)
78 | DEFAULT_COLORS = OrderedDict([('Blue', '#1f77b4'), ('Orange', '#ff7f0e'), ('Green', '#2ca02c'), ('Red', '#d62728'),
79 | ('Purple', '#9467bd'), ('Brown', '#8c564b'), ('Pink', '#e377c2'), ('DarkGray', '#7f7f7f'),
80 | ('Yellow', '#bcbd22'), ('Cyan', '#17becf')])
81 |
82 | ## Continous color map
83 | ## See https://matplotlib.org/examples/color/colormaps_reference.html to preview color rendering
84 | DEFAULT_COLOR_STYLE = 'coolwarm'
85 | COLOR_SYLES = ['ocean', 'gist_earth', 'terrain', 'gnuplot', 'gnuplot2', 'CMRmap',
86 | 'gist_rainbow', 'rainbow', 'jet', # Miscellaneous colormaps
87 | 'viridis', 'plasma', 'inferno', 'magma', # Perceptually Uniform Sequential colormaps
88 | 'Spectral', 'coolwarm', 'seismic', # Diverging colormaps
89 | 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds', # Sequential colormaps
90 | 'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu', 'GnBu', 'PuBu',
91 | 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn']
92 |
93 | # Default axis label for coordinates
94 | X_AXIS_LABEL, Y_AXIS_LABEL = 'X (m)', 'Y (m)'
95 | X_AXIS_LABEL_DISTANCE = 'Distance (m)'
96 | Y_AXIS_LABEL_CROSS_SECTION = '' # If empty then it is automatically computed from input Serafin language
97 | TITLE_CROSS_SECTION = ''
98 |
99 | # Number of bins for EWSD distribution (for GUI `Compare Resultats`)
100 | NB_BINS_EWSD = 100
101 |
--------------------------------------------------------------------------------
/pyteltools/geom/BlueKenue.py:
--------------------------------------------------------------------------------
1 | """!
2 | Read and write BlueKenue files (.i2s/.i3s/.xyz)
3 | """
4 |
5 | import numpy as np
6 |
7 | from .geometry import Polyline
8 |
9 |
10 | class BlueKenue:
11 | def __init__(self, filename, mode):
12 | """!
13 | @brief BlueKenue file object
14 | @param filename : path to BlueKenue file
15 | @param mode : `r` for read, `w` or `x` for write mode
16 | """
17 | self.filename = filename
18 | self.mode = mode
19 | self.file = None
20 | self.header = None
21 |
22 | def __enter__(self):
23 | self.file = open(self.filename, self.mode, encoding='ISO-8859-1')
24 | return self
25 |
26 | def __exit__(self, exc_type, exc_val, exc_tb):
27 | self.file.close()
28 |
29 |
30 | class Read(BlueKenue):
31 | """"!
32 | BlueKenue file reader
33 | """
34 | def __init__(self, filename):
35 | super().__init__(filename, 'r')
36 |
37 | def read_header(self):
38 | self.header = []
39 | while True:
40 | line = self.file.readline()
41 | self.header.append(line)
42 | if line == ':EndHeader\n':
43 | break
44 | if not line:
45 | self.header = []
46 | self.file.seek(0)
47 | return False
48 | return True
49 |
50 | def get_lines(self):
51 | while True:
52 | line = self.file.readline()
53 | if not line: # EOF
54 | break
55 | if line == '\n': # there could be blank lines between line sets
56 | continue
57 | line_header = tuple(line.rstrip().split())
58 | try:
59 | nb_points = int(line_header[0])
60 | except ValueError:
61 | continue
62 | coordinates = []
63 | for i in range(nb_points):
64 | line = self.file.readline()
65 | coordinates.append(tuple(map(float, line.rstrip().split())))
66 | poly = Polyline(coordinates)
67 | poly.add_attribute(float(line_header[1]))
68 | yield poly
69 |
70 | def get_polygons(self):
71 | for poly in self.get_lines():
72 | if poly.is_closed():
73 | yield poly
74 |
75 | def get_open_polylines(self):
76 | for poly in self.get_lines():
77 | if not poly.is_closed():
78 | yield poly
79 |
80 | def get_points(self):
81 | for line in self.file.readlines():
82 | if line == '\n':
83 | continue
84 | try:
85 | x, y, z = tuple(map(float, line.rstrip().split()))
86 | except ValueError:
87 | continue
88 | yield np.array([x, y, z])
89 |
90 |
91 | class Write(BlueKenue):
92 | """"!
93 | BlueKenue file writer
94 | """
95 | XYZ_HEADER = ':FileType xyz ASCII EnSim 1.0\n'
96 | I2S_HEADER = ':FileType i2s ASCII EnSim 1.0\n'
97 | I3S_HEADER = ':FileType i3s ASCII EnSim 1.0\n'
98 |
99 | def __init__(self, filename):
100 | super().__init__(filename, 'w')
101 |
102 | def write_header(self, header=[]):
103 | if header:
104 | for line in header:
105 | self.file.write(line)
106 | else:
107 | if self.filename.endswith('i2s'):
108 | self.file.write(Write.I2S_HEADER)
109 | elif self.filename.endswith('i3s'):
110 | self.file.write(Write.I3S_HEADER)
111 | elif self.filename.endswith('xyz'):
112 | self.file.write(Write.XYZ_HEADER)
113 | self.file.write(':EndHeader\n')
114 |
115 | def write_lines(self, lines, attributes):
116 | for poly, attribute in zip(lines, attributes):
117 | nb_points = len(list(poly.coords()))
118 | self.file.write('%d %s\n' % (nb_points, str(attribute)))
119 | for p in poly.coords():
120 | self.file.write(' '.join(map(str, p)))
121 | self.file.write('\n')
122 |
123 | def write_points(self, points):
124 | for p in points:
125 | self.file.write(' '.join(map(str, p)))
126 | self.file.write('\n')
127 |
--------------------------------------------------------------------------------
/pyteltools/geom/Shapefile.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=C0326
2 | """!
3 | Read and write .shp files
4 | """
5 | import numpy as np
6 | import shapefile
7 | from shapefile import ShapefileException as ShpException
8 | from struct import error
9 |
10 | from .geometry import Polyline
11 |
12 |
13 | def get_shape_type(input_filename):
14 | sf = shapefile.Reader(input_filename)
15 | return sf.shapeType
16 |
17 |
18 | def get_lines(input_filename, shape_type):
19 | sf = shapefile.Reader(input_filename)
20 | for record in sf.shapeRecords():
21 | if record.shape.shapeType == shape_type:
22 | attributes = record.record
23 | if shape_type == shapefile.POLYLINEZ:
24 | poly = Polyline(record.shape.points, attributes, record.shape.z)
25 | else:
26 | poly = Polyline(record.shape.points, attributes)
27 | yield poly
28 |
29 |
30 | def get_open_polylines(input_filename):
31 | try:
32 | shape_type = get_shape_type(input_filename)
33 | if shape_type in (shapefile.POLYLINE, shapefile.POLYLINEZ, shapefile.POLYLINEM):
34 | for poly in get_lines(input_filename, shape_type):
35 | yield poly
36 | except error:
37 | raise ShpException('Error while reading Shapefile. Inconsistent bytes.')
38 |
39 |
40 | def get_polygons(input_filename):
41 | try:
42 | shape_type = get_shape_type(input_filename)
43 | if shape_type in (shapefile.POLYGON, shapefile.POLYGONZ, shapefile.POLYGONM):
44 | for poly in get_lines(input_filename, shape_type):
45 | yield poly
46 | except error:
47 | raise ShpException('Error while reading Shapefile. Inconsistent bytes.')
48 |
49 |
50 | def get_all_fields(input_filename):
51 | """!
52 | Get all fields characteristics of a shapefile
53 | @param input_filename : path to shapefile
54 | @return : list composed of a tuple (attribute name, attribute type, length and
55 | precision) for each field
56 | """
57 | sf = shapefile.Reader(input_filename)
58 | return sf.fields[1:]
59 |
60 |
61 | def get_attribute_names(input_filename):
62 | """!
63 | Get attributes (except the M value) of a shapefile
64 | @param input_filename : path to shapefile
65 | @return <[str], [int]>: list of field names and indices
66 | """
67 | names, indices = [], []
68 | for i, (field_name, field_type, _, _) in enumerate(get_all_fields(input_filename)):
69 | if field_type == 'M':
70 | continue
71 | else:
72 | indices.append(i)
73 | if type(field_name) == bytes:
74 | names.append(field_name.decode('latin-1'))
75 | else:
76 | names.append(field_name)
77 | return names, indices
78 |
79 |
80 | def get_numeric_attribute_names(input_filename):
81 | """!
82 | Get all numeric attributes of a shapefile
83 | @param input_filename : path to shapefile
84 | @return <[(int, str)>: list of field names and indices
85 | """
86 | for i, (field_name, field_type, _, _) in enumerate(get_all_fields(input_filename)):
87 | if field_type == 'N' or field_type == 'F':
88 | if type(field_name) == bytes:
89 | field_name = field_name.decode('latin-1')
90 | yield i, field_name
91 |
92 |
93 | def get_points(input_filename, indices=None, with_z=False):
94 | """!
95 | Get specific points (coordinates and attributes) from a shapefile
96 | @param input_filename : path to shapefile
97 | @param indices <[int]>: indices of points
98 | @param with_z : extract z coordinate
99 | @return : tuple of coordinates and list of corresponding field values
100 | """
101 | try:
102 | sf = shapefile.Reader(input_filename)
103 | for record in sf.shapeRecords():
104 | if record.shape.shapeType in [shapefile.POINT, shapefile.POINTZ, shapefile.POINTM]:
105 | attributes = record.record
106 | decoded_attributes = []
107 | for attribute in attributes:
108 | if type(attribute) == bytes:
109 | decoded_attributes.append(attribute.decode('latin-1'))
110 | else:
111 | decoded_attributes.append(attribute)
112 | if indices is not None:
113 | decoded_attributes = [decoded_attributes[i] for i in indices]
114 | if not with_z:
115 | yield tuple(record.shape.points[0]), decoded_attributes
116 | else:
117 | if record.shape.shapeType == shapefile.POINTZ:
118 | x, y = record.shape.points[0]
119 | z = record.shape.z[0]
120 | yield (x, y, z), decoded_attributes
121 | except error:
122 | raise ShpException('Error while reading Shapefile. Inconsistent bytes.')
123 |
124 |
125 | def write_shp_points_z(output_filename, z_name, points):
126 | w = shapefile.Writer(output_filename, shapefile.POINTZ)
127 | w.field(z_name, 'N', decimal=6)
128 |
129 | for (x, y, z) in points:
130 | w.pointz(x, y, z)
131 | w.record(z)
132 |
133 |
134 | def write_shp_lines(output_filename, shape_type, lines, attribute_name):
135 | w = shapefile.Writer(output_filename, shapeType=shape_type)
136 | w.field(attribute_name, 'N', decimal=6)
137 |
138 | for poly in lines:
139 | coords = np.array(poly.coords())
140 | if shape_type < 10 and not poly.is_2d():
141 | coords = np.delete(coords, 2, 1) # remove Z array
142 | if 10 < shape_type < 20 and poly.is_2d():
143 | coords = np.hstack((coords, np.zeros((poly.nb_points(), 1))))
144 | if shape_type > 10:
145 | m = np.array(poly.m, dtype=float).reshape(coords.shape[0], 1)
146 | coords = np.hstack((coords, m))
147 | if shape_type == shapefile.POLYLINE:
148 | w.line([list(map(tuple, coords))])
149 | elif shape_type == shapefile.POLYGON:
150 | w.poly([list(map(tuple, coords))])
151 | elif shape_type == shapefile.POLYLINEZ:
152 | w.linez([list(map(tuple, coords))])
153 | elif shape_type == shapefile.POLYGONZ:
154 | w.polyz([list(map(tuple, coords))])
155 | elif shape_type == shapefile.POLYLINEM:
156 | w.linem([list(map(tuple, coords))])
157 | else:
158 | w.polym([list(map(tuple, coords))])
159 | w.record(poly.attributes()[0])
160 |
--------------------------------------------------------------------------------
/pyteltools/geom/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/geom/__init__.py
--------------------------------------------------------------------------------
/pyteltools/geom/geometry.py:
--------------------------------------------------------------------------------
1 | """!
2 | Geometrical objects
3 | """
4 |
5 | import numpy as np
6 | from shapely.geometry import Point, MultiPolygon, LineString as OpenPolyline, Polygon as ClosedPolyline
7 |
8 |
9 | class Polyline:
10 | """!
11 | @brief Custom (open or closed) polyline class
12 | """
13 | def __init__(self, coordinates, attributes=None, z_array=None, m_array=None, id=None):
14 | self._nb_points = len(coordinates)
15 | self._is_2d = len(coordinates[0]) == 2
16 | if z_array is not None:
17 | self._is_2d = False
18 |
19 | self._is_closed = False
20 | if tuple(coordinates[0]) == tuple(coordinates[-1]):
21 | self._is_closed = len(coordinates) > 2 # line with 2 coordinates which are identical can not be a polygon
22 | if z_array is not None:
23 | self._is_closed = z_array[-1] == z_array[0]
24 | if z_array is None:
25 | coord = coordinates
26 | else:
27 | coord = [(x, y, z) for (x, y), z in zip(coordinates, z_array)]
28 | if self._is_closed:
29 | self._polyline = ClosedPolyline(coord)
30 | else:
31 | self._polyline = OpenPolyline(coord)
32 | if attributes is None:
33 | self._attributes = []
34 | else:
35 | self._attributes = attributes[:]
36 |
37 | if m_array is None:
38 | self.m = [None] * self._nb_points
39 | else:
40 | if m_array:
41 | self.m = m_array[:]
42 | else:
43 | self.m = [None] * self._nb_points
44 | self.id = id
45 |
46 | def set_id(self, id):
47 | self.id = id
48 |
49 | def to_3d(self, z_array):
50 | return Polyline(self.coords(), self.attributes(), z_array)
51 |
52 | def to_2d(self):
53 | if self.is_2d():
54 | return Polyline(self.coords(), self.attributes(), m_array=self.m)
55 | return Polyline(list(map(tuple, np.array(self.coords())[:, :2])), self.attributes(), m_array=self.m)
56 |
57 | def is_2d(self):
58 | return self._is_2d
59 |
60 | def is_closed(self):
61 | return self._is_closed
62 |
63 | def nb_points(self):
64 | return self._nb_points
65 |
66 | def attributes(self):
67 | return self._attributes
68 |
69 | def add_attribute(self, attribute):
70 | self._attributes.append(attribute)
71 |
72 | def coords(self):
73 | if self.is_closed():
74 | return self._polyline.exterior.coords
75 | return self._polyline.coords
76 |
77 | def polyline(self):
78 | return self._polyline
79 |
80 | def project(self, x, y):
81 | return self._polyline.project(Point(x, y))
82 |
83 | def segments(self):
84 | prev_x, prev_y = None, None
85 | for coord in self.coords():
86 | x, y = coord[:2] # ignore elevation if 3D
87 | if prev_x is None:
88 | prev_x, prev_y = x, y
89 | else:
90 | yield x > prev_x, y > prev_y, Polyline([(prev_x, prev_y), (x, y)])
91 | prev_x, prev_y = x, y
92 |
93 | def __str__(self):
94 | return ['Open', 'Closed'][self.is_closed()] + ' polyline with coordinates %s' % str(list(self.coords()))
95 |
96 | def contains(self, item):
97 | return self._polyline.contains(item)
98 |
99 | def bounds(self):
100 | return self._polyline.bounds
101 |
102 | def length(self):
103 | return self._polyline.length
104 |
105 | def polygon_intersection(self, triangle):
106 | """!
107 | @brief (Used in volume calculation) Return the polygon or multipolygon intersection with the triangle
108 | @param triangle : A triangle
109 | @return : The intersection with the triangle
110 | """
111 | inter = self._polyline.intersection(triangle)
112 | if inter.geom_type == 'Polygon' or inter.geom_type == 'MultiPolygon':
113 | return True, inter
114 | elif inter.geom_type == 'GeometryCollection':
115 | poly = list(filter(lambda x: x.geom_type == 'Polygon', inter.geoms))
116 | if not poly:
117 | return False, None
118 | return True, MultiPolygon(poly)
119 | return False, None
120 |
121 | @staticmethod
122 | def triangle_difference(triangle, polygon):
123 | """!
124 | @brief (Used in volume calculation) Return the polygon or multipolygon in triangle but not in polygon
125 | @param triangle : A triangle
126 | @param polygon : A polygon
127 | @return :
128 | The difference between triangle and polygon
129 | """
130 | diff = triangle.difference(polygon.polyline())
131 | if diff.geom_type == 'Polygon' or diff.geom_type == 'MultiPolygon':
132 | return True, diff
133 | elif diff.geom_type == 'GeometryCollection':
134 | poly = list(filter(lambda x: x.geom_type == 'Polygon', diff.geoms))
135 | if not poly:
136 | return False, None
137 | return True, MultiPolygon(poly)
138 | return False, None
139 |
140 | def linestring_intersection(self, triangle):
141 | """!
142 | @brief (Used in flux calculation) Returns the LinearString intersection with the triangle
143 | @param triangle : A triangle
144 | @return : The intersection with the triangle
145 | """
146 | inter = triangle.intersection(self._polyline)
147 | if inter.geom_type == 'LineString':
148 | return True, [inter]
149 | elif inter.geom_type == 'MultiLineString':
150 | return True, list(inter.geoms)
151 | elif inter.geom_type == 'GeometryCollection':
152 | return True, list(filter(lambda x: x.geom_type == 'LineString', inter.geoms))
153 | return False, None
154 |
155 | def apply_transformations(self, transformations):
156 | new_coords = np.array(list(self.coords()))
157 | if self.is_2d():
158 | new_coords = np.hstack((new_coords, np.zeros((self.nb_points(), 1))))
159 |
160 | for t in transformations:
161 | new_coords = np.apply_along_axis(t, 1, new_coords)
162 | if self.is_2d():
163 | new_coords = new_coords[:, :2]
164 |
165 | return Polyline(list(map(tuple, new_coords)), self.attributes(), m_array=self.m)
166 |
167 | def resample(self, max_len):
168 | new_coords = []
169 | new_m = []
170 | coords = list(self.coords())
171 |
172 | new_coords.append(coords[0])
173 | new_m.append(self.m[0])
174 |
175 | for i in range(self.nb_points()-1):
176 | first_point, second_point = coords[i], coords[i+1]
177 | segment = OpenPolyline([first_point, second_point])
178 | nb_segments = int(np.ceil(segment.length / max_len))
179 | inv_nb_segments = 1/nb_segments
180 | first_m, second_m = self.m[i], self.m[i+1]
181 | if first_m is None or second_m is None:
182 | interpolate_m = False
183 | else:
184 | interpolate_m = True
185 |
186 | for j in range(1, nb_segments):
187 | new_point = list(segment.interpolate(j*inv_nb_segments, normalized=True).coords)[0]
188 | new_coords.append(new_point)
189 | if interpolate_m:
190 | m = ((1-j) * first_m + j * second_m) * inv_nb_segments
191 | new_m.append(m)
192 | else:
193 | new_m.append(None)
194 | new_coords.append(second_point)
195 | new_m.append(second_m)
196 | return Polyline(new_coords, self.attributes(), m_array=new_m)
197 |
198 | def __repr__(self):
199 | return "%sPolyline with %i vertices" % ('Closed ' if self.is_closed() else '', len(self.coords()))
200 |
--------------------------------------------------------------------------------
/pyteltools/geom/util.py:
--------------------------------------------------------------------------------
1 | from pyteltools.utils.log import new_logger
2 |
3 | logger = new_logger(__name__)
4 |
--------------------------------------------------------------------------------
/pyteltools/gui/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/gui/__init__.py
--------------------------------------------------------------------------------
/pyteltools/gui/classic_gui.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from PyQt5.QtCore import Qt
3 | from PyQt5.QtWidgets import (QApplication, QComboBox, QDialog, QDialogButtonBox, QFrame, QGroupBox, QHBoxLayout,
4 | QLabel, QListWidget, QListView, QPushButton, QRadioButton, QSplitter, QStackedLayout,
5 | QVBoxLayout, QWidget)
6 |
7 | from pyteltools.conf import settings
8 |
9 | from .CalculatorGUI import CalculatorGUI
10 | from .CompareResultsGUI import CompareResultsGUI
11 | from .ComputeFluxGUI import ComputeFluxGUI
12 | from .ComputeVolumeGUI import ComputeVolumeGUI
13 | from .ConfigTransformation import TransformationMap
14 | from .ExtractVariablesGUI import ExtractVariablesGUI
15 | from .GeometryConverterGUI import FileConverterGUI
16 | from .LinesGUI import LinesGUI
17 | from .MaxMinMeanGUI import MaxMinMeanGUI
18 | from .PointsGUI import PointsGUI
19 | from .ProjectLinesGUI import ProjectLinesGUI
20 | from .ProjectMeshGUI import ProjectMeshGUI
21 |
22 |
23 | class GlobalConfigDialog(QDialog):
24 | def __init__(self, language, csv_separator):
25 | super().__init__()
26 | buttons = QDialogButtonBox(QDialogButtonBox.Ok, Qt.Horizontal, self)
27 | buttons.accepted.connect(self._select)
28 | self.new_options = tuple()
29 |
30 | self.lang_box = QGroupBox('Input Serafin language')
31 | hlayout = QHBoxLayout()
32 | self.french_button = QRadioButton('French')
33 | english_button = QRadioButton('English')
34 | hlayout.addWidget(self.french_button)
35 | hlayout.addWidget(english_button)
36 | self.lang_box.setLayout(hlayout)
37 | self.lang_box.setMaximumHeight(80)
38 | if language == 'fr':
39 | self.french_button.setChecked(True)
40 | else:
41 | english_button.setChecked(True)
42 |
43 | self.csv_box = QComboBox()
44 | self.csv_box.setFixedHeight(30)
45 | for sep in ['Semicolon ;', 'Comma ,', 'Tab']:
46 | self.csv_box.addItem(sep)
47 | if csv_separator == ';':
48 | self.csv_box.setCurrentIndex(0)
49 | elif csv_separator == ',':
50 | self.csv_box.setCurrentIndex(1)
51 | else:
52 | self.csv_box.setCurrentIndex(2)
53 |
54 | layout = QVBoxLayout()
55 | layout.addWidget(self.lang_box)
56 | hlayout = QHBoxLayout()
57 | hlayout.addWidget(QLabel('CSV separator'))
58 | hlayout.addWidget(self.csv_box, Qt.AlignLeft)
59 | layout.addLayout(hlayout)
60 | layout.setSpacing(20)
61 | layout.addStretch()
62 | layout.addWidget(buttons)
63 | self.setLayout(layout)
64 |
65 | self.setWindowTitle('PyTelTools global configuration')
66 | self.resize(300, 200)
67 |
68 | def _select(self):
69 | separator = {0: ';', 1: ',', 2: '\t'}[self.csv_box.currentIndex()]
70 | language = ['en', 'fr'][self.french_button.isChecked()]
71 | self.new_options = (language, separator)
72 | self.accept()
73 |
74 | def closeEvent(self, event):
75 | separator = {0: ';', 1: ',', 2: '\t'}[self.csv_box.currentIndex()]
76 | language = ['en', 'fr'][self.french_button.isChecked()]
77 | self.new_options = (language, separator)
78 |
79 |
80 | class MainPanel(QWidget):
81 | def __init__(self, parent):
82 | super().__init__()
83 | self.extract = ExtractVariablesGUI(parent)
84 | self.maxmin = MaxMinMeanGUI(parent)
85 | self.points = PointsGUI(parent)
86 | self.lines = LinesGUI(parent)
87 | self.project = ProjectLinesGUI(parent)
88 | self.mesh = ProjectMeshGUI(parent)
89 | self.volume = ComputeVolumeGUI(parent)
90 | self.compare = CompareResultsGUI(parent)
91 | self.flux = ComputeFluxGUI(parent)
92 |
93 | trans = TransformationMap()
94 | self.conv = FileConverterGUI(parent)
95 | self.calc = CalculatorGUI(parent)
96 |
97 | self.stackLayout = QStackedLayout()
98 | self.stackLayout.addWidget(QLabel('Hello! This is the start page (TODO)'))
99 | self.stackLayout.addWidget(self.extract)
100 | self.stackLayout.addWidget(self.maxmin)
101 | self.stackLayout.addWidget(self.points)
102 | self.stackLayout.addWidget(self.lines)
103 | self.stackLayout.addWidget(self.project)
104 | self.stackLayout.addWidget(self.mesh)
105 | self.stackLayout.addWidget(self.volume)
106 | self.stackLayout.addWidget(self.flux)
107 | self.stackLayout.addWidget(self.compare)
108 | self.stackLayout.addWidget(trans)
109 | self.stackLayout.addWidget(self.conv)
110 | self.stackLayout.addWidget(self.calc)
111 | self.setLayout(self.stackLayout)
112 |
113 | self.stackLayout.currentChanged.connect(parent.autoResize)
114 |
115 | def switch_language(self, language):
116 | for widget in [self.extract, self.maxmin, self.points, self.lines, self.project, self.mesh,
117 | self.volume, self.compare, self.flux, self.conv, self.calc]:
118 | widget.switch_language(language)
119 |
120 |
121 | class ClassicMainWindow(QWidget):
122 | def __init__(self):
123 | super().__init__()
124 | self.language = settings.LANG
125 | self.csv_separator = settings.CSV_SEPARATOR
126 | self.fmt_float = settings.FMT_FLOAT
127 | self.logging_level = settings.LOGGING_LEVEL
128 | self.panel = MainPanel(self)
129 |
130 | config_button = QPushButton('Global\nConfiguration')
131 | config_button.setMinimumHeight(40)
132 | config_button.clicked.connect(self.global_config)
133 |
134 | pageList = QListWidget()
135 | for name in ['Start', 'Extract variables', 'Max/Min/Mean/Arrival/Duration', 'Interpolate on points',
136 | 'Interpolate along lines', 'Project along lines', 'Project mesh',
137 | 'Compute volume', 'Compute flux', 'Compare two results',
138 | 'Transform coordinate systems', 'Convert geom file formats', 'Variable Calculator']:
139 | pageList.addItem('\n' + name + '\n')
140 | pageList.setFlow(QListView.TopToBottom)
141 | pageList.currentRowChanged.connect(self.panel.layout().setCurrentIndex)
142 |
143 | pageList.setCurrentRow(0)
144 |
145 | splitter = QSplitter()
146 | left_widget = QWidget()
147 | vlayout = QVBoxLayout()
148 | vlayout.addWidget(config_button)
149 | vlayout.addWidget(pageList)
150 | left_widget.setLayout(vlayout)
151 | splitter.addWidget(left_widget)
152 | splitter.addWidget(self.panel)
153 | splitter.setHandleWidth(5)
154 | splitter.setCollapsible(0, False)
155 | splitter.setCollapsible(1, False)
156 |
157 | handle = splitter.handle(1)
158 | layout = QVBoxLayout()
159 | layout.setSpacing(0)
160 | layout.setContentsMargins(0, 0, 0, 0)
161 | line = QFrame()
162 | line.setFrameShape(QFrame.VLine)
163 | line.setFrameShadow(QFrame.Sunken)
164 | layout.addWidget(line)
165 | handle.setLayout(layout)
166 |
167 | mainLayout = QHBoxLayout()
168 | mainLayout.addWidget(splitter)
169 | mainLayout.setContentsMargins(0, 0, 0, 0)
170 | self.setLayout(mainLayout)
171 |
172 | self.setWindowTitle('PyTelTools :: Classic interface')
173 | self.setWindowFlags(self.windowFlags() | Qt.CustomizeWindowHint)
174 | self.frameGeom = self.frameGeometry()
175 | self.move(self.frameGeom.center())
176 |
177 | def global_config(self):
178 | dlg = GlobalConfigDialog(self.language, self.csv_separator)
179 | value = dlg.exec_()
180 | if value == QDialog.Accepted:
181 | self.language, self.csv_separator = dlg.new_options
182 | self.panel.switch_language(self.language)
183 |
184 | def autoResize(self, index):
185 | if not self.isMaximized():
186 | self.resize(self.panel.stackLayout.widget(index).sizeHint())
187 |
188 | def inDialog(self):
189 | self.setWindowFlags(self.windowFlags() & ~Qt.WindowCloseButtonHint)
190 | self.setEnabled(False)
191 | self.show()
192 |
193 | def outDialog(self):
194 | self.setWindowFlags(self.windowFlags() | Qt.WindowCloseButtonHint)
195 | self.setEnabled(True)
196 | self.show()
197 |
198 |
199 | def exception_hook(exctype, value, traceback):
200 | """!
201 | @brief Needed for suppressing traceback silencing in newer version of PyQt5
202 | """
203 | sys._excepthook(exctype, value, traceback)
204 | sys.exit(1)
205 |
206 |
207 | if __name__ == '__main__':
208 | # suppress explicitly traceback silencing
209 | sys._excepthook = sys.excepthook
210 | sys.excepthook = exception_hook
211 |
212 | app = QApplication(sys.argv)
213 | window = ClassicMainWindow()
214 | window.show()
215 | app.exec_()
216 |
--------------------------------------------------------------------------------
/pyteltools/main_interface.py:
--------------------------------------------------------------------------------
1 | from PyQt5.QtWidgets import (QApplication, QDialog, QHBoxLayout, QPushButton)
2 | import sys
3 |
4 | from pyteltools.gui.classic_gui import ClassicMainWindow as GUIWindow
5 | from pyteltools.workflow.workflow_gui import WorkflowWelcomeWindow
6 |
7 |
8 | class HelloWorld(QDialog):
9 | def __init__(self):
10 | super().__init__()
11 | self.choice = None
12 | left_button = QPushButton('Classic\nInterface')
13 | right_button = QPushButton('Workflow\nInterface')
14 | for bt in [left_button, right_button]:
15 | bt.setFixedSize(150, 200)
16 |
17 | left_button.clicked.connect(self.choose_left)
18 | right_button.clicked.connect(self.choose_right)
19 |
20 | vlayout = QHBoxLayout()
21 | vlayout.addWidget(left_button)
22 | vlayout.addWidget(right_button)
23 | self.setLayout(vlayout)
24 | self.setWindowTitle('PyTelTools')
25 |
26 | def choose_left(self):
27 | self.choice = 1
28 | self.accept()
29 |
30 | def choose_right(self):
31 | self.choice = 2
32 | self.accept()
33 |
34 |
35 | def run_gui_app():
36 | app = QApplication(sys.argv)
37 | dlg = HelloWorld()
38 | value = dlg.exec_()
39 | if value == QDialog.Accepted:
40 | if dlg.choice == 1:
41 | widget = GUIWindow()
42 | widget.showMaximized()
43 | else:
44 | widget = WorkflowWelcomeWindow()
45 | widget.show()
46 | else:
47 | sys.exit(0)
48 | app.exec_()
49 |
50 |
51 | if __name__ == '__main__':
52 | run_gui_app()
53 |
--------------------------------------------------------------------------------
/pyteltools/slf/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/slf/__init__.py
--------------------------------------------------------------------------------
/pyteltools/slf/comparison.py:
--------------------------------------------------------------------------------
1 | """!
2 | Comparison between two Serafin files with identical meshes
3 | """
4 |
5 | import numpy as np
6 |
7 | from pyteltools.slf.interpolation import Interpolator
8 | from pyteltools.slf.volume import TruncatedTriangularPrisms
9 |
10 |
11 | class ReferenceMesh(TruncatedTriangularPrisms):
12 | """!
13 | @brief Wrapper for computing error measures when comparing a test mesh to a reference mesh
14 |
15 | The test mesh should have identical geometry to the reference mesh. Only the values are different.
16 | The comparison region can be the whole mesh or the interior of a polygon.
17 | """
18 | def __init__(self, *args, **kwargs):
19 | super().__init__(*args, **kwargs)
20 | self.area = {}
21 | self.point_weight = []
22 | self.inverse_total_area = 1
23 |
24 | self.nb_triangles_inside = 0
25 | self.inside_polygon = False
26 | self.polygon = None
27 | self.triangle_polygon_intersection = {}
28 |
29 | def add_polygon(self, polygon):
30 | """!
31 | @brief Initialize the weight on all points of the mesh depending on the comparison region
32 | @param polygon : A polygon defining the comparison region or None if it is the whole mesh
33 | """
34 | self.area = {}
35 | self.point_weight = np.zeros((self.nb_points,), dtype=np.float64)
36 |
37 | if polygon is None: # entire mesh
38 | self.inside_polygon = False
39 | self.triangle_polygon_intersection = {}
40 | self.nb_triangles_inside = self.nb_triangles
41 | total_area = 0
42 | for (i, j, k), t in self.triangles.items():
43 | area = t.area
44 | self.area[i, j, k] = area
45 | total_area += area
46 | self.point_weight[[i, j, k]] += area
47 | else:
48 | self.inside_polygon = True
49 | self.polygon = polygon
50 | self.nb_triangles_inside = 0
51 |
52 | potential_elements = self.get_intersecting_elements(polygon.bounds())
53 | self.point_weight = np.zeros((self.nb_points,), dtype=np.float64)
54 | self.triangle_polygon_intersection = {}
55 | total_area = 0
56 | for i, j, k in potential_elements:
57 | t = self.triangles[i, j, k]
58 | if polygon.contains(t):
59 | self.nb_triangles_inside += 1
60 | area = t.area
61 | total_area += area
62 | self.point_weight[[i, j, k]] += area
63 | self.area[i, j, k] = area
64 | else:
65 | is_intersected, intersection = polygon.polygon_intersection(t)
66 | if is_intersected:
67 | self.nb_triangles_inside += 1
68 | area = intersection.area
69 | total_area += area
70 | centroid = intersection.centroid
71 | interpolator = Interpolator(t).get_interpolator_at(centroid.x, centroid.y)
72 | self.triangle_polygon_intersection[i, j, k] = (area, interpolator)
73 | self.point_weight /= 3.0
74 | self.inverse_total_area = 1 / total_area
75 |
76 | def mean_signed_deviation(self, values):
77 | """!
78 | @brief Compute the mean signed deviation between two meshes
79 | @param values : The difference between the test mesh and the reference mesh
80 | @return : The value of the mean signed deviation
81 | """
82 | if not self.inside_polygon:
83 | return self.point_weight.dot(values) * self.inverse_total_area
84 | else:
85 | volume_boundary = TruncatedTriangularPrisms.boundary_volume_in_polygon(self.triangle_polygon_intersection,
86 | values)
87 | return (volume_boundary + self.point_weight.dot(values)) * self.inverse_total_area
88 |
89 | def mean_absolute_deviation(self, values):
90 | """!
91 | @brief Compute the mean absolute deviation between two meshes
92 | @param values : The difference between the test mesh and the reference mesh
93 | @return : The value of the mean absolute deviation
94 | """
95 | if not self.inside_polygon:
96 | return self.point_weight.dot(np.abs(values)) * self.inverse_total_area
97 | else:
98 | abs_values = np.abs(values)
99 | volume_boundary = TruncatedTriangularPrisms.boundary_volume_in_polygon(self.triangle_polygon_intersection,
100 | abs_values)
101 | return (volume_boundary + self.point_weight.dot(abs_values)) * self.inverse_total_area
102 |
103 | def root_mean_square_deviation(self, values):
104 | """!
105 | @brief Compute the root mean square deviation between two meshes
106 | @param values : The difference between the test mesh and the reference mesh
107 | @return : The value of the root mean square deviation
108 | """
109 | if not self.inside_polygon:
110 | return np.sqrt(self.point_weight.dot(np.square(values)) * self.inverse_total_area)
111 | else:
112 | squared_values = np.square(values)
113 | volume_boundary = TruncatedTriangularPrisms.boundary_volume_in_polygon(self.triangle_polygon_intersection,
114 | squared_values)
115 | return np.sqrt((volume_boundary + self.point_weight.dot(squared_values)) * self.inverse_total_area)
116 |
117 | def element_wise_signed_deviation(self, values):
118 | """!
119 | @brief Compute the element wise signed deviation (signed deviation distribution) between two meshes
120 | @param values : The difference between the test mesh and the reference mesh
121 | @return : The value of the signed deviation for every triangles in the comparison area
122 | """
123 | ewsd = {}
124 | for i, j, k in self.area:
125 | ewsd[i, j, k] = sum(values[[i, j, k]]) * self.area[i, j, k] / 3.0 * self.nb_triangles_inside \
126 | * self.inverse_total_area
127 | if self.inside_polygon:
128 | for i, j, k in self.triangle_polygon_intersection:
129 | area, interpolator = self.triangle_polygon_intersection[i, j, k]
130 | ewsd[i, j, k] = interpolator.dot(values[[i, j, k]]) * area * self.nb_triangles_inside \
131 | * self.inverse_total_area
132 | return ewsd
133 |
134 | def quadratic_volume(self, values):
135 | """!
136 | @brief (Used in BSS calculations) Compute the quadratic volume between two meshes
137 | @param values : The difference between the test mesh and the reference mesh
138 | @return : The value of the quadratic volume
139 | """
140 | if not self.inside_polygon:
141 | return self.point_weight.dot(np.square(values))
142 | else:
143 | squared_values = np.square(values)
144 | volume_boundary = TruncatedTriangularPrisms.boundary_volume_in_polygon(self.triangle_polygon_intersection,
145 | squared_values)
146 | return self.point_weight.dot(squared_values) + volume_boundary
147 |
--------------------------------------------------------------------------------
/pyteltools/slf/data/Serafin_var2D.csv:
--------------------------------------------------------------------------------
1 | varID;fr;en;unit
2 | U;VITESSE U;VELOCITY U;M/S
3 | V;VITESSE V;VELOCITY V;M/S
4 | C;CELERITE;CELERITY;M/S
5 | H;HAUTEUR D'EAU;WATER DEPTH;M
6 | S;SURFACE LIBRE;FREE SURFACE;M
7 | B;FOND;BOTTOM;M
8 | F;FROUDE;FROUDE NUMBER;
9 | Q;DEBIT SCALAIRE;SCALAR FLOWRATE;M2/S
10 | Q;DEBIT Q;FLOWRATE Q;M2/S
11 | I;DEBIT SUIVANT X;FLOWRATE ALONG X;M2/S
12 | I;DEBIT QX;FLOWRATE QX;M2/S
13 | J;DEBIT SUIVANT Y;FLOWRATE ALONG Y;M2/S
14 | J;DEBIT QY;FLOWRATE QX;M2/S
15 | E;DISSIPATION;DISSIPATION;WATT/KG
16 | D;VISCOSITE TURB.;VISCOSITY;M2/S
17 | K;ENERGIE TURBUL.;TURBULENT ENERG.;JOULE/KG
18 | P;PRESSION ATMOS.;AIR PRESSURE;PASCAL
19 | M;VITESSE SCALAIRE;SCALAR VELOCITY;M/S
20 | X;VENT X;WIND ALONG X;M/S
21 | Y;VENT Y;WIND ALONG Y;M/S
22 | W;FROTTEMENT;BOTTOM FRICTION;
23 | W;COEFT FROTTEMENT;FRICTION COEFT;
24 | A;DERIVE EN X;DRIFT ALONG X;M
25 | G;DERIVE EN Y;DRIFT ALONG Y;M
26 | L;NBRE DE COURANT;COURANT NUMBER;
27 | MAXZ;COTE MAXIMUM;HIGH WATER MARK;M
28 | TMXZ;TEMPS COTE MAXI;HIGH WATER TIME;S
29 | MAXV;VITESSE MAXIMUM;HIGHEST VELOCITY;M/S
30 | TMXV;T VITESSE MAXI;TIME OF HIGH VEL;S
31 | US;VITESSE DE FROT.;FRICTION VEL.;M/S
32 | US;VITESSE DE FROT;FRICTION VELOCIT;M/S
33 | TAU;CONTRAINTE;BED SHEAR STRESS;PASCAL
34 | TAU;FROTTEMENT TOB;BED SHEAR STRESS;PASCAL
35 | DMAX;DIAMETRE;DIAMETER;MM
36 | WH;HAUTEUR HM0;WAVE HEIGHT HM0;M
37 | PPT;PERIODE PIC TPR5;PEAK PERIOD TPR5;S
38 | RB;FOND RIGIDE;RIGID BED;M
39 | THETAW;DIRECTION MOY;MEAN DIRECTION;DEG
40 | QS;DEBIT SOLIDE;SOLID DISCH;M2/S
41 | QSX;DEBIT SOLIDE X;SOLID DISCH X;M2/S
42 | QSY;DEBIT SOLIDE Y;SOLID DISCH Y;M2/S
43 | QS;DEBIT SOLIDE;SOLID DISCHARGE;M2/S
44 | QSX;DEBIT SOL EN X;SOLID DIS IN X;M2/S
45 | QSY;DEBIT SOL EN Y;SOLID DIS IN Y;M2/S
46 | EV;EVOLUTION;EVOLUTION;M
47 | EV;EVOLUTION FOND;BED EVOLUTION;M
48 | KS;RUGOSITE TOTALE.;RUGOSITE TOTALE;M
49 | KS;RUGOSITE TOTALE;RUGOSITE TOTALE;M
50 | MU;CORR FROTT PEAU;FROT. PEAU MU;
51 | D50;DIAMETRE MOYEN;MEAN DIAMETER;M
52 | UWB;VITESSE FOND;BOTTOM VELOCITY;M/S
53 | QSBL;QS CHARRIAGE;QS BEDLOAD;M2/S
54 | QSBLX;QS CHARRIAGE X;QS BEDLOAD X;M2/S
55 | QSBLY;QS CHARRIAGE Y;QS BEDLOAD Y;M2/S
56 | QSSUSP;QS SUSPENSION;QS SUSPENSION;M2/S
57 | QSSUSPX;QS SUSPENSION X;QS SUSPENSION X;M2/S
58 | QSSUSPY;QS SUSPENSION Y;QS SUSPENSION Y;M2/S
59 | HD;EPAISSEUR DU LIT;BED THICKNESS;M
60 | EF;FLUX D'EROSION;EROSION FLUX;KG/M2/S
61 | DF;FLUX DE DEPOT;DEPOSITION FLUX;KG/M2/S
62 | FROTP;FROT. PEAU;FROT. PEAU;PASCAL
63 |
--------------------------------------------------------------------------------
/pyteltools/slf/data/Serafin_var3D.csv:
--------------------------------------------------------------------------------
1 | varID;fr;en;unit
2 | Z;COTE Z;ELEVATION Z;M
3 | U;VITESSE U;VELOCITY U;M/S
4 | V;VITESSE V;VELOCITY V;M/S
5 | W;VITESSE W;VELOCITY W;M/S
6 | NUX;NUX POUR VITESSE;NUX FOR VELOCITY;M2/S
7 | NUY;NUY POUR VITESSE;NUY FOR VELOCITY;M2/S
8 | NUZ;NUZ POUR VITESSE;NUZ FOR VELOCITY;M2/S
9 | K;ENERGIE TURBULEN;TURBULENT ENERGY;JOULE/KG
10 | EPS;DISSIPATION;DISSIPATION;WATT/KG
11 | RI;NB DE RICHARDSON;RICHARDSON NUMB;
12 | RHO;DENSITE RELATIVE;RELATIVE DENSITY;
13 | DP;PRESSION DYNAMIQ;DYNAMIC PRESSURE;PA
14 | PH;PRESSION HYDROST;HYDROSTATIC PRES;PA
15 | UCONV;U CONVECTION;U ADVECTION;M/S
16 | VCONV;V CONVECTION;V ADVECTION;M/S
17 | WCONV;W CONVECTION;W ADVECTION;M/S
18 | DM1;DM1;DM1;
19 | DHHN;DHHN;DHHN;M
20 | UCONVC;UCONVC;UCONVC;M/S
21 | VCONVC;VCONVC;VCONVC;M/S
22 | UD;UD;UD;M/S
23 | VD;VD;VD;M/S
24 | WD;WD;WD;M/S
25 | M;VITESSE SCALAIRE;SCALAR VELOCITY;M/S
26 | NU;NU POUR VITESSE;NU FOR VELOCITY;M2/S
27 |
--------------------------------------------------------------------------------
/pyteltools/slf/datatypes.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | import datetime
3 |
4 | from . import Serafin
5 | from .util import logger
6 |
7 |
8 | class SerafinData:
9 | def __init__(self, job_id, filename, language):
10 | self.job_id = job_id
11 | self.language = language
12 | self.filename = filename
13 | self.index = None
14 | self.triangles = {}
15 | self.header = None
16 | self.time = [] # <[float]>
17 | self.time_second = [] # <[datetime.timedelta]> FIXME: should be renamed differently!
18 | self.start_time = None
19 |
20 | self.selected_vars = []
21 | self.selected_vars_names = {}
22 | self.selected_time_indices = []
23 | self.equations = []
24 | self.us_equation = None
25 | self.to_single = False
26 |
27 | self.operator = None
28 | self.metadata = {}
29 | # * Select single layer: 'layer_selection': an integer
30 | # * SynchMax: 'var': a string (variable identifier)
31 | # * Vertical aggregation: 'vertical_operator': 'Min', 'Max' or 'Mean'
32 |
33 | def read(self):
34 | try:
35 | with Serafin.Read(self.filename, self.language) as input_stream:
36 | input_stream.read_header()
37 | input_stream.get_time()
38 |
39 | self.header = input_stream.header.copy()
40 | self.time = input_stream.time[:]
41 | except PermissionError:
42 | raise Serafin.SerafinRequestError('Permission denied (Is the file opened by another application?).')
43 |
44 | if self.header.date is not None:
45 | try:
46 | year, month, day, hour, minute, second = self.header.date
47 | self.start_time = datetime.datetime(year, month, day, hour, minute, second)
48 | except ValueError:
49 | logger.warning('Date seems invalid, replaced by default date.')
50 | if self.start_time is None:
51 | self.start_time = datetime.datetime(1900, 1, 1, 0, 0, 0)
52 | self.time_second = list(map(lambda x: datetime.timedelta(seconds=x), self.time))
53 | self.selected_vars = self.header.var_IDs[:]
54 | self.selected_vars_names = {var_id: (var_name, var_unit) for (var_id, var_name, var_unit)
55 | in zip(self.header.var_IDs, self.header.var_names, self.header.var_units)}
56 | self.selected_time_indices = list(range(len(self.time)))
57 | return self.header.is_2d
58 |
59 | def copy(self):
60 | copy_data = SerafinData(self.job_id, self.filename, self.language)
61 | copy_data.index = self.index
62 | copy_data.triangles = self.triangles
63 | copy_data.header = self.header
64 | copy_data.time = self.time
65 | copy_data.start_time = self.start_time
66 | copy_data.time_second = self.time_second
67 | copy_data.metadata = self.metadata
68 |
69 | copy_data.selected_vars = self.selected_vars[:]
70 | copy_data.selected_vars_names = deepcopy(self.selected_vars_names)
71 | copy_data.selected_time_indices = self.selected_time_indices[:]
72 | copy_data.equations = self.equations[:]
73 | copy_data.us_equation = self.us_equation
74 | copy_data.to_single = self.to_single
75 | copy_data.operator = self.operator
76 | return copy_data
77 |
78 | def default_output_header(self):
79 | output_header = self.header.copy()
80 | output_header.empty_variables()
81 | for var_ID in self.selected_vars:
82 | var_name, var_unit = self.selected_vars_names[var_ID]
83 | output_header.add_variable(var_ID, var_name, var_unit)
84 | if self.to_single:
85 | output_header.to_single_precision()
86 | return output_header
87 |
88 | def build_2d_output_header(self):
89 | output_header = self.header.copy_as_2d()
90 | output_header.empty_variables()
91 | for var_ID in self.selected_vars:
92 | var_name, var_unit = self.selected_vars_names[var_ID]
93 | output_header.add_variable(var_ID, var_name, var_unit)
94 | if self.to_single:
95 | output_header.to_single_precision()
96 | return output_header
97 |
98 | def transform_mesh(self, transformations):
99 | """!
100 | @brief Apply transformations on mesh nodes (only in 2D)
101 | @param transformations <[geom.transformation.Transformation]>: list of successive transformations
102 | """
103 | self.header.transform_mesh(transformations)
104 | self.index = None
105 | self.triangles = {}
106 |
107 |
108 | class CSVData:
109 | def __init__(self, filename, header=None, out_name='', separator=''):
110 | self.filename = filename
111 | self.out_name = ''
112 | self.metadata = {}
113 | self.separator = ''
114 |
115 | if header is None: # read existing file
116 | self.separator = separator
117 | self.table = []
118 | self.out_name = out_name
119 | with open(out_name, 'r') as f:
120 | for line in f.readlines():
121 | self.table.append(line.rstrip().split(self.separator))
122 | else:
123 | self.table = [header]
124 |
125 | def add_row(self, row):
126 | self.table.append(row)
127 |
128 | def write(self, filename, separator):
129 | with open(filename, 'w') as output_stream:
130 | for line in self.table:
131 | output_stream.write(separator.join(line))
132 | output_stream.write('\n')
133 | self.out_name = filename
134 | self.separator = separator
135 |
136 |
137 | class PolylineData:
138 | def __init__(self):
139 | self.lines = []
140 | self.fields = []
141 |
142 | def __len__(self):
143 | return len(self.lines)
144 |
145 | def add_line(self, line):
146 | self.lines.append(line)
147 |
148 | def set_fields(self, fields):
149 | self.fields = fields[:]
150 |
151 | def is_empty(self):
152 | return len(self.lines) == 0
153 |
154 | def id_are_unique(self):
155 | ids = [line.id for line in self.lines]
156 | return len(ids) == len(set(ids))
157 |
158 |
159 | class PointData:
160 | def __init__(self):
161 | self.points = []
162 | self.attributes = []
163 | self.fields = []
164 | self.fields_name = []
165 | self.attributes_decoded = []
166 |
167 | def __len__(self):
168 | return len(self.points)
169 |
170 | def add_point(self, point):
171 | self.points.append(point)
172 |
173 | def add_attribute(self, attribute):
174 | self.attributes.append(attribute)
175 | decoded = []
176 | for a in attribute:
177 | if type(a) == bytes:
178 | decoded.append(a.decode('latin-1'))
179 | decoded.append(str(a))
180 | self.attributes_decoded.append(decoded)
181 |
182 | def set_fields(self, fields):
183 | self.fields = fields[:]
184 | for f in fields:
185 | name = f[0]
186 | if type(name) == bytes:
187 | self.fields_name.append(name.decode('latin-1'))
188 | else:
189 | self.fields_name.append(name)
190 |
191 | def is_empty(self):
192 | return len(self.points) == 0
193 |
--------------------------------------------------------------------------------
/pyteltools/slf/expression/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/slf/expression/__init__.py
--------------------------------------------------------------------------------
/pyteltools/slf/expression/condition.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | class ComplexCondition:
5 | def __init__(self, index):
6 | self.index = index
7 | self.text = ''
8 | self.polygonal = False
9 | self.masked = False # placeholder
10 | self.mask_id = 0
11 |
12 | def __str__(self):
13 | return 'C%d: %s' % (self.index, self.text)
14 |
15 | def code(self):
16 | return 'C%d' % self.index
17 |
18 |
19 | class SimpleCondition(ComplexCondition):
20 | def __init__(self, index, expression, comparator, threshold):
21 | super().__init__(index)
22 | self.expression = expression
23 | self.text = '%s %s %s' % (repr(self.expression), comparator, str(threshold))
24 | self.polygonal = expression.polygonal
25 | self.mask_id = expression.mask_id
26 |
27 | if comparator == '>':
28 | self._evaluate = lambda value: value > threshold
29 | elif comparator == '<':
30 | self._evaluate = lambda value: value < threshold
31 | elif comparator == '>=':
32 | self._evaluate = lambda value: value >= threshold
33 | else:
34 | self._evaluate = lambda value: value <= threshold
35 |
36 | def evaluate(self, current_values):
37 | return self._evaluate(current_values[self.expression.code()])
38 |
39 |
40 | class AndOrCondition(ComplexCondition):
41 | def __init__(self, index, first_condition, second_condition, is_and):
42 | super().__init__(index)
43 | self.first_condition = first_condition
44 | self.second_condition = second_condition
45 | self.text = '(%s) %s (%s)' % (self.first_condition.text, 'AND' if is_and else 'OR',
46 | self.second_condition.text)
47 | self.func = np.logical_and if is_and else np.logical_or
48 |
49 | def evaluate(self, current_values):
50 | return self.func(current_values[self.first_condition.code()], current_values[self.second_condition.code()])
51 |
--------------------------------------------------------------------------------
/pyteltools/slf/expression/expression.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from pyteltools.slf.misc import OPERATIONS, OPERATORS, tighten_expression
4 |
5 |
6 | class ComplexExpression:
7 | """!
8 | expression object in an expression pool
9 | """
10 | def __init__(self, index):
11 | self.index = index
12 | self.polygonal = False # polygonal expression can only be evaluated inside a masked expression
13 | self.masked = False # masked expression cannot be composed to create new expression
14 | self.mask_id = 0
15 |
16 | def __repr__(self):
17 | return ''
18 |
19 | def __str__(self):
20 | return 'E%d: %s' % (self.index, repr(self))
21 |
22 | def code(self):
23 | return 'E%d' % self.index
24 |
25 | def evaluate(self, values, mask=None):
26 | return []
27 |
28 |
29 | class PolygonalMask:
30 | def __init__(self, index, mask, values):
31 | self.index = index
32 | self.mask = mask
33 | self.values = values
34 | self.children = []
35 | self.nb_children = 0
36 |
37 | def code(self):
38 | return 'POLY%d' % self.index
39 |
40 | def add_child(self, child):
41 | self.nb_children += 1
42 | self.children.append(child.code())
43 |
44 |
45 | class SimpleExpression(ComplexExpression):
46 | """!
47 | expression object in an expression pool
48 | """
49 | def __init__(self, index, postfix, literal_expression):
50 | super().__init__(index)
51 | self.expression = postfix
52 | self.tight_expression = tighten_expression(literal_expression)
53 |
54 | def __repr__(self):
55 | return self.tight_expression
56 |
57 | def evaluate(self, values, mask=None):
58 | stack = []
59 | for symbol in self.expression:
60 | if symbol in OPERATORS:
61 | if symbol in ('sqrt', 'sin', 'cos', 'atan'):
62 | operand = stack.pop()
63 | stack.append(OPERATIONS[symbol](operand))
64 | else:
65 | first_operand = stack.pop()
66 | second_operand = stack.pop()
67 | stack.append(OPERATIONS[symbol](first_operand, second_operand))
68 | else:
69 | if symbol[0] == '[':
70 | stack.append(values[symbol[1:-1]])
71 | else:
72 | stack.append(float(symbol))
73 | return stack.pop()
74 |
75 |
76 | class ConditionalExpression(ComplexExpression):
77 | def __init__(self, index, condition, true_expression, false_expression):
78 | super().__init__(index)
79 | self.condition = condition
80 | self.true_expression = true_expression
81 | self.false_expression = false_expression
82 |
83 | def __repr__(self):
84 | return 'IF (%s) THEN (%s) ELSE (%s)' % (self.condition.text, repr(self.true_expression),
85 | repr(self.false_expression))
86 |
87 | def evaluate(self, values, mask=None):
88 | condition = values[self.condition.code()]
89 | return np.where(condition, values[self.true_expression.code()], values[self.false_expression.code()])
90 |
91 |
92 | class MaxMinExpression(ComplexExpression):
93 | def __init__(self, index, first_expression, second_expression, is_max):
94 | super().__init__(index)
95 | self.first_expression = first_expression
96 | self.second_expression = second_expression
97 | self.is_max = is_max
98 |
99 | def __repr__(self):
100 | return '%s(%s, %s)' % ('MAX' if self.is_max else 'MIN',
101 | repr(self.first_expression), repr(self.second_expression))
102 |
103 | def evaluate(self, values, mask=None):
104 | if self.is_max:
105 | return np.maximum(values[self.first_expression.code()], values[self.second_expression.code()])
106 | else:
107 | return np.minimum(values[self.first_expression.code()], values[self.second_expression.code()])
108 |
109 |
110 | class MaskedExpression(ComplexExpression):
111 | def __init__(self, index, inside_expression, outside_expression):
112 | super().__init__(index)
113 | self.inside_expression = inside_expression
114 | self.outside_expression = outside_expression
115 | self.masked = True
116 | self.polygonal = True
117 | self.mask_id = self.inside_expression.mask_id
118 |
119 | def __repr__(self):
120 | return 'IF (POLY%s) THEN (%s) ELSE (%s)' % (self.mask_id,
121 | repr(self.inside_expression),
122 | repr(self.outside_expression))
123 |
124 | def evaluate(self, values, mask=None):
125 | return np.where(mask, values[self.inside_expression.code()], values[self.outside_expression.code()])
126 |
--------------------------------------------------------------------------------
/pyteltools/slf/interpolation.py:
--------------------------------------------------------------------------------
1 | """!
2 | Barycentric interpolation in triangles
3 | """
4 |
5 | import numpy as np
6 |
7 | from .mesh2D import Mesh2D
8 |
9 |
10 | class Interpolator:
11 | """!
12 | Wrapper for calculating the barycentric coordinates of 2d points in a 2d triangle
13 | """
14 | def __init__(self, triangle):
15 | p1, p2, p3 = tuple(map(list, list(triangle.exterior.coords)[:-1]))
16 | self.x1, self.y1 = p1
17 | x2, y2 = p2
18 | x3, y3 = p3
19 | self.vec_x = np.array([x2-x3, x3-self.x1, self.x1-x2])
20 | self.vec_y = np.array([y2-y3, y3-self.y1, self.y1-y2])
21 | norm_z = (x2-self.x1) * (y3-self.y1) - (y2-self.y1) * (x3-self.x1)
22 | self.vec_norm_z = np.array([norm_z, 0, 0])
23 | self.inv_norm_z = 1 / norm_z
24 |
25 | def get_interpolator_at(self, x, y):
26 | """!
27 | @brief Return the barycentric coordinates of the point (x, y)
28 | """
29 | return (self.vec_norm_z + (x-self.x1) * self.vec_y - (y-self.y1) * self.vec_x) * self.inv_norm_z
30 |
31 | def is_in_triangle(self, x, y):
32 | """!
33 | @brief Return a boolean indicating if the point (x, y) is in the triangle, and its barycentric coordinates
34 | """
35 | coord = (self.vec_norm_z + (x-self.x1) * self.vec_y - (y-self.y1) * self.vec_x) * self.inv_norm_z
36 | return np.all(coord >= 0) and np.all(coord <= 1), coord
37 |
38 |
39 | class MeshInterpolator(Mesh2D):
40 | def __init__(self, *args, **kwargs):
41 | super().__init__(*args, **kwargs)
42 |
43 | def get_point_interpolators(self, points):
44 | nb_points = len(points)
45 | is_inside = [False] * nb_points
46 | point_interpolators = [None] * nb_points
47 |
48 | for index, (x, y) in enumerate(points):
49 | potential_elements = self.get_intersecting_elements((x, y, x, y))
50 | if not potential_elements:
51 | continue
52 | for i, j, k in potential_elements:
53 | t = self.triangles[i, j, k]
54 | is_in, point_interpolator = Interpolator(t).is_in_triangle(x, y)
55 | if is_in:
56 | is_inside[index] = True
57 | point_interpolators[index] = ((i, j, k), point_interpolator)
58 | break
59 |
60 | return is_inside, point_interpolators
61 |
62 | def _get_line_interpolators(self, line):
63 | intersections = []
64 | internal_points = [] # line interpolators without intersections
65 |
66 | # record the distance offset before the first intersection point
67 | offset = 0
68 | found_intersection = False
69 |
70 | for right, up, segment in line.segments(): # for every segment, sort intersection points
71 | segment_intersections = []
72 | potential_elements = self.get_intersecting_elements(segment.bounds())
73 | for i, j, k in potential_elements:
74 | t = self.triangles[i, j, k]
75 | is_intersected, t_intersections = segment.linestring_intersection(t)
76 | if is_intersected:
77 | interpolator = Interpolator(t)
78 | for intersection in t_intersections:
79 | for x, y in intersection.coords:
80 | segment_intersections.append((x, y, (i, j, k), interpolator.get_interpolator_at(x, y)))
81 |
82 | # first sort by y, then sort by x
83 | if up:
84 | segment_intersections.sort(key=lambda x: x[1])
85 | else:
86 | segment_intersections.sort(key=lambda x: x[1], reverse=True)
87 | if right:
88 | segment_intersections.sort()
89 | else:
90 | segment_intersections.sort(reverse=True)
91 |
92 | intersections.extend(segment_intersections)
93 | if not segment_intersections:
94 | continue
95 |
96 | internal_points.append(segment_intersections[0])
97 | internal_points.append(segment_intersections[-1])
98 |
99 | if not found_intersection:
100 | first_point, second_point = list(segment.coords())
101 | if not segment_intersections:
102 | offset += np.linalg.norm(np.array(second_point) - np.array(first_point))
103 | else:
104 | found_intersection = True
105 | first_intersection = np.array(segment_intersections[0][:2])
106 | offset += np.linalg.norm(first_intersection - np.array(first_point))
107 |
108 | # if the intersection is continuous, every internal point or turning point has at least two duplicates
109 | prev_x, prev_y = None, None
110 | duplicates = 0
111 | to_remove = [False] * len(intersections)
112 | for i, (x, y, _, __) in enumerate(intersections):
113 | if i == 0: # the start and end points are not duplicated
114 | continue
115 | if prev_x is None:
116 | prev_x, prev_y = x, y
117 | continue
118 | if x == prev_x and y == prev_y:
119 | to_remove[i] = True
120 | duplicates += 1
121 | else:
122 | if duplicates == 0: # no duplicate found, the intersection is discontinuous
123 | return [], [], [], []
124 | duplicates = 0
125 | prev_x, prev_y = x, y
126 |
127 | intersections = [intersections[i] for i in range(len(intersections)) if not to_remove[i]]
128 |
129 | # trim internal points from 2n+2 to n+1
130 | if internal_points:
131 | internal_points = internal_points[0:-1:2] + [internal_points[-1]]
132 |
133 | # compute cumulative distance
134 | distance = offset
135 | distances = [offset]
136 | for i in range(len(intersections)-1):
137 | first, second = intersections[i+1], intersections[i]
138 | distance += np.linalg.norm([second[0] - first[0], second[1] - first[1]])
139 | distances.append(distance)
140 |
141 | distance = offset
142 | distances_internal = [offset]
143 | for i in range(len(internal_points)-1):
144 | first, second = internal_points[i+1], internal_points[i]
145 | distance += np.linalg.norm([second[0] - first[0], second[1] - first[1]])
146 | distances_internal.append(distance)
147 |
148 | return intersections, distances, internal_points, distances_internal
149 |
150 | def get_line_interpolators(self, lines):
151 | nb_nonempty = 0
152 | indices_nonempty = []
153 | line_interpolators = []
154 | line_interpolators_internal = []
155 |
156 | for i, line in enumerate(lines):
157 | line_interpolator, distance, line_interpolator_internal, distance_internal = self._get_line_interpolators(line)
158 |
159 | if line_interpolator:
160 | nb_nonempty += 1
161 | indices_nonempty.append(i)
162 |
163 | line_interpolators.append((line_interpolator, distance))
164 | line_interpolators_internal.append((line_interpolator_internal, distance_internal))
165 |
166 | return nb_nonempty, indices_nonempty, line_interpolators, line_interpolators_internal
167 |
168 | @staticmethod
169 | def interpolate_along_lines(input_stream, selected_vars, selected_time_indices, indices_nonempty,
170 | line_interpolators, fmt_float):
171 | for u, id_line in enumerate(indices_nonempty):
172 | line_interpolator, distances = line_interpolators[id_line]
173 |
174 | for v, time_index in enumerate(selected_time_indices):
175 | time_value = input_stream.time[time_index]
176 | var_values = []
177 | for var in selected_vars:
178 | var_values.append(input_stream.read_var_in_frame(time_index, var))
179 |
180 | for (x, y, (i, j, k), interpolator), distance in zip(line_interpolator, distances):
181 | row = [str(id_line+1), str(time_value), fmt_float.format(x), fmt_float.format(y),
182 | fmt_float.format(distance)]
183 |
184 | for i_var, var in enumerate(selected_vars):
185 | values = var_values[i_var]
186 | row.append(fmt_float.format(interpolator.dot(values[[i, j, k]])))
187 | yield u, v, row
188 |
189 | @staticmethod
190 | def project_lines(input_stream, selected_vars, time_index, indices_nonempty, max_distance,
191 | reference, line_interpolators, fmt_float):
192 | var_values = []
193 | for var in selected_vars:
194 | var_values.append(input_stream.read_var_in_frame(time_index, var))
195 |
196 | for u, id_line in enumerate(indices_nonempty):
197 | line_interpolator, _ = line_interpolators[id_line]
198 |
199 | for x, y, (i, j, k), interpolator in line_interpolator:
200 | d = reference.project(x, y)
201 | if d <= 0 or d >= max_distance:
202 | continue
203 | row = [str(id_line+1), fmt_float.format(x), fmt_float.format(y), fmt_float.format(d)]
204 | for i_var, var in enumerate(selected_vars):
205 | values = var_values[i_var]
206 | row.append(fmt_float.format(interpolator.dot(values[[i, j, k]])))
207 | yield u, row
208 |
--------------------------------------------------------------------------------
/pyteltools/slf/mesh2D.py:
--------------------------------------------------------------------------------
1 | """!
2 | Representation of the 2D mesh in a 2D Serafin file.
3 | """
4 |
5 | import numpy as np
6 | from rtree.index import Index
7 | from shapely.geometry import Polygon
8 |
9 |
10 | class Mesh2D:
11 | """!
12 | The general representation of mesh in Serafin 2D.
13 | The basis for interpolation, volume calculations etc.
14 | """
15 | def __init__(self, input_header, construct_index=False, iter_pbar=lambda x, unit: x):
16 | """!
17 | @param input_header : input Serafin header
18 | @param construct_index : perform the index construction
19 | @param iter_pbar: iterable progress bar
20 | """
21 | self.x, self.y = input_header.x[:input_header.nb_nodes_2d], input_header.y[:input_header.nb_nodes_2d]
22 | self.ikle = input_header.ikle_2d - 1 # back to 0-based indexing
23 | self.triangles = {}
24 | self.nb_points = self.x.shape[0]
25 | self.nb_triangles = self.ikle.shape[0]
26 | self.points = np.stack([self.x, self.y], axis=1)
27 | if not construct_index:
28 | self.index = Index()
29 | else:
30 | self._construct_index(iter_pbar)
31 |
32 | def _construct_index(self, iter_pbar):
33 | """!
34 | Separate the index construction from the constructor, allowing a GUI override
35 | @param iter_pbar: iterable progress bar
36 | """
37 | self.index = Index()
38 | for i, j, k in iter_pbar(self.ikle, unit='elements'):
39 | t = Polygon([self.points[i], self.points[j], self.points[k]])
40 | self.triangles[i, j, k] = t
41 | self.index.insert(i, t.bounds, obj=(i, j, k))
42 |
43 | def get_intersecting_elements(self, bounding_box):
44 | """!
45 | @brief Return the triangles in the mesh intersecting the bounding box
46 | @param bounding_box : (left, bottom, right, top) of a 2d geometrical object
47 | @return <[tuple]>: The list of triangles (i,j,k) intersecting the bounding box
48 | Beware: The returned list is not sorted
49 | """
50 | return list(self.index.intersection(bounding_box, objects='raw'))
51 |
--------------------------------------------------------------------------------
/pyteltools/slf/util.py:
--------------------------------------------------------------------------------
1 | from pyteltools.utils.log import new_logger
2 |
3 | logger = new_logger(__name__)
4 |
--------------------------------------------------------------------------------
/pyteltools/slf/variable/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/slf/variable/__init__.py
--------------------------------------------------------------------------------
/pyteltools/slf/variable/variables_3d.py:
--------------------------------------------------------------------------------
1 | """!
2 | Handle 3D variables and their relationships in Serafin files for additional variable computation
3 | """
4 |
5 | from .variables_utils import *
6 |
7 |
8 | # define variables
9 | spec = """Z,COTE Z,ELEVATION Z,M
10 | U,VITESSE U,VELOCITY U,M/S
11 | V,VITESSE V,VELOCITY V,M/S
12 | W,VITESSE W,VELOCITY W,M/S
13 | NUX,NUX POUR VITESSE,NUX FOR VELOCITY,M2/S
14 | NUY,NUY POUR VITESSE,NUY FOR VELOCITY,M2/S
15 | NUZ,NUZ POUR VITESSE,NUZ FOR VELOCITY,M2/S
16 | M,VITESSE SCALAIRE,SCALAR VELOCITY,M/S
17 | NU,NU POUR VITESSE,NU FOR VELOCITY,M2/S"""
18 |
19 | basic_3D_vars_IDs = ['Z', 'U', 'V', 'W', 'NUX', 'NUY', 'NUZ', 'M', 'NU']
20 | VARIABLES_3D = build_variables(spec)
21 |
22 | Z, U, V, W, NUX, NUY, NUZ, M, NU = [VARIABLES_3D[var] for var in basic_3D_vars_IDs]
23 |
24 |
25 | # define equations
26 | BASIC_3D_EQUATIONS = {
27 | 'M': Equation((U, V, W), M, NORM2_3D),
28 | 'NU': Equation((NUX, NUY, NUZ), NU, NORM2_3D),
29 | }
30 |
31 |
32 | def is_basic_3d_variable(var_ID):
33 | """!
34 | @brief Determine if the input variable is a basic 3D variable
35 | @param var_ID : the ID (short name) of the variable
36 | @return : True if the variable is one of the basic variables
37 | """
38 | return var_ID in basic_3D_vars_IDs
39 |
40 |
41 | def get_available_3d_variables(input_var_IDs):
42 | """!
43 | @brief Determine the list of new 3D variables computable from the input variables by basic relations
44 | @param input_var_IDs <[str]>: the list of 3D variable IDs contained in the input file
45 | @return <[Variable]>: the list of variables computable from the input variables by basic relations
46 | """
47 | computables = list(map(VARIABLES_3D.get, filter(is_basic_3d_variable, input_var_IDs)))
48 | return get_available_variables(computables, BASIC_3D_EQUATIONS)
49 |
50 |
51 | def get_necessary_3d_equations(known_var_IDs, needed_var_IDs):
52 | """!
53 | @brief Determine the list of 3D equations needed to compute all user-selected variables, with precedence handling
54 | @param known_var_IDs <[str]>: the list of variable IDs contained in the input file
55 | @param needed_var_IDs <[str]>: the list of variable IDs selected by the user
56 | @return <[Equation]>: the list of equations needed to compute all user-selected variables
57 | """
58 | selected_unknown_var_IDs = list(filter(lambda x: x not in known_var_IDs, needed_var_IDs))
59 | necessary_equations = []
60 |
61 | # add M
62 | if 'M' in selected_unknown_var_IDs:
63 | necessary_equations.append(BASIC_3D_EQUATIONS['M'])
64 |
65 | # add NU
66 | if 'NU' in selected_unknown_var_IDs:
67 | necessary_equations.append(BASIC_3D_EQUATIONS['NU'])
68 |
69 | return necessary_equations
70 |
--------------------------------------------------------------------------------
/pyteltools/slf/variable/variables_utils.py:
--------------------------------------------------------------------------------
1 | """!
2 | Handle 2D and 3D additional variables
3 | """
4 |
5 | import numpy as np
6 |
7 |
8 | # define constants
9 | KARMAN = 0.4
10 | RHO_WATER = 1000.
11 | GRAVITY = 9.80665
12 |
13 |
14 | class Variable:
15 | """!
16 | @brief Data type for a single variable with ID (short name), Name (fr or en) and Unit
17 | """
18 | def __init__(self, ID, name_fr, name_en, unit, order):
19 | self._ID = ID
20 | self.name_fr = name_fr
21 | self.name_en = name_en
22 | self._unit = unit
23 | self.order = order
24 |
25 | def __repr__(self):
26 | return ', '.join([self.ID(), self.name_fr,
27 | self.name_en, self.unit()])
28 |
29 | def name(self, language):
30 | if language == 'fr':
31 | return self.name_fr
32 | return self.name_en
33 |
34 | def ID(self):
35 | return self._ID
36 |
37 | def unit(self):
38 | return self._unit
39 |
40 |
41 | class Equation:
42 | """!
43 | @brief Data type for an equation consisting of N input variables, 1 output variables and (N-1) operators
44 | """
45 | def __init__(self, input_variables, output_variable, operator):
46 | self.input = input_variables
47 | self.output = output_variable
48 | self.operator = operator
49 |
50 | def __repr__(self):
51 | return "%s -> %s (%s)" % (self.input, self.output, self.operator)
52 |
53 |
54 | def build_variables(spec):
55 | """!
56 | @brief Initialize the BASIC_VARIABLES
57 | """
58 | variables = {}
59 | for i, row in enumerate(spec.split('\n')):
60 | ID, name_fr, name_en, unit = row.split(',')
61 | variables[ID] = Variable(ID, name_fr, name_en, unit, i)
62 | return variables
63 |
64 |
65 | def square_root(x):
66 | with np.errstate(invalid='ignore'):
67 | return np.sqrt(x)
68 |
69 |
70 | def cubic_root(x):
71 | with np.errstate(invalid='ignore'):
72 | return np.where(x < 0, np.power(-x, 1/3.), np.power(x, 1/3.))
73 |
74 |
75 | def compute_NIKURADSE(w, h, m):
76 | with np.errstate(divide='ignore', invalid='ignore'):
77 | return np.sqrt(np.power(m, 2) * KARMAN**2 / np.power(np.log(30 * h / np.exp(1) / w), 2))
78 |
79 |
80 | def compute_DMAX(tau):
81 | return np.where(tau > 0.34, 1.4593 * np.power(tau, 0.979),
82 | np.where(tau > 0.1, 1.2912 * np.power(tau, 2) + 1.3572 * tau - 0.1154,
83 | 0.9055 * np.power(tau, 1.3178)))
84 |
85 |
86 | def compute_COMPONENT_X(scalar, x, y):
87 | magnitude = np.sqrt(np.power(x, 2) + np.power(y, 2))
88 | return np.where(magnitude > 0, scalar * x/magnitude, 0)
89 |
90 |
91 | def compute_COMPONENT_Y(scalar, x, y):
92 | return compute_COMPONENT_X(scalar, y, x)
93 |
94 |
95 | PLUS, MINUS, TIMES, NORM2, NORM2_3D = 1, 2, 3, 4, 104
96 | COMPUTE_TAU, COMPUTE_DMAX = 5, 6
97 | COMPUTE_CHEZY, COMPUTE_STRICKLER, COMPUTE_MANNING, COMPUTE_NIKURADSE = 7, 8, 9, 10
98 | COMPUTE_C, COMPUTE_F = 11, 12
99 | COMPUTE_COMPONENT_X, COMPUTE_COMPONENT_Y = 20, 21
100 |
101 | OPERATIONS = {
102 | PLUS: lambda a, b: a + b,
103 | MINUS: lambda a, b: a-b,
104 | TIMES: lambda a, b: a*b,
105 | NORM2: lambda a, b: np.sqrt(np.square(a) + np.square(b)),
106 | NORM2_3D: lambda a, b, c: np.sqrt(np.square(a) + np.square(b) + np.square(c)),
107 | COMPUTE_TAU: lambda x: RHO_WATER * np.square(x),
108 | COMPUTE_DMAX: compute_DMAX,
109 | COMPUTE_CHEZY: lambda w, h, m: np.sqrt(np.power(m, 2) * GRAVITY / np.square(w)),
110 | COMPUTE_STRICKLER: lambda w, h, m: np.sqrt(np.power(m, 2) * GRAVITY / np.square(w) / cubic_root(h)),
111 | COMPUTE_MANNING: lambda w, h, m: np.sqrt(np.power(m, 2) * GRAVITY * np.power(w, 2) / cubic_root(h)),
112 | COMPUTE_NIKURADSE: compute_NIKURADSE,
113 | COMPUTE_COMPONENT_X: compute_COMPONENT_X,
114 | COMPUTE_COMPONENT_Y: compute_COMPONENT_Y,
115 | COMPUTE_C: lambda h: square_root(GRAVITY * h),
116 | COMPUTE_F: lambda m, c: m / c
117 | }
118 |
119 |
120 | def do_calculation(equation, input_values):
121 | """!
122 | @brief Apply an equation on input values
123 | @param equation : an equation object
124 | @param input_values <[numpy 1D-array]>: the values of the input variables
125 | @return : the values of the output variable
126 | """
127 | operation = OPERATIONS[equation.operator]
128 | nb_operands = len(input_values)
129 | if nb_operands == 1:
130 | with np.errstate(divide='ignore', invalid='ignore'):
131 | return operation(input_values[0])
132 | elif nb_operands == 2:
133 | with np.errstate(divide='ignore', invalid='ignore'):
134 | return operation(input_values[0], input_values[1])
135 | with np.errstate(divide='ignore', invalid='ignore'):
136 | return operation(input_values[0], input_values[1], input_values[2])
137 |
138 |
139 | def get_available_variables(computables, basic_equations):
140 | """!
141 | @brief Determine the list of new variables (2D or 3D) computable from the input variables by basic relations
142 | @return <[Variable]>: the list of variables computable from the input variables by basic relations
143 | """
144 | available_vars = []
145 | while True:
146 | found_new_computable = False
147 | for equation in basic_equations.values():
148 | unknown = equation.output
149 | needed_variables = equation.input
150 | if unknown in computables: # not a new variable
151 | continue
152 | is_solvable = all(map(lambda x: x in computables, needed_variables))
153 | if is_solvable:
154 | found_new_computable = True
155 | computables.append(unknown)
156 | available_vars.append(equation.output)
157 | if not found_new_computable:
158 | break
159 | return available_vars
160 |
--------------------------------------------------------------------------------
/pyteltools/slf/variables.py:
--------------------------------------------------------------------------------
1 | """
2 | API for computing additional variables
3 | """
4 |
5 | import numpy as np
6 |
7 | from .variable.variables_2d import get_available_2d_variables, get_necessary_2d_equations, \
8 | get_US_equation, new_variables_from_US
9 | # Beware: `get_US_equation` and `new_variables_from_US` are imported indirectly
10 | from .variable.variables_3d import get_available_3d_variables, get_necessary_3d_equations
11 | from .variable.variables_utils import do_calculation
12 |
13 |
14 | def get_available_variables(input_variables, is_2d):
15 | if is_2d:
16 | return get_available_2d_variables(input_variables)
17 | return get_available_3d_variables(input_variables)
18 |
19 |
20 | def get_necessary_equations(known_var_IDs, needed_var_IDs, is_2d, us_equation=None):
21 | if is_2d:
22 | return get_necessary_2d_equations(known_var_IDs, needed_var_IDs, us_equation)
23 | return get_necessary_3d_equations(known_var_IDs, needed_var_IDs)
24 |
25 |
26 | def do_calculations_in_frame(equations, input_serafin, time_index, selected_output_IDs,
27 | output_float_type, is_2d, us_equation, ori_values={}):
28 | """!
29 | @brief Return the selected 2D variables values in a single time frame
30 | @param equations <[slf.variables_utils.Equation]>: list of all equations necessary to compute selected variables
31 | @param input_serafin : input stream for reading necessary variables
32 | @param time_index : the index of the frame (0-based)
33 | @param selected_output_IDs <[str]>: the short names of the selected output variables
34 | @param output_float_type : float32 or float64 according to the output file type
35 | @param is_2d : True if input data is 2D
36 | @param us_equation : user-specified friction law equation
37 | @param ori_values <{numpy.ndarray}>: known values before calculations
38 | @return : the values of the selected output variables
39 | """
40 | computed_values = ori_values
41 | for equation in equations:
42 | input_var_IDs = list(map(lambda x: x.ID(), equation.input))
43 |
44 | # read (if needed) input variables values
45 | for input_var_ID in input_var_IDs:
46 | if input_var_ID not in computed_values and input_var_ID[:5] != 'ROUSE':
47 | computed_values[input_var_ID] = input_serafin.read_var_in_frame(time_index, input_var_ID)
48 |
49 | if is_2d:
50 | # handle the special case for US (user-specified equation)
51 | if equation.output.ID() == 'US':
52 | computed_values['US'] = do_calculation(us_equation, [computed_values['W'],
53 | computed_values['H'],
54 | computed_values['M']])
55 | # Clean US values in case of negative or null water depth
56 | computed_values['US'] = np.where(computed_values['H'] > 0, computed_values['US'],
57 | np.zeros(input_serafin.header.nb_nodes, dtype=output_float_type))
58 |
59 | # handle the very special case for ROUSE (equation depending on user-specified value)
60 | elif equation.output.ID() == 'ROUSE':
61 | computed_values[equation.input[0].ID()] = equation.operator(computed_values['US'])
62 | continue
63 |
64 | # handle the normal case (if not already done)
65 | if equation.output.ID() not in computed_values.keys():
66 | computed_values[equation.output.ID()] = do_calculation(equation,
67 | [computed_values[var_ID] for var_ID in input_var_IDs])
68 |
69 | # reconstruct the output values array in the order of the selected IDs
70 | nb_selected_vars = len(selected_output_IDs)
71 |
72 | output_values = np.empty((nb_selected_vars, input_serafin.header.nb_nodes),
73 | dtype=output_float_type)
74 | for i in range(nb_selected_vars):
75 | var_ID = selected_output_IDs[i]
76 | if var_ID not in computed_values:
77 | output_values[i, :] = input_serafin.read_var_in_frame(time_index, var_ID)
78 | else:
79 | output_values[i, :] = computed_values[var_ID]
80 | return output_values
81 |
--------------------------------------------------------------------------------
/pyteltools/tests/__init__.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from pyteltools.slf import Serafin
4 |
5 |
6 | class TestHeader(Serafin.SerafinHeader):
7 | def __init__(self):
8 | super().__init__(title='DUMMY SERAFIN', format_type='SERAFIND')
9 |
10 | self.nb_elements = 3
11 | self.nb_nodes = 4
12 | self.nb_nodes_2d = self.nb_nodes
13 | self.nb_nodes_per_elem = 3
14 |
15 | self.ikle = np.array([1, 2, 4, 1, 3, 4, 2, 3, 4], dtype=np.int64)
16 | self.x_stored = np.array([3, 0, 6, 3], dtype=np.float64)
17 | self.y_stored = np.array([6, 0, 0, 2], dtype=np.float64)
18 |
19 | self._compute_mesh_coordinates()
20 | self._build_ikle_2d()
21 | self.build_ipobo()
22 |
--------------------------------------------------------------------------------
/pyteltools/tests/test_variables.py:
--------------------------------------------------------------------------------
1 | """!
2 | Unittest for slf.variables module
3 | """
4 |
5 | import unittest
6 |
7 | from pyteltools.slf.variables import get_necessary_equations
8 | from pyteltools.slf.variable.variables_2d import get_US_equation, CHEZY_ID, MANNING_ID, NIKURADSE_ID, STRICKLER_ID
9 |
10 |
11 | eq_name = lambda eqs: list(map(lambda x: x.output.ID(), eqs))
12 |
13 |
14 | class VariablesTestCase(unittest.TestCase):
15 | def test_no_equation(self):
16 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V'], ['U'], True, None)), [])
17 | self.assertEqual(eq_name(get_necessary_equations(['U', 'Q'], ['Q'], True, None)), [])
18 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'H', 'US'], ['US'], True, None)), [])
19 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'H', 'US'], ['US', 'U'], True, None)), [])
20 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'H', 'US'], ['V', 'US', 'M'], True, None)), [])
21 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'RANDOM NAME', 'H', 'US'], ['RANDOM NAME'], True, None)), [])
22 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'H', 'C', 'Q', 'F', 'US'], ['C'], True, None)), [])
23 |
24 | def test_H_equation(self):
25 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'S', 'B'], ['U', 'H'], True, None)), ['H'])
26 | self.assertEqual(eq_name(get_necessary_equations(['U', 'S', 'V', 'B'], ['S', 'U', 'H'], True, None)), ['H'])
27 | self.assertEqual(eq_name(get_necessary_equations(['S', 'B'], ['H', 'U', 'S'], True, None)), ['H'])
28 |
29 | def test_S_equation(self):
30 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'B'], ['U', 'S'], True, None)), ['S'])
31 | self.assertEqual(eq_name(get_necessary_equations(['H', 'Q', 'B'], ['S'], True, None)), ['S'])
32 | self.assertEqual(eq_name(get_necessary_equations(['Q', 'I', 'H', 'U', 'V', 'B'], ['S', 'U', 'H'], True, None)), ['S'])
33 | self.assertEqual(eq_name(get_necessary_equations(['U', 'B', 'J', 'I', 'Q', 'TAU', 'H', 'B'], ['H', 'S'], True, None)), ['S'])
34 |
35 | def test_B_equation(self):
36 | self.assertEqual(eq_name(get_necessary_equations(['HD', 'RB', 'H'], ['B', 'H'], True, None)), ['B'])
37 | self.assertEqual(eq_name(get_necessary_equations(['HD', 'H', 'S', 'TAU'], ['B', 'S'], True, None)), ['B'])
38 | self.assertEqual(eq_name(get_necessary_equations(['S', 'U', 'H', 'V', 'H'], ['S', 'B', 'H'], True, None)), ['B'])
39 | self.assertEqual(eq_name(get_necessary_equations(['DMAX', 'US', 'RB', 'H', 'Q', 'TAU', 'S', 'I'], ['H', 'B'], True, None)), ['B'])
40 |
41 | def test_C_equation(self):
42 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'B'], ['U', 'C', 'H'], True, None)), ['C'])
43 | self.assertEqual(eq_name(get_necessary_equations(['U', 'S', 'Q', 'B'], ['U', 'C', 'H'], True, None)), ['H', 'C'])
44 |
45 | def test_F_equation(self):
46 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'M', 'B'], ['U', 'F', 'H'], True, None)), ['C', 'F'])
47 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'S', 'B'], ['U', 'C', 'F'], True, None)), ['H', 'M', 'C', 'F'])
48 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'S', 'B'], ['U', 'C', 'F', 'I', 'Q'], True, None)), ['H', 'M', 'C', 'F', 'I', 'J', 'Q'])
49 |
50 | def test_IJ_equation(self):
51 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'B'], ['I'], True, None)), ['I'])
52 | self.assertEqual(eq_name(get_necessary_equations(['I', 'J'], ['Q'], True, None)), ['Q'])
53 | self.assertEqual(eq_name(get_necessary_equations(['H', 'U', 'Q', 'J'], ['H', 'I', 'Q'], True, None)), ['I'])
54 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'B', 'M'], ['J'], True, None)), ['J'])
55 | self.assertEqual(eq_name(get_necessary_equations(['H', 'U', 'Q', 'V', 'M'], ['M', 'Q', 'J', 'U'], True, None)), ['J'])
56 | self.assertEqual(eq_name(get_necessary_equations(['S', 'U', 'B', 'V', 'I'], ['M', 'Q', 'U'], True, None)), ['H', 'M', 'J', 'Q'])
57 | self.assertEqual(eq_name(get_necessary_equations(['S', 'U', 'B', 'V', 'M'], ['M', 'Q', 'J', 'U'], True, None)), ['H', 'I', 'J', 'Q'])
58 |
59 | def test_Q_equation(self):
60 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H'], ['Q'], True, None)), ['I', 'J', 'Q'])
61 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H'], ['U', 'Q', 'V'], True, None)), ['I', 'J', 'Q'])
62 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'I', 'H'], ['Q', 'I', 'U'], True, None)), ['J', 'Q'])
63 | self.assertEqual(eq_name(get_necessary_equations(['I', 'J', 'H', 'U', 'V', 'TAU'], ['I', 'J', 'Q'], True, None)), ['Q'])
64 | self.assertEqual(eq_name(get_necessary_equations(['I', 'S', 'B', 'V'], ['Q'], True, None)), ['H', 'J', 'Q'])
65 |
66 | def test_M_equation(self):
67 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'J', 'H', 'B', 'I'], ['U', 'H', 'V', 'M'], True, None)), ['M'])
68 | self.assertEqual(eq_name(get_necessary_equations(['H', 'Q', 'I', 'J', 'B'], ['M', 'U', 'B', 'H', 'Q'], True, None)), ['M'])
69 | self.assertEqual(eq_name(get_necessary_equations(['H', 'U', 'V', 'S', 'J', 'B'], ['Q', 'U', 'M', 'H'], True, None)), ['M', 'I', 'Q'])
70 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'S', 'J', 'B'], ['Q', 'U', 'M', 'H'], True, None)), ['H', 'M', 'I', 'Q'])
71 |
72 | def test_TAU_equation(self):
73 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'J', 'W', 'H', 'B', 'I'], ['TAU'], True, get_US_equation(CHEZY_ID))), ['M', 'US', 'TAU'])
74 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'Q', 'W', 'H'], ['TAU', 'Q', 'U', 'V'], True, get_US_equation(NIKURADSE_ID))), ['US', 'TAU'])
75 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'US', 'Q', 'W', 'H'], ['I', 'TAU', 'Q', 'U', 'V'], True, get_US_equation(STRICKLER_ID))), ['I', 'TAU'])
76 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'US', 'W', 'H'], ['Q', 'TAU', 'US'], True, get_US_equation(MANNING_ID))), ['I', 'J', 'Q', 'TAU'])
77 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'US', 'W', 'S', 'B'], ['Q', 'TAU'], True, get_US_equation(STRICKLER_ID))), ['H', 'I', 'J', 'Q', 'TAU'])
78 |
79 | def test_DMAX_equation(self):
80 | self.assertEqual(eq_name(get_necessary_equations(['US'], ['DMAX'], True, None)), ['TAU', 'DMAX'])
81 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'H', 'W', 'TAU', 'US'], ['DMAX'], True, None)), ['DMAX'])
82 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'J', 'W', 'H', 'B', 'I'], ['DMAX'], True, get_US_equation(CHEZY_ID))), ['M', 'US', 'TAU', 'DMAX'])
83 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'M', 'B', 'Q', 'W', 'S'], ['DMAX', 'H', 'Q', 'TAU', 'U', 'V'], True, get_US_equation(STRICKLER_ID))), ['H', 'US', 'TAU', 'DMAX'])
84 | self.assertEqual(eq_name(get_necessary_equations(['US', 'I', 'H', 'U', 'V'], ['M', 'DMAX', 'Q'], True, None)), ['M', 'J', 'Q', 'TAU', 'DMAX'])
85 | self.assertEqual(eq_name(get_necessary_equations(['US', 'I', 'H', 'U', 'V'], ['M', 'DMAX', 'Q'], True, None)), ['M', 'J', 'Q', 'TAU', 'DMAX'])
86 |
87 | def test_FROPT_equation(self):
88 | self.assertEqual(eq_name(get_necessary_equations(['US', 'MU'], ['FROTP'], True, None)), ['TAU', 'FROTP'])
89 | self.assertEqual(eq_name(get_necessary_equations(['U', 'MU', 'H', 'W', 'TAU', 'US'], ['FROTP'], True, None)), ['FROTP'])
90 | self.assertEqual(eq_name(get_necessary_equations(['U', 'V', 'J', 'W', 'H', 'MU', 'I'], ['FROTP'], True, get_US_equation(CHEZY_ID))), ['M', 'US', 'TAU', 'FROTP'])
91 | self.assertEqual(eq_name(get_necessary_equations(['U', 'MU', 'M', 'B', 'Q', 'W', 'S'], ['FROTP', 'DMAX', 'H', 'Q', 'TAU', 'U', 'V'], True, get_US_equation(STRICKLER_ID))), ['H', 'US', 'TAU', 'DMAX', 'FROTP'])
92 | self.assertEqual(eq_name(get_necessary_equations(['US', 'I', 'H', 'MU', 'V'], ['M', 'DMAX', 'Q', 'FROTP'], True, None)), ['M', 'J', 'Q', 'TAU', 'DMAX', 'FROTP'])
93 | self.assertEqual(eq_name(get_necessary_equations(['US', 'MU', 'H', 'U', 'V'], ['FROTP', 'M', 'DMAX', 'Q'], True, None)), ['M', 'I', 'J', 'Q', 'TAU', 'DMAX', 'FROTP'])
94 |
95 | def test_QS_equation(self):
96 | self.assertEqual(eq_name(get_necessary_equations(['HD', 'EF', 'B', 'DF'], ['B', 'QS'], True, None)), ['QS'])
97 | self.assertEqual(eq_name(get_necessary_equations(['EF', 'H', 'S', 'DF'], ['QS', 'S'], True, None)), ['QS'])
98 | self.assertEqual(eq_name(get_necessary_equations(['QSX', 'EF', 'H', 'DF', 'QSY', 'B'], ['S', 'QS', 'H'], True, None)), ['S', 'QS'])
99 | self.assertEqual(eq_name(get_necessary_equations(['DMAX', 'US', 'QSX', 'EF', 'Q', 'DF', 'S', 'B'], ['H', 'QS'], True, None)), ['H', 'QS'])
100 |
--------------------------------------------------------------------------------
/pyteltools/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/utils/__init__.py
--------------------------------------------------------------------------------
/pyteltools/utils/cli_base.py:
--------------------------------------------------------------------------------
1 | """
2 | Prepare module logger
3 |
4 | Handles some exceptions (if `in_slf` or `out_slf` arguments are present)
5 | """
6 |
7 | import argparse
8 | import coloredlogs
9 | import logging
10 | import os.path
11 | import sys
12 |
13 | from pyteltools.conf import settings
14 | from pyteltools.utils.log import new_logger
15 |
16 |
17 | LINE_WIDTH = 80
18 |
19 | logger = new_logger(__name__)
20 |
21 |
22 | class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
23 | pass
24 |
25 |
26 | class PyTelToolsArgParse(argparse.ArgumentParser):
27 | """
28 | Derived ArgumentParser with improved help message rendering
29 | """
30 | def __init__(self, add_args=[], description=None, *args, **kwargs):
31 | kwargs['formatter_class'] = CustomFormatter
32 | new_description = '_' * LINE_WIDTH + '\n' + description + '_' * LINE_WIDTH + '\n'
33 | super().__init__(add_help=False, description=new_description, *args, **kwargs)
34 | self._positionals.title = self._title_group('Positional and compulsory arguments')
35 | self._optionals.title = self._title_group('Optional arguments')
36 | self.args_known_ids = []
37 | self.group_general = None
38 | for arg_id in add_args:
39 | self.add_known_argument(arg_id)
40 |
41 | def add_known_argument(self, arg_id):
42 | """!
43 | Add pre-defined command line arguments
44 | @param arg_id : argument identifier
45 | """
46 | if arg_id == 'in_slf':
47 | self.add_argument('in_slf', help='Serafin input filename')
48 | elif arg_id == 'out_slf':
49 | self.add_argument('out_slf', help='Serafin output filename')
50 | elif arg_id == 'out_csv':
51 | self.add_argument('out_csv', help='output csv file')
52 | elif arg_id == 'shift':
53 | self.add_argument('--shift', type=float, nargs=2, help='translation (x_distance, y_distance)',
54 | metavar=('X', 'Y'))
55 | else:
56 | NotImplementedError('Argument "%s" is unknown.' % arg_id)
57 | self.args_known_ids.append(arg_id)
58 |
59 | def _add_auto_arguments(self):
60 | """Add automatic derived command line arguments"""
61 | if 'out_slf' in self.args_known_ids:
62 | self.add_argument('--to_single_precision', help='force Serafin output to be single precision',
63 | action='store_true')
64 | self.add_argument('--toggle_endianness', help='toggle output file endianness (between big/little endian)',
65 | action='store_true')
66 | if any(arg in self.args_known_ids for arg in ('in_slf', 'out_slf')):
67 | self.add_argument('--lang', help="Serafin language for variables detection: 'fr' or 'en'",
68 | default=settings.LANG)
69 | if 'out_csv' in self.args_known_ids:
70 | self.group_general.add_argument('--sep', help='csv column delimiter', default=settings.CSV_SEPARATOR)
71 |
72 | @staticmethod
73 | def _title_group(label):
74 | """Decorates group title label"""
75 | return '~> ' + label
76 |
77 | def add_argument_group(self, name, *args, **kwargs):
78 | """Add title group decoration"""
79 | return super().add_argument_group(self._title_group(name), *args, **kwargs)
80 |
81 | def add_group_general(self, add_args=[]):
82 | """Add group for optional general arguments (commonly used in PyTelTools)"""
83 | self.group_general = self.add_argument_group('General optional arguments')
84 | if 'force' in add_args:
85 | self.group_general.add_argument('-f', '--force', help='force output overwrite', action='store_true')
86 | self.args_known_ids.append('force')
87 | if 'verbose' in add_args:
88 | self.group_general.add_argument('-v', '--verbose', help='increase output verbosity', action='store_true')
89 | self.args_known_ids.append('verbose')
90 |
91 | def parse_args(self, *args, **kwargs):
92 | if self.group_general is None:
93 | self.add_group_general() # add only help message
94 | self._add_auto_arguments()
95 | self.group_general.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
96 | help='show this help message and exit')
97 |
98 | new_args = super().parse_args(*args, **kwargs)
99 |
100 | if 'verbose' in new_args:
101 | # Change verbosity globally
102 | if new_args.verbose:
103 | if settings.COLOR_LOGS:
104 | coloredlogs.set_level(logging.DEBUG)
105 | else:
106 | logging.basicConfig(level=logging.DEBUG)
107 |
108 | # Input Serafin file
109 | if 'in_slf' in new_args:
110 | try:
111 | with open(new_args.in_slf):
112 | pass
113 | except FileNotFoundError:
114 | logger.critical('No such file or directory: %s' % new_args.in_slf)
115 | sys.exit(3)
116 |
117 | if 'out_slf' in new_args: # avoid input file overwriting
118 | if os.path.isfile(new_args.out_slf):
119 | if os.path.samefile(new_args.in_slf, new_args.out_slf):
120 | logger.critical('Cannot overwrite to the input file.')
121 | sys.exit(3)
122 |
123 | # Output files
124 | if 'force' in self.args_known_ids:
125 | if not new_args.force:
126 | for out_arg in ('out_csv', 'out_slf'):
127 | if out_arg in new_args:
128 | out_path = getattr(new_args, out_arg)
129 | if os.path.isfile(out_path):
130 | logger.critical('Output file already exists: %s' % out_path)
131 | sys.exit(3)
132 |
133 | if any(arg in new_args for arg in ('in_slf', 'out_slf')):
134 | if 'slf_lang' not in new_args:
135 | new_args.in_lang = settings.LANG
136 |
137 | return new_args
138 |
--------------------------------------------------------------------------------
/pyteltools/utils/log.py:
--------------------------------------------------------------------------------
1 | import coloredlogs
2 | import logging
3 |
4 | from pyteltools.conf import settings
5 |
6 |
7 | # Overwrite some default styles
8 | try:
9 | has_bold = coloredlogs.CAN_USE_BOLD_FONT # coloredlogs<14.0 (old versions)
10 | except AttributeError:
11 | has_bold = True
12 | LEVEL_STYLES = coloredlogs.DEFAULT_LEVEL_STYLES
13 | FIELD_STYLES = coloredlogs.DEFAULT_FIELD_STYLES
14 | FIELD_STYLES['levelname'] = {'color': 'white', 'bold': has_bold} # Avoid 'black' color for Windows
15 |
16 |
17 | # Create a logger object
18 | def new_logger(name):
19 | """!
20 | Get a new logger
21 | @param name : logger name
22 | """
23 | logger = logging.getLogger(name)
24 | if settings.COLOR_LOGS:
25 | coloredlogs.install(logger=logger, level=settings.LOGGING_LEVEL, fmt=settings.LOGGING_FMT_CLI,
26 | level_styles=LEVEL_STYLES, field_styles=FIELD_STYLES)
27 | else:
28 | handler = logging.StreamHandler()
29 | handler.setFormatter(logging.Formatter(settings.LOGGING_FMT_CLI))
30 | logger.addHandler(handler)
31 | logger.setLevel(settings.LOGGING_LEVEL)
32 | return logger
33 |
34 |
35 | def set_logger_level(level):
36 | """
37 | Overwrite level of all loggers of PyTelTools (only `outil_carto` is ignored)
38 | Useful for external calling without having to modify settings
39 | """
40 | from pyteltools.geom.util import logger as geom_logger
41 | geom_logger.setLevel(level)
42 | from pyteltools.slf.util import logger as slf_logger
43 | slf_logger.setLevel(level)
44 | from pyteltools.utils.cli_base import logger as cli_logger
45 | cli_logger.setLevel(level)
46 | from pyteltools.workflow.util import logger as wk_logger
47 | wk_logger.setLevel(level)
48 |
--------------------------------------------------------------------------------
/pyteltools/utils/write_cli_usage.py:
--------------------------------------------------------------------------------
1 | """
2 | Write a markdown documentation file for command line scripts.
3 |
4 | Beware: Should be run from pyteltools repository root folder (same level as Makefile).
5 | """
6 |
7 | import importlib
8 | from glob import glob
9 | import os.path
10 | import sys
11 |
12 |
13 | sys.path.append(sys.argv[1]) # dirty method to import modules easily
14 |
15 |
16 | class CommandLineScript:
17 | def __init__(self, path):
18 | self.path = path
19 | basename = os.path.basename(self.path)
20 | self.name = os.path.splitext(basename)[0]
21 |
22 | def help_msg(self):
23 | """Returns help message with description and usage"""
24 | mod = importlib.import_module('%s' % self.name)
25 | return getattr(mod, 'parser').format_help()
26 |
27 |
28 | # Build sorted list of CLI scripts
29 | cli_scripts = []
30 | for file in sorted(glob(os.path.join(sys.argv[1], '*.py'))):
31 | if not file.endswith('__init__.py'):
32 | cli_scripts.append(CommandLineScript(file))
33 |
34 |
35 | # Write a markdown file (to be integrated within github wiki)
36 | with open(sys.argv[2], 'w') as fileout:
37 | # Write TOC
38 | fileout.write('Available **command line** scripts are:\n')
39 | for script in cli_scripts:
40 | fileout.write('* [%s.py](#%spy)\n' % (script.name, script.name))
41 | fileout.write('\n')
42 | fileout.write('A **help message** is provided for each script with the argument `-h` (e.g. `slf_base.py -h`).\n')
43 | fileout.write('Help messages are gathered below for each script '
44 | '(and this file was in fact automatically generated).\n')
45 | fileout.write('\n')
46 |
47 | # Write help message for each script
48 | for script in cli_scripts:
49 | print(script.name)
50 | fileout.write('# %s.py\n' % script.name)
51 | fileout.write('```\n')
52 | fileout.write(script.help_msg())
53 | fileout.write('```\n')
54 | fileout.write('\n')
55 |
--------------------------------------------------------------------------------
/pyteltools/workflow/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CNR-Engineering/PyTelTools/1bcba819144ddcff28dadb2c3d6d6fcc5db20142/pyteltools/workflow/__init__.py
--------------------------------------------------------------------------------
/pyteltools/workflow/workflow_gui.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os.path
3 | from PyQt5.QtWidgets import (QApplication, QFileDialog, QHBoxLayout, QMessageBox,
4 | QPushButton, QTabWidget, QVBoxLayout, QWidget)
5 | import sys
6 |
7 | from .mono_gui import MonoWidget
8 | from .multi_gui import MultiWidget
9 | from .util import logger
10 |
11 |
12 | class ProjectWindow(QWidget):
13 | def __init__(self, welcome):
14 | super().__init__()
15 | self.welcome = welcome
16 | self.mono = MonoWidget(self)
17 | self.multi = MultiWidget(self)
18 | self.tab = QTabWidget()
19 | self.tab.setStyleSheet('QTabBar::tab { height: 40px; min-width: 150px; }')
20 |
21 | self.tab.addTab(self.mono, 'Mono')
22 | self.tab.addTab(self.multi, 'Multi')
23 |
24 | self.tab.currentChanged.connect(self.switch_tab)
25 | layout = QVBoxLayout()
26 | layout.addWidget(self.tab)
27 | self.setLayout(layout)
28 |
29 | self.filename = ''
30 |
31 | def load(self, filename):
32 | if not self.mono.scene.load(filename):
33 | return False
34 | if not self.multi.scene.load(filename):
35 | return False
36 | self.filename = filename
37 | self.setWindowTitle('PyTelTools :: Workflow :: %s' % filename)
38 | return True
39 |
40 | def save(self):
41 | suffix = self.mono.scene.suffix_pool()
42 | if len(suffix) != len(set(suffix)):
43 | return False
44 |
45 | with open(self.filename, 'w') as f:
46 | for line in self.mono.scene.save():
47 | f.write(line)
48 | f.write('\n')
49 | if not self.mono.scene.not_connected():
50 | for line in self.multi.scene.save():
51 | f.write(line)
52 | f.write('\n')
53 | return True
54 |
55 | def create_new(self, filename):
56 | self.filename = filename
57 | with open(self.filename, 'w') as f:
58 | for line in self.mono.scene.save():
59 | f.write(line)
60 | f.write('\n')
61 |
62 | def switch_tab(self, index):
63 | if index == 1:
64 | self.check_and_reload()
65 | else:
66 | self.reload()
67 |
68 | def check_and_reload(self):
69 | if not self.save():
70 | self.tab.setCurrentIndex(0)
71 | QMessageBox.critical(None, 'Error', 'You have duplicated suffix.', QMessageBox.Ok)
72 | return
73 | if self.mono.scene.not_connected():
74 | self.tab.setCurrentIndex(0)
75 | QMessageBox.critical(None, 'Error', 'You have disconnected nodes.', QMessageBox.Ok)
76 | return
77 | self.load(self.filename)
78 |
79 | def reload(self):
80 | self.save()
81 | self.load(self.filename)
82 |
83 | def closeEvent(self, event):
84 | if not self.save():
85 | value = QMessageBox.question(None, 'Confirm exit', 'Are your sure to exit?\n'
86 | '(The project cannot be saved because it has duplicated suffix)',
87 | QMessageBox.Ok | QMessageBox.Cancel)
88 | if value == QMessageBox.Cancel:
89 | return
90 | self.welcome.show()
91 |
92 |
93 | class WorkflowWelcomeWindow(QWidget):
94 | def __init__(self):
95 | super().__init__()
96 | self.window = ProjectWindow(self)
97 |
98 | left_button = QPushButton('Create New\nProject')
99 | right_button = QPushButton('Load\nExisting\nProject')
100 | for bt in [left_button, right_button]:
101 | bt.setFixedSize(200, 150)
102 |
103 | left_button.clicked.connect(self.choose_left)
104 | right_button.clicked.connect(self.choose_right)
105 |
106 | vlayout = QHBoxLayout()
107 | vlayout.addWidget(left_button)
108 | vlayout.addWidget(right_button)
109 | self.setLayout(vlayout)
110 | self.setWindowTitle('PyTelTools :: Workflow interface')
111 |
112 | self.new = False
113 | self.filename = ''
114 |
115 | def choose_left(self):
116 | filename, _ = QFileDialog.getSaveFileName(None, 'Choose the project file name', '',
117 | 'All Files (*)',
118 | options=QFileDialog.Options() | QFileDialog.DontUseNativeDialog)
119 | if not filename:
120 | return
121 | self.new = True
122 | self.filename = filename
123 | self.open_project()
124 |
125 | def choose_right(self):
126 | filename, _ = QFileDialog.getOpenFileName(None, 'Choose the project file', '', 'All files (*)',
127 | options=QFileDialog.Options() | QFileDialog.DontUseNativeDialog)
128 | if not filename:
129 | return
130 | self.new = False
131 | self.filename = filename
132 | self.open_project()
133 |
134 | def open_project(self):
135 | if self.new:
136 | self.window.mono.scene.reinit()
137 | self.window.multi.scene.reinit()
138 | self.window.create_new(self.filename)
139 | self.window.tab.setCurrentIndex(0)
140 | self.window.showMaximized()
141 | self.hide()
142 | else:
143 | if self.window.load(self.filename):
144 | self.window.tab.setCurrentIndex(0)
145 | self.window.showMaximized()
146 | self.hide()
147 | else:
148 | QMessageBox.critical(None, 'Error', 'The project file is not valid.', QMessageBox.Ok)
149 |
150 |
151 | def exception_hook(exctype, value, traceback):
152 | """!
153 | @brief Needed for suppressing traceback silencing in newer version of PyQt5
154 | """
155 | sys._excepthook(exctype, value, traceback)
156 | sys.exit(1)
157 |
158 |
159 | if __name__ == '__main__':
160 | # suppress explicitly traceback silencing
161 | sys._excepthook = sys.excepthook
162 | sys.excepthook = exception_hook
163 |
164 | import argparse
165 | parser = argparse.ArgumentParser()
166 | parser.add_argument('-i', '--workspace', help='workflow project file')
167 | parser.add_argument('-v', '--verbose', action='store_true')
168 | parser.parse_args()
169 | args = parser.parse_args()
170 |
171 | if args.verbose:
172 | logger.setLevel(logging.DEBUG)
173 |
174 | app = QApplication(sys.argv)
175 | widget = WorkflowWelcomeWindow()
176 |
177 | loaded = False
178 | if args.workspace is not None:
179 | if os.path.exists(args.workspace):
180 | window = widget.window
181 | if window.load(args.workspace):
182 | window.tab.setCurrentIndex(0)
183 | window.showMaximized()
184 | loaded = True
185 | else:
186 | QMessageBox.critical(None, 'Error', 'The project file is not valid.', QMessageBox.Ok)
187 | else:
188 | QMessageBox.critical(None, 'Error', "The project file '%s' could not be found." % args.workspace,
189 | QMessageBox.Ok)
190 |
191 | if not loaded:
192 | widget.show()
193 |
194 | app.exec_()
195 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | coloredlogs
2 | descartes
3 | matplotlib
4 | numpy
5 | pyproj
6 | PyQt5
7 | pyshp>=2.0
8 | pytest
9 | Rtree
10 | scipy
11 | shapely
12 | simple-settings
13 | tqdm
14 | unittest2
15 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from glob import glob
4 | from setuptools import find_packages, setup
5 |
6 | from pyteltools import VERSION
7 |
8 |
9 | EXCLUDE_FROM_PACKAGES = ['cli', 'tests']
10 |
11 |
12 | with open('requirements.txt') as f:
13 | requirements = f.read().splitlines()
14 |
15 | cli_files = []
16 | for file in glob('cli/*.py'):
17 | if not file.endswith('__init__.py'):
18 | cli_files.append(file)
19 |
20 | setup(
21 | name='PyTelTools',
22 | version=VERSION,
23 | author='Luc Duron',
24 | author_email='l.duron@cnr.tm.fr',
25 | packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
26 | include_package_data=True, # see data files in `MANIFEST.in`
27 | scripts=cli_files,
28 | install_requires=requirements,
29 | description='Python library for Telemac post-processing tasks',
30 | url='https://github.com/CNR-Engineering/PyTelTools',
31 | )
32 |
--------------------------------------------------------------------------------