├── .github
└── workflows
│ └── python-publish.yml
├── .gitignore
├── .readthedocs.yml
├── LICENSE
├── MANIFEST.in
├── README.md
├── ToDo.md
├── docs
├── Makefile
├── _config.yml
├── make.bat
├── requirements.txt
└── source
│ ├── api_reference.rst
│ ├── conf.py
│ ├── index.rst
│ ├── intro.rst
│ ├── notebook.ipynb
│ ├── out.png
│ └── tutorial.ipynb
├── examples
└── notebooks
│ ├── Gamma1_Gamma2_tutorial.ipynb
│ ├── Getting_Started.ipynb
│ ├── VortexFittingTutorial.ipynb
│ ├── example_methods.ipynb
│ ├── load_txt_plot_vorticity.ipynb
│ ├── parse_header.ipynb
│ ├── test_4columns.ipynb
│ ├── test_graphics.ipynb
│ ├── test_interpolate_nan.ipynb
│ ├── test_reading_different_files.ipynb
│ ├── test_strain.ipynb
│ ├── test_urban_canopy.ipynb
│ ├── vortex.ipynb
│ └── example_read_VC7.ipynb
├── mypy.ini
├── pivpy
├── __init__.py
├── compute_funcs.py
├── data
│ ├── Insight
│ │ ├── Run000001.T000.D000.P000.H001.L.vec
│ │ ├── Run000002.T000.D000.P000.H001.L.vec
│ │ ├── Run000003.T000.D000.P000.H001.L.vec
│ │ ├── Run000004.T000.D000.P000.H001.L.vec
│ │ └── Run000005.T000.D000.P000.H001.L.vec
│ ├── PIVMAT_jet
│ │ ├── B00001.VC7
│ │ ├── B00002.VC7
│ │ ├── B00003.VC7
│ │ └── readme.txt
│ ├── PIV_Challenge
│ │ ├── B00001.txt
│ │ └── B00002.txt
│ ├── day2
│ │ ├── day2a005000.T000.D000.P003.H001.L.vec
│ │ ├── day2a005001.T000.D000.P003.H001.L.vec
│ │ ├── day2a005002.T000.D000.P003.H001.L.vec
│ │ ├── day2a005003.T000.D000.P003.H001.L.vec
│ │ ├── day2a005004.T000.D000.P003.H001.L.vec
│ │ └── day2a005005.T000.D000.P003.H001.L.vec
│ ├── interTest
│ │ └── testInterCreates_nc.nc
│ ├── openpiv_txt
│ │ ├── Gamma1_Gamma2_tutorial_notebook
│ │ │ ├── OpenPIVtxtFilePair0.txt
│ │ │ ├── OpenPIVtxtFilePair1.txt
│ │ │ └── StrongVortex
│ │ │ │ ├── A
│ │ │ │ ├── PIVchallengeCaseAframeA.jpg
│ │ │ │ ├── PIVchallengeCaseAframeB.jpg
│ │ │ │ ├── PIVchallengeCaseAvelField.txt
│ │ │ │ └── readme.txt
│ │ │ │ ├── B
│ │ │ │ ├── PIVchallengeCaseBframeA.jpg
│ │ │ │ ├── PIVchallengeCaseBframeB.jpg
│ │ │ │ ├── PIVchallengeCaseBvelField.txt
│ │ │ │ └── readme.txt
│ │ │ │ └── CREDIT.txt
│ │ ├── exp1_001_b.txt
│ │ └── interTest.txt
│ ├── openpiv_vec
│ │ └── exp1_001_b.vec
│ └── urban_canopy
│ │ ├── B00001.vc7
│ │ ├── B00002.vc7
│ │ ├── B00003.vc7
│ │ ├── B00004.vc7
│ │ └── B00005.vc7
├── davis_readim.py
├── graphics.py
├── inter.py
├── io.py
└── pivpy.py
├── pivpy_logo.png
├── pyproject.toml
├── requirements.txt
├── tests
├── __init__.py
├── test_graphics.py
├── test_inter.py
├── test_io.py
└── test_methods.py
└── try_filter.ipynb
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | name: Upload Python Package
10 |
11 | on:
12 | release:
13 | types: [published]
14 |
15 | permissions:
16 | contents: read
17 |
18 | jobs:
19 | deploy:
20 |
21 | runs-on: ubuntu-latest
22 |
23 | steps:
24 | - uses: actions/checkout@v3
25 | - name: Set up Python
26 | uses: actions/setup-python@v3
27 | with:
28 | python-version: '3.x'
29 | - name: Install dependencies
30 | run: |
31 | python -m pip install --upgrade pip
32 | pip install build
33 | - name: Build package
34 | run: python -m build
35 | - name: Publish package
36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
37 | with:
38 | user: __token__
39 | password: ${{ secrets.PYPI_TOKEN }}
40 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | examples/data/.ipynb_checkpoints/*
3 | .pytest_cache/*
4 | .pytest_cache/*
5 | build/*
6 | dist/*
7 | PIVPy.egg-info/*
8 | tests/PIVPy.egg-info/*
9 | .vscode/
10 | docs/build/*
11 | docs/build/doctrees/*
12 | docs/build/html/*
13 | docs/source/.ipynb_checkpoints/*
14 | examples/notebooks/.ipynb_checkpoints/*
15 | pivpy.egg-info/PKG-INFO
16 | pivpy/data/openpiv/.ipynb_checkpoints/*
17 | .ipynb_checkpoints/*
18 | pivpy.egg-info/dependency_links.txt
19 | pivpy.egg-info/requires.txt
20 | pivpy.egg-info/SOURCES.txt
21 | pivpy.egg-info/top_level.txt
22 | tmp.nc
23 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Set the version of Python and other tools you might need
9 | build:
10 | os: ubuntu-22.04
11 | tools:
12 | python: "3.10"
13 | # You can also specify other tool versions:
14 | # nodejs: "16"
15 | # rust: "1.55"
16 | # golang: "1.17"
17 |
18 | # Build documentation in the docs/ directory with Sphinx
19 | sphinx:
20 | configuration: docs/source/conf.py
21 |
22 | # If using Sphinx, optionally build your docs in additional formats such as PDF
23 | # formats:
24 | # - pdf
25 |
26 | # Optionally declare the Python requirements required to build your docs
27 | python:
28 | install:
29 | - requirements: docs/requirements.txt
30 | - method: pip
31 | path: .
32 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2014- Turbulence Structure Laboratory, Tel Aviv University
2 |
3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4 |
5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6 |
7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8 |
9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
10 |
11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements.txt
2 | recursive-include pivpy/data *
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | #
PIVPy
5 |
6 | Python based post-processing PIV data analysis
7 |
8 |
9 | [](https://badge.fury.io/py/pivpy)
10 | [](https://pivpy.readthedocs.io/en/latest/?badge=latest)
11 | [](https://mybinder.org/v2/gh/alexlib/pivpy/master?filepath=examples%2Fnotebooks%2FGetting_Started.ipynb)
12 |
13 |
14 |
15 | Merging the three packages:
16 | 1. https://github.com/tomerast/Vecpy
17 | 2. https://github.com/alexlib/pivpy/tree/xarray
18 | 3. https://github.com/ronshnapp/vecpy
19 |
20 |
21 | ### How do I get set up? ###
22 |
23 | Use `pip`:
24 |
25 | pip install pivpy[all]
26 |
27 | to include also `lvpyio` if you work with Lavision files
28 |
29 | pip install pivpy
30 |
31 | if you use OpenPIV, PIVlab, etc.
32 |
33 | #### For developers, local use:
34 |
35 | git clone https://github.com/alexlib/pivpy .
36 | cd pivpy
37 | conda create -n pivpy python=3.11
38 | conda activate pivpy
39 | conda install pip
40 | pip install -e .
41 |
42 |
43 | ### What packages are required and which are optional
44 |
45 | 1. `lvpyio` by Lavision Inc. if you use vc7 files
46 | 2. `netcdf4` if you want to store NetCDF4 files by xarray
47 | 3. `pyarrow` if you want to store parquet files
48 | 4. `vortexfitting` if you want to do vortex analysis ($\lambda_2$ and $Q$ criterions, vortex fitting)
49 | 5. `numpy`, `scipy`, `matplotlib`, `xarray` are must and installed with the `pivpy`
50 |
51 |
52 | ### Contributors
53 |
54 | 1. @alexlib
55 | 2. @ronshnapp - original steps
56 | 3. @liorshig - LVreader and great visualizaiton for Lavision
57 | 4. @nepomnyi - connection to VortexFitting and new algorithms
58 |
59 |
60 | ### How to get started?
61 |
62 | Look into the [getting started Jupyter notebook](https://github.com/alexlib/pivpy/blob/master/examples/notebooks/Getting_Started.ipynb)
63 |
64 | and additional notebooks:
65 | [Notebooks](https://github.com/alexlib/pivpy/blob/master/examples/notebooks/)
66 |
67 | ### How to test? ###
68 |
69 | From a command line just use:
70 |
71 | pip install pytest
72 | pytest
73 |
74 | ### Documentation on Github:
75 |
76 | [PIVPy on ReadTheDocs](http://pivpy.readthedocs.io)
77 |
78 | ### How to help? ###
79 |
80 | Read the ToDo file and pick one item to program. Use Fork-Develop-Pull Request model to
81 | contribute
82 |
83 | ### How to write tutorials and add those to the documentation ###
84 |
85 | Using great tutorial http://sphinx-ipynb.readthedocs.org/en/latest/howto.html we now can
86 | prepare IPython notebooks (see in /docs/source) and convert those to .rst files, then
87 |
88 | python setup.py sphinx-build
89 | sphinx-build -b html docs/source/ docs/build/html
90 |
91 | generates ```docs/build/html``` directory with the documentation
92 |
--------------------------------------------------------------------------------
/ToDo.md:
--------------------------------------------------------------------------------
1 | ### To Do ideas ###
2 |
3 | 1. animation of quiver, contour, pcolor
4 | 2. transfer pivmat showvec, showscal, filterf, averf, etc.
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | theme: jekyll-theme-cayman
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | pandoc
2 | myst-docutils
3 | myst-parser
4 | sphinx
5 | ipykernel
6 | nbsphinx
7 | pivpy
8 |
--------------------------------------------------------------------------------
/docs/source/api_reference.rst:
--------------------------------------------------------------------------------
1 | .. _api_reference:
2 |
3 | API reference
4 | =============
5 |
6 | This is a complete api reference to the PIVPy package.
7 |
8 | The ``pivpy.io`` module
9 | ----------------------------------
10 | .. automodule:: pivpy.io
11 | :members:
12 |
13 | The ``pivpy.graphics`` module
14 | -----------------------------
15 | .. automodule:: pivpy.graphics
16 | :members:
17 |
18 | The ``pivpy.pivpy`` module
19 | ----------------------------------
20 | .. automodule:: pivpy.pivpy
21 | :members:
22 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # http://www.sphinx-doc.org/en/master/config
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | # sys.path.insert(0, os.path.abspath('.'))
17 | # sys.path.insert(0, os.path.abspath('..'))
18 |
19 | sys.path.append(os.path.abspath("sphinxext"))
20 |
21 | # -- Project information -----------------------------------------------------
22 |
23 | project = "pivpy"
24 | copyright = "2019, Turbulence Structure Laboratory"
25 | author = "Turbulence Structure Laboratory"
26 |
27 | # The full version, including alpha/beta/rc tags
28 | release = "0.0.16"
29 |
30 |
31 | # -- General configuration ---------------------------------------------------
32 |
33 | # Add any Sphinx extension module names here, as strings. They can be
34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
35 | # ones.
36 | extensions = [
37 | "sphinx.ext.duration",
38 | "sphinx.ext.doctest",
39 | "sphinx.ext.autodoc",
40 | "sphinx.ext.autosummary",
41 | "sphinx.ext.intersphinx",
42 | "sphinx.ext.mathjax",
43 | "nbsphinx",
44 | "sphinx.ext.napoleon",
45 | "sphinx.ext.viewcode",
46 | "myst_parser"
47 | ]
48 |
49 |
50 | # Add any paths that contain templates here, relative to this directory.
51 | templates_path = ["_templates"]
52 |
53 | # List of patterns, relative to source directory, that match files and
54 | # directories to ignore when looking for source files.
55 | # This pattern also affects html_static_path and html_extra_path.
56 | exclude_patterns = ["_build"]
57 |
58 |
59 | # -- Options for HTML output -------------------------------------------------
60 |
61 | # The theme to use for HTML and HTML Help pages. See the documentation for
62 | # a list of builtin themes.
63 | #
64 | html_theme = "alabaster"
65 |
66 | # Add any paths that contain custom static files (such as style sheets) here,
67 | # relative to this directory. They are copied after the builtin static files,
68 | # so a file named "default.css" will overwrite the builtin "default.css".
69 | html_static_path = ["_static"]
70 |
71 | master_doc = "index"
72 |
73 | source_suffix = {
74 | ".rst": "restructuredtext",
75 | ".txt": "restructuredtext",
76 | ".md": "markdown",
77 | }
78 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../../README.md
2 | :parser: myst_parser.sphinx_
3 |
4 | PIVPy
5 | =====
6 |
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 |
11 | Home
12 | intro
13 | notebook
14 | tutorial
15 | api_reference
16 |
17 |
18 |
19 |
20 | Indices and tables
21 | ==================
22 |
23 | * :ref:`genindex`
24 | * :ref:`modindex`
25 | * :ref:`search`
26 |
27 |
--------------------------------------------------------------------------------
/docs/source/intro.rst:
--------------------------------------------------------------------------------
1 | =========================
2 | PIVPy introduction
3 | =========================
4 |
5 | ----------------------
6 | History and motivation
7 | ----------------------
8 |
9 |
10 | PIVPy is inspired by Federic Moisy Matlab package, called PIVMAT http://www.fast.u-psud.fr/pivmat/
11 |
12 |
13 | Futhermore, although OpenPIV is our main PIV analysis tool, we work with multiple software vendors in
14 | our research projects, TSI, Lavision, Dantec, PIVLab, etc. PIVPy, like PIVMAT is reading all these
15 | files and unifies the post-processing. The main underlying Python packages are:
16 | - xarray
17 | - numpy
18 | - scipy
19 | - matplotlib
20 | - jupyter
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/docs/source/out.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/docs/source/out.png
--------------------------------------------------------------------------------
/examples/notebooks/parse_header.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 9,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import re\n",
10 | "from typing import Dict, Tuple\n",
11 | "\n",
12 | "\n"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": 10,
18 | "metadata": {},
19 | "outputs": [],
20 | "source": [
21 | "\n",
22 | "def datasetauxdata_to_dict(text: str) -> Dict:\n",
23 | " result = re.findall(r\"DATASETAUXDATA \\w+=\\\"[\\w\\.]+\\\"\", text)\n",
24 | " dict = {}\n",
25 | "\n",
26 | " for item in result:\n",
27 | " pair = item.replace(\"DATASETAUXDATA \", \"\")\n",
28 | " key_value = pair.split(\"=\")\n",
29 | " dict[key_value[0]] = key_value[1].replace(\"\\\"\", \"\")\n",
30 | "\n",
31 | " # print(dict)\n",
32 | " return dict\n",
33 | "\n"
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 11,
39 | "metadata": {},
40 | "outputs": [
41 | {
42 | "data": {
43 | "text/plain": [
44 | "{'Application': 'PIV',\n",
45 | " 'SourceImageWidth': '4008',\n",
46 | " 'SourceImageHeight': '2672',\n",
47 | " 'MicrometersPerPixelX': '85.809998',\n",
48 | " 'MicrometersPerPixelY': '85.809998',\n",
49 | " 'LengthUnit': 'mm',\n",
50 | " 'OriginInImageX': '0.000000',\n",
51 | " 'OriginInImageY': '0.000000',\n",
52 | " 'MicrosecondsPerDeltaT': '50.000000',\n",
53 | " 'TimeUnit': 'ms',\n",
54 | " 'SecondaryPeakNumber': '0',\n",
55 | " 'DewarpedImageSource': '0'}"
56 | ]
57 | },
58 | "execution_count": 11,
59 | "metadata": {},
60 | "output_type": "execute_result"
61 | }
62 | ],
63 | "source": [
64 | "text=\"\"\"\n",
65 | "TITLE=\"D:\\Experiments2021\\iai_0107_\\exp1_\\Analysis\\exp1_006014.T000.D000.P001.H000.L.vec\" VARIABLES=\"X mm\", \"Y mm\", \"U m/s\", \"V m/s\", \"CHC\", DATASETAUXDATA Application=\"PIV\" DATASETAUXDATA SourceImageWidth=\"4008\" DATASETAUXDATA SourceImageHeight=\"2672\" DATASETAUXDATA MicrometersPerPixelX=\"85.809998\" DATASETAUXDATA MicrometersPerPixelY=\"85.809998\" DATASETAUXDATA LengthUnit=\"mm\" DATASETAUXDATA OriginInImageX=\"0.000000\" DATASETAUXDATA OriginInImageY=\"0.000000\" DATASETAUXDATA MicrosecondsPerDeltaT=\"50.000000\" DATASETAUXDATA TimeUnit=\"ms\" DATASETAUXDATA SecondaryPeakNumber=\"0\" DATASETAUXDATA DewarpedImageSource=\"0\" ZONE I=124, J=82, F=POINT\n",
66 | "\"\"\"\n",
67 | "\n",
68 | "datasetauxdata_to_dict(text)"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": 12,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "import pathlib\n",
78 | "file = pathlib.Path('../../pivpy/data/Insight/Run000001.T000.D000.P000.H001.L.vec')"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": 13,
84 | "metadata": {},
85 | "outputs": [
86 | {
87 | "name": "stdout",
88 | "output_type": "stream",
89 | "text": [
90 | "TITLE=\"E:\\2CM_FP500_5%G_68K\\C001H001S0015CC\\Soapfilmone\\Analysis\\Run000001.T000.D000.P000.H001.L.vec\" VARIABLES=\"X mm\", \"Y mm\", \"U m/s\", \"V m/s\", \"CHC\", DATASETAUXDATA Application=\"PIV\" DATASETAUXDATA SourceImageWidth=\"1024\" DATASETAUXDATA SourceImageHeight=\"1024\" DATASETAUXDATA MicrometersPerPixelX=\"19.530001\" DATASETAUXDATA MicrometersPerPixelY=\"19.530001\" DATASETAUXDATA LengthUnit=\"mm\" DATASETAUXDATA OriginInImageX=\"0.000000\" DATASETAUXDATA OriginInImageY=\"0.000000\" DATASETAUXDATA MicrosecondsPerDeltaT=\"2000.000000\" DATASETAUXDATA TimeUnit=\"ms\" DATASETAUXDATA SecondaryPeakNumber=\"0\" DATASETAUXDATA DewarpedImageSource=\"0\" ZONE I=63, J=63, F=POINT\n",
91 | "\n"
92 | ]
93 | },
94 | {
95 | "data": {
96 | "text/plain": [
97 | "{'Application': 'PIV',\n",
98 | " 'SourceImageWidth': '1024',\n",
99 | " 'SourceImageHeight': '1024',\n",
100 | " 'MicrometersPerPixelX': '19.530001',\n",
101 | " 'MicrometersPerPixelY': '19.530001',\n",
102 | " 'LengthUnit': 'mm',\n",
103 | " 'OriginInImageX': '0.000000',\n",
104 | " 'OriginInImageY': '0.000000',\n",
105 | " 'MicrosecondsPerDeltaT': '2000.000000',\n",
106 | " 'TimeUnit': 'ms',\n",
107 | " 'SecondaryPeakNumber': '0',\n",
108 | " 'DewarpedImageSource': '0'}"
109 | ]
110 | },
111 | "execution_count": 13,
112 | "metadata": {},
113 | "output_type": "execute_result"
114 | }
115 | ],
116 | "source": [
117 | "with open(file, \"r\") as f:\n",
118 | " header = f.readline()\n",
119 | "\n",
120 | "print(header)\n",
121 | "\n",
122 | "datasetauxdata_to_dict(header)"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": 14,
128 | "metadata": {},
129 | "outputs": [],
130 | "source": [
131 | "def parse_header(filename: pathlib.Path)-> Tuple:\n",
132 | " \"\"\"\n",
133 | " parse_header ( filename)\n",
134 | " Parses header of the file (.vec) to get the variables (typically X,Y,U,V)\n",
135 | " and units (can be m,mm, pix/DELTA_T or mm/sec, etc.), and the size of the\n",
136 | " Dataset by the number of rows and columns.\n",
137 | " Input:\n",
138 | " filename : complete path of the file to read, pathlib.Path\n",
139 | " Returns:\n",
140 | " variables : list of strings\n",
141 | " units : list of strings\n",
142 | " rows : number of rows of the Dataset\n",
143 | " cols : number of columns of the Dataset\n",
144 | " DELTA_T : time interval between the two PIV frames in microseconds\n",
145 | " \"\"\"\n",
146 | "\n",
147 | " # defaults\n",
148 | " frame = 0\n",
149 | "\n",
150 | " # split path from the filename\n",
151 | " fname = filename.name\n",
152 | " # get the number in a filename if it's a .vec file from Insight\n",
153 | " if \".\" in fname[:-4]: # day2a005003.T000.D000.P003.H001.L.vec\n",
154 | " frame = int(re.findall(r\"\\d+\", fname.split(\".\")[0])[-1])\n",
155 | " elif \"_\" in filename[:-4]:\n",
156 | " frame = int(\n",
157 | " re.findall(r\"\\d+\", fname.split(\"_\")[1])[-1]\n",
158 | " ) # exp1_001_b.vec, .txt\n",
159 | "\n",
160 | " with open(filename,\"r\") as fid:\n",
161 | " header = fid.readline()\n",
162 | "\n",
163 | " # if the file does not have a header, can be from OpenPIV or elsewhere\n",
164 | " # return None\n",
165 | " if header[:5] != \"TITLE\":\n",
166 | " variables = [\"x\", \"y\", \"u\", \"v\"]\n",
167 | " units = [\"pix\", \"pix\", \"pix\", \"pix\"]\n",
168 | " rows = None\n",
169 | " cols = None\n",
170 | " dt = 0.0\n",
171 | " return (variables, units, rows, cols, dt, frame)\n",
172 | "\n",
173 | " header_list = (\n",
174 | " header.replace(\",\", \" \").replace(\"=\", \" \").replace('\"', \" \").split()\n",
175 | " )\n",
176 | "\n",
177 | " # get variable names, typically X,Y,U,V\n",
178 | " variables = header_list[3:12][::2]\n",
179 | "\n",
180 | " # get units - this is important if it's mm or m/s\n",
181 | " units = header_list[4:12][::2]\n",
182 | "\n",
183 | " # get the size of the PIV grid in rows x cols\n",
184 | " rows = int(header_list[-5])\n",
185 | " cols = int(header_list[-3])\n",
186 | "\n",
187 | " # this is also important to know the time interval, DELTA_T\n",
188 | " ind1 = header.find(\"MicrosecondsPerDeltaT\")\n",
189 | " dt = float(header[ind1:].split('\"')[1])\n",
190 | "\n",
191 | " return (variables, units, rows, cols, dt, frame)"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": 15,
197 | "metadata": {},
198 | "outputs": [
199 | {
200 | "data": {
201 | "text/plain": [
202 | "(['X', 'Y', 'U', 'V', 'CHC'], ['mm', 'mm', 'm/s', 'm/s'], 63, 63, 2000.0, 1)"
203 | ]
204 | },
205 | "execution_count": 15,
206 | "metadata": {},
207 | "output_type": "execute_result"
208 | }
209 | ],
210 | "source": [
211 | "parse_header(file)"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 16,
217 | "metadata": {},
218 | "outputs": [
219 | {
220 | "data": {
221 | "text/plain": [
222 | "'TITLE=\"E:\\\\2CM_FP500_5%G_68K\\\\C001H001S0015CC\\\\Soapfilmone\\\\Analysis\\\\Run000001.T000.D000.P000.H001.L.vec\" VARIABLES=\"X mm\", \"Y mm\", \"U m/s\", \"V m/s\", \"CHC\", DATASETAUXDATA Application=\"PIV\" DATASETAUXDATA SourceImageWidth=\"1024\" DATASETAUXDATA SourceImageHeight=\"1024\" DATASETAUXDATA MicrometersPerPixelX=\"19.530001\" DATASETAUXDATA MicrometersPerPixelY=\"19.530001\" DATASETAUXDATA LengthUnit=\"mm\" DATASETAUXDATA OriginInImageX=\"0.000000\" DATASETAUXDATA OriginInImageY=\"0.000000\" DATASETAUXDATA MicrosecondsPerDeltaT=\"2000.000000\" DATASETAUXDATA TimeUnit=\"ms\" DATASETAUXDATA SecondaryPeakNumber=\"0\" DATASETAUXDATA DewarpedImageSource=\"0\" ZONE I=63, J=63, F=POINT\\n'"
223 | ]
224 | },
225 | "execution_count": 16,
226 | "metadata": {},
227 | "output_type": "execute_result"
228 | }
229 | ],
230 | "source": [
231 | "header"
232 | ]
233 | },
234 | {
235 | "cell_type": "code",
236 | "execution_count": null,
237 | "metadata": {},
238 | "outputs": [],
239 | "source": []
240 | }
241 | ],
242 | "metadata": {
243 | "kernelspec": {
244 | "display_name": "Python 3.8.12 64-bit",
245 | "language": "python",
246 | "name": "python3"
247 | },
248 | "language_info": {
249 | "codemirror_mode": {
250 | "name": "ipython",
251 | "version": 3
252 | },
253 | "file_extension": ".py",
254 | "mimetype": "text/x-python",
255 | "name": "python",
256 | "nbconvert_exporter": "python",
257 | "pygments_lexer": "ipython3",
258 | "version": "3.8.12"
259 | },
260 | "orig_nbformat": 4,
261 | "vscode": {
262 | "interpreter": {
263 | "hash": "03e34814937a156905f166f184e2fc1bbb91f2c043c514292aed6abea91ed019"
264 | }
265 | }
266 | },
267 | "nbformat": 4,
268 | "nbformat_minor": 2
269 | }
270 |
--------------------------------------------------------------------------------
/examples/notebooks/test_reading_different_files.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from pivpy import io, pivpy, graphics"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import xarray as xr\n",
19 | "from typing import List \n",
20 | "import numpy as np\n",
21 | "import pandas as pd\n",
22 | "import pkg_resources as pkg\n",
23 | "import matplotlib.pyplot as plt\n",
24 | "%matplotlib inline\n"
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": null,
30 | "metadata": {},
31 | "outputs": [],
32 | "source": [
33 | "import pathlib\n",
34 | "\n",
35 | "path = pathlib.Path(pkg.resource_filename(\"pivpy\", \"data\"))\n",
36 | "subdirs = [x for x in sorted(path.glob('**/*')) if x.is_dir()]\n",
37 | "subdirs = [s for s in subdirs if s.stem != '.ipynb_checkpoints']\n",
38 | "\n",
39 | "test_files = []\n",
40 | "for d in subdirs:\n",
41 | " files = [x for x in sorted(d.glob('[!.]*')) if not x.is_dir() ]\n",
42 | " test_files.append(files[0])\n",
43 | " \n",
44 | "print(test_files)"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "from pivpy.io import *\n",
54 | "\n",
55 | "for file in test_files:\n",
56 | " #print(file)\n",
57 | " # print(file.stem)\n",
58 | " variables, units, rows, cols, dt, frame, method = parse_header(file)\n",
59 | " # print(f'frame = {frame}')\n",
60 | " print(file.stem, method)\n",
61 | " ds = method(file)\n",
62 | " plt.figure()\n",
63 | " ds.isel(t=0).piv.quiver(arrScale=5)\n",
64 | " plt.title(file.stem)\n",
65 | " \n",
66 | " \n",
67 | " if file.suffix == '.vec':\n",
68 | " try:\n",
69 | " # try another method\n",
70 | " ds = load_insight_vec_as_csv(file)\n",
71 | " plt.figure()\n",
72 | " ds.isel(t=0).piv.quiver(arrScale=5)\n",
73 | " plt.title(file.stem)\n",
74 | " except:\n",
75 | " pass\n",
76 | " elif file.suffix == '.txt':\n",
77 | " try:\n",
78 | " ds = load_openpiv_txt_as_csv(file)\n",
79 | " plt.figure()\n",
80 | " ds.isel(t=0).piv.quiver(arrScale=5)\n",
81 | " plt.title(file.stem)\n",
82 | " except:\n",
83 | " pass\n"
84 | ]
85 | }
86 | ],
87 | "metadata": {
88 | "kernelspec": {
89 | "display_name": "Python 3 (ipykernel)",
90 | "language": "python",
91 | "name": "python3"
92 | },
93 | "language_info": {
94 | "codemirror_mode": {
95 | "name": "ipython",
96 | "version": 3
97 | },
98 | "file_extension": ".py",
99 | "mimetype": "text/x-python",
100 | "name": "python",
101 | "nbconvert_exporter": "python",
102 | "pygments_lexer": "ipython3",
103 | "version": "3.8.12"
104 | },
105 | "vscode": {
106 | "interpreter": {
107 | "hash": "cc9f4a0bb10ffa66460a048df631f1f9f016c2b1c927bb3119f96382f3a023b9"
108 | }
109 | }
110 | },
111 | "nbformat": 4,
112 | "nbformat_minor": 4
113 | }
114 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | plugins = numpy.typing.mypy_plugin
--------------------------------------------------------------------------------
/pivpy/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import xarray as xr
3 |
4 | xr.set_options(keep_attrs=True, display_expand_attrs=False)
5 |
--------------------------------------------------------------------------------
/pivpy/compute_funcs.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import xarray as xr
3 |
4 |
5 | def Γ1_moving_window_function(
6 | fWin: xr.Dataset,
7 | n: int,
8 | ) -> xr.DataArray:
9 | """
10 | This is the implementation of Γ1 function given by equation 9 in L.Graftieaux, M. Michard,
11 | N. Grosjean, "Combining PIV, POD and vortex identification algorithms for the study of
12 | unsteady turbulent swirling flows", Meas.Sci.Technol., 12(2001), p.1422-1429.
13 | Γ1 function is used to identify the locations of the centers of the vortices (which are
14 | given by the Γ1 peak values within the velocity field).
15 | IMPORTANT NOTICE: even though this function, theoretically, can be used on its own,
16 | it is not supposed to. It is designed to be used with Dask for big
17 | PIV datasets. The recomendation is not to use this function on its own, but rather use
18 | Γ1 attribute of piv class of PIVPY package (example of usage in this case would be: ds.piv.Γ1())
19 | This function accepts a (2*n+1)x(2*n+1) neighborhood of one velocity vector from the
20 | entire velocity field in the form of Xarray dataset. And for this neighborhood only,
21 | it calculates the value of Γ1.
22 | Also, note this function is designed in a way that assumes point P (see the referenced
23 | article) to coincide with the center for fWin.
24 | This function works only for 2D velocity field.
25 |
26 | Args:
27 | fWin (xarray.Dataset) - a moving window of the dataset (fWin = field rolling window)
28 | n (int) - the rolling window size (n=1 means a 3x3 rolling window)
29 |
30 | Returns:
31 | xr.DataArray(Γ1) (xr.DataArray) - an xarray DataArray object with Γ1 caclculated for
32 | for the given rolling window
33 | """
34 | # We must convert fWin to numpy, because when this function was originally implemented
35 | # with fWin being an xr.Dataset, it was unbelievably slow! Conversion of fWin to numpy
36 | # proved to give an incredible boost in speed.
37 | # To speed up the things even more I put everything in one line, which is unreadable.
38 | # Thus, to understand, what is going one, I'm giving a break up of the line
39 | # (the names of the variables are taken from the referenced article):
40 | # PMx = fWin['xCoordinates'].to_numpy() - float(fWin['xCoordinates'][n,n])
41 | # PMy = fWin['yCoordinates'].to_numpy() - float(fWin['yCoordinates'][n,n])
42 | # PM = np.sqrt(np.add(np.square(PMx), np.square(PMy)))
43 | # u = fWin['u'].to_numpy()
44 | # v = fWin['v'].to_numpy()
45 | # U = (u**2 + v**2)**(0.5)
46 | # The external tensor product (PM ^ U) * z (see the referenced article) can be simplified as a
47 | # cross product for the case of the 2D velocity field: (PM x U). According to the rules of
48 | # cross product, for the 2D velocity field, we have (PM x U) = PM_x * v - PM_y * u. In other
49 | # words, we don't have to compute the sin given in the referenced article. But I am, still,
50 | # going to use sin down below as the variable to be consistent with the expression given
51 | # in the referenced article.
52 | # sinΘM_Γ1 = (PMx*v - PMy*u) / PM / U
53 | # Γ1 = np.nansum(sinΘM_Γ1) / (((2*n+1)**2))
54 | # And now here goes my one-liner. Note, that I didn't put PMx, PMy, u and v calculations
55 | # into my line. That's because I figured out emperically that would slow down the calculations.
56 | # n always points to the central interrogation window (just think of it). It gives me point P.
57 | PMx = np.subtract(fWin['xCoordinates'].to_numpy(), float(fWin['xCoordinates'][n,n]))
58 | PMy = np.subtract(fWin['yCoordinates'].to_numpy(), float(fWin['yCoordinates'][n,n]))
59 | u = fWin['u'].to_numpy()
60 | v = fWin['v'].to_numpy()
61 | # Since for the case when point M coincides with point P we have a 0/0 situation, we'll
62 | # recive a warning. To temporarily suspend that warning do the following (credit goes to
63 | # https://stackoverflow.com/a/29950752/10073233):
64 | with np.errstate(divide='ignore', invalid='ignore'):
65 | Γ1 = np.nanmean(np.divide(np.subtract(np.multiply(PMx,v), np.multiply(PMy,u)), np.multiply(np.sqrt(np.add(np.square(PMx), np.square(PMy))), np.sqrt(np.add(np.square(u), np.square(v))))))
66 |
67 | return xr.DataArray(Γ1).fillna(0.0) # fillna(0) is necessary for plotting
68 |
69 |
70 | def Γ2_moving_window_function(
71 | fWin: xr.Dataset,
72 | n: int,
73 | ) -> xr.DataArray:
74 | """
75 | This is the implementation of Γ2 function given by equation 11 in L.Graftieaux, M. Michard,
76 | N. Grosjean, "Combining PIV, POD and vortex identification algorithms for the study of
77 | unsteady turbulent swirling flows", Meas.Sci.Technol., 12(2001), p.1422-1429.
78 | Γ2 function is used to identify the boundaries of the vortices in a velocity field.
79 | IMPORTANT NOTICE: even though this function, theoretically, can be used on its own,
80 | it is not supposed to. It is designed to be used with Dask for big
81 | PIV datasets. The recomendation is not to use this function on its own, but rather use
82 | Γ2 attribute of piv class of PIVPY package (example of usage in this case would be: ds.piv.Γ2())
83 | This function accepts a (2*n+1)x(2*n+1) neighborhood of one velocity vector from the
84 | entire velocity field in the form of Xarray dataset. And for this neighborhood only,
85 | it calculates the value of Γ2.
86 | Also, note this function is designed in a way that assumes point P (see the referenced
87 | article) to coincide with the center for fWin.
88 | And finally, the choice of convective velocity (see the referenced article) is made in the
89 | article: it is the average velocity within fWin.
90 | This function works only for 2D velocity field.
91 |
92 | Args:
93 | fWin (xarray.Dataset) - a moving window of the dataset (fWin = field rolling window)
94 | n (int) - the rolling window size (n=1 means a 3x3 rolling window)
95 |
96 | Returns:
97 | xr.DataArray(Γ2) (xr.DataArray) - an xarray DataArray object with Γ2 caclculated for
98 | for the given rolling window
99 | """
100 | # We must convert fWin to numpy, because when this function was originally implemented
101 | # with fWin being an xr.Dataset, it was unbelievably slow! Conversion of fWin to numpy
102 | # proved to give an incredible boost in speed.
103 | # To speed up the things even more I put everything in one line, which is unreadable.
104 | # Thus, to understand, what is going one, I'm giving a break up of the line
105 | # (the names of the variables are taken from the referenced article):
106 | # PMx = fWin['xCoordinates'].to_numpy() - float(fWin['xCoordinates'][n,n])
107 | # PMy = fWin['yCoordinates'].to_numpy() - float(fWin['yCoordinates'][n,n])
108 | # PM = np.sqrt(np.add(np.square(PMx), np.square(PMy)))
109 | # u = fWin['u'].to_numpy()
110 | # v = fWin['v'].to_numpy()
111 | # We are going to include point P into the calculations of velocity UP_tilde
112 | # uP_tilde = np.nanmean(u)
113 | # vP_tilde = np.nanmean(v)
114 | # uDif = u - uP_tilde
115 | # vDif = v - vP_tilde
116 | # UDif = (uDif**2 + vDif**2)**(0.5)
117 | # The external tensor product (PM ^ UDif) * z (see the referenced article) can be simplified as a
118 | # cross product for the case of the 2D velocity field: (PM x UDif). According to the rules of
119 | # cross product, for the 2D velocity field, we have (PM x UDif) = PM_x * vDif - PM_y * uDif.
120 | # I am going to use sin down below as the variable to be consistent with the expression given
121 | # for Γ1 function.
122 | # sinΘM_Γ2 = (PMx*vDif - PMy*uDif) / PM / UDif
123 | # Γ2 = np.nansum(sinΘM_Γ2) / (((2*n+1)**2))
124 | # And now here goes my one-liner. Note, that I didn't put PMx, PMy, u and v calculations
125 | # into my line. That's because I figured out emperically that would slow down the calculations.
126 | # n always points to the central interrogation window (just think of it). It gives me point P.
127 | PMx = np.subtract(fWin['xCoordinates'].to_numpy(), float(fWin['xCoordinates'][n,n]))
128 | PMy = np.subtract(fWin['yCoordinates'].to_numpy(), float(fWin['yCoordinates'][n,n]))
129 | u = fWin['u'].to_numpy()
130 | v = fWin['v'].to_numpy()
131 | uDif = u - np.nanmean(u)
132 | vDif = v - np.nanmean(v)
133 | # Since for the case when point M coincides with point P, we have a 0/0 situation and we'll
134 | # recive a warning. To temporarily suspend that warning do the following (credit goes to
135 | # https://stackoverflow.com/a/29950752/10073233):
136 | with np.errstate(divide='ignore', invalid='ignore'):
137 | Γ2 = np.nanmean(np.divide(np.subtract(np.multiply(PMx,vDif), np.multiply(PMy,uDif)), np.multiply(np.sqrt(np.add(np.square(PMx), np.square(PMy))), np.sqrt(np.add(np.square(uDif), np.square(vDif))))))
138 |
139 | return xr.DataArray(Γ2).fillna(0.0) # fillna(0) is necessary for plotting
--------------------------------------------------------------------------------
/pivpy/data/PIVMAT_jet/B00001.VC7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/PIVMAT_jet/B00001.VC7
--------------------------------------------------------------------------------
/pivpy/data/PIVMAT_jet/B00002.VC7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/PIVMAT_jet/B00002.VC7
--------------------------------------------------------------------------------
/pivpy/data/PIVMAT_jet/B00003.VC7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/PIVMAT_jet/B00003.VC7
--------------------------------------------------------------------------------
/pivpy/data/PIVMAT_jet/readme.txt:
--------------------------------------------------------------------------------
1 | Sample Directory - PIVMat Toolbox
2 |
3 | F. Moisy
4 | _______________________________________________________________________
5 |
6 |
7 | *** JET *** (Experiment at Lab FAST, Univ. Paris-Sud, CNRS)
8 | This directory contains 3 velocity fields in the Davis 7 format (*.VC7).
9 | These velocity fields have been obtained from a 12 m/s air jet
10 | with a diameter of 15 cm. The flow is seeded by 11 microns hollow glass
11 | spheres (Sphericel), and lighted by a pulsed 25 mJ Yag laser.
12 | Pictures are taken with a 2048x2048 14 bits camera (ImagerProPlus),
13 | imaging a vertical plane of dimension 23x23 cm, using an interframe
14 | time of 40 microseconds. Velocity fields have been computed using
15 | interrogation windows of size 32x32 pixels, 50% overlap. A triangular
16 | zone on the bottom left has not been computed due to insufficient
17 | lighting.
18 |
--------------------------------------------------------------------------------
/pivpy/data/interTest/testInterCreates_nc.nc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/interTest/testInterCreates_nc.nc
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/A/PIVchallengeCaseAframeA.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/A/PIVchallengeCaseAframeA.jpg
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/A/PIVchallengeCaseAframeB.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/A/PIVchallengeCaseAframeB.jpg
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/A/readme.txt:
--------------------------------------------------------------------------------
1 | Instructions for the analysis of case A :
2 | Loss of seeding in the core of a tip vortex
3 | 26.10.2000 christian.kaehler@dlr.de
4 |
5 | Image A001_1.tif and A001_2.tif were recorded at the DNW-LLF in order
6 | to study experimentally the wake vortex formation behind a transport
7 | aircraft (DLR ALVAST half model) in landing configuration (U=60 m/s,
8 | main flow direction is perpendicular to the light-sheet plane). The
9 | measurement position was 1.64 m behind the wing tip and the field of
10 | view is 170 mm by 140 mm. The images were selected as strong gradients,
11 | loss of image density, and varying particle image sizes are common
12 | problems for many PIV applications in large wind tunnels.
13 |
14 |
15 | Camera characteristics:
16 |
17 | Type PCO SensiCam, see http://www.pco.de
18 | Sensor technology Progressive Scan
19 | Resolution 1280 pixel x1024 pixel.
20 | Pixel size 6.7 micrometer x 6.7 micrometer
21 | Dynamic range 12 bits (Peltier cooled)
22 | Quantum efficiency typ 40 %
23 | Full well capacity 25000 e
24 | Readout noise @ 12.5MHz 7 ... 8 e
25 |
26 |
27 | The reference analysis for this case is :
28 |
29 | :ev_IS_size_x = 32;
30 | :ev_IS_size_y = 32;
31 | :ev_IS_size_unit = "pixel";
32 | :ev_IS_grid_distance_x = 16;
33 | :ev_IS_grid_distance_y = 16;
34 | :ev_IS_grid_distance_unit = "pixel";
35 | :ev_origin_of_evaluation = 16, 16;
36 | :ev_origin_of_evaluation_units = "pixel";
37 | :ev_IS_offset = 0, 0;
38 | :ev_IS_offset_units = "pixel";
39 |
40 |
41 |
42 | The mandatory data to be provided are :
43 |
44 | Raw data : A001_team_ref_raw.nc
45 |
46 | - the raw displacement field corresponding to the highest correlation peak
47 | - the value of the highest correlation peak for all interrogation positions
48 | - the correlation values used for the sub-pixel interpolation at the
49 | following locations (see table) ; eg. 3 x 3 neighbourhood
50 | - the whole correlation plane (extra file in tecplot format!) calculated
51 | at the following locations (see table)
52 |
53 | x (pixels) y (pixels)
54 | 512 640
55 | 528 528
56 | 800 512
57 | ----------------------------------------------------------------------------
58 |
59 |
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/B/PIVchallengeCaseBframeA.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/B/PIVchallengeCaseBframeA.jpg
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/B/PIVchallengeCaseBframeB.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/B/PIVchallengeCaseBframeB.jpg
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/B/readme.txt:
--------------------------------------------------------------------------------
1 | Instructions for the analysis of case B:
2 | Strong vortex
3 | 28.10.2000 okamoto@tokai.t.u-tokyo.ac.jp
4 |
5 | Flow Field
6 | * The flow field was strong vortex generated on the computer.
7 | * The field parameter is not known.
8 |
9 | Image generation
10 | * The particle scatter was assumed to be the gaussian distribution.
11 | * The oclusion particle was considered.
12 | The intensity of the particles were linerly added.
13 | * Fill factor was assumed to be 0.7.
14 | * Particle diameters were randomly determined.
15 | The histogram of the particle diameter was gaussian distribution.
16 | * Laser light sheet intensity was assumed to be gaussian.
17 | Maximum out-of-plane velocity was set to be about 30% of
18 | laser light thickness.
19 |
20 |
21 | The reference analysis for this case is :
22 |
23 | :ev_IS_size_x = 32;
24 | :ev_IS_size_y = 32;
25 | :ev_IS_size_unit = "pixel";
26 | :ev_IS_grid_distance_x = 16;
27 | :ev_IS_grid_distance_y = 16;
28 | :ev_IS_grid_distance_unit = "pixel";
29 | :ev_origin_of_evaluation = 16, 16;
30 | :ev_origin_of_evaluation_units = "pixel";
31 | :ev_IS_offset = 0, 0;
32 | :ev_IS_offset_units = "pixel";
33 | :ev_cf_fill_ratio = 0.7;
34 |
35 |
36 |
37 | The mandatory data to be provided are :
38 |
39 | Raw data : B00?_team_ref_raw.nc
40 |
41 | - Location of the Vortex center (pixel unit)
42 | - Vorticity at the vortex center (pixel/pixel unit)
43 | - Circulation (Gamma) at infinity (pixel^2 unit)
44 |
45 |
46 |
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/Gamma1_Gamma2_tutorial_notebook/StrongVortex/CREDIT.txt:
--------------------------------------------------------------------------------
1 | The images in these folders (A and B) are copied from the PIV challenged website, cases A and B (https://www.pivchallenge.org/pub/index.html#a). The credit for case A images goes to Christian Kaehler (christian.kaehler@dlr.de), the credit for case B goes to Dr. K. Okamoto (okamoto@tokai.t.u-tokyo.ac.jp).
2 |
--------------------------------------------------------------------------------
/pivpy/data/openpiv_txt/exp1_001_b.txt:
--------------------------------------------------------------------------------
1 | 16 16 -0.2660 0.0000 51.1181
2 | 32 16 -0.2141 5.3176 46.4536
3 | 48 16 -0.4586 5.7992 55.6021
4 | 64 16 -0.5912 6.0680 47.6212
5 | 80 16 -0.2630 6.1598 65.8990
6 | 96 16 -0.1837 6.1231 70.8474
7 | 112 16 -0.2679 6.0351 58.2946
8 | 128 16 -0.2422 5.8967 49.7150
9 | 144 16 -0.0622 5.7204 34.5564
10 | 160 16 0.4454 5.6087 41.3563
11 | 176 16 0.6786 6.0622 47.7402
12 | 192 16 0.6172 5.8701 46.2573
13 | 208 16 0.7675 5.8869 65.0185
14 | 224 16 0.9790 5.9975 65.6787
15 | 240 16 0.7577 5.5986 35.6282
16 | 256 16 0.2276 4.9226 44.3142
17 | 272 16 0.1492 5.0652 45.3810
18 | 288 16 -0.0808 6.1374 56.8346
19 | 304 16 -0.0227 6.1563 52.7018
20 | 320 16 0.1914 6.0492 72.5240
21 | 336 16 -0.1206 6.0055 84.4970
22 | 352 16 0.3862 0.0000 80.1964
23 | 368 16 -0.2476 5.5832 77.3720
24 | 384 16 -0.0213 5.7747 77.9097
25 | 400 16 -0.2147 5.8245 64.0071
26 | 416 16 -0.3278 5.3193 63.5844
27 | 432 16 -0.1431 5.1407 76.1815
28 | 448 16 -0.0173 5.6647 52.5568
29 | 464 16 -0.0477 5.8587 47.8998
30 | 480 16 -0.0650 0.0000 44.2146
31 | 16 32 0.0064 5.2251 50.8890
32 | 32 32 -0.0452 5.4580 48.9003
33 | 48 32 -0.2837 5.8483 44.2996
34 | 64 32 -0.2052 6.1281 54.3368
35 | 80 32 -0.1307 6.1289 80.3487
36 | 96 32 -0.0326 6.0519 74.2154
37 | 112 32 -0.0088 6.0541 63.6525
38 | 128 32 0.2888 6.0093 50.6823
39 | 144 32 0.3962 5.7483 35.7608
40 | 160 32 0.2854 5.7557 42.1634
41 | 176 32 0.3748 5.8814 57.5531
42 | 192 32 0.1709 5.9295 57.5336
43 | 208 32 0.2072 5.6714 43.5565
44 | 224 32 0.6548 5.9067 48.6017
45 | 240 32 0.0671 5.2663 48.4627
46 | 256 32 -0.3258 4.8332 58.4985
47 | 272 32 -0.3548 4.8791 63.9009
48 | 288 32 -0.2062 5.6049 66.2173
49 | 304 32 -0.1466 5.7260 71.6143
50 | 320 32 0.0721 5.6302 54.5876
51 | 336 32 0.1054 5.9497 71.3703
52 | 352 32 -0.1260 5.9474 73.8987
53 | 368 32 -0.3777 5.6985 68.0953
54 | 384 32 -0.3922 5.6590 55.7711
55 | 400 32 -0.2208 5.7520 53.9397
56 | 416 32 -0.2667 5.1896 66.9407
57 | 432 32 -0.2931 5.1421 69.0080
58 | 448 32 0.1729 5.8981 58.2380
59 | 464 32 0.2029 6.1702 58.3279
60 | 480 32 -0.6287 5.8920 31.8780
61 | 16 48 -0.4655 5.2019 58.2460
62 | 32 48 -0.5002 5.4836 49.5299
63 | 48 48 -0.2977 5.9401 46.8446
64 | 64 48 -0.0723 6.1756 64.6580
65 | 80 48 0.0786 6.2294 59.7890
66 | 96 48 0.1047 6.1059 66.4062
67 | 112 48 0.1668 6.0707 69.0342
68 | 128 48 0.4799 5.8547 59.1761
69 | 144 48 0.3254 5.7814 62.7016
70 | 160 48 0.2867 5.7473 55.5559
71 | 176 48 0.6210 5.8875 52.3961
72 | 192 48 0.6193 6.2079 41.9424
73 | 208 48 0.5715 6.1286 30.8576
74 | 224 48 -0.1336 6.0203 49.2268
75 | 240 48 -0.2729 5.2728 39.1695
76 | 256 48 -0.4753 4.7960 55.7315
77 | 272 48 -0.6022 4.8062 55.6717
78 | 288 48 -0.4667 5.1426 50.9104
79 | 304 48 -0.2287 5.2272 67.2784
80 | 320 48 -0.1121 5.3400 65.8739
81 | 336 48 -0.0420 5.6569 61.3309
82 | 352 48 0.0849 6.0505 79.6654
83 | 368 48 -0.0731 5.8903 86.0525
84 | 384 48 -0.2801 5.6874 70.4638
85 | 400 48 -0.2899 5.5411 50.8946
86 | 416 48 -0.3216 5.2359 65.0437
87 | 432 48 -0.3299 5.2054 67.9761
88 | 448 48 -0.3523 5.4542 44.1385
89 | 464 48 -0.1810 0.0000 37.6503
90 | 480 48 -0.3744 6.6045 35.3481
91 | 16 64 -0.7385 5.0593 59.5238
92 | 32 64 -0.7008 5.4059 54.7859
93 | 48 64 -0.9805 5.9006 56.3929
94 | 64 64 -0.9263 6.0362 47.2399
95 | 80 64 -0.1637 6.1761 52.2556
96 | 96 64 0.0151 6.1211 78.1697
97 | 112 64 0.1124 6.0504 77.1044
98 | 128 64 0.3338 5.9139 70.5350
99 | 144 64 0.4800 5.6979 52.4861
100 | 160 64 0.8346 5.1059 50.4549
101 | 176 64 0.6858 4.9009 36.7723
102 | 192 64 0.4987 5.9797 29.1122
103 | 208 64 -0.3838 5.8055 55.7042
104 | 224 64 -0.6037 5.6838 63.8988
105 | 240 64 -0.3047 5.1405 57.7131
106 | 256 64 -0.6022 4.8513 63.4930
107 | 272 64 -0.8022 4.8721 54.9374
108 | 288 64 -0.6288 5.1016 55.0225
109 | 304 64 -0.5329 5.0973 58.9885
110 | 320 64 -0.3274 5.3054 63.4912
111 | 336 64 -0.3195 5.3638 62.2323
112 | 352 64 0.0163 5.8789 72.8784
113 | 368 64 0.0351 5.9212 83.7439
114 | 384 64 -0.1466 5.7818 70.8392
115 | 400 64 -0.3353 5.6378 51.8190
116 | 416 64 -0.4600 5.1945 57.3202
117 | 432 64 -0.4982 5.1622 59.4559
118 | 448 64 -0.5834 5.3690 36.0925
119 | 464 64 -0.3970 6.8731 30.9554
120 | 480 64 -0.2853 6.6865 32.7726
121 | 16 80 0.0709 5.1342 69.9682
122 | 32 80 -0.1251 5.3110 51.4841
123 | 48 80 -0.8699 5.8676 69.5896
124 | 64 80 -0.7932 5.9873 62.2195
125 | 80 80 -0.0378 5.4531 50.1021
126 | 96 80 0.0781 5.5939 62.0878
127 | 112 80 0.2746 5.6289 65.3885
128 | 128 80 0.4857 5.5005 48.2245
129 | 144 80 0.7461 5.2295 54.0916
130 | 160 80 1.0009 5.0488 71.9296
131 | 176 80 0.7769 5.2617 50.0544
132 | 192 80 0.2014 0.0000 39.1063
133 | 208 80 -0.6664 5.7750 43.1688
134 | 224 80 -0.3054 5.4495 55.4279
135 | 240 80 -0.1018 5.3874 55.2668
136 | 256 80 -0.5186 4.8639 49.3664
137 | 272 80 -0.4082 4.9106 48.8141
138 | 288 80 -0.4252 4.9162 55.9226
139 | 304 80 -0.4223 5.0897 57.4123
140 | 320 80 -0.2758 5.4734 47.8075
141 | 336 80 -0.4578 5.5731 53.1943
142 | 352 80 -0.3256 5.6820 59.8204
143 | 368 80 -0.2372 5.9677 64.4187
144 | 384 80 -0.3192 6.1168 69.8310
145 | 400 80 -0.3613 5.9959 55.0152
146 | 416 80 0.0249 5.1259 50.3079
147 | 432 80 -0.1776 5.2281 61.7613
148 | 448 80 -0.5675 5.7967 55.4182
149 | 464 80 -0.6098 6.2219 47.8443
150 | 480 80 -0.1962 6.0653 46.9738
151 | 16 96 0.1161 5.0734 47.7976
152 | 32 96 0.1216 5.3353 35.6936
153 | 48 96 0.1049 5.3309 38.0820
154 | 64 96 0.0201 5.3996 60.3155
155 | 80 96 0.0926 5.2116 73.2102
156 | 96 96 0.3167 5.0949 70.5032
157 | 112 96 0.5662 5.2474 61.5167
158 | 128 96 0.6800 5.1259 75.0395
159 | 144 96 0.5406 4.9486 77.9302
160 | 160 96 0.5343 5.0728 68.2106
161 | 176 96 0.2285 5.8365 56.0530
162 | 192 96 0.1057 6.2786 55.0662
163 | 208 96 0.0355 6.0533 54.7341
164 | 224 96 0.0740 5.5984 47.8343
165 | 240 96 -0.0498 5.5599 52.9922
166 | 256 96 -0.3739 5.1663 53.0011
167 | 272 96 -0.2880 4.9432 50.0684
168 | 288 96 -0.5060 4.9439 60.4973
169 | 304 96 -0.4924 5.0473 52.3125
170 | 320 96 -0.6698 5.5144 53.5522
171 | 336 96 -0.6881 5.6059 67.6154
172 | 352 96 -0.4882 5.7390 28.5008
173 | 368 96 -0.7915 5.7603 39.8743
174 | 384 96 -0.5084 5.7725 52.0187
175 | 400 96 0.1458 5.3867 64.4981
176 | 416 96 0.2476 5.1994 62.3539
177 | 432 96 0.0538 5.3192 55.5802
178 | 448 96 -0.3632 5.7312 50.2395
179 | 464 96 -0.2593 5.8910 51.1988
180 | 480 96 -0.0720 5.5690 35.3455
181 | 16 112 0.0422 4.9986 52.8963
182 | 32 112 -0.0666 5.6660 59.4415
183 | 48 112 -0.1273 5.5896 49.1123
184 | 64 112 0.0723 5.5320 38.1991
185 | 80 112 0.5504 5.1496 41.7486
186 | 96 112 0.7096 5.1277 55.5575
187 | 112 112 0.8148 5.3060 50.0394
188 | 128 112 0.7320 5.2277 59.3002
189 | 144 112 0.4240 5.0620 56.5876
190 | 160 112 0.2032 5.1877 57.7365
191 | 176 112 0.0494 6.2009 58.3323
192 | 192 112 -0.0598 6.3403 53.1708
193 | 208 112 -0.1044 5.9932 49.4238
194 | 224 112 -0.1182 5.7949 60.5025
195 | 240 112 -0.1755 5.6541 63.7128
196 | 256 112 -0.4813 5.2914 54.7768
197 | 272 112 -0.2851 4.8089 50.9113
198 | 288 112 -0.3041 4.8402 55.0572
199 | 304 112 -0.2678 4.7911 48.2611
200 | 320 112 -0.5532 5.2881 55.9914
201 | 336 112 -0.7162 5.4427 64.8990
202 | 352 112 -0.9156 5.6392 59.7510
203 | 368 112 -0.7979 5.6964 53.7576
204 | 384 112 -0.5185 5.2893 41.6099
205 | 400 112 -0.2262 4.9300 50.7459
206 | 416 112 -0.1667 5.3487 46.7669
207 | 432 112 -0.6834 5.5993 47.8077
208 | 448 112 -0.4467 5.6145 41.5128
209 | 464 112 -0.2503 5.6618 60.5462
210 | 480 112 -0.2896 5.2969 53.2187
211 | 16 128 -0.3406 5.1627 38.6863
212 | 32 128 0.0394 5.7058 59.8673
213 | 48 128 -0.0935 5.8704 54.4378
214 | 64 128 0.0371 5.9148 58.6590
215 | 80 128 0.3022 5.8286 53.7933
216 | 96 128 0.1984 5.6698 47.1724
217 | 112 128 0.3191 5.5812 42.2102
218 | 128 128 0.4118 5.5829 43.6476
219 | 144 128 0.1636 5.6212 62.7516
220 | 160 128 -0.2118 5.7357 43.5904
221 | 176 128 -0.0068 0.0000 34.7308
222 | 192 128 -0.0035 6.2362 63.4686
223 | 208 128 -0.0231 6.1844 62.1597
224 | 224 128 -0.1613 6.1338 62.5439
225 | 240 128 -0.2006 5.8065 56.3123
226 | 256 128 -0.0149 5.4621 42.4235
227 | 272 128 -0.0371 4.7212 53.8304
228 | 288 128 -0.0736 4.7946 56.9295
229 | 304 128 -0.3203 4.5530 47.8522
230 | 320 128 -0.4787 4.7652 46.6277
231 | 336 128 -0.2402 5.4391 56.2421
232 | 352 128 -0.2307 5.5064 58.6148
233 | 368 128 -0.5473 5.1417 41.5416
234 | 384 128 -0.7953 5.1070 53.8764
235 | 400 128 -0.9090 5.5516 55.7792
236 | 416 128 -0.7632 5.6729 51.6068
237 | 432 128 -0.3900 5.2423 43.6209
238 | 448 128 -0.3047 5.0729 60.4482
239 | 464 128 -0.3450 5.2726 56.3936
240 | 480 128 -0.3408 5.2755 60.9409
241 | 16 144 -0.6770 5.1447 39.6429
242 | 32 144 0.0658 5.7065 52.1289
243 | 48 144 0.0989 5.9948 62.7155
244 | 64 144 0.1594 5.8616 53.5199
245 | 80 144 -0.0208 5.5039 44.3911
246 | 96 144 -0.0676 5.6624 49.1162
247 | 112 144 0.0345 5.5074 45.0069
248 | 128 144 0.1864 5.3700 45.0706
249 | 144 144 -0.0975 5.7869 58.4125
250 | 160 144 -0.7475 6.0953 64.0778
251 | 176 144 -0.8534 6.2139 69.1094
252 | 192 144 -0.1849 5.9691 59.5815
253 | 208 144 -0.0590 6.0081 79.9533
254 | 224 144 0.1492 5.9567 79.2342
255 | 240 144 0.2949 5.7268 64.5413
256 | 256 144 0.4353 5.3682 45.2594
257 | 272 144 0.1805 4.6946 45.0206
258 | 288 144 -0.2376 4.8287 55.5592
259 | 304 144 -0.6585 4.7478 65.4530
260 | 320 144 -0.8702 4.7243 67.0225
261 | 336 144 -0.5227 4.7978 50.1696
262 | 352 144 -0.1937 5.3971 55.8021
263 | 368 144 -0.4382 5.4220 42.9269
264 | 384 144 -0.4115 5.4681 32.1767
265 | 400 144 -0.2793 5.5809 52.9775
266 | 416 144 -0.1413 5.4871 51.3583
267 | 432 144 0.0258 5.0589 47.1058
268 | 448 144 -0.1072 4.9170 72.1918
269 | 464 144 -0.2023 5.0517 67.7849
270 | 480 144 -0.2530 5.2184 60.3220
271 | 16 160 -0.9754 4.8125 54.9724
272 | 32 160 -0.7306 4.9266 44.6643
273 | 48 160 -0.1235 5.9868 85.5887
274 | 64 160 0.0146 5.8070 63.5004
275 | 80 160 0.0910 5.3332 63.0269
276 | 96 160 0.0820 5.3529 63.0063
277 | 112 160 0.0564 5.3364 59.4003
278 | 128 160 -0.0652 5.3556 50.6909
279 | 144 160 -0.3074 5.6434 45.6468
280 | 160 160 -0.5193 5.9199 37.4299
281 | 176 160 -0.5098 5.7307 37.5054
282 | 192 160 -0.4049 5.6111 43.8793
283 | 208 160 -0.3096 5.7634 51.4526
284 | 224 160 0.1639 5.8264 71.8900
285 | 240 160 0.2998 5.7313 74.5491
286 | 256 160 0.4334 5.2237 62.0465
287 | 272 160 0.2729 4.8203 64.3813
288 | 288 160 -0.4021 4.7949 46.3945
289 | 304 160 -0.7353 4.8530 58.0406
290 | 320 160 -0.8973 4.7342 73.6246
291 | 336 160 -0.9429 4.7596 76.0339
292 | 352 160 -0.8108 5.0254 68.5296
293 | 368 160 -0.5593 5.0998 49.3527
294 | 384 160 -0.3743 5.0602 53.7334
295 | 400 160 -0.3153 5.2283 45.8568
296 | 416 160 0.0911 5.3078 48.7039
297 | 432 160 0.0167 5.1273 68.5061
298 | 448 160 -0.0010 4.9397 79.4898
299 | 464 160 0.0510 5.0150 81.1628
300 | 480 160 0.1160 5.0457 69.2515
301 | 16 176 -1.0731 4.7265 39.3656
302 | 32 176 -0.3648 5.3754 32.9446
303 | 48 176 -0.2206 5.4612 49.6006
304 | 64 176 0.2394 5.2173 59.5809
305 | 80 176 0.2357 5.1468 61.6475
306 | 96 176 0.1382 5.1702 56.6626
307 | 112 176 0.0735 5.3594 56.4787
308 | 128 176 -0.3254 5.5533 56.6658
309 | 144 176 -0.1501 0.0000 67.4252
310 | 160 176 -0.0058 6.0701 74.9075
311 | 176 176 0.0937 5.5931 48.1617
312 | 192 176 0.1242 5.1645 76.1594
313 | 208 176 0.1743 5.5158 49.1661
314 | 224 176 0.1905 5.7268 55.4372
315 | 240 176 0.3467 5.6808 48.6333
316 | 256 176 0.3537 5.0730 59.8030
317 | 272 176 -0.0889 4.9699 55.9956
318 | 288 176 -0.8531 5.0490 62.8180
319 | 304 176 -0.8719 4.8771 70.8287
320 | 320 176 -0.8273 4.6753 64.1508
321 | 336 176 -0.8075 4.6278 61.1485
322 | 352 176 -0.7162 4.5716 62.2615
323 | 368 176 -0.6430 4.5734 60.4824
324 | 384 176 -0.5755 4.8012 62.1145
325 | 400 176 -0.5551 4.9505 62.1056
326 | 416 176 -0.1808 5.1572 52.2497
327 | 432 176 -0.0517 5.1283 72.8003
328 | 448 176 0.0501 4.9346 86.9850
329 | 464 176 0.1011 4.8870 79.8012
330 | 480 176 0.0420 4.8546 70.8767
331 | 16 192 -1.0584 4.6749 43.2086
332 | 32 192 -0.7973 4.8093 49.8236
333 | 48 192 -0.7441 5.2135 56.2283
334 | 64 192 -0.6347 5.1567 49.7924
335 | 80 192 -0.0105 5.1709 46.6826
336 | 96 192 0.1598 5.2442 51.7202
337 | 112 192 -0.1334 5.3942 56.0792
338 | 128 192 -0.4536 5.6367 56.5837
339 | 144 192 -0.2405 5.8859 62.7588
340 | 160 192 0.0633 5.8881 77.3091
341 | 176 192 0.0302 5.5775 63.3350
342 | 192 192 -0.0023 5.2894 61.4061
343 | 208 192 0.3194 5.6590 49.6767
344 | 224 192 0.1632 5.8216 64.4586
345 | 240 192 0.1069 5.6836 52.3813
346 | 256 192 0.0884 5.3771 56.5989
347 | 272 192 -0.3301 5.1447 54.8337
348 | 288 192 -0.7684 4.9272 52.9415
349 | 304 192 -0.7886 4.7337 63.1890
350 | 320 192 -0.7926 4.6578 66.1385
351 | 336 192 -0.8067 4.6529 69.3144
352 | 352 192 -0.7035 4.6491 68.5333
353 | 368 192 -0.7293 4.6633 56.9297
354 | 384 192 -0.6559 4.7757 49.6149
355 | 400 192 -0.4141 4.7971 54.0245
356 | 416 192 -0.2129 5.0016 61.8819
357 | 432 192 -0.0726 5.0765 70.0041
358 | 448 192 0.0361 4.9974 79.1331
359 | 464 192 0.0616 4.8021 72.2203
360 | 480 192 -0.0311 4.7913 87.0358
361 | 16 208 -0.6859 4.6013 34.6090
362 | 32 208 -0.4258 4.7225 42.5834
363 | 48 208 -0.5754 5.1140 57.3562
364 | 64 208 -0.8977 5.1711 52.7958
365 | 80 208 -0.8088 4.9475 39.7291
366 | 96 208 -0.7044 5.2637 47.4013
367 | 112 208 -0.7497 5.7620 59.5666
368 | 128 208 -0.6399 6.1032 61.7170
369 | 144 208 -0.3788 5.6982 48.1300
370 | 160 208 -0.2395 5.5735 52.2425
371 | 176 208 -0.2081 5.4753 50.4366
372 | 192 208 -0.1834 5.2595 60.3038
373 | 208 208 0.0518 5.3449 45.4584
374 | 224 208 0.0492 5.6735 51.1487
375 | 240 208 -0.3026 5.5178 58.4484
376 | 256 208 -0.1529 5.3229 55.2810
377 | 272 208 -0.1739 4.9957 54.0236
378 | 288 208 -0.2163 4.7185 47.8606
379 | 304 208 -0.4185 4.6619 53.2879
380 | 320 208 -0.5887 5.0745 51.9375
381 | 336 208 -0.7071 5.2125 54.7770
382 | 352 208 -0.7922 5.0402 72.2163
383 | 368 208 -0.7992 5.0371 62.2521
384 | 384 208 -0.7599 5.0130 51.2524
385 | 400 208 -0.6036 4.8993 54.6402
386 | 416 208 -0.1665 4.8828 65.1331
387 | 432 208 -0.0500 4.9806 73.0756
388 | 448 208 0.0265 5.0231 67.3886
389 | 464 208 0.0606 4.8844 56.0320
390 | 480 208 0.0040 4.7824 71.5273
391 | 16 224 -0.0840 0.0000 38.4215
392 | 32 224 0.2530 4.7209 43.3990
393 | 48 224 0.0054 5.0770 52.8939
394 | 64 224 -0.0946 5.4262 38.8268
395 | 80 224 -0.6325 5.1901 43.5436
396 | 96 224 -0.6644 5.1689 48.7335
397 | 112 224 -0.6129 5.6253 41.6707
398 | 128 224 -0.3024 6.1597 64.2200
399 | 144 224 -0.4331 5.2086 46.3742
400 | 160 224 -0.4113 5.0656 54.4137
401 | 176 224 -0.1445 5.5454 49.2113
402 | 192 224 -0.1831 5.2078 58.5719
403 | 208 224 -0.0142 5.0976 60.4116
404 | 224 224 0.1058 5.3902 41.1214
405 | 240 224 -0.0350 5.8133 58.6442
406 | 256 224 -0.0450 5.6207 60.5875
407 | 272 224 0.1225 4.5535 39.5417
408 | 288 224 0.3254 4.3373 51.9140
409 | 304 224 0.1856 4.6772 52.8313
410 | 320 224 -0.1257 4.9474 64.9597
411 | 336 224 -0.3442 5.1445 61.7517
412 | 352 224 -0.5434 5.1023 54.8203
413 | 368 224 -0.6314 5.0112 65.7632
414 | 384 224 -0.7008 5.1517 66.9553
415 | 400 224 -0.7820 5.1082 67.1196
416 | 416 224 -0.4950 5.0227 51.5161
417 | 432 224 -0.3333 5.1172 64.3597
418 | 448 224 -0.2500 5.1974 71.5327
419 | 464 224 -0.2529 5.2684 59.3291
420 | 480 224 -0.0800 4.7845 60.3724
421 | 16 240 -0.0403 0.0000 49.2493
422 | 32 240 0.3878 4.9067 48.9877
423 | 48 240 -0.0911 5.1633 51.3906
424 | 64 240 -0.1341 5.5404 41.0196
425 | 80 240 -0.4567 5.4113 31.1567
426 | 96 240 -0.7087 5.1745 38.3841
427 | 112 240 -0.1954 5.0745 39.4395
428 | 128 240 -0.1164 5.3217 51.1129
429 | 144 240 0.1217 4.7177 44.5506
430 | 160 240 0.4478 4.8447 43.9649
431 | 176 240 0.2902 5.5620 41.5339
432 | 192 240 0.2995 5.4882 37.8493
433 | 208 240 0.1837 5.1260 52.1479
434 | 224 240 0.4063 4.9714 63.6502
435 | 240 240 0.1645 5.3380 59.3534
436 | 256 240 0.0100 5.3307 57.8375
437 | 272 240 0.2630 4.1851 55.4257
438 | 288 240 0.2935 4.1716 55.2668
439 | 304 240 0.1397 4.6236 48.6988
440 | 320 240 0.0395 4.9661 60.3977
441 | 336 240 0.0001 5.0994 69.2723
442 | 352 240 -0.1180 5.1313 60.9265
443 | 368 240 -0.3475 5.0426 57.7417
444 | 384 240 -0.3004 4.9984 53.9129
445 | 400 240 -0.2992 5.0590 51.1451
446 | 416 240 -0.3350 5.1513 72.3706
447 | 432 240 -0.2783 5.2730 75.0144
448 | 448 240 -0.1356 5.2567 71.9383
449 | 464 240 -0.1582 5.2556 66.8552
450 | 480 240 -0.2291 4.4251 42.1827
451 | 16 256 0.1133 0.0000 55.9860
452 | 32 256 0.9023 5.2478 54.6713
453 | 48 256 0.1044 5.3779 41.2308
454 | 64 256 0.0664 5.3613 36.8194
455 | 80 256 0.0000 5.4699 39.4053
456 | 96 256 0.4935 5.2149 49.4042
457 | 112 256 0.5812 4.8392 56.7306
458 | 128 256 0.8307 4.9369 58.5180
459 | 144 256 0.7345 4.5291 48.4412
460 | 160 256 0.6801 4.7601 52.0054
461 | 176 256 0.5233 5.2517 44.9008
462 | 192 256 0.6079 5.6410 39.3121
463 | 208 256 0.2809 4.9941 47.7975
464 | 224 256 -0.0298 5.0761 58.9682
465 | 240 256 -0.1943 5.1449 57.0000
466 | 256 256 -0.2420 4.8671 51.1640
467 | 272 256 -0.5109 4.1609 56.9009
468 | 288 256 -0.6167 4.3257 54.3138
469 | 304 256 -0.6800 4.4795 45.4827
470 | 320 256 -0.2271 4.9911 50.5440
471 | 336 256 -0.2049 5.2401 60.1803
472 | 352 256 -0.5907 5.1848 54.8287
473 | 368 256 -0.6740 5.0309 54.6526
474 | 384 256 -0.1441 4.9462 41.0573
475 | 400 256 -0.0735 5.0750 65.0365
476 | 416 256 -0.1140 5.1354 78.3979
477 | 432 256 0.0644 5.0520 88.3049
478 | 448 256 0.0934 5.0084 84.1060
479 | 464 256 -0.1019 5.0900 70.0286
480 | 480 256 -0.2026 4.9086 50.5502
481 | 16 272 1.1394 5.2390 52.2339
482 | 32 272 0.9359 5.3289 57.6903
483 | 48 272 0.6402 5.5312 60.3927
484 | 64 272 0.6657 5.3558 46.4200
485 | 80 272 0.6677 5.1054 49.1493
486 | 96 272 0.9104 4.8024 62.5267
487 | 112 272 0.8406 4.6705 67.1575
488 | 128 272 0.8724 4.7281 66.9460
489 | 144 272 0.7654 4.7352 62.2809
490 | 160 272 0.4341 4.8295 55.7493
491 | 176 272 0.1432 5.0413 63.3973
492 | 192 272 -0.0475 5.1024 59.6103
493 | 208 272 -0.3590 5.0053 50.6115
494 | 224 272 -0.3462 5.1451 56.1493
495 | 240 272 -0.5591 5.1373 68.0656
496 | 256 272 -0.6196 4.8120 66.3093
497 | 272 272 -0.4618 4.6174 58.4653
498 | 288 272 -0.5254 4.6693 57.1364
499 | 304 272 -0.5373 4.6528 45.9647
500 | 320 272 -0.5343 4.7557 47.5385
501 | 336 272 -0.5339 5.1415 55.9344
502 | 352 272 -0.6859 5.0322 53.3477
503 | 368 272 -0.5926 5.0226 59.1168
504 | 384 272 -0.3841 4.9982 53.1053
505 | 400 272 -0.3731 5.0280 56.6566
506 | 416 272 -0.1608 4.9593 70.4962
507 | 432 272 -0.0042 4.8795 86.2875
508 | 448 272 -0.0525 4.9046 79.6074
509 | 464 272 -0.1551 4.9126 76.7173
510 | 480 272 -0.0927 4.8744 83.3895
511 | 16 288 0.4141 0.0000 50.6960
512 | 32 288 0.9064 5.1557 52.2524
513 | 48 288 0.5838 5.3130 55.0396
514 | 64 288 0.6234 5.2902 44.9918
515 | 80 288 0.6061 4.9747 38.8801
516 | 96 288 0.7623 4.6086 44.4744
517 | 112 288 0.2836 4.6985 58.2723
518 | 128 288 0.1958 4.7204 63.4938
519 | 144 288 0.4180 4.8970 52.6428
520 | 160 288 0.3728 4.9247 50.9420
521 | 176 288 0.1228 5.1363 45.3892
522 | 192 288 0.0850 5.1921 42.8073
523 | 208 288 -0.1523 4.8170 46.4004
524 | 224 288 -0.0701 4.7534 60.4402
525 | 240 288 -0.2473 5.0569 53.7415
526 | 256 288 -0.4906 4.9397 66.4541
527 | 272 288 -0.4361 4.8189 65.5591
528 | 288 288 -0.3296 4.6785 41.2407
529 | 304 288 -0.3772 4.8700 44.3624
530 | 320 288 -0.3360 4.9373 51.7251
531 | 336 288 -0.2970 5.0087 61.9837
532 | 352 288 -0.2729 4.9802 64.3242
533 | 368 288 -0.3467 4.8930 70.0626
534 | 384 288 -0.2869 4.8720 72.7136
535 | 400 288 -0.2618 4.9333 68.1550
536 | 416 288 -0.2123 4.9312 72.0006
537 | 432 288 -0.2521 4.8315 74.4442
538 | 448 288 -0.2362 4.8136 75.0918
539 | 464 288 -0.1283 4.7249 65.6701
540 | 480 288 -0.0264 4.7636 65.6672
541 | 16 304 0.5007 0.0000 37.3625
542 | 32 304 1.2941 5.2451 34.9666
543 | 48 304 0.7590 5.3123 41.9720
544 | 64 304 0.4413 5.3162 36.9817
545 | 80 304 0.7329 4.8422 38.9633
546 | 96 304 0.6281 4.8320 39.5705
547 | 112 304 0.1429 4.9219 65.4833
548 | 128 304 -0.0470 4.9060 72.7646
549 | 144 304 0.0575 4.7817 66.0999
550 | 160 304 0.1140 4.6296 46.2446
551 | 176 304 0.0442 4.5447 45.3353
552 | 192 304 0.2107 4.8478 56.0612
553 | 208 304 0.1757 4.6214 51.1958
554 | 224 304 0.1164 4.6485 56.8246
555 | 240 304 0.1643 5.2446 59.3918
556 | 256 304 0.0046 4.9746 55.5154
557 | 272 304 0.0294 5.0045 63.8814
558 | 288 304 -0.0041 4.9986 52.5303
559 | 304 304 -0.2750 4.7873 55.9384
560 | 320 304 -0.2307 4.7703 70.4011
561 | 336 304 -0.3801 4.8281 75.4647
562 | 352 304 -0.4767 5.1388 74.3375
563 | 368 304 -0.5142 5.2192 63.7760
564 | 384 304 -0.4449 5.0279 73.6193
565 | 400 304 -0.3511 4.9611 74.0519
566 | 416 304 -0.2663 4.9347 56.5493
567 | 432 304 -0.2370 4.7709 63.4332
568 | 448 304 0.0016 4.6141 60.1646
569 | 464 304 0.2142 4.6024 61.4815
570 | 480 304 0.3521 4.6723 50.0092
571 | 16 320 0.9360 5.5031 48.0808
572 | 32 320 0.7162 5.6918 50.3185
573 | 48 320 0.7539 5.1336 46.1426
574 | 64 320 0.7603 4.6730 59.4677
575 | 80 320 1.0498 4.9602 65.8221
576 | 96 320 0.7548 5.1086 52.1524
577 | 112 320 0.1300 5.1020 71.1568
578 | 128 320 -0.1156 4.9625 77.0975
579 | 144 320 -0.0569 4.8675 79.0932
580 | 160 320 -0.0455 4.7261 60.7643
581 | 176 320 0.1795 4.4875 46.5323
582 | 192 320 0.1947 4.6553 53.7557
583 | 208 320 0.3188 4.5401 48.6664
584 | 224 320 0.1086 4.5668 49.0946
585 | 240 320 -0.1049 4.8267 56.3132
586 | 256 320 0.3458 4.6519 63.3709
587 | 272 320 0.2784 4.8801 64.4557
588 | 288 320 0.0273 4.9140 61.6293
589 | 304 320 -0.4334 4.7458 65.3551
590 | 320 320 -0.4503 4.7412 73.6247
591 | 336 320 -0.3973 4.8657 68.2134
592 | 352 320 -0.4588 5.0676 54.8998
593 | 368 320 -0.4829 5.2036 58.6884
594 | 384 320 -0.4964 5.2380 59.0459
595 | 400 320 -0.5068 5.1721 49.7864
596 | 416 320 -0.3641 4.9931 51.4353
597 | 432 320 -0.1508 4.8077 62.5661
598 | 448 320 0.0714 4.7602 63.6848
599 | 464 320 0.3239 4.8533 63.7127
600 | 480 320 0.5881 5.0504 58.7281
601 | 16 336 0.4053 0.0000 38.9827
602 | 32 336 0.6284 5.5676 41.1950
603 | 48 336 0.0881 4.8763 49.2116
604 | 64 336 0.0300 4.5275 55.4407
605 | 80 336 0.7332 5.1582 56.3783
606 | 96 336 0.5959 5.3143 55.7506
607 | 112 336 0.2676 5.1219 66.1471
608 | 128 336 0.0325 5.0475 76.5907
609 | 144 336 -0.0829 4.9741 84.3262
610 | 160 336 -0.0376 4.6990 76.3266
611 | 176 336 0.1783 4.2114 58.5704
612 | 192 336 0.3626 4.5265 50.4267
613 | 208 336 0.5715 4.7207 65.2604
614 | 224 336 0.4399 4.7704 62.8460
615 | 240 336 -0.0646 4.5986 52.1476
616 | 256 336 -0.0178 4.6366 66.1381
617 | 272 336 -0.0453 4.9274 74.2459
618 | 288 336 -0.2823 4.8546 61.2342
619 | 304 336 -0.4401 4.8206 50.8114
620 | 320 336 -0.3721 4.9350 53.8702
621 | 336 336 -0.3862 5.0728 76.0751
622 | 352 336 -0.4362 5.0584 62.6611
623 | 368 336 -0.2971 5.0987 70.3864
624 | 384 336 -0.2897 5.4030 59.6053
625 | 400 336 -0.3135 5.3965 54.2405
626 | 416 336 -0.1186 5.2520 53.5424
627 | 432 336 -0.1244 5.2028 62.6046
628 | 448 336 0.0048 5.0909 80.1851
629 | 464 336 0.2388 5.1166 76.2977
630 | 480 336 0.4834 5.1726 56.9579
631 | 16 352 0.9210 5.3338 51.6804
632 | 32 352 0.4237 5.6644 35.4753
633 | 48 352 0.0156 4.8801 38.9518
634 | 64 352 -0.1638 4.4779 57.5189
635 | 80 352 0.2961 5.1829 45.3429
636 | 96 352 0.3966 5.4163 55.9499
637 | 112 352 0.2937 5.1985 56.1881
638 | 128 352 0.1241 5.1335 73.3088
639 | 144 352 0.2530 0.0000 86.6670
640 | 160 352 0.1070 4.8219 69.5149
641 | 176 352 0.1242 4.4992 54.6415
642 | 192 352 0.1699 4.4801 64.0207
643 | 208 352 0.4217 4.7326 62.0050
644 | 224 352 0.6073 4.9082 65.5903
645 | 240 352 0.1271 4.5972 61.2682
646 | 256 352 -0.2065 4.6559 73.9387
647 | 272 352 -0.3774 4.9668 75.5837
648 | 288 352 -0.3718 4.9239 73.7407
649 | 304 352 -0.3042 4.9453 74.0412
650 | 320 352 -0.2143 5.1311 86.4621
651 | 336 352 -0.1501 5.1828 71.8513
652 | 352 352 -0.2617 5.1976 56.6460
653 | 368 352 -0.2194 5.2424 71.9666
654 | 384 352 -0.1588 5.3797 63.4101
655 | 400 352 -0.2006 5.3477 63.3137
656 | 416 352 -0.0557 5.3045 62.6248
657 | 432 352 -0.0370 5.2936 56.6157
658 | 448 352 -0.2387 5.2766 55.5884
659 | 464 352 -0.1232 5.2683 56.6393
660 | 480 352 0.1291 0.0000 65.5129
661 |
--------------------------------------------------------------------------------
/pivpy/data/openpiv_vec/exp1_001_b.vec:
--------------------------------------------------------------------------------
1 | 16 16 -0.2660 0.0000 51.1181
2 | 32 16 -0.2141 5.3176 46.4536
3 | 48 16 -0.4586 5.7992 55.6021
4 | 64 16 -0.5912 6.0680 47.6212
5 | 80 16 -0.2630 6.1598 65.8990
6 | 96 16 -0.1837 6.1231 70.8474
7 | 112 16 -0.2679 6.0351 58.2946
8 | 128 16 -0.2422 5.8967 49.7150
9 | 144 16 -0.0622 5.7204 34.5564
10 | 160 16 0.4454 5.6087 41.3563
11 | 176 16 0.6786 6.0622 47.7402
12 | 192 16 0.6172 5.8701 46.2573
13 | 208 16 0.7675 5.8869 65.0185
14 | 224 16 0.9790 5.9975 65.6787
15 | 240 16 0.7577 5.5986 35.6282
16 | 256 16 0.2276 4.9226 44.3142
17 | 272 16 0.1492 5.0652 45.3810
18 | 288 16 -0.0808 6.1374 56.8346
19 | 304 16 -0.0227 6.1563 52.7018
20 | 320 16 0.1914 6.0492 72.5240
21 | 336 16 -0.1206 6.0055 84.4970
22 | 352 16 0.3862 0.0000 80.1964
23 | 368 16 -0.2476 5.5832 77.3720
24 | 384 16 -0.0213 5.7747 77.9097
25 | 400 16 -0.2147 5.8245 64.0071
26 | 416 16 -0.3278 5.3193 63.5844
27 | 432 16 -0.1431 5.1407 76.1815
28 | 448 16 -0.0173 5.6647 52.5568
29 | 464 16 -0.0477 5.8587 47.8998
30 | 480 16 -0.0650 0.0000 44.2146
31 | 16 32 0.0064 5.2251 50.8890
32 | 32 32 -0.0452 5.4580 48.9003
33 | 48 32 -0.2837 5.8483 44.2996
34 | 64 32 -0.2052 6.1281 54.3368
35 | 80 32 -0.1307 6.1289 80.3487
36 | 96 32 -0.0326 6.0519 74.2154
37 | 112 32 -0.0088 6.0541 63.6525
38 | 128 32 0.2888 6.0093 50.6823
39 | 144 32 0.3962 5.7483 35.7608
40 | 160 32 0.2854 5.7557 42.1634
41 | 176 32 0.3748 5.8814 57.5531
42 | 192 32 0.1709 5.9295 57.5336
43 | 208 32 0.2072 5.6714 43.5565
44 | 224 32 0.6548 5.9067 48.6017
45 | 240 32 0.0671 5.2663 48.4627
46 | 256 32 -0.3258 4.8332 58.4985
47 | 272 32 -0.3548 4.8791 63.9009
48 | 288 32 -0.2062 5.6049 66.2173
49 | 304 32 -0.1466 5.7260 71.6143
50 | 320 32 0.0721 5.6302 54.5876
51 | 336 32 0.1054 5.9497 71.3703
52 | 352 32 -0.1260 5.9474 73.8987
53 | 368 32 -0.3777 5.6985 68.0953
54 | 384 32 -0.3922 5.6590 55.7711
55 | 400 32 -0.2208 5.7520 53.9397
56 | 416 32 -0.2667 5.1896 66.9407
57 | 432 32 -0.2931 5.1421 69.0080
58 | 448 32 0.1729 5.8981 58.2380
59 | 464 32 0.2029 6.1702 58.3279
60 | 480 32 -0.6287 5.8920 31.8780
61 | 16 48 -0.4655 5.2019 58.2460
62 | 32 48 -0.5002 5.4836 49.5299
63 | 48 48 -0.2977 5.9401 46.8446
64 | 64 48 -0.0723 6.1756 64.6580
65 | 80 48 0.0786 6.2294 59.7890
66 | 96 48 0.1047 6.1059 66.4062
67 | 112 48 0.1668 6.0707 69.0342
68 | 128 48 0.4799 5.8547 59.1761
69 | 144 48 0.3254 5.7814 62.7016
70 | 160 48 0.2867 5.7473 55.5559
71 | 176 48 0.6210 5.8875 52.3961
72 | 192 48 0.6193 6.2079 41.9424
73 | 208 48 0.5715 6.1286 30.8576
74 | 224 48 -0.1336 6.0203 49.2268
75 | 240 48 -0.2729 5.2728 39.1695
76 | 256 48 -0.4753 4.7960 55.7315
77 | 272 48 -0.6022 4.8062 55.6717
78 | 288 48 -0.4667 5.1426 50.9104
79 | 304 48 -0.2287 5.2272 67.2784
80 | 320 48 -0.1121 5.3400 65.8739
81 | 336 48 -0.0420 5.6569 61.3309
82 | 352 48 0.0849 6.0505 79.6654
83 | 368 48 -0.0731 5.8903 86.0525
84 | 384 48 -0.2801 5.6874 70.4638
85 | 400 48 -0.2899 5.5411 50.8946
86 | 416 48 -0.3216 5.2359 65.0437
87 | 432 48 -0.3299 5.2054 67.9761
88 | 448 48 -0.3523 5.4542 44.1385
89 | 464 48 -0.1810 0.0000 37.6503
90 | 480 48 -0.3744 6.6045 35.3481
91 | 16 64 -0.7385 5.0593 59.5238
92 | 32 64 -0.7008 5.4059 54.7859
93 | 48 64 -0.9805 5.9006 56.3929
94 | 64 64 -0.9263 6.0362 47.2399
95 | 80 64 -0.1637 6.1761 52.2556
96 | 96 64 0.0151 6.1211 78.1697
97 | 112 64 0.1124 6.0504 77.1044
98 | 128 64 0.3338 5.9139 70.5350
99 | 144 64 0.4800 5.6979 52.4861
100 | 160 64 0.8346 5.1059 50.4549
101 | 176 64 0.6858 4.9009 36.7723
102 | 192 64 0.4987 5.9797 29.1122
103 | 208 64 -0.3838 5.8055 55.7042
104 | 224 64 -0.6037 5.6838 63.8988
105 | 240 64 -0.3047 5.1405 57.7131
106 | 256 64 -0.6022 4.8513 63.4930
107 | 272 64 -0.8022 4.8721 54.9374
108 | 288 64 -0.6288 5.1016 55.0225
109 | 304 64 -0.5329 5.0973 58.9885
110 | 320 64 -0.3274 5.3054 63.4912
111 | 336 64 -0.3195 5.3638 62.2323
112 | 352 64 0.0163 5.8789 72.8784
113 | 368 64 0.0351 5.9212 83.7439
114 | 384 64 -0.1466 5.7818 70.8392
115 | 400 64 -0.3353 5.6378 51.8190
116 | 416 64 -0.4600 5.1945 57.3202
117 | 432 64 -0.4982 5.1622 59.4559
118 | 448 64 -0.5834 5.3690 36.0925
119 | 464 64 -0.3970 6.8731 30.9554
120 | 480 64 -0.2853 6.6865 32.7726
121 | 16 80 0.0709 5.1342 69.9682
122 | 32 80 -0.1251 5.3110 51.4841
123 | 48 80 -0.8699 5.8676 69.5896
124 | 64 80 -0.7932 5.9873 62.2195
125 | 80 80 -0.0378 5.4531 50.1021
126 | 96 80 0.0781 5.5939 62.0878
127 | 112 80 0.2746 5.6289 65.3885
128 | 128 80 0.4857 5.5005 48.2245
129 | 144 80 0.7461 5.2295 54.0916
130 | 160 80 1.0009 5.0488 71.9296
131 | 176 80 0.7769 5.2617 50.0544
132 | 192 80 0.2014 0.0000 39.1063
133 | 208 80 -0.6664 5.7750 43.1688
134 | 224 80 -0.3054 5.4495 55.4279
135 | 240 80 -0.1018 5.3874 55.2668
136 | 256 80 -0.5186 4.8639 49.3664
137 | 272 80 -0.4082 4.9106 48.8141
138 | 288 80 -0.4252 4.9162 55.9226
139 | 304 80 -0.4223 5.0897 57.4123
140 | 320 80 -0.2758 5.4734 47.8075
141 | 336 80 -0.4578 5.5731 53.1943
142 | 352 80 -0.3256 5.6820 59.8204
143 | 368 80 -0.2372 5.9677 64.4187
144 | 384 80 -0.3192 6.1168 69.8310
145 | 400 80 -0.3613 5.9959 55.0152
146 | 416 80 0.0249 5.1259 50.3079
147 | 432 80 -0.1776 5.2281 61.7613
148 | 448 80 -0.5675 5.7967 55.4182
149 | 464 80 -0.6098 6.2219 47.8443
150 | 480 80 -0.1962 6.0653 46.9738
151 | 16 96 0.1161 5.0734 47.7976
152 | 32 96 0.1216 5.3353 35.6936
153 | 48 96 0.1049 5.3309 38.0820
154 | 64 96 0.0201 5.3996 60.3155
155 | 80 96 0.0926 5.2116 73.2102
156 | 96 96 0.3167 5.0949 70.5032
157 | 112 96 0.5662 5.2474 61.5167
158 | 128 96 0.6800 5.1259 75.0395
159 | 144 96 0.5406 4.9486 77.9302
160 | 160 96 0.5343 5.0728 68.2106
161 | 176 96 0.2285 5.8365 56.0530
162 | 192 96 0.1057 6.2786 55.0662
163 | 208 96 0.0355 6.0533 54.7341
164 | 224 96 0.0740 5.5984 47.8343
165 | 240 96 -0.0498 5.5599 52.9922
166 | 256 96 -0.3739 5.1663 53.0011
167 | 272 96 -0.2880 4.9432 50.0684
168 | 288 96 -0.5060 4.9439 60.4973
169 | 304 96 -0.4924 5.0473 52.3125
170 | 320 96 -0.6698 5.5144 53.5522
171 | 336 96 -0.6881 5.6059 67.6154
172 | 352 96 -0.4882 5.7390 28.5008
173 | 368 96 -0.7915 5.7603 39.8743
174 | 384 96 -0.5084 5.7725 52.0187
175 | 400 96 0.1458 5.3867 64.4981
176 | 416 96 0.2476 5.1994 62.3539
177 | 432 96 0.0538 5.3192 55.5802
178 | 448 96 -0.3632 5.7312 50.2395
179 | 464 96 -0.2593 5.8910 51.1988
180 | 480 96 -0.0720 5.5690 35.3455
181 | 16 112 0.0422 4.9986 52.8963
182 | 32 112 -0.0666 5.6660 59.4415
183 | 48 112 -0.1273 5.5896 49.1123
184 | 64 112 0.0723 5.5320 38.1991
185 | 80 112 0.5504 5.1496 41.7486
186 | 96 112 0.7096 5.1277 55.5575
187 | 112 112 0.8148 5.3060 50.0394
188 | 128 112 0.7320 5.2277 59.3002
189 | 144 112 0.4240 5.0620 56.5876
190 | 160 112 0.2032 5.1877 57.7365
191 | 176 112 0.0494 6.2009 58.3323
192 | 192 112 -0.0598 6.3403 53.1708
193 | 208 112 -0.1044 5.9932 49.4238
194 | 224 112 -0.1182 5.7949 60.5025
195 | 240 112 -0.1755 5.6541 63.7128
196 | 256 112 -0.4813 5.2914 54.7768
197 | 272 112 -0.2851 4.8089 50.9113
198 | 288 112 -0.3041 4.8402 55.0572
199 | 304 112 -0.2678 4.7911 48.2611
200 | 320 112 -0.5532 5.2881 55.9914
201 | 336 112 -0.7162 5.4427 64.8990
202 | 352 112 -0.9156 5.6392 59.7510
203 | 368 112 -0.7979 5.6964 53.7576
204 | 384 112 -0.5185 5.2893 41.6099
205 | 400 112 -0.2262 4.9300 50.7459
206 | 416 112 -0.1667 5.3487 46.7669
207 | 432 112 -0.6834 5.5993 47.8077
208 | 448 112 -0.4467 5.6145 41.5128
209 | 464 112 -0.2503 5.6618 60.5462
210 | 480 112 -0.2896 5.2969 53.2187
211 | 16 128 -0.3406 5.1627 38.6863
212 | 32 128 0.0394 5.7058 59.8673
213 | 48 128 -0.0935 5.8704 54.4378
214 | 64 128 0.0371 5.9148 58.6590
215 | 80 128 0.3022 5.8286 53.7933
216 | 96 128 0.1984 5.6698 47.1724
217 | 112 128 0.3191 5.5812 42.2102
218 | 128 128 0.4118 5.5829 43.6476
219 | 144 128 0.1636 5.6212 62.7516
220 | 160 128 -0.2118 5.7357 43.5904
221 | 176 128 -0.0068 0.0000 34.7308
222 | 192 128 -0.0035 6.2362 63.4686
223 | 208 128 -0.0231 6.1844 62.1597
224 | 224 128 -0.1613 6.1338 62.5439
225 | 240 128 -0.2006 5.8065 56.3123
226 | 256 128 -0.0149 5.4621 42.4235
227 | 272 128 -0.0371 4.7212 53.8304
228 | 288 128 -0.0736 4.7946 56.9295
229 | 304 128 -0.3203 4.5530 47.8522
230 | 320 128 -0.4787 4.7652 46.6277
231 | 336 128 -0.2402 5.4391 56.2421
232 | 352 128 -0.2307 5.5064 58.6148
233 | 368 128 -0.5473 5.1417 41.5416
234 | 384 128 -0.7953 5.1070 53.8764
235 | 400 128 -0.9090 5.5516 55.7792
236 | 416 128 -0.7632 5.6729 51.6068
237 | 432 128 -0.3900 5.2423 43.6209
238 | 448 128 -0.3047 5.0729 60.4482
239 | 464 128 -0.3450 5.2726 56.3936
240 | 480 128 -0.3408 5.2755 60.9409
241 | 16 144 -0.6770 5.1447 39.6429
242 | 32 144 0.0658 5.7065 52.1289
243 | 48 144 0.0989 5.9948 62.7155
244 | 64 144 0.1594 5.8616 53.5199
245 | 80 144 -0.0208 5.5039 44.3911
246 | 96 144 -0.0676 5.6624 49.1162
247 | 112 144 0.0345 5.5074 45.0069
248 | 128 144 0.1864 5.3700 45.0706
249 | 144 144 -0.0975 5.7869 58.4125
250 | 160 144 -0.7475 6.0953 64.0778
251 | 176 144 -0.8534 6.2139 69.1094
252 | 192 144 -0.1849 5.9691 59.5815
253 | 208 144 -0.0590 6.0081 79.9533
254 | 224 144 0.1492 5.9567 79.2342
255 | 240 144 0.2949 5.7268 64.5413
256 | 256 144 0.4353 5.3682 45.2594
257 | 272 144 0.1805 4.6946 45.0206
258 | 288 144 -0.2376 4.8287 55.5592
259 | 304 144 -0.6585 4.7478 65.4530
260 | 320 144 -0.8702 4.7243 67.0225
261 | 336 144 -0.5227 4.7978 50.1696
262 | 352 144 -0.1937 5.3971 55.8021
263 | 368 144 -0.4382 5.4220 42.9269
264 | 384 144 -0.4115 5.4681 32.1767
265 | 400 144 -0.2793 5.5809 52.9775
266 | 416 144 -0.1413 5.4871 51.3583
267 | 432 144 0.0258 5.0589 47.1058
268 | 448 144 -0.1072 4.9170 72.1918
269 | 464 144 -0.2023 5.0517 67.7849
270 | 480 144 -0.2530 5.2184 60.3220
271 | 16 160 -0.9754 4.8125 54.9724
272 | 32 160 -0.7306 4.9266 44.6643
273 | 48 160 -0.1235 5.9868 85.5887
274 | 64 160 0.0146 5.8070 63.5004
275 | 80 160 0.0910 5.3332 63.0269
276 | 96 160 0.0820 5.3529 63.0063
277 | 112 160 0.0564 5.3364 59.4003
278 | 128 160 -0.0652 5.3556 50.6909
279 | 144 160 -0.3074 5.6434 45.6468
280 | 160 160 -0.5193 5.9199 37.4299
281 | 176 160 -0.5098 5.7307 37.5054
282 | 192 160 -0.4049 5.6111 43.8793
283 | 208 160 -0.3096 5.7634 51.4526
284 | 224 160 0.1639 5.8264 71.8900
285 | 240 160 0.2998 5.7313 74.5491
286 | 256 160 0.4334 5.2237 62.0465
287 | 272 160 0.2729 4.8203 64.3813
288 | 288 160 -0.4021 4.7949 46.3945
289 | 304 160 -0.7353 4.8530 58.0406
290 | 320 160 -0.8973 4.7342 73.6246
291 | 336 160 -0.9429 4.7596 76.0339
292 | 352 160 -0.8108 5.0254 68.5296
293 | 368 160 -0.5593 5.0998 49.3527
294 | 384 160 -0.3743 5.0602 53.7334
295 | 400 160 -0.3153 5.2283 45.8568
296 | 416 160 0.0911 5.3078 48.7039
297 | 432 160 0.0167 5.1273 68.5061
298 | 448 160 -0.0010 4.9397 79.4898
299 | 464 160 0.0510 5.0150 81.1628
300 | 480 160 0.1160 5.0457 69.2515
301 | 16 176 -1.0731 4.7265 39.3656
302 | 32 176 -0.3648 5.3754 32.9446
303 | 48 176 -0.2206 5.4612 49.6006
304 | 64 176 0.2394 5.2173 59.5809
305 | 80 176 0.2357 5.1468 61.6475
306 | 96 176 0.1382 5.1702 56.6626
307 | 112 176 0.0735 5.3594 56.4787
308 | 128 176 -0.3254 5.5533 56.6658
309 | 144 176 -0.1501 0.0000 67.4252
310 | 160 176 -0.0058 6.0701 74.9075
311 | 176 176 0.0937 5.5931 48.1617
312 | 192 176 0.1242 5.1645 76.1594
313 | 208 176 0.1743 5.5158 49.1661
314 | 224 176 0.1905 5.7268 55.4372
315 | 240 176 0.3467 5.6808 48.6333
316 | 256 176 0.3537 5.0730 59.8030
317 | 272 176 -0.0889 4.9699 55.9956
318 | 288 176 -0.8531 5.0490 62.8180
319 | 304 176 -0.8719 4.8771 70.8287
320 | 320 176 -0.8273 4.6753 64.1508
321 | 336 176 -0.8075 4.6278 61.1485
322 | 352 176 -0.7162 4.5716 62.2615
323 | 368 176 -0.6430 4.5734 60.4824
324 | 384 176 -0.5755 4.8012 62.1145
325 | 400 176 -0.5551 4.9505 62.1056
326 | 416 176 -0.1808 5.1572 52.2497
327 | 432 176 -0.0517 5.1283 72.8003
328 | 448 176 0.0501 4.9346 86.9850
329 | 464 176 0.1011 4.8870 79.8012
330 | 480 176 0.0420 4.8546 70.8767
331 | 16 192 -1.0584 4.6749 43.2086
332 | 32 192 -0.7973 4.8093 49.8236
333 | 48 192 -0.7441 5.2135 56.2283
334 | 64 192 -0.6347 5.1567 49.7924
335 | 80 192 -0.0105 5.1709 46.6826
336 | 96 192 0.1598 5.2442 51.7202
337 | 112 192 -0.1334 5.3942 56.0792
338 | 128 192 -0.4536 5.6367 56.5837
339 | 144 192 -0.2405 5.8859 62.7588
340 | 160 192 0.0633 5.8881 77.3091
341 | 176 192 0.0302 5.5775 63.3350
342 | 192 192 -0.0023 5.2894 61.4061
343 | 208 192 0.3194 5.6590 49.6767
344 | 224 192 0.1632 5.8216 64.4586
345 | 240 192 0.1069 5.6836 52.3813
346 | 256 192 0.0884 5.3771 56.5989
347 | 272 192 -0.3301 5.1447 54.8337
348 | 288 192 -0.7684 4.9272 52.9415
349 | 304 192 -0.7886 4.7337 63.1890
350 | 320 192 -0.7926 4.6578 66.1385
351 | 336 192 -0.8067 4.6529 69.3144
352 | 352 192 -0.7035 4.6491 68.5333
353 | 368 192 -0.7293 4.6633 56.9297
354 | 384 192 -0.6559 4.7757 49.6149
355 | 400 192 -0.4141 4.7971 54.0245
356 | 416 192 -0.2129 5.0016 61.8819
357 | 432 192 -0.0726 5.0765 70.0041
358 | 448 192 0.0361 4.9974 79.1331
359 | 464 192 0.0616 4.8021 72.2203
360 | 480 192 -0.0311 4.7913 87.0358
361 | 16 208 -0.6859 4.6013 34.6090
362 | 32 208 -0.4258 4.7225 42.5834
363 | 48 208 -0.5754 5.1140 57.3562
364 | 64 208 -0.8977 5.1711 52.7958
365 | 80 208 -0.8088 4.9475 39.7291
366 | 96 208 -0.7044 5.2637 47.4013
367 | 112 208 -0.7497 5.7620 59.5666
368 | 128 208 -0.6399 6.1032 61.7170
369 | 144 208 -0.3788 5.6982 48.1300
370 | 160 208 -0.2395 5.5735 52.2425
371 | 176 208 -0.2081 5.4753 50.4366
372 | 192 208 -0.1834 5.2595 60.3038
373 | 208 208 0.0518 5.3449 45.4584
374 | 224 208 0.0492 5.6735 51.1487
375 | 240 208 -0.3026 5.5178 58.4484
376 | 256 208 -0.1529 5.3229 55.2810
377 | 272 208 -0.1739 4.9957 54.0236
378 | 288 208 -0.2163 4.7185 47.8606
379 | 304 208 -0.4185 4.6619 53.2879
380 | 320 208 -0.5887 5.0745 51.9375
381 | 336 208 -0.7071 5.2125 54.7770
382 | 352 208 -0.7922 5.0402 72.2163
383 | 368 208 -0.7992 5.0371 62.2521
384 | 384 208 -0.7599 5.0130 51.2524
385 | 400 208 -0.6036 4.8993 54.6402
386 | 416 208 -0.1665 4.8828 65.1331
387 | 432 208 -0.0500 4.9806 73.0756
388 | 448 208 0.0265 5.0231 67.3886
389 | 464 208 0.0606 4.8844 56.0320
390 | 480 208 0.0040 4.7824 71.5273
391 | 16 224 -0.0840 0.0000 38.4215
392 | 32 224 0.2530 4.7209 43.3990
393 | 48 224 0.0054 5.0770 52.8939
394 | 64 224 -0.0946 5.4262 38.8268
395 | 80 224 -0.6325 5.1901 43.5436
396 | 96 224 -0.6644 5.1689 48.7335
397 | 112 224 -0.6129 5.6253 41.6707
398 | 128 224 -0.3024 6.1597 64.2200
399 | 144 224 -0.4331 5.2086 46.3742
400 | 160 224 -0.4113 5.0656 54.4137
401 | 176 224 -0.1445 5.5454 49.2113
402 | 192 224 -0.1831 5.2078 58.5719
403 | 208 224 -0.0142 5.0976 60.4116
404 | 224 224 0.1058 5.3902 41.1214
405 | 240 224 -0.0350 5.8133 58.6442
406 | 256 224 -0.0450 5.6207 60.5875
407 | 272 224 0.1225 4.5535 39.5417
408 | 288 224 0.3254 4.3373 51.9140
409 | 304 224 0.1856 4.6772 52.8313
410 | 320 224 -0.1257 4.9474 64.9597
411 | 336 224 -0.3442 5.1445 61.7517
412 | 352 224 -0.5434 5.1023 54.8203
413 | 368 224 -0.6314 5.0112 65.7632
414 | 384 224 -0.7008 5.1517 66.9553
415 | 400 224 -0.7820 5.1082 67.1196
416 | 416 224 -0.4950 5.0227 51.5161
417 | 432 224 -0.3333 5.1172 64.3597
418 | 448 224 -0.2500 5.1974 71.5327
419 | 464 224 -0.2529 5.2684 59.3291
420 | 480 224 -0.0800 4.7845 60.3724
421 | 16 240 -0.0403 0.0000 49.2493
422 | 32 240 0.3878 4.9067 48.9877
423 | 48 240 -0.0911 5.1633 51.3906
424 | 64 240 -0.1341 5.5404 41.0196
425 | 80 240 -0.4567 5.4113 31.1567
426 | 96 240 -0.7087 5.1745 38.3841
427 | 112 240 -0.1954 5.0745 39.4395
428 | 128 240 -0.1164 5.3217 51.1129
429 | 144 240 0.1217 4.7177 44.5506
430 | 160 240 0.4478 4.8447 43.9649
431 | 176 240 0.2902 5.5620 41.5339
432 | 192 240 0.2995 5.4882 37.8493
433 | 208 240 0.1837 5.1260 52.1479
434 | 224 240 0.4063 4.9714 63.6502
435 | 240 240 0.1645 5.3380 59.3534
436 | 256 240 0.0100 5.3307 57.8375
437 | 272 240 0.2630 4.1851 55.4257
438 | 288 240 0.2935 4.1716 55.2668
439 | 304 240 0.1397 4.6236 48.6988
440 | 320 240 0.0395 4.9661 60.3977
441 | 336 240 0.0001 5.0994 69.2723
442 | 352 240 -0.1180 5.1313 60.9265
443 | 368 240 -0.3475 5.0426 57.7417
444 | 384 240 -0.3004 4.9984 53.9129
445 | 400 240 -0.2992 5.0590 51.1451
446 | 416 240 -0.3350 5.1513 72.3706
447 | 432 240 -0.2783 5.2730 75.0144
448 | 448 240 -0.1356 5.2567 71.9383
449 | 464 240 -0.1582 5.2556 66.8552
450 | 480 240 -0.2291 4.4251 42.1827
451 | 16 256 0.1133 0.0000 55.9860
452 | 32 256 0.9023 5.2478 54.6713
453 | 48 256 0.1044 5.3779 41.2308
454 | 64 256 0.0664 5.3613 36.8194
455 | 80 256 0.0000 5.4699 39.4053
456 | 96 256 0.4935 5.2149 49.4042
457 | 112 256 0.5812 4.8392 56.7306
458 | 128 256 0.8307 4.9369 58.5180
459 | 144 256 0.7345 4.5291 48.4412
460 | 160 256 0.6801 4.7601 52.0054
461 | 176 256 0.5233 5.2517 44.9008
462 | 192 256 0.6079 5.6410 39.3121
463 | 208 256 0.2809 4.9941 47.7975
464 | 224 256 -0.0298 5.0761 58.9682
465 | 240 256 -0.1943 5.1449 57.0000
466 | 256 256 -0.2420 4.8671 51.1640
467 | 272 256 -0.5109 4.1609 56.9009
468 | 288 256 -0.6167 4.3257 54.3138
469 | 304 256 -0.6800 4.4795 45.4827
470 | 320 256 -0.2271 4.9911 50.5440
471 | 336 256 -0.2049 5.2401 60.1803
472 | 352 256 -0.5907 5.1848 54.8287
473 | 368 256 -0.6740 5.0309 54.6526
474 | 384 256 -0.1441 4.9462 41.0573
475 | 400 256 -0.0735 5.0750 65.0365
476 | 416 256 -0.1140 5.1354 78.3979
477 | 432 256 0.0644 5.0520 88.3049
478 | 448 256 0.0934 5.0084 84.1060
479 | 464 256 -0.1019 5.0900 70.0286
480 | 480 256 -0.2026 4.9086 50.5502
481 | 16 272 1.1394 5.2390 52.2339
482 | 32 272 0.9359 5.3289 57.6903
483 | 48 272 0.6402 5.5312 60.3927
484 | 64 272 0.6657 5.3558 46.4200
485 | 80 272 0.6677 5.1054 49.1493
486 | 96 272 0.9104 4.8024 62.5267
487 | 112 272 0.8406 4.6705 67.1575
488 | 128 272 0.8724 4.7281 66.9460
489 | 144 272 0.7654 4.7352 62.2809
490 | 160 272 0.4341 4.8295 55.7493
491 | 176 272 0.1432 5.0413 63.3973
492 | 192 272 -0.0475 5.1024 59.6103
493 | 208 272 -0.3590 5.0053 50.6115
494 | 224 272 -0.3462 5.1451 56.1493
495 | 240 272 -0.5591 5.1373 68.0656
496 | 256 272 -0.6196 4.8120 66.3093
497 | 272 272 -0.4618 4.6174 58.4653
498 | 288 272 -0.5254 4.6693 57.1364
499 | 304 272 -0.5373 4.6528 45.9647
500 | 320 272 -0.5343 4.7557 47.5385
501 | 336 272 -0.5339 5.1415 55.9344
502 | 352 272 -0.6859 5.0322 53.3477
503 | 368 272 -0.5926 5.0226 59.1168
504 | 384 272 -0.3841 4.9982 53.1053
505 | 400 272 -0.3731 5.0280 56.6566
506 | 416 272 -0.1608 4.9593 70.4962
507 | 432 272 -0.0042 4.8795 86.2875
508 | 448 272 -0.0525 4.9046 79.6074
509 | 464 272 -0.1551 4.9126 76.7173
510 | 480 272 -0.0927 4.8744 83.3895
511 | 16 288 0.4141 0.0000 50.6960
512 | 32 288 0.9064 5.1557 52.2524
513 | 48 288 0.5838 5.3130 55.0396
514 | 64 288 0.6234 5.2902 44.9918
515 | 80 288 0.6061 4.9747 38.8801
516 | 96 288 0.7623 4.6086 44.4744
517 | 112 288 0.2836 4.6985 58.2723
518 | 128 288 0.1958 4.7204 63.4938
519 | 144 288 0.4180 4.8970 52.6428
520 | 160 288 0.3728 4.9247 50.9420
521 | 176 288 0.1228 5.1363 45.3892
522 | 192 288 0.0850 5.1921 42.8073
523 | 208 288 -0.1523 4.8170 46.4004
524 | 224 288 -0.0701 4.7534 60.4402
525 | 240 288 -0.2473 5.0569 53.7415
526 | 256 288 -0.4906 4.9397 66.4541
527 | 272 288 -0.4361 4.8189 65.5591
528 | 288 288 -0.3296 4.6785 41.2407
529 | 304 288 -0.3772 4.8700 44.3624
530 | 320 288 -0.3360 4.9373 51.7251
531 | 336 288 -0.2970 5.0087 61.9837
532 | 352 288 -0.2729 4.9802 64.3242
533 | 368 288 -0.3467 4.8930 70.0626
534 | 384 288 -0.2869 4.8720 72.7136
535 | 400 288 -0.2618 4.9333 68.1550
536 | 416 288 -0.2123 4.9312 72.0006
537 | 432 288 -0.2521 4.8315 74.4442
538 | 448 288 -0.2362 4.8136 75.0918
539 | 464 288 -0.1283 4.7249 65.6701
540 | 480 288 -0.0264 4.7636 65.6672
541 | 16 304 0.5007 0.0000 37.3625
542 | 32 304 1.2941 5.2451 34.9666
543 | 48 304 0.7590 5.3123 41.9720
544 | 64 304 0.4413 5.3162 36.9817
545 | 80 304 0.7329 4.8422 38.9633
546 | 96 304 0.6281 4.8320 39.5705
547 | 112 304 0.1429 4.9219 65.4833
548 | 128 304 -0.0470 4.9060 72.7646
549 | 144 304 0.0575 4.7817 66.0999
550 | 160 304 0.1140 4.6296 46.2446
551 | 176 304 0.0442 4.5447 45.3353
552 | 192 304 0.2107 4.8478 56.0612
553 | 208 304 0.1757 4.6214 51.1958
554 | 224 304 0.1164 4.6485 56.8246
555 | 240 304 0.1643 5.2446 59.3918
556 | 256 304 0.0046 4.9746 55.5154
557 | 272 304 0.0294 5.0045 63.8814
558 | 288 304 -0.0041 4.9986 52.5303
559 | 304 304 -0.2750 4.7873 55.9384
560 | 320 304 -0.2307 4.7703 70.4011
561 | 336 304 -0.3801 4.8281 75.4647
562 | 352 304 -0.4767 5.1388 74.3375
563 | 368 304 -0.5142 5.2192 63.7760
564 | 384 304 -0.4449 5.0279 73.6193
565 | 400 304 -0.3511 4.9611 74.0519
566 | 416 304 -0.2663 4.9347 56.5493
567 | 432 304 -0.2370 4.7709 63.4332
568 | 448 304 0.0016 4.6141 60.1646
569 | 464 304 0.2142 4.6024 61.4815
570 | 480 304 0.3521 4.6723 50.0092
571 | 16 320 0.9360 5.5031 48.0808
572 | 32 320 0.7162 5.6918 50.3185
573 | 48 320 0.7539 5.1336 46.1426
574 | 64 320 0.7603 4.6730 59.4677
575 | 80 320 1.0498 4.9602 65.8221
576 | 96 320 0.7548 5.1086 52.1524
577 | 112 320 0.1300 5.1020 71.1568
578 | 128 320 -0.1156 4.9625 77.0975
579 | 144 320 -0.0569 4.8675 79.0932
580 | 160 320 -0.0455 4.7261 60.7643
581 | 176 320 0.1795 4.4875 46.5323
582 | 192 320 0.1947 4.6553 53.7557
583 | 208 320 0.3188 4.5401 48.6664
584 | 224 320 0.1086 4.5668 49.0946
585 | 240 320 -0.1049 4.8267 56.3132
586 | 256 320 0.3458 4.6519 63.3709
587 | 272 320 0.2784 4.8801 64.4557
588 | 288 320 0.0273 4.9140 61.6293
589 | 304 320 -0.4334 4.7458 65.3551
590 | 320 320 -0.4503 4.7412 73.6247
591 | 336 320 -0.3973 4.8657 68.2134
592 | 352 320 -0.4588 5.0676 54.8998
593 | 368 320 -0.4829 5.2036 58.6884
594 | 384 320 -0.4964 5.2380 59.0459
595 | 400 320 -0.5068 5.1721 49.7864
596 | 416 320 -0.3641 4.9931 51.4353
597 | 432 320 -0.1508 4.8077 62.5661
598 | 448 320 0.0714 4.7602 63.6848
599 | 464 320 0.3239 4.8533 63.7127
600 | 480 320 0.5881 5.0504 58.7281
601 | 16 336 0.4053 0.0000 38.9827
602 | 32 336 0.6284 5.5676 41.1950
603 | 48 336 0.0881 4.8763 49.2116
604 | 64 336 0.0300 4.5275 55.4407
605 | 80 336 0.7332 5.1582 56.3783
606 | 96 336 0.5959 5.3143 55.7506
607 | 112 336 0.2676 5.1219 66.1471
608 | 128 336 0.0325 5.0475 76.5907
609 | 144 336 -0.0829 4.9741 84.3262
610 | 160 336 -0.0376 4.6990 76.3266
611 | 176 336 0.1783 4.2114 58.5704
612 | 192 336 0.3626 4.5265 50.4267
613 | 208 336 0.5715 4.7207 65.2604
614 | 224 336 0.4399 4.7704 62.8460
615 | 240 336 -0.0646 4.5986 52.1476
616 | 256 336 -0.0178 4.6366 66.1381
617 | 272 336 -0.0453 4.9274 74.2459
618 | 288 336 -0.2823 4.8546 61.2342
619 | 304 336 -0.4401 4.8206 50.8114
620 | 320 336 -0.3721 4.9350 53.8702
621 | 336 336 -0.3862 5.0728 76.0751
622 | 352 336 -0.4362 5.0584 62.6611
623 | 368 336 -0.2971 5.0987 70.3864
624 | 384 336 -0.2897 5.4030 59.6053
625 | 400 336 -0.3135 5.3965 54.2405
626 | 416 336 -0.1186 5.2520 53.5424
627 | 432 336 -0.1244 5.2028 62.6046
628 | 448 336 0.0048 5.0909 80.1851
629 | 464 336 0.2388 5.1166 76.2977
630 | 480 336 0.4834 5.1726 56.9579
631 | 16 352 0.9210 5.3338 51.6804
632 | 32 352 0.4237 5.6644 35.4753
633 | 48 352 0.0156 4.8801 38.9518
634 | 64 352 -0.1638 4.4779 57.5189
635 | 80 352 0.2961 5.1829 45.3429
636 | 96 352 0.3966 5.4163 55.9499
637 | 112 352 0.2937 5.1985 56.1881
638 | 128 352 0.1241 5.1335 73.3088
639 | 144 352 0.2530 0.0000 86.6670
640 | 160 352 0.1070 4.8219 69.5149
641 | 176 352 0.1242 4.4992 54.6415
642 | 192 352 0.1699 4.4801 64.0207
643 | 208 352 0.4217 4.7326 62.0050
644 | 224 352 0.6073 4.9082 65.5903
645 | 240 352 0.1271 4.5972 61.2682
646 | 256 352 -0.2065 4.6559 73.9387
647 | 272 352 -0.3774 4.9668 75.5837
648 | 288 352 -0.3718 4.9239 73.7407
649 | 304 352 -0.3042 4.9453 74.0412
650 | 320 352 -0.2143 5.1311 86.4621
651 | 336 352 -0.1501 5.1828 71.8513
652 | 352 352 -0.2617 5.1976 56.6460
653 | 368 352 -0.2194 5.2424 71.9666
654 | 384 352 -0.1588 5.3797 63.4101
655 | 400 352 -0.2006 5.3477 63.3137
656 | 416 352 -0.0557 5.3045 62.6248
657 | 432 352 -0.0370 5.2936 56.6157
658 | 448 352 -0.2387 5.2766 55.5884
659 | 464 352 -0.1232 5.2683 56.6393
660 | 480 352 0.1291 0.0000 65.5129
661 |
--------------------------------------------------------------------------------
/pivpy/data/urban_canopy/B00001.vc7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/urban_canopy/B00001.vc7
--------------------------------------------------------------------------------
/pivpy/data/urban_canopy/B00002.vc7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/urban_canopy/B00002.vc7
--------------------------------------------------------------------------------
/pivpy/data/urban_canopy/B00003.vc7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/urban_canopy/B00003.vc7
--------------------------------------------------------------------------------
/pivpy/data/urban_canopy/B00004.vc7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/urban_canopy/B00004.vc7
--------------------------------------------------------------------------------
/pivpy/data/urban_canopy/B00005.vc7:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy/data/urban_canopy/B00005.vc7
--------------------------------------------------------------------------------
/pivpy/davis_readim.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Tue Jul 23 19:33:20 2019
4 |
5 | @author: lior
6 | """
7 | import numpy as np
8 | import xarray as xr
9 | import os
10 | import ReadIM
11 | from pivpy.io import parse_header, loadvec
12 |
13 |
14 | def ReadDavis(path, time=0):
15 | """
16 | input path for files format from davis tested for im7&vc7
17 | out put [X Y U V mask]
18 | valid only for 2d piv cases
19 | RETURN:
20 | in case of images (image type=0):
21 | X = scaled x-coordinates
22 | Y = scaled y-coordinates
23 | U = scaled image intensities
24 | v=0
25 | MASK=0
26 | in case of 2D vector fields (A.IType = 1,2 or 3):
27 | X = scaled x-coordinates
28 | Y = scaled y-coordinates
29 | U = scaled vx-components of vectors
30 | V = scaled vy-components of vectors
31 |
32 | """
33 | # you need to add clear to prevent data leaks
34 | buff, vatts = ReadIM.extra.get_Buffer_andAttributeList(path)
35 | v_array, buff1 = ReadIM.extra.buffer_as_array(buff)
36 | nx = buff.nx
37 | # nz = buff.nz #flake8 claims it's not used
38 | ny = buff.ny
39 | # set data range:
40 | baseRangeX = np.arange(nx)
41 | baseRangeY = np.arange(ny)
42 | # baseRangeZ = np.arange(nz) #flake8 recognized it's not used
43 | lhs1 = (
44 | baseRangeX + 0.5
45 | ) * buff.vectorGrid * buff.scaleX.factor + buff.scaleX.offset # x-range
46 | lhs2 = (
47 | baseRangeY + 0.5
48 | ) * buff.vectorGrid * buff.scaleY.factor + buff.scaleY.offset # y-range
49 | lhs3 = 0
50 | lhs4 = 0
51 | mask = 0
52 | if buff.image_sub_type <= 0: # grayvalue image format
53 | lhs3 = v_array[0, :, :]
54 | lhs4 = v_array[1, :, :]
55 | Im = xr.DataArray(
56 | v_array,
57 | dims=("frame", "z", "x"),
58 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "frame": [0, 1]},
59 | )
60 | data = xr.Dataset({"Im": Im})
61 |
62 | elif buff.image_sub_type == 2: # simple 2D vector format: (vx,vy)
63 | # Calculate vector position and components
64 | [lhs1, lhs2] = np.meshgrid(lhs1, lhs2)
65 | # lhs1=np.transpose(lhs1)
66 | # lhs2=np.transpose(lhs2)
67 | lhs3 = v_array[0, :, :] * buff.scaleI.factor + buff.scaleI.offset
68 | lhs4 = v_array[1, :, :] * buff.scaleI.factor + buff.scaleI.offset
69 | if buff.scaleY.factor < 0.0:
70 | lhs4 = -lhs4
71 | lhs3 = lhs3[:, :, np.newaxis]
72 | lhs4 = lhs4[:, :, np.newaxis]
73 | u = xr.DataArray(
74 | lhs3,
75 | dims=("z", "x", "t"),
76 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "t": [time]},
77 | )
78 | v = xr.DataArray(
79 | lhs4,
80 | dims=("z", "x", "t"),
81 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "t": [time]},
82 | )
83 | data = xr.Dataset({"u": u, "v": v})
84 | # plt.quiver(lhs1,lhs2,lhs3,lhs4);
85 | elif buff.image_sub_type == 3 or buff.image_sub_type == 1:
86 | # normal 2D vector format + peak: sel+4*(vx,vy) (+peak)
87 | # Calculate vector position and components
88 | [lhs1, lhs2] = np.meshgrid(lhs1, lhs2)
89 | # lhs1=np.transpose(lhs1)
90 | # lhs2=np.transpose(lhs2)
91 | lhs3 = lhs1 * 0
92 | lhs4 = lhs2 * 0
93 | # Get choice
94 | maskData = v_array[0, :, :]
95 | # Build best vectors from choice field
96 | for i in range(5):
97 | mask = maskData == (i + 1)
98 | if i < 4: # get best vectors
99 | dat = v_array[2 * i + 1, :, :]
100 | lhs3[mask] = dat[mask]
101 | dat = v_array[2 * i + 2, :, :]
102 | lhs4[mask] = dat[mask]
103 | else: # get interpolated vectors
104 | dat = v_array[7, :, :]
105 | lhs3[mask] = dat[mask]
106 | dat = v_array[8, :, :]
107 | lhs4[mask] = dat[mask]
108 | lhs3 = lhs3 * buff.scaleI.factor + buff.scaleI.offset
109 | lhs4 = lhs4 * buff.scaleI.factor + buff.scaleI.offset
110 | # Display vector field
111 | if buff.scaleY.factor < 0.0:
112 | lhs4 = -1 * lhs4
113 | mask = maskData == 0
114 | lhs3 = lhs3[:, :, np.newaxis]
115 | lhs4 = lhs4[:, :, np.newaxis]
116 | maskData = maskData[:, :, np.newaxis]
117 | u = xr.DataArray(
118 | lhs3,
119 | dims=("z", "x", "t"),
120 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "t": [time]},
121 | )
122 | v = xr.DataArray(
123 | lhs4,
124 | dims=("z", "x", "t"),
125 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "t": [time]},
126 | )
127 | chc = xr.DataArray(
128 | maskData,
129 | dims=("z", "x", "t"),
130 | coords={"x": lhs1[0, :], "z": lhs2[:, 0], "t": [time]},
131 | )
132 | data = xr.Dataset({"u": u, "v": v, "chc": chc})
133 | data.attrs["Info"] = ReadIM.extra.att2dict(vatts)
134 | # clean memory
135 | ReadIM.DestroyBuffer(buff1)
136 | del buff1
137 | ReadIM.DestroyBuffer(buff)
138 | del buff
139 | ReadIM.DestroyAttributeListSafe(vatts)
140 | del vatts
141 | return data
142 |
143 |
144 | def load_directory(path, basename=""):
145 | """
146 | load_directory (path)
147 |
148 | Loads all the .VEC files in the directory into a single
149 | xarray dataset with variables and units added as attributes
150 |
151 | Input:
152 | directory : path to the directory with .vec files
153 |
154 | Output:
155 | data : xarray DataSet with dimensions: x,y,t and
156 | data arrays of u,v,
157 | attributes of variables and units
158 |
159 |
160 | See more: loadvec
161 | """
162 | files = [f for f in os.listdir(path) if f.endswith(".vc7")]
163 | variables, units, rows, cols, dt, frame = parse_header(files[0])
164 |
165 | data = []
166 | for i, f in enumerate(files):
167 | data.append(loadvec(f, rows, cols, variables, units, dt, frame + i - 1))
168 |
169 | combined = xr.concat(data, dim="t")
170 | combined.attrs["variables"] = variables
171 | combined.attrs["units"] = units
172 | combined.attrs["dt"] = dt
173 | combined.attrs["files"] = files
174 | return combined
175 |
176 |
177 | path = "C:\\Users\\lior\\Documents\\ibrrTau\\timeDependedVecMaps"
178 | files = [f for f in os.listdir(path) if f.endswith(".vc7")]
179 | data = []
180 | data.append(ReadDavis(path + "\\" + files[-1], 1))
181 | data.append(ReadDavis(path + "\\" + files[-2], 2))
182 | combined = xr.concat(data, dim="t")
183 | # x =x.flatten()
184 | # y =y.ravel()
185 | # u =u.ravel()
186 | # v =v.ravel()
187 | # plt.quiver(x,y,u,v)
188 | # plt.imshow(u)
189 | # u = xr.DataArray(u,dims=('y','x'),coords={'x':x[0,:],'y':y[:,0]})
190 |
--------------------------------------------------------------------------------
/pivpy/graphics.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Various plots, mostly wraping xarray.plot
4 |
5 | """
6 | import os
7 | import numpy as np
8 | import matplotlib.colors as colors
9 | import matplotlib.pyplot as plt
10 | from matplotlib.animation import FuncAnimation, FFMpegWriter
11 | import xarray as xr
12 | from pivpy.io import POS_UNITS, VEL_UNITS
13 | from typing import List
14 | import warnings
15 |
16 |
17 | def quiver(
18 | data: xr.DataArray,
19 | arrScale: float = 25.0,
20 | threshold: float = None,
21 | nthArr: int = 1,
22 | aspectratio: str = "equal",
23 | colorbar: bool = False,
24 | colorbar_orient: str = "vertical",
25 | units: List = [],
26 | streamlines: bool = False,
27 | cmap: str = 'RdBu',
28 | **kwargs,
29 | ):
30 | """creates quiver of xr.Dataset
31 |
32 | Args:
33 | data (xr.DataArray): _description_
34 | arrScale (float, optional): _description_. Defaults to 25.0.
35 | threshold (float, optional): _description_. Defaults to None.
36 | nthArr (int, optional): _description_. Defaults to 1.
37 | aspectratio (str, optional): _description_. Defaults to "equal".
38 | colorbar (bool, optional): _description_. Defaults to False.
39 | colorbar_orient (str, optional): _description_. Defaults to "vertical".
40 | units (List, optional): _description_. Defaults to [].
41 | streamlines (bool, optional): _description_. Defaults to False.
42 | cmap (str, optional): matplotlib.colormap, e.g. 'jet', 'hot', 'RdBu', 'Reds'
43 |
44 | Returns:
45 | _type_: _description_
46 | """
47 | data = dataset_to_array(data)
48 |
49 | pos_units = data.x.attrs["units"] if len(units) == 0 else units[0]
50 | vel_units = data.u.attrs["units"] if len(units) == 0 else units[2]
51 |
52 | # subsampling number of vectors
53 | data = data.sel(x=data.x[::nthArr], y=data.y[::nthArr])
54 |
55 | # clip data to the threshold
56 | if threshold is not None:
57 | data["u"] = xr.where(data["u"] > threshold, threshold, data["u"])
58 | data["v"] = xr.where(data["v"] > threshold, threshold, data["v"])
59 |
60 | data["s"] = np.sqrt(data["u"] ** 2 + data["v"] ** 2)
61 |
62 |
63 |
64 | if len(plt.get_fignums()) == 0: # if no figure is open
65 | fig, ax = plt.subplots() # open a new figure
66 | else:
67 | fig = plt.gcf()
68 | ax = fig.gca()
69 |
70 | # quiver itself
71 | Q = data.plot.quiver(
72 | x="x",
73 | y="y",
74 | u="u",
75 | v="v",
76 | hue="s",
77 | units="width",
78 | scale=np.max(data["s"].values * arrScale),
79 | headwidth=2,
80 | cmap=cmap,
81 | ax=ax,
82 | **kwargs,
83 | )
84 |
85 | if colorbar is False:
86 | # cbar = fig.colorbar(Q, shrink=0.9, orientation=colbar_orient)
87 | cb = Q.colorbar
88 | if cb:
89 | cb.remove()
90 | plt.draw()
91 | else:
92 | if colorbar_orient == "horizontal":
93 | cb = Q.colorbar
94 | cb.remove()
95 | cb = fig.colorbar(Q, orientation=colorbar_orient, ax=ax)
96 |
97 | if streamlines: # contours or streamlines
98 | strm = data.plot.streamplot(
99 | x="x",
100 | y="y",
101 | u="u",
102 | v="v",
103 | hue="s",
104 | cmap="hot",
105 | linewidth=1,
106 | ax=ax,
107 | )
108 | strm.colorbar.remove()
109 |
110 | # if colorbar:
111 | # cbar = fig.colorbar(
112 | # strm,
113 | # orientation=colorbar_orient,
114 | # fraction=0.1,
115 | # )
116 | # cbar.set_label(r"$ V \, (" + vel_units + r")$")
117 |
118 | ax.set_xlabel(f"x ({pos_units})")
119 | ax.set_ylabel(f"y ({pos_units})")
120 | ax.set_aspect(aspectratio)
121 | # ax.invert_yaxis()
122 |
123 | return fig, ax
124 |
125 |
126 | def histogram(data, normed=False):
127 | """creates two histograms of two velocity components
128 |
129 | Args:
130 | data (_type_): _description_
131 | normed (bool, optional): _description_. Defaults to False.
132 |
133 | Returns:
134 | _type_: _description_
135 | """
136 |
137 | u = np.asarray(data.u).flatten()
138 | v = np.asarray(data.v).flatten()
139 |
140 | f, ax = plt.subplots(2)
141 |
142 | ax[0].hist(u, bins=np.int32(np.sqrt(len(u)) * 0.5), density=normed)
143 | ax[0].set_xlabel(f"u ({data.u.attrs['units']})")
144 |
145 | ax[1] = plt.subplot2grid((2, 1), (1, 0))
146 | ax[1].hist(v, bins=np.int32(np.sqrt(len(v) * 0.5)), density=normed)
147 | ax[1].set_xlabel(f"v ({data.v.attrs['units']})")
148 | plt.tight_layout()
149 | return f, ax
150 |
151 |
152 | def contour_plot(
153 | data: xr.DataArray,
154 | threshold: float = None,
155 | contourLevels: List[float] = None,
156 | colorbar: bool = False,
157 | logscale: bool = False,
158 | aspectratio: str = "equal",
159 | units: List[str] = [],
160 | ):
161 | """creates contour plot of xr.DataArray
162 |
163 | Args:
164 | data (xr.DataArray): _description_
165 | threshold (float, optional): _description_. Defaults to None.
166 | contourLevels (List[float], optional): _description_. Defaults to None.
167 | colorbar (bool, optional): _description_. Defaults to False.
168 | logscale (bool, optional): _description_. Defaults to False.
169 | aspectratio (str, optional): _description_. Defaults to "equal".
170 | units (List[str], optional): _description_. Defaults to [].
171 |
172 | Returns:
173 | _type_: _description_
174 | """
175 | data = dataset_to_array(data)
176 |
177 | if "w" not in data.var():
178 | data = data.piv.vec2scal("ke")
179 |
180 | if threshold is not None:
181 | data["w"] = xr.where(data["w"] > threshold, threshold, data["w"])
182 |
183 | f, ax = plt.subplots()
184 | # data.plot.contourf(x='x',y='y',row='y',col='x', ax=ax)
185 |
186 | if contourLevels is None:
187 | levels = np.linspace(
188 | np.min(data["w"].values),
189 | np.max(data["w"].values),
190 | 10,
191 | )
192 | else:
193 | levels = contourLevels # vector of levels to set
194 |
195 | if logscale:
196 | data["w"] = np.abs(data["w"])
197 |
198 | c = data["w"].plot.contourf(
199 | x="x",
200 | y="y",
201 | levels=levels,
202 | cmap=plt.get_cmap("RdYlBu"),
203 | norm=colors.LogNorm(),
204 | ax=ax,
205 | )
206 | else:
207 | c = data["w"].plot.contourf(
208 | x="x",
209 | y="y",
210 | levels=levels,
211 | cmap=plt.get_cmap("RdYlBu"),
212 | ax=ax,
213 | )
214 |
215 | if not colorbar:
216 | # cbar = c.colorbar(c, orientation=colbar)
217 | c.colorbar.remove()
218 | # cbar.set_label(propUnits)
219 |
220 | ax.set_aspect(aspectratio)
221 |
222 | return f, ax
223 |
224 |
225 | def showf(data, flow_property="ke", **kwargs):
226 | """shows data as quiver over a scalar background
227 |
228 | Args:
229 | data (_type_): _description_
230 | flow_property (str, optional): _description_. Defaults to "ke".
231 | """
232 | fig, ax = showscal(data, flow_property=flow_property, **kwargs)
233 | fig, ax = quiver(data, **kwargs)
234 |
235 |
236 | def showscal(data, flow_property="ke", **kwargs):
237 | """creates contour plot of some scalar field of a flow property
238 |
239 | Args:
240 | data (_type_): _description_
241 | flow_property (str, optional): _description_. Defaults to "ke".
242 |
243 | Returns:
244 | _type_: _description_
245 | """
246 | data = data.piv.vec2scal(flow_property=flow_property)
247 | fig, ax = contour_plot(data, **kwargs)
248 | return fig, ax
249 |
250 |
251 | def animate(data: xr.Dataset,
252 | arrowscale: int = 1,
253 | savepath: str = None,
254 | units: str = "pix/dt"):
255 | """animates flow fields in the data and saves to MP4 format
256 |
257 | Args:
258 | data (xr.Dataset): _description_
259 | arrowscale (int, optional): _description_. Defaults to 1.
260 | savepath (str, optional): _description_. Defaults to None.
261 |
262 | Returns:
263 | _type_: _description_
264 | """
265 | X, Y = np.meshgrid(data.x, data.y)
266 | X = X.T
267 | Y = Y.T
268 | U, V = data.u[:, :, 0], data.v[:, :, 0] # first frame
269 | fig, ax = plt.subplots(1, 1)
270 | M = np.sqrt(U**2 + V**2)
271 |
272 | Q = ax.quiver(
273 | X[::3, ::3],
274 | Y[::3, ::3],
275 | U[::3, ::3],
276 | V[::3, ::3],
277 | M[::3, ::3],
278 | units="inches",
279 | scale=arrowscale,
280 | )
281 |
282 | cb = plt.colorbar(Q)
283 |
284 | # units = data.attrs["units"]
285 |
286 | cb.ax.set_ylabel(f"velocity ({units})")
287 |
288 | text = ax.text(
289 | 0.2,
290 | 1.05,
291 | "1/" + str(len(data.t)),
292 | ha="center",
293 | va="center",
294 | transform=ax.transAxes,
295 | )
296 |
297 | def update_quiver(num, Q, data, text):
298 | """_summary_
299 |
300 | Args:
301 | num (_type_): _description_
302 | Q (_type_): _description_
303 | data (_type_): _description_
304 | text (_type_): _description_
305 |
306 | Returns:
307 | _type_: _description_
308 | """
309 | U, V = data.u[:, :, num], data.v[:, :, num]
310 |
311 | M = np.sqrt(U[::3, ::3] ** 2 + V[::3, ::3] ** 2)
312 | Q.set_UVC(U[::3, ::3], V[::3, ::3], M[::3, ::3])
313 | text.set_text(str(num + 1) + "/" + str(len(data.t)))
314 | return Q
315 |
316 | anim = FuncAnimation(
317 | fig,
318 | update_quiver,
319 | fargs=(Q, data, text),
320 | frames=len(data.t),
321 | blit=False,
322 | )
323 | mywriter = FFMpegWriter()
324 | if savepath:
325 | p = os.getcwd()
326 | os.chdir(savepath)
327 | anim.save("im.mp4", writer=mywriter)
328 | os.chdir(p)
329 | else:
330 | anim.save("im.mp4", writer=mywriter)
331 |
332 |
333 | def dataset_to_array(data: xr.Dataset, t_index: int = 0):
334 | """converts xarray Dataset to array"""
335 | if "t" in data.dims:
336 | warnings.warn(
337 | "Warning: this function uses the first \
338 | frame, otherwise use: data.isel(t=N)"
339 | )
340 | return data.isel(t=t_index)
341 |
342 |
343 | return data
344 |
--------------------------------------------------------------------------------
/pivpy/inter.py:
--------------------------------------------------------------------------------
1 | """
2 | "inter" stands for "intefacing".
3 | This module provides a function that allows to go convert PIVPY datasets
4 | to the VortexFitting datasets.
5 | Here is the link to the VortexFitting article:
6 | https://www.sciencedirect.com/science/article/pii/S2352711020303174?via%3Dihub .
7 | """
8 |
9 | import warnings
10 | import numpy as np
11 |
12 | try:
13 | import vortexfitting.classes as vf
14 | except ImportError:
15 | warnings.warn("VortexFitting is not installed, use pip install vortexfitting")
16 |
17 |
18 | def pivpyTOvf(field, ncFilePath):
19 | """
20 | "vf" in the name of the function stands for VortexFitting package.
21 | The function takes PIVPY data base object and converts it to an object of class
22 | VelocityField from VortexFitting package. We are interested only in the velocity
23 | field portion of the PIVPY database because this is what VortexFitting's
24 | VelocityField class is concerned about.
25 | There is no way to to convert PIVPY database to vortexfitting.VelocityField
26 | without creating a file because the later wants a file to read from.
27 | I wanted to create a hidden file, but that's a pain if we try doing it
28 | cross-platform. For now, I'm just going to create a normal file and leave
29 | it to the user to decide what to do with it when the analysis is done. A user
30 | must supply the name of the file.
31 | Parameters: field (xarray.DataSet) - PIVPY ofject - an Xarray data set - that contains
32 | velocity field; it may contain other para-
33 | meters (e.g. vorticity), but it doesn't
34 | matter; it must contain only one time frame.
35 | ncFilePath (pathlib.Path) - a path to the .nc file that will store
36 | the velocity field that will be fed to
37 | VortexFitting; an example of the file name
38 | is PIVpair1VelocityField.nc; note the file
39 | doesn't have to exist - the function just
40 | needs a name for the file
41 | Returns: vfield (vortexfitting.VelocityField) - object of the VortexFitting
42 | package class VelocityField
43 | """
44 |
45 |
46 |
47 | # VortexFitting expects the physical system of corrdinates, but field - being obtained
48 | # from an OpenPIV .txt file is in the image system of coordinates. So, we have to invert
49 | # they y axis. The procedure that after a lot of trials and errors ended up working is
50 | # copied from here https://stackoverflow.com/a/70695479/10073233 and is given by:
51 | field = field.reindex(y = field.y[::-1])
52 |
53 | # VortexFitting expects time coordinate to go first. # moreover, comparing lines 93, 100,
54 | # 103, 104 in classes.py of the VortexFitting package, it looks like that it
55 | # expects 'x' to go after 'y'.
56 | fieldReordered = field.transpose('t','y','x')
57 | fieldReordered = fieldReordered.fillna(0.0)
58 |
59 | # VortexFitting expects very specific names of the data arrays. And there must be
60 | # the third component of velocity vector.
61 | fieldReordered['velocity_z'] = fieldReordered['u'].copy(
62 | data=np.zeros(fieldReordered['u'].values.shape))
63 | fieldRenamed = fieldReordered.rename_vars(
64 | {'u':'velocity_n', 'v':'velocity_s', 'y':'grid_z', 'x':'grid_n', })
65 |
66 | fieldRenamed.to_netcdf(path=ncFilePath, mode='w')
67 |
68 | vfield = vf.VelocityField(str(ncFilePath), file_type = 'piv_netcdf', time_step=0)
69 |
70 | return vfield
--------------------------------------------------------------------------------
/pivpy/io.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | Contains functions for reading flow fields in various formats
5 | """
6 |
7 | import pathlib
8 | import re
9 | import warnings
10 | from typing import List, Tuple
11 | import numpy as np
12 | import xarray as xr
13 | import pandas as pd
14 | from numpy.typing import ArrayLike
15 |
16 | try:
17 | from lvpyio import read_buffer
18 | except ImportError:
19 | warnings.warn("lvreader is not installed, use pip install lvpyio")
20 |
21 |
22 | # Defaults
23 | POS_UNITS: str = "pix" # or mm, m, after scaling
24 | TIME_UNITS: str = "frame" # "frame" if not scaled, can become 'sec' or 'msec', 'usec'
25 | # after scaling can be m/s, mm/s
26 | VEL_UNITS: str = POS_UNITS # default is displacement in pix
27 | DELTA_T: np.float64 = 0.0 # default is 0. i.e. uknown, can be any float value
28 |
29 |
30 | def unsorted_unique(arr: ArrayLike) -> ArrayLike:
31 | """creates a sorted unique numpy array"""
32 | arr1, c = np.unique(arr, return_index=True)
33 | out = arr1[c.argsort()]
34 | return out, c
35 |
36 |
37 | def set_default_attrs(dataset: xr.Dataset) -> xr.Dataset:
38 | """Defines default attributes:
39 |
40 | # xr.DataSet.x.attrs["units"] = POS_UNITS
41 | POS_UNITS: str = "pix" # or mm, m, after scaling
42 |
43 |
44 | # None if not known, can become 'sec' or 'msec', 'usec'
45 | # the time units are for the sequence of PIV realizations
46 | # useful for animations of the DataSet and averaging
47 | # xr.DataSet.t.attrs["units"] = TIME_UNITS
48 | TIME_UNITS: str = None
49 |
50 | # after scaling can be m/s, mm/s, default is POS_UNITS
51 | # xr.DataSet.u.attrs["units"] = VEL_UNITS
52 | VEL_UNITS: str = POS_UNITS
53 |
54 | # attribute of the xr.DataSet, defines things for the
55 | # single flow realization, frame A -> DELTA_T -> frame B
56 | # xr.DataSet.attrs["delta_t"] = DELTA_T
57 | DELTA_T: float = None # default is unknown, can be
58 |
59 | """
60 |
61 | dataset.x.attrs["units"] = POS_UNITS
62 | dataset.y.attrs["units"] = POS_UNITS
63 | dataset.u.attrs["units"] = VEL_UNITS
64 | dataset.v.attrs["units"] = VEL_UNITS
65 | dataset.t.attrs["units"] = TIME_UNITS
66 | dataset.attrs["delta_t"] = DELTA_T
67 | dataset.attrs["files"] = []
68 |
69 | return dataset
70 |
71 |
72 | def create_sample_field(
73 | rows: int = 5,
74 | cols: int = 8,
75 | grid: List = None,
76 | frame: int = 0,
77 | noise_sigma: float = 1.0,
78 | ) -> xr.Dataset:
79 | """Creates a sample Dataset for the tests.
80 |
81 | Args:
82 | rows (int) : number of points along vertical coordinate,
83 | corresponds to 'y'
84 | cols (int) : number of grid points along horizontal coordinate, 'x'
85 | grid (int, int) : spacing between vectors in two directions (x,y)
86 | frame (int): frame number
87 | noise_sigma (float): strength of Gaussian noise to add
88 |
89 | Returns:
90 | xarray.Dataset(): dataset
91 | """
92 | if grid is None:
93 | grid = [16, 8]
94 |
95 | x = np.arange(grid[0], (cols + 1) * grid[0], grid[0])
96 | y = np.arange(grid[1], (rows + 1) * grid[1], grid[1])
97 |
98 | xm, ym = np.meshgrid(x, y)
99 | u = (
100 | np.ones_like(xm)
101 | + np.linspace(0.0, 10.0, cols)
102 | + noise_sigma * np.random.randn(1, cols)
103 | )
104 | v = (
105 | np.zeros_like(ym)
106 | + np.linspace(-1.0, 1.0, rows).reshape(rows, 1)
107 | + noise_sigma * np.random.randn(rows, 1)
108 | )
109 |
110 | u = u[:, :, np.newaxis]
111 | v = v[:, :, np.newaxis]
112 | chc = np.ones_like(u)
113 |
114 | u = xr.DataArray(u, dims=("y", "x", "t"), coords={"x": x, "y": y, "t": [frame]})
115 | v = xr.DataArray(v, dims=("y", "x", "t"), coords={"x": x, "y": y, "t": [frame]})
116 | chc = xr.DataArray(chc, dims=("y", "x", "t"), coords={"x": x, "y": y, "t": [frame]})
117 |
118 | dataset = xr.Dataset({"u": u, "v": v, "chc": chc})
119 | dataset = set_default_attrs(dataset)
120 |
121 | return dataset
122 |
123 |
124 | def create_sample_Dataset(
125 | n_frames: int = 5, rows: int = 5, cols: int = 3, noise_sigma: float = 0.0
126 | ) -> xr.Dataset:
127 | """Creates a syntetic dataset
128 |
129 | Args:
130 | n_frames : number of frames
131 | rows (int) : number of points along vertical coordinate,
132 | corresponds to 'y'
133 | cols : number of grid points along horizontal coordinate, 'x'
134 | grid : spacing between vectors in two directions (x,y)
135 | frame : frame number
136 | noise_sigma : strength of Gaussian noise to add
137 |
138 | Returns:
139 | dataset: PIVPy dataset
140 |
141 | Example:
142 | ds = create_sample_dataset(n_frames=3)
143 | """
144 |
145 | dataset = []
146 | for i in range(n_frames):
147 | dataset.append(
148 | create_sample_field(rows=rows, cols=cols, frame=i, noise_sigma=noise_sigma)
149 | )
150 |
151 | combined = xr.concat(dataset, dim="t")
152 | combined = set_default_attrs(combined)
153 |
154 | return combined
155 |
156 |
157 | def create_uniform_strain():
158 | """creates constant strain field"""
159 | return create_sample_field(noise_sigma=0.0)
160 |
161 |
162 | def from_arrays(
163 | x: ArrayLike,
164 | y: ArrayLike,
165 | u: ArrayLike,
166 | v: ArrayLike,
167 | mask: np.array,
168 | frame: int = 0,
169 | ) -> xr.Dataset:
170 | """creates a dataset from two-dimensional Numpy arrays
171 | of x,y,u,v and mask
172 |
173 | Args:
174 | x (array): x
175 | y (array): y
176 | u (array): u
177 | v (array): v
178 | mask (array): mask, all numpy arrays of the same shape
179 | frame (int): frame number, default is 0
180 | Returns:
181 | ds (xarray.Dataset): xarray dataset with default attributes
182 | Example:
183 | ds = io.from_arrays(x,y,u,v,mask,frame=0)
184 | """
185 | # create dataset structure of appropriate size
186 | dataset = create_sample_field(rows=x.shape[0], cols=x.shape[1], frame=frame)
187 | # assign arrays
188 | dataset["x"] = x[0, :]
189 | dataset["y"] = y[:, 0]
190 | dataset["u"] = xr.DataArray(u[:, :, np.newaxis], dims=("y", "x", "t"))
191 | dataset["v"] = xr.DataArray(v[:, :, np.newaxis], dims=("y", "x", "t"))
192 | dataset["chc"] = xr.DataArray(mask[:, :, np.newaxis], dims=("y", "x", "t"))
193 | dataset = set_default_attrs(dataset)
194 |
195 | return dataset
196 |
197 |
198 | def from_df(
199 | df: pd.DataFrame,
200 | frame: int = 0,
201 | filename: str = None,
202 | ) -> xr.Dataset:
203 | """creates pivpy.Dataset from pandas DataFrame
204 |
205 | Args:
206 | df (pd.DataFrame): DataFrame with columns of x,y,u,v
207 | frame (int, optional): frame number. Defaults to 0.
208 | filename (str, optional): filename to add to the attributes. Defaults to None.
209 |
210 | Returns:
211 | xr.Dataset: pivpy.Dataset
212 | """
213 | d = df.to_numpy()
214 |
215 | x, ix = unsorted_unique(d[:, 0])
216 | y, iy = unsorted_unique(d[:, 1])
217 |
218 | if d.shape[1] < 5: # davis8 does not have mask or chc
219 | d = np.column_stack((d,np.zeros_like(d[:,-1])))
220 |
221 | if ix[1] == 1: #x grows first
222 | d = d.reshape(len(y), len(x), 5).transpose(1, 0, 2)
223 | elif iy[1] == 1: # y grows first
224 | d = d.reshape(len(x), len(y), 5) # .transpose(1,0,2)
225 | else:
226 | raise ValueError('not sorted x or y')
227 |
228 | u = d[:, :, 2]
229 | v = d[:, :, 3]
230 | chc = d[:, :, 4]
231 |
232 | # extend dimensions
233 | u = u[:, :, np.newaxis]
234 | v = v[:, :, np.newaxis]
235 | chc = chc[:, :, np.newaxis]
236 |
237 | u = xr.DataArray(u, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
238 | v = xr.DataArray(v, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
239 | chc = xr.DataArray(chc, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
240 |
241 | dataset = xr.Dataset({"u": u, "v": v, "chc": chc})
242 | dataset = set_default_attrs(dataset)
243 | if filename is not None:
244 | dataset.attrs["files"].append(str(filename))
245 |
246 | return dataset
247 |
248 |
249 | def load_vec(
250 | filename: pathlib.Path,
251 | rows: int = None,
252 | cols: int = None,
253 | delta_t: float = None,
254 | frame: int = 0,
255 | ) -> xr.Dataset:
256 | """
257 | load_vec(filename,rows=rows,cols=cols)
258 | Loads the VEC file (TECPLOT format by TSI Inc.),
259 | OpenPIV VEC or TXT formats
260 | Arguments:
261 | filename : file name, expected to have a header and 5 columns
262 | rows, cols : number of rows and columns of a vector field,
263 | if None, None, then parse_header is called to infer the number
264 | written in the header
265 | DELTA_T : time interval (default is None)
266 | frame : frame or time marker (default is None)
267 | Output:
268 | dataset is a xAarray Dataset, see xarray for help
269 | """
270 | if rows is None or cols is None:
271 | _, _, rows, cols, dt, frame, _ = parse_header(filename)
272 | # print(f'rows = {rows}, cols = {cols}')
273 |
274 | if rows is None: # means no headers, openpiv vec file
275 | # d = np.genfromtxt(filename, usecols=(0, 1, 2, 3, 4))
276 | d = np.genfromtxt(
277 | filename,
278 | usecols=(0, 1, 2, 3, 4)
279 | )
280 | x, ix = unsorted_unique(d[:, 0])
281 | y, iy = unsorted_unique(d[:, 1])
282 |
283 | # print(f'rows = {len(y)}, cols = {len(x)}')
284 |
285 | if ix[1] == 1: #x grows first
286 | d = d.reshape(len(y), len(x), 5).transpose(1, 0, 2)
287 | elif iy[1] == 1: # y grows first
288 | d = d.reshape(len(y), len(x), 5) # .transpose(1,0,2)
289 | else:
290 | raise ValueError('not sorted x or y')
291 | else: # Insight VEC file
292 | d = np.genfromtxt(
293 | filename, skip_header=1, delimiter=",", usecols=(0, 1, 2, 3, 4)
294 | ).reshape(cols, rows, 5).transpose(1, 0, 2)
295 |
296 | x = d[:, :, 0][:, 0]
297 | y = d[:, :, 1][0, :]
298 |
299 |
300 | u = d[:, :, 2]
301 | v = d[:, :, 3]
302 | chc = d[:, :, 4]
303 |
304 | # extend dimensions
305 | u = u[:, :, np.newaxis]
306 | v = v[:, :, np.newaxis]
307 | chc = chc[:, :, np.newaxis]
308 |
309 | u = xr.DataArray(u, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
310 | v = xr.DataArray(v, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
311 | chc = xr.DataArray(chc, dims=("x", "y", "t"), coords={"x": x, "y": y, "t": [frame]})
312 |
313 | dataset = xr.Dataset({"u": u, "v": v, "chc": chc})
314 |
315 | dataset = set_default_attrs(dataset)
316 | if filename is not None:
317 | dataset.attrs["files"].append(str(filename))
318 | if delta_t is not None:
319 | dataset.attrs["delta_t"] = delta_t
320 |
321 | return dataset
322 |
323 | def load_insight_vec_as_csv(
324 | filename: pathlib.Path,
325 | rows: int = None,
326 | cols: int = None,
327 | delta_t: float = None,
328 | frame: int = 0,
329 | ) -> xr.Dataset:
330 | """
331 | load_insight_vec_as_csv(filename,rows=rows,cols=cols)
332 | Loads the VEC file (TECPLOT format by TSI Inc.),
333 | Arguments:
334 | filename : file name, expected to have a header and 5 columns
335 | rows, cols : number of rows and columns of a vector field,
336 | if None, None, then parse_header is called to infer the number
337 | written in the header
338 | DELTA_T : time interval (default is None)
339 | frame : frame or time marker (default is None)
340 | Output:
341 | dataset is a xAarray Dataset, see xarray for help
342 | """
343 | df = pd.read_csv(
344 | filename,
345 | header=None,
346 | skiprows=1,
347 | usecols=[0,1,2,3,4],
348 | names=["x","y","u","v","chc"],
349 | )
350 | dataset = from_df(df,frame=frame,filename=filename)
351 |
352 | return dataset
353 |
354 |
355 |
356 |
357 | def load_vc7(
358 | filename: pathlib.Path,
359 | frame: int = 0,
360 | ) -> xr.Dataset:
361 | """
362 | load_vc7(filename) or load_vc7(filename, frame=0)
363 | Loads the vc7 file using Lavision lvreader package,
364 | Arguments:
365 | filename : file name, pathlib.Path
366 | Output:
367 | dataset : xarray.Dataset
368 | """
369 | buffer = read_buffer(str(filename))
370 | data = buffer[0] # first component is a vector frame
371 | plane = 0 # don't understand the planes issue, simple vc7 is 0
372 |
373 | u = data.components["U0"][plane]
374 | v = data.components["V0"][plane]
375 |
376 | mask = np.logical_not(data.masks[plane] & data.enabled[plane])
377 | u[mask] = 0.0
378 | v[mask] = 0.0
379 |
380 | # scale
381 | u = data.scales.i.offset + u * data.scales.i.slope
382 | v = data.scales.i.offset + v * data.scales.i.slope
383 |
384 | x = np.arange(u.shape[1])
385 | y = np.arange(u.shape[0])
386 |
387 | x = data.scales.x.offset + (x + 0.5) * data.scales.x.slope * data.grid.x
388 | y = data.scales.y.offset + (y + 0.5) * data.scales.y.slope * data.grid.y
389 |
390 | x, y = np.meshgrid(x, y)
391 | dataset = from_arrays(x, y, u, v, mask, frame=frame)
392 |
393 | dataset["t"].assign_coords({"t": dataset.t + frame})
394 |
395 | dataset.attrs["files"].append(str(filename))
396 | dataset.attrs["delta_t"] = data.attributes["FrameDt"]
397 |
398 | return dataset
399 |
400 |
401 | def load_directory(
402 | path: pathlib.Path,
403 | basename: str = "*",
404 | ext: str = ".vec",
405 | ) -> xr.Dataset:
406 | """
407 | load_directory (path,basename='*', ext='*.vec')
408 |
409 | Loads all the files with the chosen sextension in the directory into a
410 | single xarray Dataset with variables and units added as attributes
411 |
412 | Input:
413 | directory : path to the directory with .vec, .txt or .VC7 files,
414 | period . can be dropped
415 |
416 | Output:
417 | dataset : xarray Dataset with dimensions: x,y,t and
418 | dataset arrays of u,v,
419 | attributes of variables and units
420 |
421 |
422 | See more: load_vec
423 | """
424 |
425 | files = sorted(path.glob(basename + ext))
426 |
427 | if len(files) == 0:
428 | raise IOError(f"No files {basename+ext} in the directory {path} ")
429 |
430 | print(f"found {len(files)} files")
431 |
432 | dataset = []
433 | combined = []
434 |
435 | _, _, rows, cols, delta_t, _, method = parse_header(files[0])
436 |
437 | if method is load_vc7:
438 | for i, f in enumerate(files):
439 | dataset.append(load_vc7(f, frame=i))
440 | else:
441 | for i, f in enumerate(files):
442 | dataset.append(method(f, rows=rows, cols=cols, frame=i, delta_t=delta_t))
443 |
444 | if len(dataset) > 0:
445 | combined = xr.concat(dataset, dim="t")
446 | combined.attrs["delta_t"] = dataset[-1].attrs["delta_t"]
447 | combined.attrs["files"] = str(files)
448 | return combined
449 |
450 | else:
451 | raise IOError("Could not read the files")
452 |
453 |
454 | def parse_header(filename: pathlib.Path) -> Tuple[str, ...]:
455 | """ parses the header line in the file to obtain attributes
456 |
457 | Args:
458 | filename (pathlib.Path): txt, vec file name
459 |
460 | Returns:
461 | Tuple[str, ...]:
462 | variables:
463 | units :
464 | rows :
465 | cols :
466 | delta_t:
467 | frame :
468 | method :
469 | """
470 | fname = filename.stem.split(".")[0] # str(filename.name).split(".")[0]
471 |
472 | try:
473 | frame = int(re.findall(r"\d+", fname)[-1])
474 | # print(int(re.findall(r'\d+', tmp)[-1]))
475 | # print(int(''.join(filter(str.isdigit,tmp))[-1]))
476 | # print(int(re.findall(r'[0-9]+', tmp)[-1]))
477 | except ValueError:
478 | frame = 0
479 |
480 | # binary, no header
481 | if filename.suffix.lower() == ".vc7":
482 | return (
483 | ["x", "y", "u", "v"],
484 | 4 * [POS_UNITS],
485 | None,
486 | None,
487 | None,
488 | frame,
489 | load_vc7,
490 | )
491 |
492 | with open(filename, "r", encoding="utf-8") as fid:
493 | header = fid.readline()
494 | # print(header)
495 |
496 | # if the file does not have a header, can be from OpenPIV or elsewhere
497 | # return None
498 | if header.startswith("#DaVis"):
499 | header_list = header.split(" ")
500 | rows = header_list[4]
501 | cols = header_list[5]
502 | pos_units = header_list[7]
503 | vel_units = header_list[-1]
504 | variables = ["x", "y", "u", "v"]
505 | units = [pos_units, pos_units, vel_units, vel_units]
506 | dt = 0.0
507 | method = load_davis8_txt
508 | return variables, units, rows, cols, dt, frame, method
509 |
510 | elif header.startswith("TITLE="): # Insight
511 | header_list = (
512 | header.replace(",", " ").replace("=", " ").replace('"', " ").split()
513 | )
514 |
515 | # get variable names, typically X,Y,U,V
516 | variables = header_list[3:12][::2]
517 |
518 | # get units - this is important if it's mm or m/s
519 | units = header_list[4:12][::2]
520 |
521 | # get the size of the PIV grid in rows x cols
522 | rows = int(header_list[-5])
523 | cols = int(header_list[-3])
524 |
525 | # this is also important to know the time interval, DELTA_T
526 | ind1 = header.find("MicrosecondsPerDeltaT")
527 | dt = float(header[ind1:].split('"')[1])
528 | method = load_vec
529 |
530 | return variables, units, rows, cols, dt, frame, method
531 |
532 | else: # no header, probably OpenPIV txt
533 | method = load_openpiv_txt
534 | return (
535 | ["x", "y", "u", "v"],
536 | 4 * [POS_UNITS],
537 | None,
538 | None,
539 | None,
540 | frame,
541 | method,
542 | )
543 |
544 |
545 | def get_units(filename: pathlib.Path) -> Tuple[str, str, float]:
546 | """
547 | get_units(filename)
548 |
549 | given a full path name to the .vec file will return the names
550 | of length and velocity units fallback option is all None. Uses
551 | parse_header function, see below.
552 |
553 | """
554 |
555 | _, units, _, _, _, _, _ = parse_header(filename)
556 |
557 | if units == "":
558 | return (POS_UNITS, VEL_UNITS, DELTA_T)
559 |
560 | lUnits = units[0] # either m, mm, pix
561 | velUnits = units[2] # either m/s, mm/s, pix
562 |
563 | tUnits = velUnits.split("/")[1] # make it 's' if exists
564 |
565 | return (lUnits, velUnits, tUnits)
566 |
567 |
568 | def load_openpiv_txt(
569 | filename: str,
570 | rows: int = None,
571 | cols: int = None,
572 | delta_t: float = None,
573 | frame: int = 0,
574 | ) -> xr.Dataset:
575 | """ loads OpenPIV txt file
576 |
577 | Args:
578 | filename (str): _description_
579 | rows (int, optional): _description_. Defaults to None.
580 | cols (int, optional): _description_. Defaults to None.
581 | delta_t (float, optional): _description_. Defaults to None.
582 | frame (int, optional): _description_. Defaults to 0.
583 |
584 | Returns:
585 | xr.Dataset: _description_
586 | """
587 | if rows is None: # means no headers
588 | d = np.genfromtxt(filename, usecols=(0, 1, 2, 3, 4))
589 | x, ix = unsorted_unique(d[:, 0])
590 | y, iy = unsorted_unique(d[:, 1])
591 |
592 | if ix[1] == 1: #x grows first
593 | d = d.reshape(len(y), len(x), 5).transpose(1, 0, 2)
594 | elif iy[1] == 1: # y grows first
595 | d = d.reshape(len(y), len(x), 5)
596 | else:
597 | raise ValueError('not sorted x or y')
598 | else:
599 | d = np.genfromtxt(
600 | filename, skip_header=1, delimiter=",", usecols=(0, 1, 2, 3, 4)
601 | )
602 | d = d.reshape((rows, cols, 5))
603 |
604 | x = d[:, :, 0][0, :]
605 | y = d[:, :, 1][:, 0]
606 |
607 | u = d[:, :, 2]
608 | v = d[:, :, 3]
609 | chc = d[:, :, 4]
610 |
611 | dataset = xr.Dataset(
612 | {
613 | "u": xr.DataArray(
614 | u[:, :, np.newaxis],
615 | dims=("x", "y", "t"),
616 | coords={"x": x, "y": y, "t": [frame]},
617 | ),
618 | "v": xr.DataArray(
619 | v[:, :, np.newaxis],
620 | dims=("x", "y", "t"),
621 | coords={"x": x, "y": y, "t": [frame]},
622 | ),
623 | "chc": xr.DataArray(
624 | chc[:, :, np.newaxis],
625 | dims=("x", "y", "t"),
626 | coords={"x": x, "y": y, "t": [frame]},
627 | ),
628 | }
629 | )
630 |
631 | dataset = set_default_attrs(dataset)
632 | if delta_t is not None:
633 | dataset.attrs["delta_t"] = delta_t
634 | dataset.attrs["files"].append(str(filename))
635 |
636 | return dataset
637 |
638 |
639 | def load_openpiv_txt_as_csv(
640 | filename: str,
641 | rows: int = None,
642 | cols: int = None,
643 | delta_t: float = None,
644 | frame: int = 0,
645 | ) -> xr.Dataset:
646 | """ loads OpenPIV txt file
647 |
648 | Args:
649 | filename (str): _description_
650 | rows (int, optional): _description_. Defaults to None.
651 | cols (int, optional): _description_. Defaults to None.
652 | delta_t (float, optional): _description_. Defaults to None.
653 | frame (int, optional): _description_. Defaults to 0.
654 |
655 | Returns:
656 | xr.Dataset: _description_
657 | """
658 | df = pd.read_csv(
659 | filename,
660 | header=None,
661 | names=['x','y','u','v','chc'],
662 | delim_whitespace=True,
663 | usecols = (0,1,2,3,4),
664 | )
665 |
666 | dataset = from_df(
667 | df,
668 | frame=frame,
669 | filename=filename
670 | )
671 |
672 | return dataset
673 |
674 |
675 |
676 |
677 |
678 |
679 | def load_davis8_txt(
680 | filename: pathlib.Path,
681 | rows: int = None, # pylint: disable=W0613
682 | cols: int = None, # pylint: disable=W0613
683 | delta_t: float = 0.0, # pylint: disable=W0613
684 | frame: int = 0,
685 | ) -> xr.Dataset:
686 | """loads Davis8 old ASCII tables format
687 |
688 | Args:
689 | filename (pathlib.Path): Davis8 filename.txt
690 | rows (int, optional): rows. Defaults to None.
691 | cols (int, optional): cols. Defaults to None.
692 | delta_t (float, optional): delta_t. Defaults to 0.0.
693 | frame (int, optional): frame number. Defaults to 0.
694 |
695 | Returns:
696 | xr.Dataset: pivpy.Dataset
697 | """
698 | dataframe = pd.read_csv(
699 | filename, delimiter="\t", skiprows=1, names=["x", "y", "u", "v"], decimal=","
700 | )
701 | dataset = from_df(dataframe, frame=frame,filename=filename)
702 | # print(f'{rows},{cols},{delta_t}')
703 | return dataset
704 |
705 |
706 | # def sorted_unique(array):
707 | # """Returns not sorted sorted_unique"""
708 | # uniq, index = np.unique(array, return_index=True)
709 | # return uniq[index.argsort()]
710 |
--------------------------------------------------------------------------------
/pivpy/pivpy.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This script extends the functionality of xarray.Dataset by adding a new accessor called piv. The accessor adds several properties and methods that are useful for working with particle image velocimetry (PIV) data. The properties include average, which returns the mean flow field, and delta_t, which returns the time step used in the PIV measurement. The methods include crop, which allows the user to crop the data by a given number of rows and columns from the boundaries, vec2scal, which converts vector data to scalar data, pan, which pans the data by a given number of pixels, and rotate, which rotates the data by a given angle.
4 |
5 |
6 | @author: Ron, Alex
7 | """
8 | try:
9 | from typing_extensions import Literal
10 | except ImportError:
11 | from typing import Literal
12 | from typing import List
13 |
14 | import numpy as np
15 | import xarray as xr
16 | from scipy.interpolate import griddata
17 | from scipy.ndimage import gaussian_filter
18 |
19 | from pivpy.graphics import quiver as gquiver
20 | from pivpy.graphics import showf as gshowf
21 | from pivpy.graphics import showscal as gshowscal
22 | from pivpy.compute_funcs import Γ1_moving_window_function, Γ2_moving_window_function
23 |
24 | # """ learn from this example
25 | # import xarray as xr
26 | # @xr.register_dataset_accessor('geo')
27 | # class GeoAccessor(object):
28 | # def __init__(self, xarray_obj):
29 | # self._obj = xarray_obj
30 | # self._center = None
31 |
32 | # @property
33 | # def center(self):
34 | # " Return the geographic center point of this dataset."
35 | # if self._center is None:
36 | # # we can use a cache on our accessor objects, because accessors
37 | # # themselves are cached on instances that access them.
38 | # lon = self._obj.latitude
39 | # lat = self._obj.longitude
40 | # self._center = (float(lon.mean()), float(lat.mean()))
41 | # return self._center
42 |
43 | # def plot(self):
44 | # " Plot data on a map."
45 | # return 'plotting!'
46 |
47 |
48 | # In [1]: ds = xr.Dataset({'longitude': np.linspace(0, 10),
49 | # ...: 'latitude': np.linspace(0, 20)})
50 | # ...:
51 |
52 | # In [2]: ds.geo.center
53 | # Out[2]: (10.0, 5.0)
54 |
55 | # In [3]: ds.geo.plot()
56 | # Out[3]: 'plotting!'
57 |
58 | # """
59 |
60 |
61 | @xr.register_dataset_accessor("piv")
62 | class PIVAccessor(object):
63 | """extends xarray Dataset with PIVPy properties"""
64 |
65 | def __init__(self, xarray_obj):
66 | """
67 | Arguments:
68 | data : xarray Dataset:
69 | x,y,t are coordinates
70 | u,v,chs are the data arracc_ys
71 |
72 | We add few shortcuts (properties):
73 | data.piv.average is the time average (data.mean(dim='t'))
74 | data.piv.delta_t is the shortcut to get $\\Delta t$
75 | data.piv.vorticity
76 | data.piv.tke
77 | data.piv.shear
78 |
79 | and a few methods:
80 | data.piv.vec2scal()
81 | data.piv.pan
82 | data.piv.rotate
83 |
84 | """
85 | self._obj = xarray_obj
86 | self._average = None
87 | self._delta_t = None
88 |
89 | @property
90 | def average(self):
91 | """Return the mean flow field ."""
92 | if self._average is None: # only first time
93 | self._average = self._obj.mean(dim="t")
94 | self._average.attrs = self._obj.attrs # we need units in quiver
95 | self._average.assign_coords({"t": 0})
96 |
97 | return self._average
98 |
99 | def crop(self, crop_vector=None):
100 | """ crops xr.Dataset by some rows, cols from the boundaries
101 |
102 | Args:
103 | crop_vector (_type_, optional): _description_. Defaults to None.
104 |
105 | Returns:
106 | _type_: _description_
107 | """
108 | if crop_vector is None:
109 | crop_vector = 4 * [None]
110 |
111 | xmin, xmacc_x, ymin, ymacc_x = crop_vector
112 |
113 | xmin = self._obj.x.min() if xmin is None else xmin
114 | xmacc_x = self._obj.x.macc_x() if xmacc_x is None else xmacc_x
115 | ymin = self._obj.y.min() if ymin is None else ymin
116 | ymacc_x = self._obj.y.macc_x() if ymacc_x is None else ymacc_x
117 |
118 | self._obj = self._obj.sel(x=slice(xmin, xmacc_x), y=slice(ymin, ymacc_x))
119 |
120 | return self._obj
121 |
122 | def pan(self, shift_x=0.0, shift_y=0.0):
123 | """moves the field by shift_x,shift_y in the same units as x,y"""
124 | self._obj = self._obj.assign_coords(
125 | {"x": self._obj.x + shift_x, "y": self._obj.y + shift_y}
126 | )
127 | return self._obj
128 |
129 | def filterf(self, sigma: List[float]=[1.,1.,0], **kwargs):
130 | """Gaussian filtering of velocity"""
131 |
132 | self._obj["u"] = xr.DataArray(
133 | gaussian_filter(
134 | self._obj["u"].values, sigma, **kwargs),
135 | dims=("y", "x", "t"),
136 | attrs = self._obj["u"].attrs,
137 | )
138 | self._obj["v"] = xr.DataArray(
139 | gaussian_filter(
140 | self._obj["v"].values, sigma, **kwargs),
141 | dims=("y", "x", "t"),
142 | attrs = self._obj["v"].attrs,
143 | )
144 |
145 | return self._obj
146 |
147 | def fill_nans(self, method: Literal["linear", "nearest", "cubic"] = "nearest"):
148 | """
149 | This method uses scipy.interpolate.griddata to interpolate missing data.
150 | Parameters
151 | ----------
152 | src_data: Any
153 | Input data array.
154 | method: {'linear', 'nearest', 'cubic'}, optional
155 | The method to use for interpolation in `scipy.interpolate.griddata`.
156 | Returns
157 | -------
158 | :class:`numpy.ndarray`:
159 | An interpolated :class:`numpy.ndarray`.
160 | """
161 |
162 | def _griddata_nans(src_data, x_coords, y_coords, method=method):
163 |
164 | src_data_flat = src_data.copy().flatten()
165 | data_bool = ~np.isnan(src_data_flat)
166 |
167 | if not data_bool.any():
168 | return src_data
169 |
170 | return griddata(
171 | points=(x_coords.flatten()[data_bool], y_coords.flatten()[data_bool]),
172 | values=src_data_flat[data_bool],
173 | xi=(x_coords, y_coords),
174 | method=method,
175 | # fill_value=nodata,
176 | )
177 |
178 | x_coords, y_coords = np.meshgrid(
179 | self._obj.coords["x"].values, self._obj.coords["y"].values
180 | )
181 |
182 | for var_name in self._obj.variables:
183 | if var_name not in self._obj.coords:
184 | for t_i in self._obj["t"]:
185 | new_data = _griddata_nans(
186 | self._obj.sel(t=t_i)[var_name].data,
187 | x_coords,
188 | y_coords,
189 | method=method,
190 | )
191 | self._obj.sel(t=t_i)[var_name].data[:] = new_data
192 |
193 | return self._obj
194 |
195 | def __add__(self, other):
196 | """add two datasets means that we sum up the velocities, assume
197 | that x,y,t,delta_t are all identical
198 | """
199 | self._obj["u"] += other._obj["u"]
200 | self._obj["v"] += other._obj["v"]
201 | return self._obj
202 |
203 | def __sub__(self, other):
204 | """add two datasets means that we sum up the velocities, assume
205 | that x,y,t,delta_t are all identical
206 | """
207 | self._obj["u"] -= other._obj["u"]
208 | self._obj["v"] -= other._obj["v"]
209 | return self._obj
210 |
211 | def vorticity(self):
212 | """calculates vorticity of the data arracc_y (at one time instance) and
213 | adds it to the attributes
214 |
215 | Input:
216 | xarray with the variables u,v and dimensions x,y
217 |
218 | Output:
219 | xarray with the estimated vorticity as a scalar field with
220 | same dimensions
221 |
222 | """
223 |
224 | self._obj["w"] = self._obj["v"].differentiate("x") - self._obj[
225 | "u"
226 | ].differentiate("y")
227 |
228 | self._obj["w"].attrs["units"] = "1/delta_t"
229 | self._obj["w"].attrs["standard_name"] = "vorticity"
230 |
231 | return self._obj
232 |
233 | def strain(self):
234 | """ calculates rate of strain of a two component field
235 |
236 | Returns:
237 | _type_: adds ["w"] = du_dx^2 + dv_dy^2 + 0.5*(du_dy+dv_dx)^2
238 | """
239 | du_dx = self._obj["u"].differentiate("x")
240 | du_dy = self._obj["u"].differentiate("y")
241 | dv_dx = self._obj["v"].differentiate("x")
242 | dv_dy = self._obj["v"].differentiate("y")
243 |
244 | self._obj["w"] = du_dx**2 + dv_dy**2 + 0.5 * (du_dy + dv_dx) ** 2
245 | self._obj["w"].attrs["units"] = "1/delta_t"
246 | self._obj["w"].attrs["standard_name"] = "strain"
247 |
248 | return self._obj
249 |
250 | def divergence(self):
251 | """ calculates divergence field
252 |
253 | Returns:
254 | self._obj: xr.Dataset with the new property ["w"] = divergence
255 | """
256 | du_dx, _ = np.gradient(
257 | self._obj["u"], self._obj["x"], self._obj["y"], acc_x_is=(0, 1)
258 | )
259 | _, dv_dy = np.gradient(
260 | self._obj["v"], self._obj["x"], self._obj["y"], acc_x_is=(0, 1)
261 | )
262 |
263 | if "t" in self._obj.coords:
264 | self._obj["w"] = (("x", "y", "t"), dv_dy + du_dx)
265 | else:
266 | self._obj["w"] = (("x", "y"), dv_dy + du_dx)
267 |
268 | self._obj["w"].attrs["units"] = "1/delta_t"
269 | self._obj["w"].attrs["standard_name"] = "divergence"
270 |
271 | return self._obj
272 |
273 | def acceleration(self):
274 | """calculates material derivative or acceleration of the
275 | data arracc_y (single frame)
276 |
277 | Input:
278 | xarray with the variables u,v and dimensions x,y
279 |
280 | Output:
281 | xarray with the estimated acceleration as a scalar field data['w']
282 |
283 | """
284 | du_dx = self._obj["u"].differentiate("x")
285 | du_dy = self._obj["u"].differentiate("y")
286 | dv_dx = self._obj["v"].differentiate("x")
287 | dv_dy = self._obj["v"].differentiate("y")
288 |
289 | acc_x = self._obj["u"] * du_dx + self._obj["v"] * du_dy
290 | acc_y = self._obj["u"] * dv_dx + self._obj["v"] * dv_dy
291 |
292 | self._obj["w"] = xr.DataArray(
293 | np.sqrt(acc_x**2 + acc_y**2), dims=["x", "y", "t"]
294 | )
295 |
296 | self._obj["w"].attrs["units"] = "1/delta_t"
297 | self._obj["w"].attrs["standard_name"] = "acceleration"
298 |
299 | return self._obj
300 |
301 | def kinetic_energy(self):
302 | """estimates turbulent kinetic energy"""
303 | self._obj["w"] = self._obj["u"] ** 2 + self._obj["v"] ** 2
304 | self._obj["w"].attrs["units"] = "(m/s)^2"
305 | self._obj["w"].attrs["standard_name"] = "kinetic_energy"
306 | return self._obj
307 |
308 | def tke(self):
309 | """estimates turbulent kinetic energy"""
310 | if len(self._obj.t) < 2:
311 | raise ValueError(
312 | "TKE is not defined for a single vector field, \
313 | use .piv.kinetic_energy()"
314 | )
315 |
316 | new_obj = self._obj.copy()
317 | new_obj -= new_obj.mean(dim="t")
318 | new_obj["w"] = new_obj["u"] ** 2 + new_obj["v"] ** 2
319 | new_obj["w"].attrs["units"] = "(m/s)^2"
320 | new_obj["w"].attrs["standard_name"] = "TKE"
321 |
322 | return new_obj
323 |
324 | def fluct(self):
325 | """returns fluctuations as a new dataset"""
326 |
327 | if len(self._obj.t) < 2:
328 | raise ValueError(
329 | "fluctuations cannot be defined for a \
330 | single vector field, use .piv.ke()"
331 | )
332 |
333 | new_obj = self._obj.copy()
334 | new_obj -= new_obj.mean(dim="t")
335 |
336 | new_obj["u"].attrs["standard_name"] = "fluctation"
337 | new_obj["v"].attrs["standard_name"] = "fluctation"
338 |
339 | return new_obj
340 |
341 | def reynolds_stress(self):
342 | """returns fluctuations as a new dataset"""
343 |
344 | if len(self._obj.t) < 2:
345 | raise ValueError(
346 | "fluctuations cannot be defined for a \
347 | single vector field, use .piv.ke()"
348 | )
349 |
350 | new_obj = self._obj.copy()
351 | new_obj -= new_obj.mean(dim="t")
352 |
353 | new_obj["w"] = -1 * new_obj["u"] * new_obj["v"] # new scalar
354 | self._obj["w"] = new_obj["w"].mean(dim="t") # reynolds stress is -\rho < u' v'>
355 | self._obj["w"].attrs["standard_name"] = "Reynolds_stress"
356 |
357 | return self._obj
358 |
359 | def rms(self):
360 | """Root mean square"""
361 | self._obj = self.tke()
362 | self._obj["w"] = np.sqrt(self._obj["w"])
363 | self._obj["w"].attrs["standard_name"] = "rms"
364 | self._obj["w"].attrs["units"] = "m/s"
365 |
366 | def Γ1(self, n, convCoords = True):
367 | """Makes use of Dask (kind of) to run Γ1_moving_window_function via Γ1_pad.
368 | It takes an Xarray dataset, applies rolling window to it, groups rolling windows
369 | and applyies custom Γ1-calculating function to it in a parallel manner.
370 |
371 | Args:
372 | self._obj (xr.Dataset) - must contain, at least, u, v, x, y and t
373 | n (int) - (2*n+1) gives the rolling window size
374 | convCoords (bool) - either True or False, convCoords = convert coordinates,
375 | if True - create two new data arrays within self._obj with
376 | the names "xCoordiantes" and "yCoordiantes" that store x and y
377 | coordinates as data arrays; always keep it "True" unless you
378 | have already created "xCoordiantes" and "yCoordiantes" somehow
379 | (say, by running Γ1 or Γ2 functions before)
380 |
381 | Returns:
382 | self._obj (xr.Dataset) - the argument with the Γ1 data array
383 | """
384 | # Xarray rolling window (below) doesn't roll over the coordinates. We're going to convert
385 | # them to data arrays. Xarray does't make the conversion procedure easy. So, instead of
386 | # Xarray, we are going to adhere to numpy for the conversion.
387 | if convCoords:
388 | PMX, PMY = np.meshgrid(self._obj.coords['x'].to_numpy(), self._obj.coords['y'].to_numpy())
389 | tTimes = self._obj.coords['t'].to_numpy().size
390 | XYshape = PMX.T.shape + (tTimes,)
391 | self._obj['xCoordinates'] = xr.DataArray(np.broadcast_to(PMX.T[:,:,np.newaxis], XYshape), dims=['x','y','t'])
392 | self._obj['yCoordinates'] = xr.DataArray(np.broadcast_to(PMY.T[:,:,np.newaxis], XYshape), dims=['x','y','t'])
393 |
394 | # Create the object of class rolling:
395 | rollingW = self._obj.rolling({"x":(2*n+1), "y":(2*n+1), "t":1}, center=True)
396 | # Construct the dataset containing a new dimension corresponding to the rolling window
397 | fieldRoll = rollingW.construct(x='rollWx', y='rollWy', t='rollWt')
398 | # Xarray requires stacked array in case of a multidimensional rolling window
399 | fieldStacked = fieldRoll.stack(gridcell=['x','y','t'])
400 |
401 | # map_blocks is an automated Dask-parallel mapping function. It requires a
402 | # special implementation. Thus, I have to create a separate function - Γ1_pad -
403 | # which performs groupping of the stacked dataset fieldStacked. Then map_blocks
404 | # automaticly Dask-chunks Γpad returns. Every Dask-chunk can contain several groups.
405 | # The chunks are computed in parallel. See here for map_blocks() function:
406 | # https://tutorial.xarray.dev/advanced/map_blocks/simple_map_blocks.html
407 | def Γ1_pad(ds, n):
408 | dsGroup = ds.groupby("gridcell")
409 | return dsGroup.map(Γ1_moving_window_function, args=[n])
410 |
411 | newArr = fieldStacked.map_blocks(Γ1_pad, args=[n]).compute()
412 | # Now, the result must be unstacked to return to the original x, y, t coordinates.
413 | self._obj['Γ1'] = newArr.unstack("gridcell")
414 |
415 | self._obj['Γ1'].attrs["standard_name"] = "Gamma 1"
416 | self._obj['Γ1'].attrs["units"] = "dimensionless"
417 |
418 | return self._obj
419 |
420 | def Γ2(self, n, convCoords = True):
421 | """Makes use of Dask (kind of) to run Γ2_moving_window_function via Γ2_pad.
422 | It takes an Xarray dataset, applies rolling window to it, groups rolling windows
423 | and applyies custom Γ2-calculating function to it in a parallel manner.
424 |
425 | Args:
426 | self._obj (xr.Dataset) - must contain, at least, u, v, x, y and t
427 | n (int) - (2*n+1) gives the rolling window size
428 | convCoords (bool) - either True or False, convCoords = convert coordinates,
429 | if True - create two new data arrays within self._obj with
430 | the names "xCoordiantes" and "yCoordiantes" that store x and y
431 | coordinates as data arrays; always keep it "True" unless you
432 | have already created "xCoordiantes" and "yCoordiantes" somehow
433 | (say, by running Γ1 or Γ2 functions before)
434 |
435 | Returns:
436 | self._obj (xr.Dataset) - the argument with the Γ2 data array
437 | """
438 | # Xarray rolling window (below) doesn't roll over the coordinates. We're going to convert
439 | # them to data arrays. Xarray does't make the conversion procedure easy. So, instead of
440 | # Xarray, we are going to adhere to numpy for the conversion.
441 | if convCoords:
442 | PMX, PMY = np.meshgrid(self._obj.coords['x'].to_numpy(), self._obj.coords['y'].to_numpy())
443 | tTimes = self._obj.coords['t'].to_numpy().size
444 | XYshape = PMX.T.shape + (tTimes,)
445 | self._obj['xCoordinates'] = xr.DataArray(np.broadcast_to(PMX.T[:,:,np.newaxis], XYshape), dims=['x','y','t'])
446 | self._obj['yCoordinates'] = xr.DataArray(np.broadcast_to(PMY.T[:,:,np.newaxis], XYshape), dims=['x','y','t'])
447 |
448 | # Create the object of class rolling:
449 | rollingW = self._obj.rolling({"x":(2*n+1), "y":(2*n+1), "t":1}, center=True)
450 | # Construct the dataset containing a new dimension corresponding to the rolling window
451 | fieldRoll = rollingW.construct(x='rollWx', y='rollWy', t='rollWt')
452 | # Xarray requires stacked array in case of a multidimensional rolling window
453 | fieldStacked = fieldRoll.stack(gridcell=['x','y','t'])
454 |
455 | # map_blocks is an automated Dask-parallel mapping function. It requires a
456 | # special implementation. Thus, I have to create a separate function - Γ2_pad -
457 | # which performs groupping of the stacked dataset fieldStacked. Then map_blocks
458 | # automaticly Dask-chunks Γpad returns. Every Dask-chunk can contain several groups.
459 | # The chunks are computed in parallel. See here for map_blocks() function:
460 | # https://tutorial.xarray.dev/advanced/map_blocks/simple_map_blocks.html
461 | def Γ2_pad(ds, n):
462 | dsGroup = ds.groupby("gridcell")
463 | return dsGroup.map(Γ2_moving_window_function, args=[n])
464 |
465 | newArr = fieldStacked.map_blocks(Γ2_pad, args=[n]).compute()
466 | # Now, the result must be unstacked to return to the original x, y, t coordinates.
467 | self._obj['Γ2'] = newArr.unstack("gridcell")
468 |
469 | self._obj['Γ2'].attrs["standard_name"] = "Gamma 2"
470 | self._obj['Γ2'].attrs["units"] = "dimensionless"
471 |
472 | return self._obj
473 |
474 | def vec2scal(self, flow_property: str = "curl"):
475 | """ creates a scalar flow property field
476 |
477 | Args:
478 | flow_property (str, optional): one of the flow properties. Defaults to "curl".
479 |
480 | Returns:
481 | _type_: _description_
482 | """
483 | # replace few common names
484 | flow_property = "vorticity" if flow_property == "curl" else flow_property
485 | flow_property = "kinetic_energy" if flow_property == "ken" else flow_property
486 | flow_property = "kinetic_energy" if flow_property == "ke" else flow_property
487 | flow_property = "vorticity" if flow_property == "vort" else flow_property
488 |
489 | method = getattr(self, str(flow_property))
490 |
491 | self._obj = method()
492 |
493 | return self._obj
494 |
495 | def __mul__(self, scalar):
496 | """
497 | multiplication of a velocity field by a scalar (simple scaling)
498 | """
499 | self._obj["u"] *= scalar
500 | self._obj["v"] *= scalar
501 | if "w" in self._obj.var():
502 | self._obj["w"] += scalar
503 |
504 | return self._obj
505 |
506 | def __div__(self, scalar):
507 | """
508 | multiplication of a velocity field by a scalar (simple scaling)
509 | """
510 | self._obj["u"] /= scalar
511 | self._obj["v"] /= scalar
512 |
513 | return self._obj
514 |
515 | def set_delta_t(self, delta_t: float = 0.0):
516 | """sets delta_t attribute, float, default is 0.0"""
517 | self._obj.attrs["delta_t"] = delta_t
518 | return self._obj
519 |
520 | def set_scale(self, scale: float = 1.0):
521 | """scales all variables by a sclar"""
522 | for var in ["x", "y", "u", "v"]:
523 | self._obj[var] = self._obj[var] * scale
524 |
525 | return self._obj
526 |
527 | def rotate(self, theta: float = 0.0):
528 | """rotates the data, but only for some x,y grids
529 | Args:
530 | theta (float): degrees in the clockwise direction
531 | it can only work for the cases with equal size along
532 | x and y
533 | Returns:
534 | rotated object
535 | """
536 |
537 | theta = theta / 360.0 * 2 * np.pi
538 |
539 | x_i = self._obj.x * np.cos(theta) + self._obj.y * np.sin(theta)
540 | eta = self._obj.y * np.cos(theta) - self._obj.x * np.sin(theta)
541 | du_dx_i = self._obj.u * np.cos(theta) + self._obj.v * np.sin(theta)
542 | u_eta = self._obj.v * np.cos(theta) - self._obj.u * np.sin(theta)
543 |
544 | self._obj["x"] = x_i
545 | self._obj["y"] = eta
546 | self._obj["u"] = du_dx_i
547 | self._obj["v"] = u_eta
548 |
549 | if "theta" in self._obj:
550 | self._obj["theta"] += theta
551 | else:
552 | self._obj["theta"] = theta
553 |
554 | return self._obj
555 |
556 | @property
557 | def delta_t(self):
558 | """receives the delta_t from the set"""
559 | if self._delta_t is None:
560 | self._delta_t = self._obj.attrs["delta_t"]
561 | return self._delta_t
562 |
563 | def quiver(self, **kwargs):
564 | """graphics.quiver() as a flow_property"""
565 | fig, acc_x = gquiver(self._obj, **kwargs)
566 | return fig, acc_x
567 |
568 | def streamplot(self, **kwargs):
569 | """graphics.quiver(streamlines=True)"""
570 | gquiver(self._obj, streamlines=True, **kwargs)
571 |
572 | def showf(self, **kwargs):
573 | """method for graphics.showf"""
574 | gshowf(self._obj, **kwargs)
575 |
576 | def showscal(self, **kwargs):
577 | """method for graphics.showscal"""
578 | gshowscal(self._obj, **kwargs)
579 |
580 | # @property
581 | # def vel_units(self):
582 | # " Return the geographic center point of this dataset."
583 | # if self._vel_units is None:
584 | # self._vel_units = self._obj.attrs.l_units + '/' + \
585 | # self._obj.attrs.t_units
586 | # return self._vel_units
587 |
--------------------------------------------------------------------------------
/pivpy_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/pivpy_logo.png
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "pivpy"
3 | version = "0.0.20"
4 | authors = [
5 | { name="Alex Liberzon and Ron Shnapp", email="alex.liberzon@gmail.com" },
6 | ]
7 | description = "Python package for post-processing PIV results"
8 | readme = "README.md"
9 | license = { file="LICENSE" }
10 | requires-python = ">=3.7"
11 | classifiers = [
12 | "Programming Language :: Python :: 3",
13 | "License :: OSI Approved :: MIT License",
14 | "Operating System :: OS Independent",
15 | ]
16 |
17 | dependencies = [
18 | "numpy",
19 | "scipy",
20 | "xarray",
21 | "matplotlib",
22 | "pytest",
23 | "vortexfitting"
24 | ]
25 |
26 | keywords=["example documentation tutorial"]
27 |
28 |
29 | [project.optional-dependencies]
30 | lvpyio = ["lvpyio"]
31 | readim = ["readim"]
32 | netcdf = ["netcdf4"]
33 | vortexfitting = ["vortexfitting"]
34 | full = ["lvpyio", "readim", "netcdf4", "vortexfitting"]
35 |
36 | [project.urls]
37 | "Homepage" = "https://github.com/alexlib/pivpy"
38 | "Bug Tracker" = "https://github.com/alexlib/pivpy/issues"
39 |
40 |
41 | [tool.setuptools]
42 | include-package-data = true
43 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | xarray
4 | netcdf4
5 | matplotlib
6 | pytest
7 | typing-extensions
8 | vortexfitting
9 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexlib/pivpy/aa4200c0cf19e2f49a12f799fa54b0e8cc96fab7/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_graphics.py:
--------------------------------------------------------------------------------
1 | """ tests of pivpy.graphics module """
2 | import pathlib
3 | import importlib.resources
4 | from pivpy import io, graphics, pivpy
5 |
6 | # Ensure compatibility with different Python versions (3.9+ has 'files', 3.7 and 3.8 need 'path')
7 | try:
8 | from importlib.resources import files
9 | except ImportError:
10 | from importlib.resources import path as resource_path
11 |
12 | # For Python 3.9+
13 | try:
14 | path = files('pivpy') / 'data'
15 | except NameError:
16 | # For Python 3.7 and 3.8
17 | with resource_path('pivpy', 'data') as data_path:
18 | path = data_path
19 |
20 | # Convert to pathlib.Path if not already
21 | path = pathlib.Path(path)
22 |
23 | filename = path / "Insight" / "Run000001.T000.D000.P000.H001.L.vec"
24 |
25 | # load data
26 | _d = io.load_vec(filename).isel(t=0)
27 |
28 |
29 | def test_showscal():
30 | """tests showscal
31 | """
32 | graphics.showscal(_d, flow_property="curl")
33 |
34 |
35 | def test_quiver():
36 | """ tests quiver
37 | """
38 | graphics.quiver(_d)
39 | _d.piv.quiver()
40 |
41 |
42 | def test_xarray_plot():
43 | """tests xarray plot use of pcolormesh
44 | """
45 | d = _d.piv.vec2scal("curl")
46 | d["w"].plot.pcolormesh()
47 |
48 |
49 | def test_histogram():
50 | """tests histogram
51 | """
52 | graphics.histogram(_d)
53 |
54 |
55 | def test_quiver_openpiv_vec():
56 | """ tests quiver of openpiv vec file
57 | """
58 | filename = path / "openpiv_vec" / "exp1_001_b.vec"
59 | print(filename, filename.exists())
60 | _d = io.load_vec(filename)
61 | _d.isel(t=0).piv.quiver() # notice the warning
62 |
63 | def test_showf():
64 | """tests showf
65 | """
66 | graphics.showf(_d)
67 |
68 | def test_average():
69 | """tests average
70 | """
71 | d = io.create_sample_Dataset()
72 | d = d.piv.average
73 | d.piv.quiver()
--------------------------------------------------------------------------------
/tests/test_inter.py:
--------------------------------------------------------------------------------
1 | """ tests of pivpy.inter module """
2 | import pathlib
3 | import importlib.resources
4 | import numpy as np
5 | from pivpy import io, inter
6 |
7 | # Ensure compatibility with different Python versions (3.9+ has 'files', 3.7 and 3.8 need 'path')
8 | try:
9 | from importlib.resources import files
10 | except ImportError:
11 | from importlib.resources import path as resource_path
12 |
13 | # For Python 3.9+
14 | try:
15 | path = files('pivpy') / 'data'
16 | except NameError:
17 | # For Python 3.7 and 3.8
18 | with resource_path('pivpy', 'data') as data_path:
19 | path = data_path
20 |
21 | # Convert to pathlib.Path if not already
22 | path = pathlib.Path(path)
23 |
24 | openpivTxtTestFile = path / "openpiv_txt" / "interTest.txt"
25 | saveNcFile = path / "interTest" / "testInterCreates_nc.nc"
26 |
27 | def test_pivpy_to_vf():
28 | """
29 | The idea is to check if VortexFitting gets the same velocity field
30 | as PIVPY. But note that VortexFitting modifies the field a bit
31 | when reading it in.
32 | IMPORTANT: to run this test, VortexFitting package must be installed.
33 | """
34 | d = io.load_openpiv_txt(str(openpivTxtTestFile)) # PIVPY velocity field
35 | vfField = inter.pivpyTOvf(d, saveNcFile) # VortexFitting velocity field
36 | x = d.coords['x'].values
37 | y = d.coords['y'].values
38 | u = d['u'].isel(t=0).values
39 | v = d['v'].isel(t=0).values
40 | # See classes.py, below line 87 (if file_type == "piv_netcdf") in VortexFitting package.
41 | assert x.shape == vfField.x_coordinate_matrix.shape
42 | assert y.shape == vfField.y_coordinate_matrix.shape
43 | assert x.all() == vfField.x_coordinate_matrix.all()
44 | assert np.subtract(np.flip(y),np.flip(y)[0]).all() == vfField.y_coordinate_matrix.all()
45 | # Due to image to physical coordinate system converstion, u and v must be transposed.
46 | assert u.T.shape == vfField.u_velocity_matrix.shape
47 | assert v.T.shape == vfField.v_velocity_matrix.shape
48 | assert np.subtract(u.T,np.mean(u.T,1)[:, None]).all() == vfField.u_velocity_matrix.all()
49 | assert np.subtract(v.T,np.mean(v.T,1)[:, None]).all() == vfField.v_velocity_matrix.all()
--------------------------------------------------------------------------------
/tests/test_io.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | import numpy as np
3 | import importlib.resources
4 | from pivpy import io
5 |
6 | # Ensure compatibility with different Python versions (3.9+ has 'files', 3.7 and 3.8 need 'path')
7 | try:
8 | from importlib.resources import files
9 | except ImportError:
10 | from importlib.resources import path as resource_path
11 |
12 | # For Python 3.9+
13 | try:
14 | path = files('pivpy') / 'data'
15 | except NameError:
16 | # For Python 3.7 and 3.8
17 | with resource_path('pivpy', 'data') as data_path:
18 | path = data_path
19 |
20 |
21 | vec_file = path / "Insight" / "Run000002.T000.D000.P000.H001.L.vec"
22 | openpiv_txt_file = path / "openpiv_txt" / "exp1_001_b.txt"
23 |
24 |
25 | def test_get_dt():
26 | """ test if we get correct delta t """
27 | _, _, _, _,delta_t,_,_ = io.parse_header(vec_file)
28 | assert delta_t == 2000.
29 |
30 |
31 | def test_get_frame():
32 | """ tests the correct frame number """
33 | _, _, _, _, _, frame,_ = io.parse_header(
34 | path/ "day2" / "day2a005003.T000.D000.P003.H001.L.vec"
35 | )
36 | assert frame == 5003
37 | _, _, _, _, _, frame,_ = io.parse_header(
38 | vec_file
39 | )
40 | assert frame == 2
41 | _, _, _, _, _, frame,_ = io.parse_header(
42 | path / "openpiv_vec" / "exp1_001_b.vec"
43 | )
44 | assert frame == 1
45 | _, _, _, _, _, frame,_ = io.parse_header(
46 | path / "openpiv_txt" / "exp1_001_b.txt"
47 | )
48 | assert frame == 1
49 |
50 |
51 | def test_load_vec():
52 | """tests loading vec file
53 | """
54 | data = io.load_vec(vec_file)
55 | assert data["u"].shape == (63, 63, 1)
56 | tmp = data["u"].values
57 | assert tmp[0, 0, 0] == 0.0
58 | assert np.allclose(data.coords["x"][0], 0.31248)
59 | assert "t" in data.dims
60 |
61 |
62 | # readim is depreceated, see the new Lavision Python package
63 | # def test_load_vc7():
64 | # data = io.load_vc7(os.path.join(path, "VC7/2Ca.VC7"))
65 | # assert data["u"].shape == (57, 43, 1)
66 | # assert np.allclose(data.u.values[0, 0], -0.04354814)
67 | # assert np.allclose(data.coords["x"][-1], 193.313795)
68 |
69 |
70 | def test_loadopenpivtxt():
71 | """tests loading openpivtxt file
72 | """
73 | io.load_openpiv_txt(openpiv_txt_file)
74 |
75 | def test_load_directory():
76 | """tests loading directory of Insight VEC, vc7, and Davis8 files
77 | """
78 | data = io.load_directory(
79 | path / "Insight",
80 | basename="Run*",
81 | ext=".vec"
82 | )
83 | print(data.t)
84 | assert np.allclose(data["t"], [0, 1, 2, 3, 4])
85 |
86 | data = io.load_directory(
87 | path / "urban_canopy",
88 | basename="B*",
89 | ext=".vc7"
90 | )
91 | assert np.allclose(data["t"], [0, 1, 2, 3, 4])
92 |
93 | data = io.load_directory(
94 | path / "PIV_Challenge",
95 | basename="B*",
96 | ext=".txt"
97 | )
98 | assert np.allclose(data["t"], [0, 1])
99 |
100 | def test_check_units():
101 | """ reads units and checks their validitty
102 | def set_default_attrs(dataset: xr.Dataset)-> xr.Dataset:
103 | """
104 | data = io.create_sample_Dataset()
105 | assert data.t.attrs["units"] in ["s", "sec", "frame"]
106 | assert data.x.attrs["units"] in ["pix", "m", "mm"]
107 | assert data.y.attrs["units"] in ["pix", "m", "mm"]
108 | assert data.u.attrs["units"] in ["pix", "m", "mm"]
109 | assert data.v.attrs["units"] in ["pix", "m", "mm"]
110 | assert data.attrs["delta_t"] == 0.0
111 |
112 |
113 | def test_create_sample_field():
114 | data = io.create_sample_field(frame=3)
115 | assert data["t"] == 3
116 | data = io.create_sample_field(rows=3, cols=7)
117 | assert data.x.shape[0] == 7
118 | assert data.y.shape[0] == 3
119 | assert data["t"] == 0.0
120 |
121 |
122 |
123 | def test_create_sample_dataset():
124 | data = io.create_sample_Dataset(n_frames=3)
125 | assert data.sizes["t"] == 3
126 | # assert data.dims["t"] == 3
127 | assert np.allclose(data["t"], np.arange(3))
128 |
129 |
130 | def test_to_nc():
131 | data = io.create_sample_Dataset(n_frames = 25)
132 | data.to_netcdf("tmp.nc")
133 |
134 | data = io.load_directory(path / "Insight" )
135 | data.to_netcdf("tmp.nc")
136 |
137 |
--------------------------------------------------------------------------------
/tests/test_methods.py:
--------------------------------------------------------------------------------
1 | """ tests pivpy.pivpy methods """
2 | import pathlib
3 | import numpy as np
4 | import importlib.resources
5 | import pytest
6 | from pivpy import io
7 |
8 |
9 | FILE1 = "Run000001.T000.D000.P000.H001.L.vec"
10 | FILE2 = "Run000002.T000.D000.P000.H001.L.vec"
11 |
12 | # Ensure compatibility with different Python versions (3.9+ has 'files', 3.7 and 3.8 need 'path')
13 | try:
14 | from importlib.resources import files
15 | except ImportError:
16 | from importlib.resources import path as resource_path
17 |
18 | # For Python 3.9+
19 | try:
20 | path = files('pivpy') / 'data'
21 | except NameError:
22 | # For Python 3.7 and 3.8
23 | with resource_path('pivpy', 'data') as data_path:
24 | path = data_path
25 |
26 | path = path / "Insight"
27 |
28 |
29 | _a = io.load_vec(path / FILE1)
30 | _b = io.load_vec(path / FILE2)
31 |
32 |
33 | def test_crop():
34 | """tests crop"""
35 | _c = _a.piv.crop([5, 15, -5, -15])
36 | assert _c.u.shape == (32, 32, 1)
37 |
38 |
39 | def test_select_roi():
40 | """tests xarray selection option on our dataset"""
41 | _c = io.create_sample_Dataset(n_frames=5, rows=10, cols=10)
42 | _c = _c.sel(x=slice(35, 70), y=slice(30, 90))
43 | assert _c.u.shape == (7, 2, 5) # note the last dimension is preserved
44 |
45 |
46 | def test_pan():
47 | """test a shift by dx,dy using pan method"""
48 | _c = _a.copy()
49 | _c = _c.piv.pan(1.0, -1.0) # note the use of .piv.
50 | assert np.allclose(_c.coords["x"][0], 1.312480)
51 | assert np.allclose(_c.coords["y"][0], -1.31248)
52 |
53 |
54 | def test_mean():
55 | """tests mean or average property"""
56 | data = io.create_sample_Dataset(10)
57 | print(data.piv.average.u.median())
58 | assert np.allclose(data.piv.average.u.median(), 6.0)
59 |
60 |
61 | def test_vec2scal():
62 | """tests vec2scal"""
63 | data = io.create_sample_Dataset()
64 | data = data.piv.vec2scal() # default is curl
65 | assert data["w"].attrs["standard_name"] == "vorticity"
66 |
67 | data = data.piv.vec2scal(flow_property="strain")
68 | assert data["w"].attrs["standard_name"] == "strain"
69 |
70 |
71 | def test_add():
72 | """tests addition of two datasets"""
73 | data = io.create_sample_Dataset()
74 | tmp = data + data
75 | assert tmp["u"][0, 0, 0] == 2.0
76 |
77 |
78 | def test_subtract():
79 | """tests subtraction"""
80 | data = io.create_sample_Dataset()
81 | tmp = data - data
82 | assert tmp["u"][0, 0, 0] == 0.0
83 |
84 |
85 | def test_multiply():
86 | """tests subtraction"""
87 | data = io.create_sample_Dataset()
88 | tmp = data * 3.5
89 | assert tmp["u"][0, 0, 0] == 3.5
90 |
91 |
92 | def test_set_get_dt():
93 | """tests setting the new dt"""
94 | data = io.create_sample_Dataset()
95 | assert data.attrs["delta_t"] == 0.0
96 |
97 | data.piv.set_delta_t(2.0)
98 | assert data.attrs["delta_t"] == 2.0
99 |
100 |
101 | # def test_rotate():
102 | # """ tests rotation """
103 | # data = io.create_sample_Dataset()
104 | # data.piv.rotate(90) # rotate by 90 deg
105 | # assert data['u'][0,0,0] == 2.1 # shall fail
106 |
107 |
108 | def test_fluctuations():
109 | """tests fluctuations, velocity fields are replaced"""
110 | data = io.create_sample_field()
111 | with pytest.raises(ValueError):
112 | data.piv.fluct()
113 |
114 | data = io.create_sample_Dataset(100) # enough for random
115 | fluct = data.piv.fluct()
116 | assert np.allclose(fluct["u"].mean(dim="t"), 0.0)
117 | assert np.allclose(fluct["v"].mean(dim="t"), 0.0)
118 |
119 |
120 | def test_reynolds_stress():
121 | """tests Reynolds stress"""
122 | data = io.create_sample_Dataset(2, noise_sigma=0.0)
123 | data.isel(t=1)["u"] += 0.1
124 | data.isel(t=1)["v"] -= 0.1
125 | tmp = data.piv.reynolds_stress()
126 | assert np.allclose(tmp["w"], 0.0025)
127 | assert tmp["w"].attrs["standard_name"] == "Reynolds_stress"
128 |
129 |
130 | def test_set_scale():
131 | """tests scaling the dataset by a scalar"""
132 | data = io.create_sample_Dataset()
133 | tmp = data.piv.set_scale(1.0)
134 | assert np.allclose(tmp["x"], data["x"])
135 |
136 | tmp = data.copy()
137 | tmp.piv.set_scale(2.0)
138 | tmp_mean = tmp["u"].mean(dim=("t", "x", "y")).values
139 | data_mean = data["u"].mean(dim=("t", "x", "y")).values
140 | assert np.allclose(tmp_mean / data_mean, 2.0)
141 |
142 |
143 | def test_vorticity():
144 | """tests vorticity estimate"""
145 | data = io.create_sample_field() # we need another flow field
146 | data.piv.vorticity()
147 | assert np.allclose(data["w"], 0.0)
148 |
149 |
150 | def test_strain():
151 | """tests shear estimate"""
152 | data = io.create_sample_field(rows=3, cols=3, noise_sigma=0.0)
153 | data = data.piv.strain()
154 | assert np.allclose(data["w"].values, 0.11328125, 1e-6)
155 | # also after scaling
156 | data.piv.set_scale(1/16) # 16 pixels is the grid
157 | data = data.piv.strain()
158 | assert np.allclose(data["w"].values, 0.11328125, 1e-6)
159 |
160 |
161 | def test_tke():
162 | """tests TKE"""
163 | data = io.create_sample_Dataset()
164 | data = data.piv.tke() # now defined
165 | assert data["w"].attrs["standard_name"] == "TKE"
166 |
167 | def test_Γ1():
168 | """tests Γ1"""
169 | data = io.create_sample_Dataset(n_frames=4, rows=3, cols=2)
170 | data = data.piv.Γ1(n=1)
171 | assert data["Γ1"].to_numpy().shape == (2,3,4)
172 | assert data["Γ1"].attrs["standard_name"] == "Gamma 1"
173 | assert data["Γ1"].attrs["units"] == "dimensionless"
174 |
175 | def test_Γ2():
176 | """tests Γ2"""
177 | data = io.create_sample_Dataset(n_frames=2, rows=3, cols=4)
178 | data = data.piv.Γ2(n=1)
179 | assert data["Γ2"].to_numpy().shape == (4,3,2)
180 | assert data["Γ2"].attrs["standard_name"] == "Gamma 2"
181 | assert data["Γ2"].attrs["units"] == "dimensionless"
182 |
183 | def test_curl():
184 | """tests curl that is also vorticity"""
185 | _c = _a.copy()
186 | _c.piv.vec2scal(flow_property="curl")
187 |
188 | assert _c["w"].attrs["standard_name"] == "vorticity"
189 |
190 | def test_fill_nans():
191 | """" tests fill_nans function """
192 | ds = io.create_sample_Dataset(n_frames=1,rows=7,cols=11,noise_sigma=0.5)
193 | ds["u"][1:4,1:4] = np.nan
194 | # ds.sel(t=0)["u"].plot()
195 | new = ds.copy(deep=True) # prepare memory for the result
196 | new.piv.fill_nans() # fill nans
197 | assert ds.dropna(dim='x')["v"].shape == (7, 8, 1)
198 | assert new.dropna(dim='x')["v"].shape == (7, 11, 1)
199 |
200 | def test_filterf():
201 | """ tests filterf
202 | """
203 | dataset = io.create_sample_Dataset(n_frames=3,rows=5,cols=10)
204 | dataset = dataset.piv.filterf() # no inputs
205 | dataset = dataset.piv.filterf([.5, .5, 0.]) # with sigma
206 | # ds["mag"] = np.hypot(ds["u"], ds["v"])
207 | # ds.plot.quiver(x='x',y='y',u='u',v='v',hue='mag',col='t',scale=150,cmap='RdBu')
208 |
--------------------------------------------------------------------------------
/try_filter.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "from pivpy import io\n",
10 | "from pivpy import pivpy\n",
11 | "from scipy.ndimage.filters import gaussian_filter"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": 2,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "ds = io.create_sample_dataset()\n",
21 | "ds2 = ds.copy(deep=True)"
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": 3,
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "data": {
31 | "text/plain": [
32 | "\n",
33 | "Dimensions: (t: 5, x: 3, y: 7)\n",
34 | "Coordinates:\n",
35 | " * x (x) float64 32.0 64.0 96.0\n",
36 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
37 | " * t (t) int64 0 1 2 3 4\n",
38 | "Data variables:\n",
39 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
40 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
41 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
42 | "Attributes:\n",
43 | " variables: ['x', 'y', 'u', 'v']\n",
44 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
45 | " dt: 1.0\n",
46 | " files: "
47 | ]
48 | },
49 | "execution_count": 3,
50 | "metadata": {},
51 | "output_type": "execute_result"
52 | }
53 | ],
54 | "source": [
55 | "ds"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 4,
61 | "metadata": {},
62 | "outputs": [
63 | {
64 | "data": {
65 | "text/plain": [
66 | "\n",
67 | "Dimensions: (t: 5, x: 3, y: 7)\n",
68 | "Coordinates:\n",
69 | " * x (x) float64 32.0 64.0 96.0\n",
70 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
71 | " * t (t) int64 0 1 2 3 4\n",
72 | "Data variables:\n",
73 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
74 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
75 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
76 | "Attributes:\n",
77 | " variables: ['x', 'y', 'u', 'v']\n",
78 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
79 | " dt: 1.0\n",
80 | " files: "
81 | ]
82 | },
83 | "execution_count": 4,
84 | "metadata": {},
85 | "output_type": "execute_result"
86 | }
87 | ],
88 | "source": [
89 | "ds2"
90 | ]
91 | },
92 | {
93 | "cell_type": "code",
94 | "execution_count": 14,
95 | "metadata": {},
96 | "outputs": [
97 | {
98 | "data": {
99 | "text/plain": [
100 | "\n",
101 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
102 | " [1., 2., 3., 4., 5., 6., 7.],\n",
103 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
104 | "Coordinates:\n",
105 | " * x (x) float64 32.0 64.0 96.0\n",
106 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
107 | " t int64 0"
108 | ]
109 | },
110 | "execution_count": 14,
111 | "metadata": {},
112 | "output_type": "execute_result"
113 | }
114 | ],
115 | "source": [
116 | "ds.u.isel(t=0)"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 16,
122 | "metadata": {},
123 | "outputs": [
124 | {
125 | "ename": "SyntaxError",
126 | "evalue": "can't assign to function call (, line 2)",
127 | "output_type": "error",
128 | "traceback": [
129 | "\u001b[0;36m File \u001b[0;32m\"\"\u001b[0;36m, line \u001b[0;32m2\u001b[0m\n\u001b[0;31m ds.u.isel(t=0) = gaussian_filter(ds.u.isel(t=0),1)\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m can't assign to function call\n"
130 | ]
131 | }
132 | ],
133 | "source": [
134 | "ds.u.isel(t=0)\n",
135 | "ds.u.isel(t=0) = gaussian_filter(ds.u.isel(t=0),1)"
136 | ]
137 | },
138 | {
139 | "cell_type": "code",
140 | "execution_count": 25,
141 | "metadata": {},
142 | "outputs": [
143 | {
144 | "name": "stdout",
145 | "output_type": "stream",
146 | "text": [
147 | "\n",
148 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
149 | " [1., 2., 3., 4., 5., 6., 7.],\n",
150 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
151 | "Coordinates:\n",
152 | " * x (x) float64 32.0 64.0 96.0\n",
153 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
154 | " t int64 0\n",
155 | "\n",
156 | "array(1.427041)\n",
157 | "Coordinates:\n",
158 | " x float64 32.0\n",
159 | " y float64 16.0\n",
160 | " t int64 0\n",
161 | "\n",
162 | "array(1.)\n",
163 | "Coordinates:\n",
164 | " x float64 32.0\n",
165 | " y float64 16.0\n",
166 | " t int64 0\n",
167 | "\n",
168 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
169 | " [1., 2., 3., 4., 5., 6., 7.],\n",
170 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
171 | "Coordinates:\n",
172 | " * x (x) float64 32.0 64.0 96.0\n",
173 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
174 | " t int64 1\n",
175 | "\n",
176 | "array(1.427041)\n",
177 | "Coordinates:\n",
178 | " x float64 32.0\n",
179 | " y float64 16.0\n",
180 | " t int64 1\n",
181 | "\n",
182 | "array(1.)\n",
183 | "Coordinates:\n",
184 | " x float64 32.0\n",
185 | " y float64 16.0\n",
186 | " t int64 1\n",
187 | "\n",
188 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
189 | " [1., 2., 3., 4., 5., 6., 7.],\n",
190 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
191 | "Coordinates:\n",
192 | " * x (x) float64 32.0 64.0 96.0\n",
193 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
194 | " t int64 2\n",
195 | "\n",
196 | "array(1.427041)\n",
197 | "Coordinates:\n",
198 | " x float64 32.0\n",
199 | " y float64 16.0\n",
200 | " t int64 2\n",
201 | "\n",
202 | "array(1.)\n",
203 | "Coordinates:\n",
204 | " x float64 32.0\n",
205 | " y float64 16.0\n",
206 | " t int64 2\n",
207 | "\n",
208 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
209 | " [1., 2., 3., 4., 5., 6., 7.],\n",
210 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
211 | "Coordinates:\n",
212 | " * x (x) float64 32.0 64.0 96.0\n",
213 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
214 | " t int64 3\n",
215 | "\n",
216 | "array(1.427041)\n",
217 | "Coordinates:\n",
218 | " x float64 32.0\n",
219 | " y float64 16.0\n",
220 | " t int64 3\n",
221 | "\n",
222 | "array(1.)\n",
223 | "Coordinates:\n",
224 | " x float64 32.0\n",
225 | " y float64 16.0\n",
226 | " t int64 3\n",
227 | "\n",
228 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
229 | " [1., 2., 3., 4., 5., 6., 7.],\n",
230 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
231 | "Coordinates:\n",
232 | " * x (x) float64 32.0 64.0 96.0\n",
233 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
234 | " t int64 4\n",
235 | "\n",
236 | "array(1.427041)\n",
237 | "Coordinates:\n",
238 | " x float64 32.0\n",
239 | " y float64 16.0\n",
240 | " t int64 4\n",
241 | "\n",
242 | "array(1.)\n",
243 | "Coordinates:\n",
244 | " x float64 32.0\n",
245 | " y float64 16.0\n",
246 | " t int64 4\n"
247 | ]
248 | }
249 | ],
250 | "source": [
251 | "import xarray as xr\n",
252 | "\n",
253 | "for t in range(len(ds.t)):\n",
254 | " tmp = ds.isel(t=t)\n",
255 | " print(tmp.u)\n",
256 | " tmp['u'] = xr.DataArray(gaussian_filter(tmp.u,1), dims=['x','y'])\n",
257 | " print(tmp.u[0,0])\n",
258 | " print(ds.isel(t=t).u[0,0])\n",
259 | " tmp['v'] = xr.DataArray(gaussian_filter(tmp.u,1), dims=['x','y'])\n",
260 | " "
261 | ]
262 | },
263 | {
264 | "cell_type": "code",
265 | "execution_count": 26,
266 | "metadata": {},
267 | "outputs": [
268 | {
269 | "data": {
270 | "text/plain": [
271 | "\n",
272 | "array(1.)\n",
273 | "Coordinates:\n",
274 | " x float64 32.0\n",
275 | " y float64 16.0\n",
276 | " t int64 0"
277 | ]
278 | },
279 | "execution_count": 26,
280 | "metadata": {},
281 | "output_type": "execute_result"
282 | }
283 | ],
284 | "source": [
285 | "ds.u[0,0,0]"
286 | ]
287 | },
288 | {
289 | "cell_type": "code",
290 | "execution_count": 44,
291 | "metadata": {},
292 | "outputs": [
293 | {
294 | "data": {
295 | "text/plain": [
296 | "\n",
297 | "array([[1.427041, 2.067956, 3.004833, 4. , 4.995167, 5.932044, 6.572959],\n",
298 | " [1.427041, 2.067956, 3.004833, 4. , 4.995167, 5.932044, 6.572959],\n",
299 | " [1.427041, 2.067956, 3.004833, 4. , 4.995167, 5.932044, 6.572959]])\n",
300 | "Coordinates:\n",
301 | " * x (x) float64 32.0 64.0 96.0\n",
302 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
303 | " t int64 4"
304 | ]
305 | },
306 | "execution_count": 44,
307 | "metadata": {},
308 | "output_type": "execute_result"
309 | }
310 | ],
311 | "source": [
312 | "tmp['u']= (['x','y'],gaussian_filter(tmp.u,1))\n",
313 | "tmp.u"
314 | ]
315 | },
316 | {
317 | "cell_type": "code",
318 | "execution_count": 45,
319 | "metadata": {},
320 | "outputs": [
321 | {
322 | "data": {
323 | "text/plain": [
324 | "\n",
325 | "array([[1., 2., 3., 4., 5., 6., 7.],\n",
326 | " [1., 2., 3., 4., 5., 6., 7.],\n",
327 | " [1., 2., 3., 4., 5., 6., 7.]])\n",
328 | "Coordinates:\n",
329 | " * x (x) float64 32.0 64.0 96.0\n",
330 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
331 | " t int64 4"
332 | ]
333 | },
334 | "execution_count": 45,
335 | "metadata": {},
336 | "output_type": "execute_result"
337 | }
338 | ],
339 | "source": [
340 | "tmp = ds.isel(t=-1)\n",
341 | "tmp.u"
342 | ]
343 | },
344 | {
345 | "cell_type": "code",
346 | "execution_count": 8,
347 | "metadata": {},
348 | "outputs": [
349 | {
350 | "data": {
351 | "text/plain": [
352 | "\n",
353 | "Dimensions: (t: 5, x: 3, y: 7)\n",
354 | "Coordinates:\n",
355 | " * x (x) float64 32.0 64.0 96.0\n",
356 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
357 | " * t (t) int64 0 1 2 3 4\n",
358 | "Data variables:\n",
359 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
360 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
361 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
362 | "Attributes:\n",
363 | " variables: ['x', 'y', 'u', 'v']\n",
364 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
365 | " dt: 1.0\n",
366 | " files: "
367 | ]
368 | },
369 | "execution_count": 8,
370 | "metadata": {},
371 | "output_type": "execute_result"
372 | }
373 | ],
374 | "source": [
375 | "ds2.piv.spatial_filter(filter = 'gaussian',sigma=(1,1))"
376 | ]
377 | },
378 | {
379 | "cell_type": "code",
380 | "execution_count": null,
381 | "metadata": {},
382 | "outputs": [],
383 | "source": []
384 | },
385 | {
386 | "cell_type": "code",
387 | "execution_count": 9,
388 | "metadata": {},
389 | "outputs": [],
390 | "source": [
391 | "from scipy.ndimage.filters import median_filter"
392 | ]
393 | },
394 | {
395 | "cell_type": "code",
396 | "execution_count": 10,
397 | "metadata": {},
398 | "outputs": [
399 | {
400 | "data": {
401 | "text/plain": [
402 | "\n",
403 | "Dimensions: (t: 5, x: 3, y: 7)\n",
404 | "Coordinates:\n",
405 | " * x (x) float64 32.0 64.0 96.0\n",
406 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
407 | " * t (t) int64 0 1 2 3 4\n",
408 | "Data variables:\n",
409 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
410 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
411 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
412 | "Attributes:\n",
413 | " variables: ['x', 'y', 'u', 'v']\n",
414 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
415 | " dt: 1.0\n",
416 | " files: "
417 | ]
418 | },
419 | "execution_count": 10,
420 | "metadata": {},
421 | "output_type": "execute_result"
422 | }
423 | ],
424 | "source": [
425 | "\n",
426 | "for t in ds['t']:\n",
427 | " tmp = ds.sel(t=t)\n",
428 | " tmp['u'] = xr.DataArray(median_filter(tmp['u'],size=(3,3)), dims=['x','y'])\n",
429 | " tmp['v'] = xr.DataArray(median_filter(tmp['u'],size=(3,3)), dims=['x','y'])\n",
430 | "\n",
431 | "ds"
432 | ]
433 | },
434 | {
435 | "cell_type": "code",
436 | "execution_count": 11,
437 | "metadata": {},
438 | "outputs": [
439 | {
440 | "data": {
441 | "text/plain": [
442 | "\n",
443 | "Dimensions: (t: 5, x: 3, y: 7)\n",
444 | "Coordinates:\n",
445 | " * x (x) float64 32.0 64.0 96.0\n",
446 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
447 | " * t (t) int64 0 1 2 3 4\n",
448 | "Data variables:\n",
449 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
450 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
451 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
452 | "Attributes:\n",
453 | " variables: ['x', 'y', 'u', 'v']\n",
454 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
455 | " dt: 1.0\n",
456 | " files: "
457 | ]
458 | },
459 | "execution_count": 11,
460 | "metadata": {},
461 | "output_type": "execute_result"
462 | }
463 | ],
464 | "source": [
465 | "ds2.piv.spatial_filter(filter='median',size=(3,3))"
466 | ]
467 | },
468 | {
469 | "cell_type": "code",
470 | "execution_count": 46,
471 | "metadata": {},
472 | "outputs": [
473 | {
474 | "data": {
475 | "text/plain": [
476 | "\n",
477 | "Dimensions: (t: 5, x: 3, y: 7)\n",
478 | "Coordinates:\n",
479 | " * x (x) float64 32.0 64.0 96.0\n",
480 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
481 | " * t (t) int64 0 1 2 3 4\n",
482 | "Data variables:\n",
483 | " u (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
484 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
485 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
486 | "Attributes:\n",
487 | " variables: ['x', 'y', 'u', 'v']\n",
488 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
489 | " dt: 1.0\n",
490 | " files: "
491 | ]
492 | },
493 | "execution_count": 46,
494 | "metadata": {},
495 | "output_type": "execute_result"
496 | }
497 | ],
498 | "source": [
499 | "ds"
500 | ]
501 | },
502 | {
503 | "cell_type": "code",
504 | "execution_count": 47,
505 | "metadata": {},
506 | "outputs": [],
507 | "source": [
508 | "tmp = ds.isel(t=0)"
509 | ]
510 | },
511 | {
512 | "cell_type": "code",
513 | "execution_count": 48,
514 | "metadata": {},
515 | "outputs": [
516 | {
517 | "data": {
518 | "text/plain": [
519 | "\n",
520 | "Dimensions: (x: 3, y: 7)\n",
521 | "Coordinates:\n",
522 | " * x (x) float64 32.0 64.0 96.0\n",
523 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
524 | " t int64 0\n",
525 | "Data variables:\n",
526 | " u (x, y) float64 1.0 2.0 3.0 4.0 5.0 6.0 ... 2.0 3.0 4.0 5.0 6.0 7.0\n",
527 | " v (x, y) float64 0.02514 0.02514 0.02514 ... 0.2371 0.2371 0.2371\n",
528 | " chc (x, y) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0 1.0\n",
529 | "Attributes:\n",
530 | " variables: ['x', 'y', 'u', 'v']\n",
531 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
532 | " dt: 1.0\n",
533 | " files: "
534 | ]
535 | },
536 | "execution_count": 48,
537 | "metadata": {},
538 | "output_type": "execute_result"
539 | }
540 | ],
541 | "source": [
542 | "tmp"
543 | ]
544 | },
545 | {
546 | "cell_type": "code",
547 | "execution_count": 52,
548 | "metadata": {},
549 | "outputs": [],
550 | "source": [
551 | "tmp.u[0,0]=2"
552 | ]
553 | },
554 | {
555 | "cell_type": "code",
556 | "execution_count": 54,
557 | "metadata": {},
558 | "outputs": [
559 | {
560 | "data": {
561 | "text/plain": [
562 | "\n",
563 | "Dimensions: (x: 3, y: 7)\n",
564 | "Coordinates:\n",
565 | " * x (x) float64 32.0 64.0 96.0\n",
566 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
567 | " t int64 0\n",
568 | "Data variables:\n",
569 | " u (x, y) float64 2.0 2.0 3.0 4.0 5.0 6.0 ... 2.0 3.0 4.0 5.0 6.0 7.0\n",
570 | " v (x, y) float64 0.02514 0.02514 0.02514 ... 0.2371 0.2371 0.2371\n",
571 | " chc (x, y) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0 1.0\n",
572 | "Attributes:\n",
573 | " variables: ['x', 'y', 'u', 'v']\n",
574 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
575 | " dt: 1.0\n",
576 | " files: "
577 | ]
578 | },
579 | "execution_count": 54,
580 | "metadata": {},
581 | "output_type": "execute_result"
582 | }
583 | ],
584 | "source": [
585 | "tmp"
586 | ]
587 | },
588 | {
589 | "cell_type": "code",
590 | "execution_count": 56,
591 | "metadata": {},
592 | "outputs": [
593 | {
594 | "data": {
595 | "text/plain": [
596 | "\n",
597 | "Dimensions: (x: 3, y: 7)\n",
598 | "Coordinates:\n",
599 | " * x (x) float64 32.0 64.0 96.0\n",
600 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
601 | " t int64 0\n",
602 | "Data variables:\n",
603 | " u (x, y) float64 2.0 2.0 3.0 4.0 5.0 6.0 ... 2.0 3.0 4.0 5.0 6.0 7.0\n",
604 | " v (x, y) float64 0.02514 0.02514 0.02514 ... 0.2371 0.2371 0.2371\n",
605 | " chc (x, y) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0 1.0\n",
606 | "Attributes:\n",
607 | " variables: ['x', 'y', 'u', 'v']\n",
608 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
609 | " dt: 1.0\n",
610 | " files: "
611 | ]
612 | },
613 | "execution_count": 56,
614 | "metadata": {},
615 | "output_type": "execute_result"
616 | }
617 | ],
618 | "source": [
619 | "ds.isel(t=0)"
620 | ]
621 | },
622 | {
623 | "cell_type": "code",
624 | "execution_count": 57,
625 | "metadata": {},
626 | "outputs": [
627 | {
628 | "data": {
629 | "text/plain": [
630 | "\n",
631 | "Dimensions: (t: 5, x: 3, y: 7)\n",
632 | "Coordinates:\n",
633 | " * x (x) float64 32.0 64.0 96.0\n",
634 | " * y (y) float64 16.0 32.0 48.0 64.0 80.0 96.0 112.0\n",
635 | " * t (t) int64 0 1 2 3 4\n",
636 | "Data variables:\n",
637 | " u (x, y, t) float64 2.0 1.0 1.0 1.0 1.0 2.0 ... 7.0 7.0 7.0 7.0 7.0\n",
638 | " v (x, y, t) float64 0.02514 -0.499 0.07393 ... -0.3746 0.4407 0.03312\n",
639 | " chc (x, y, t) float64 1.0 1.0 1.0 1.0 1.0 1.0 ... 1.0 1.0 1.0 1.0 1.0\n",
640 | "Attributes:\n",
641 | " variables: ['x', 'y', 'u', 'v']\n",
642 | " units: ['pix', 'pix', 'pix/dt', 'pix/dt']\n",
643 | " dt: 1.0\n",
644 | " files: "
645 | ]
646 | },
647 | "execution_count": 57,
648 | "metadata": {},
649 | "output_type": "execute_result"
650 | }
651 | ],
652 | "source": [
653 | "ds"
654 | ]
655 | },
656 | {
657 | "cell_type": "code",
658 | "execution_count": null,
659 | "metadata": {},
660 | "outputs": [],
661 | "source": []
662 | }
663 | ],
664 | "metadata": {
665 | "kernelspec": {
666 | "display_name": "pivpy",
667 | "language": "python",
668 | "name": "pivpy"
669 | },
670 | "language_info": {
671 | "codemirror_mode": {
672 | "name": "ipython",
673 | "version": 3
674 | },
675 | "file_extension": ".py",
676 | "mimetype": "text/x-python",
677 | "name": "python",
678 | "nbconvert_exporter": "python",
679 | "pygments_lexer": "ipython3",
680 | "version": "3.6.8"
681 | }
682 | },
683 | "nbformat": 4,
684 | "nbformat_minor": 2
685 | }
686 |
--------------------------------------------------------------------------------