├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml └── workflows │ ├── pyntcloud_cd.yml │ └── tests.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── CODE_OF_CONDUCT.md ├── LICENSE.md ├── README.md ├── binder ├── environment.yml └── postBuild ├── docs ├── Makefile ├── PyntCloud.rst ├── _config.yml ├── conf.py ├── contributing.rst ├── filters.rst ├── filters_dev.rst ├── images │ ├── boxplot.png │ ├── plot1.gif │ ├── plot2.gif │ ├── plot3.gif │ ├── pyntcloud_logo.png │ ├── pyntcloud_logo_small.png │ ├── pyntcloud_logo_square.png │ └── scatter_matrix.png ├── index.rst ├── installation.rst ├── introduction.rst ├── io.rst ├── io_dev.rst ├── make.bat ├── points.rst ├── requirements.txt ├── samplers.rst ├── samplers_dev.rst ├── scalar_fields.rst ├── scalar_fields_dev.rst ├── structures.rst └── structures_dev.rst ├── examples ├── [scalar_fields] eigenvalues.ipynb ├── [scalar_fields] normals.ipynb ├── [structures] VoxelGrid.ipynb ├── [visualization] Polylines.ipynb ├── [visualization] PyntCloud.ipynb └── data │ ├── ankylosaurus_mesh.ply │ └── images │ ├── scalar_fields-eigen_values-1.png │ ├── scalar_fields-eigen_values-2.png │ ├── scalar_fields-eigen_values-3.png │ ├── scalar_fields-eigen_values-4.png │ ├── scalar_fields-eigen_values-5.png │ ├── scalar_fields-eigen_values-6.png │ ├── scalar_fields-eigen_values-7.png │ ├── scalar_fields-eigen_values-8.png │ ├── scalar_fields-normals-1.png │ ├── scalar_fields-normals-2.png │ ├── scalar_fields-normals-3.png │ ├── scalar_fields-normals-4.png │ ├── structures-voxelgrid-1.png │ ├── structures-voxelgrid-2.png │ ├── structures-voxelgrid-3.png │ ├── structures-voxelgrid-4.png │ ├── structures-voxelgrid-5.png │ ├── structures-voxelgrid-6.png │ ├── structures-voxelgrid-7.png │ ├── structures-voxelgrid-8.png │ ├── structures-voxelgrid-9.png │ ├── visualization-Polylines-1.png │ ├── visualization-Polylines-2.png │ ├── visualization-Polylines-3.png │ └── visualization-PyntCloud-1.png ├── pyproject.toml ├── src └── pyntcloud │ ├── __init__.py │ ├── core_class.py │ ├── filters │ ├── __init__.py │ ├── base.py │ ├── kdtree.py │ └── xyz.py │ ├── geometry │ ├── __init__.py │ ├── areas.py │ ├── coord_systems.py │ ├── models │ │ ├── __init__.py │ │ ├── base.py │ │ ├── plane.py │ │ └── sphere.py │ └── rotations.py │ ├── io │ ├── __init__.py │ ├── ascii.py │ ├── bin.py │ ├── las.py │ ├── npz.py │ ├── obj.py │ ├── off.py │ ├── open3d.py │ ├── pcd.py │ ├── ply.py │ └── pyvista.py │ ├── neighbors │ ├── __init__.py │ ├── k_neighbors.py │ └── r_neighbors.py │ ├── plot │ ├── __init__.py │ ├── common.py │ ├── matplotlib_backend.py │ ├── pythreejs_backend.py │ ├── pyvista_backend.py │ └── voxelgrid.py │ ├── ransac │ ├── __init__.py │ ├── fitters.py │ ├── models.py │ └── samplers.py │ ├── samplers │ ├── __init__.py │ ├── base.py │ ├── mesh.py │ ├── points.py │ └── voxelgrid.py │ ├── scalar_fields │ ├── __init__.py │ ├── base.py │ ├── eigenvalues.py │ ├── k_neighbors.py │ ├── normals.py │ ├── rgb.py │ ├── voxelgrid.py │ └── xyz.py │ ├── structures │ ├── __init__.py │ ├── base.py │ ├── convex_hull.py │ ├── delaunay.py │ ├── kdtree.py │ └── voxelgrid.py │ └── utils │ ├── __init__.py │ ├── array.py │ ├── dataframe.py │ ├── mesh.py │ └── numba.py └── tests ├── conftest.py ├── data ├── diamond.bin ├── diamond.dae ├── diamond.json ├── diamond.las ├── diamond.laz ├── diamond.npy ├── diamond.npz ├── diamond.obj ├── diamond.off ├── diamond.ply ├── diamond.vtk ├── diamond.x3d ├── diamond.xyz ├── diamond_ascii.ply ├── diamond_ascii.stl ├── diamond_ascii_vertex_index.ply ├── diamond_color.obj ├── diamond_color.off ├── diamond_with_bool.ply ├── has_offsets.las ├── mnist.npz ├── obj_issue_221.obj ├── obj_issue_226.obj ├── obj_issue_vn.obj ├── plane.npz ├── simple.las ├── simple.laz ├── sphere.ply └── voxelgrid.ply ├── integration ├── filters │ ├── test_kdtree_filters.py │ └── test_xyz_filters.py ├── io │ ├── test_from_file.py │ ├── test_from_instance.py │ ├── test_to_file.py │ └── test_to_instance.py ├── samplers │ ├── test_mesh_samplers.py │ ├── test_points_samplers.py │ └── test_voxelgrid_samplers.py ├── scalar_fields │ ├── test_eigenvalues_scalar_fields.py │ ├── test_k_neighbors_scalar_fields.py │ ├── test_normals_scalar_fields.py │ ├── test_rgb_scalar_fields.py │ ├── test_voxlegrid_scalar_fields.py │ └── test_xyz_scalar_fields.py ├── structures │ └── test_voxelgrid_structures.py └── test_core_class.py └── unit ├── filters ├── test_kdtree_filters.py └── test_xyz_filters.py ├── ransac └── test_samplers_ransac.py ├── samplers ├── test_mesh_samplers.py ├── test_points_samplers.py └── test_voxelgrid_samplers.py ├── scalar_fields ├── test_eigenvalues_scalar_fields.py ├── test_k_neighbors_scalar_fields.py ├── test_normals_scalar_fields.py ├── test_rgb_scalar_fields.py ├── test_voxlegrid_scalar_fields.py └── test_xyz_scalar_fields.py ├── structures └── test_voxelgrid_structures.py └── test_geometry.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | **Describe the bug** 8 | A clear and concise description of what the bug is. 9 | 10 | **To Reproduce** 11 | Steps to reproduce the behavior: 12 | 1. Go to '...' 13 | 2. Click on '....' 14 | 3. Scroll down to '....' 15 | 4. See error 16 | 17 | **Expected behavior** 18 | A clear and concise description of what you expected to happen. 19 | 20 | **Screenshots** 21 | If applicable, add screenshots to help explain your problem. 22 | 23 | **Desktop (please complete the following information):** 24 | - OS: [e.g. iOS] 25 | - Browser (if necessary) [e.g. chrome, safari] 26 | - Version (master branch commit) 27 | 28 | **Additional context** 29 | Add any other context about the problem here. 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. 18 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "04:00" 8 | open-pull-requests-limit: 10 9 | -------------------------------------------------------------------------------- /.github/workflows/pyntcloud_cd.yml: -------------------------------------------------------------------------------- 1 | name: pyncloud C.D. 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v1 12 | - name: Set up Python 13 | uses: actions/setup-python@v1 14 | with: 15 | python-version: '3.7' 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install -r requirements.txt 20 | - name: Build pyntcloud 21 | run: | 22 | python setup.py sdist 23 | python setup.py bdist_wheel --universal 24 | - name: Publish packages to PyPI 25 | uses: pypa/gh-action-pypi-publish@master 26 | with: 27 | user: __token__ 28 | password: ${{ secrets.PYPI_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | build: 13 | 14 | strategy: 15 | max-parallel: 4 16 | matrix: 17 | python-version: [3.11, 3.12, 3.13] 18 | platform: [ubuntu-latest, macos-latest, windows-latest] 19 | runs-on: ${{ matrix.platform }} 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | - name: Set up Python ${{ matrix.python-version }} 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | cache: "pip" 28 | - name: Install pyntcloud 29 | run: | 30 | pip install ".[all]" 31 | - name: Test and coverage with pytest 32 | run: | 33 | pip install pytest 34 | pip install pytest-cov 35 | 36 | pytest --cov=pyntcloud --cov-report=xml --cov-append tests/unit 37 | pytest --cov=pyntcloud --cov-report=xml --cov-append tests/integration 38 | 39 | - name: Upload coverage to Codecov 40 | if: matrix.platform == 'ubuntu-latest' && matrix.python-version == '3.11' 41 | run: bash <(curl -s https://codecov.io/bash) -t $TOKEN -B $REF 42 | env: 43 | TOKEN: "${{ secrets.CODECOV_TOKEN }}" 44 | REF: "${{ github.ref }}" 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Windows image file caches 2 | Thumbs.db 3 | ehthumbs.db 4 | 5 | # Folder config file 6 | Desktop.ini 7 | 8 | # Recycle Bin used on file shares 9 | $RECYCLE.BIN/ 10 | 11 | # Windows Installer files 12 | *.cab 13 | *.msi 14 | *.msm 15 | *.msp 16 | 17 | # Windows shortcuts 18 | *.lnk 19 | 20 | # ========================= 21 | # Operating System Files 22 | # ========================= 23 | 24 | # OSX 25 | # ========================= 26 | 27 | .DS_Store 28 | .AppleDouble 29 | .LSOverride 30 | 31 | # Thumbnails 32 | ._* 33 | 34 | # Files that might appear in the root of a volume 35 | .DocumentRevisions-V100 36 | .fseventsd 37 | .Spotlight-V100 38 | .TemporaryItems 39 | .Trashes 40 | .VolumeIcon.icns 41 | 42 | # Directories potentially created on remote AFP share 43 | .AppleDB 44 | .AppleDesktop 45 | Network Trash Folder 46 | Temporary Items 47 | .apdisk 48 | .vscode/launch.json 49 | .vscode/settings.json 50 | *.pyc 51 | .cache/v/cache/lastfailed 52 | 53 | pyntcloud\.egg-info/ 54 | 55 | \.ipynb_checkpoints/ 56 | 57 | build/ 58 | dist/ 59 | docs/_build 60 | 61 | \.idea/ 62 | 63 | \.pytest_cache/ 64 | pythonenv* 65 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v5.0.0 4 | hooks: 5 | - id: check-added-large-files 6 | - id: check-case-conflict 7 | - id: check-json 8 | - id: check-merge-conflict 9 | args: ['--assume-in-merge'] 10 | - id: check-toml 11 | - id: check-yaml 12 | - id: end-of-file-fixer 13 | - id: mixed-line-ending 14 | args: ['--fix=lf'] 15 | - id: sort-simple-yaml 16 | - id: trailing-whitespace 17 | - repo: https://github.com/astral-sh/ruff-pre-commit 18 | rev: 'v0.9.10' 19 | hooks: 20 | - id: ruff 21 | args: [--fix, --exit-non-zero-on-fix] 22 | - id: ruff-format 23 | - repo: https://github.com/codespell-project/codespell 24 | rev: v2.4.1 25 | hooks: 26 | - id: codespell 27 | exclude: CODE_OF_CONDUCT.md 28 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at daviddelaiglesiacastro@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2017-2019 The pyntcloud Developers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Making point clouds fun again 2 | 3 | ![pyntcloud logo](https://raw.githubusercontent.com/daavoo/pyntcloud/master/docs/images/pyntcloud_logo.png) 4 | 5 | 6 | **pyntcloud** is a Python library for working with 3D point clouds leveraging the power of the Python scientific stack. 7 | 8 | - [Examples](https://github.com/daavoo/pyntcloud/tree/master/examples). 9 | - [Documentation](http://pyntcloud.readthedocs.io/en/latest/) 10 | 11 | ## Installation 12 | 13 | ```bash 14 | pip install pyntcloud 15 | ``` 16 | 17 | ## Quick Overview 18 | 19 | You can access most of pyntcloud's functionality from its core class: PyntCloud. 20 | 21 | With PyntCloud you can perform complex 3D processing operations with minimum lines of 22 | code. For example you can: 23 | 24 | - Load a PLY point cloud from disk. 25 | - Add 3 new scalar fields by converting RGB to HSV. 26 | - Build a grid of voxels from the point cloud. 27 | - Build a new point cloud keeping only the nearest point to each occupied voxel center. 28 | - Save the new point cloud in numpy's NPZ format. 29 | 30 | With the following concise code: 31 | 32 | ```python 33 | from pyntcloud import PyntCloud 34 | 35 | cloud = PyntCloud.from_file("some_file.ply") 36 | 37 | cloud.add_scalar_field("hsv") 38 | 39 | voxelgrid_id = cloud.add_structure("voxelgrid", n_x=32, n_y=32, n_z=32) 40 | 41 | new_cloud = cloud.get_sample("voxelgrid_nearest", voxelgrid_id=voxelgrid_id, as_PyntCloud=True) 42 | 43 | new_cloud.to_file("out_file.npz") 44 | ``` 45 | 46 | ## Integration with other libraries 47 | 48 | pyntcloud offers seamless integration with other 3D processing libraries. 49 | 50 | You can create / convert PyntCloud instances from / to many 3D processing libraries using the `from_instance` / `to_instance` methods: 51 | 52 | - [Open3D](https://www.open3d.org) 53 | 54 | ```python 55 | import open3d as o3d 56 | from pyntcloud import PyntCloud 57 | 58 | # FROM Open3D 59 | original_triangle_mesh = o3d.io.read_triangle_mesh("diamond.ply") 60 | cloud = PyntCloud.from_instance("open3d", original_triangle_mesh) 61 | 62 | # TO Open3D 63 | cloud = PyntCloud.from_file("diamond.ply") 64 | converted_triangle_mesh = cloud.to_instance("open3d", mesh=True) # mesh=True by default 65 | ``` 66 | 67 | - [PyVista](https://docs.pyvista.org) 68 | 69 | ```python 70 | import pyvista as pv 71 | from pyntcloud import PyntCloud 72 | 73 | # FROM PyVista 74 | original_point_cloud = pv.read("diamond.ply") 75 | cloud = PyntCloud.from_instance("pyvista", original_point_cloud) 76 | 77 | # TO PyVista 78 | cloud = PyntCloud.from_file("diamond.ply") 79 | converted_triangle_mesh = cloud.to_instance("pyvista", mesh=True) 80 | ``` 81 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | name: pyntcloud 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - numpy 7 | - scipy 8 | - matplotlib 9 | - pandas 10 | - notebook>=5.2.2 11 | - ipywidgets>=6.0.1 12 | - ipython>=6.2.1 13 | - h5py 14 | - numba 15 | - pythreejs 16 | - pip: 17 | - pylas 18 | - lazrs 19 | - appmode 20 | -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | # make sure jupyter has enough data for complex meshes 2 | mkdir $HOME/.jupyter 3 | echo "c.NotebookApp.iopub_data_rate_limit=1e22" >> $HOME/.jupyter/jupyter_notebook_config.py 4 | 5 | 6 | # enable appmode for nice demos 7 | jupyter nbextension enable --py --sys-prefix widgetsnbextension 8 | jupyter nbextension enable --py --sys-prefix appmode 9 | jupyter serverextension enable --py --sys-prefix appmode 10 | 11 | # install pyntcloud 12 | pip install . 13 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pyntcloud 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | 12 | # Put it first so that "make" without argument is like "make help". 13 | help: 14 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 15 | 16 | .PHONY: help Makefile 17 | 18 | # Catch-all target: route all unknown targets to Sphinx using the new 19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 20 | %: Makefile 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/PyntCloud.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | PyntCloud 3 | ========= 4 | .. currentmodule:: pyntcloud 5 | 6 | **PyntCloud** is the core class that englobes almost all the functionality available 7 | in **pyntcloud**. 8 | 9 | Whereas, in its classical form, the point clouds are understood as simple sets of points, a 10 | PyntCloud is a Python class with several **attributes** and **methods** that enable a more 11 | fluent way of manipulating this entity. 12 | 13 | Attributes 14 | ========== 15 | 16 | PyntCloud's attributes serve to store information about the point cloud and the 17 | structures associated with it. 18 | 19 | Each PyntCloud's instance some predefined attributes: 20 | 21 | - centroid 22 | - mesh 23 | - :ref:`points` 24 | - structures 25 | - xyz 26 | 27 | You may also add other attributes to your own PyntCloud instance. 28 | 29 | The information about this attributes is reported by the __repr__ method: 30 | 31 | 32 | .. code-block:: python 33 | 34 | from pyntcloud import PyntCloud 35 | 36 | cloud = PyntCloud.from_file("test/data/filters/filters.ply") 37 | 38 | print(cloud) 39 | 40 | .. parsed-literal:: 41 | 42 | PyntCloud 43 | 6 points with 0 scalar fields 44 | 0 faces in mesh 45 | 0 kdtrees 46 | 0 voxelgrids 47 | Centroid: 0.45000001788139343, 0.45000001788139343, 0.45000001788139343 48 | Other attributes: 49 | 50 | 51 | Methods 52 | ======= 53 | 54 | Available methods are very intuitive. 55 | 56 | Methods starting with the word **add** incorporate new information to some existing PyntCloud attribute. 57 | 58 | Methods starting with the word **get** return some information extracted from the PyntCloud. 59 | 60 | **I/O** methods read/write information from/to different 3D formats. 61 | 62 | **Other** methods are useful things that don't fit in any of the above categories. 63 | 64 | *ADD* METHODS 65 | ------------- 66 | 67 | .. function:: PyntCloud.add_scalar_field 68 | :noindex: 69 | 70 | :ref:`scalar_fields` 71 | 72 | :ref:`scalar_fields_dev` 73 | 74 | .. function:: PyntCloud.add_structure 75 | :noindex: 76 | 77 | *GET* METHODS 78 | ------------- 79 | 80 | .. function:: PyntCloud.get_filter 81 | :noindex: 82 | 83 | :ref:`filters` 84 | 85 | :ref:`filters_dev` 86 | 87 | .. function:: PyntCloud.get_sample 88 | :noindex: 89 | 90 | :ref:`samplers` 91 | 92 | :ref:`samplers_dev` 93 | 94 | .. function:: PyntCloud.get_neighbors 95 | :noindex: 96 | 97 | .. function:: PyntCloud.get_mesh_vertices 98 | :noindex: 99 | 100 | *I/O* METHODS 101 | ------------- 102 | 103 | :ref:`io` 104 | 105 | .. function:: PyntCloud.from_file 106 | :noindex: 107 | 108 | .. function:: PyntCloud.to_file 109 | :noindex: 110 | 111 | *OTHER* METHODS 112 | --------------- 113 | 114 | .. function:: PyntCloud.apply_filter 115 | :noindex: 116 | 117 | .. function:: PyntCloud.split_on 118 | :noindex: 119 | 120 | .. function:: PyntCloud.plot 121 | :noindex: 122 | -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman 2 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. _contributing: 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | At the very least, you need a Python(>3.5) installation (an isolated environment, i.e conda, is recommended) and the following requirements: 8 | 9 | - numpy 10 | - numba 11 | - scipy 12 | - pandas 13 | - flake8 14 | - pytest 15 | 16 | Then you can clone the repo and install it in editable mode: 17 | 18 | .. code-block:: bash 19 | 20 | git clone https://github.com/daavoo/pyntcloud.git 21 | pip install -e pyntcloud 22 | 23 | From the root of the repo, you can run: 24 | 25 | .. code-block:: bash 26 | 27 | # for getting warnings about syntax and other kinds of errors 28 | flake8 29 | 30 | # for running all the tests 31 | pytest -v 32 | -------------------------------------------------------------------------------- /docs/filters.rst: -------------------------------------------------------------------------------- 1 | .. _filters: 2 | 3 | ======= 4 | Filters 5 | ======= 6 | 7 | .. currentmodule:: pyntcloud 8 | 9 | As the name suggest, filters are used to discard points from the point cloud based on a condition that is evaluated against all the points in the point cloud. 10 | 11 | All filters take a PyntCloud (and extra information in some cases) as input and produce a boolean array as output. 12 | 13 | This boolean array separates the points that passed the filter and will thus be retained (True) from those which did not and will thus be removed (False). 14 | 15 | Filters are accessible through: 16 | 17 | .. function:: PyntCloud.get_filter 18 | :noindex: 19 | 20 | We group the available filters based on what the requirements for computing them are. 21 | 22 | .. currentmodule:: pyntcloud.filters 23 | 24 | Only require XYZ 25 | ================= 26 | 27 | "BBOX" 28 | ------ 29 | 30 | .. autoclass:: BoundingBox 31 | 32 | Require KDTree 33 | ============== 34 | 35 | Required args: 36 | 37 | kdtree: KDTree.id 38 | 39 | .. code-block:: python 40 | 41 | kdtree = pointcloud.add_structure("kdtree", ...) 42 | 43 | "ROR" 44 | ----- 45 | 46 | .. autoclass:: RadiusOutlierRemoval 47 | 48 | "SOR" 49 | ----- 50 | 51 | .. autoclass:: StatisticalOutlierRemoval 52 | -------------------------------------------------------------------------------- /docs/filters_dev.rst: -------------------------------------------------------------------------------- 1 | .. _filters_dev: 2 | 3 | ============= 4 | Filters - Dev 5 | ============= 6 | 7 | This page contains useful information for developers who want to modify / add content to the filters module. 8 | 9 | First of all, two points of advice: 10 | 11 | - Use the existing filters as guideline. 12 | 13 | - Follow the general CONTRIBUTING GUIDELINES. 14 | 15 | The Big Picture 16 | =============== 17 | 18 | .. currentmodule:: pyntcloud 19 | 20 | Filters are used by the method: 21 | 22 | .. function:: PyntCloud.get_filter 23 | :noindex: 24 | 25 | Take a look at the source code in order to get a general overview of how filters are being used. All filters are classes and all have the same 26 | methods. 27 | 28 | The sections below will guide you through the filters module explaining how you can create your own filters or where you need 29 | to look in order to modify existing ones. 30 | 31 | Base Class 32 | ========== 33 | 34 | .. currentmodule:: pyntcloud.filters.base 35 | 36 | All filters must inherit from the base class `Filter` and implement its abstract methods. 37 | 38 | The base class is located at pyntcloud/filters/base.py 39 | 40 | .. autoclass:: Filter 41 | 42 | At the very least, all filters receive a PyntCloud when they are instantiated. 43 | 44 | The `Filter.extract_info` method must be overridden in order to extract and save the information required to compute the filter in a attribute. 45 | 46 | See SUBMODULE BASE CLASS below for more information. 47 | 48 | `Filter.compute` is where the boolean array is generated and returned. It should use the information extracted by the above method in order to decide which 49 | points should be filtered. 50 | 51 | See SPECIFIC FILTER CLASS below. 52 | 53 | Submodule Base Class 54 | ==================== 55 | 56 | .. currentmodule:: pyntcloud.filters.f_kdtree 57 | 58 | Filters are grouped into submodules according to which kind of information they require to be computed. 59 | 60 | For example, filters that require a KDTree to be computed are in pyntcloud/filters/f_kdtree.py 61 | 62 | As the information required by all the filters in each submodule is the same, we create a submodule base class overriding the __init__ and extract_info 63 | methods of the Filter base class. 64 | 65 | For example, in the f_kdtree submodule there is a Filter_KDTree class from which all the filters that require a KDTree inherit. 66 | 67 | .. autoclass:: Filter_KDTree 68 | 69 | If you don't find a submodule that extracts the information that your new filter needs, create a new one using as guideline one of the existing ones. 70 | 71 | Specific Filter Class 72 | ===================== 73 | 74 | Once you have a submodule base class that extracts the right information, you have to actually create the specific class 75 | for your filter, inheriting from the submodule base class and overriding the `Filter.compute` method. 76 | 77 | If the computation of your filter requires some parameters from the user, you should override the `__init__` method in order to accept those 78 | parameters. 79 | 80 | For example, the RadiusOutlierRemoval filter requires the user to specify a radius "r" and a number of neighbors "k": 81 | 82 | .. autoclass:: RadiusOutlierRemoval 83 | 84 | Let PyntCloud know about your filter 85 | ==================================== 86 | 87 | In order to do so, you have to do some things: 88 | 89 | - Add tests at `test/test_filters.py`. 90 | - Import your new filter(s) and/or submodule(s) at `pyntcloud/filters/__init__.py`. 91 | - Include them in the ALL_FILTERS dictionary, giving them a **string alias** at `pyntcloud/filters/__init__.py`. 92 | - Document them in the `PyntCloud.get_filter` docstring at `pyntcloud/core_class.py`. 93 | - Document them at `docs/filters.rst`. 94 | -------------------------------------------------------------------------------- /docs/images/boxplot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/boxplot.png -------------------------------------------------------------------------------- /docs/images/plot1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/plot1.gif -------------------------------------------------------------------------------- /docs/images/plot2.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/plot2.gif -------------------------------------------------------------------------------- /docs/images/plot3.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/plot3.gif -------------------------------------------------------------------------------- /docs/images/pyntcloud_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/pyntcloud_logo.png -------------------------------------------------------------------------------- /docs/images/pyntcloud_logo_small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/pyntcloud_logo_small.png -------------------------------------------------------------------------------- /docs/images/pyntcloud_logo_square.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/pyntcloud_logo_square.png -------------------------------------------------------------------------------- /docs/images/scatter_matrix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/docs/images/scatter_matrix.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | Welcome to pyntcloud! 3 | ===================== 4 | 5 | pyntcloud is a Python library for working with 3D point clouds. 6 | 7 | .. image:: /images/pyntcloud_logo.png 8 | 9 | This documentation is under construction. 10 | 11 | .. toctree:: 12 | :maxdepth: 1 13 | 14 | introduction 15 | installation 16 | contributing 17 | PyntCloud 18 | points 19 | filters 20 | filters_dev 21 | io 22 | io_dev 23 | samplers 24 | samplers_dev 25 | structures 26 | structures_dev 27 | scalar_fields 28 | scalar_fields_dev 29 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | Basic Installation 8 | ================== 9 | 10 | With Python 3.5 or greater you can install pyntcloud using pip: 11 | 12 | .. code-block:: bash 13 | 14 | pip install pyntcloud 15 | 16 | 17 | You can also install with miniconda: 18 | 19 | https://conda.io/miniconda.html 20 | 21 | And running: 22 | 23 | .. code-block:: bash 24 | 25 | conda install pyntcloud -c conda-forge 26 | 27 | 28 | 29 | Installation for developers 30 | =========================== 31 | 32 | Check :ref:`contributing` 33 | -------------------------------------------------------------------------------- /docs/introduction.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Introduction 3 | ============ 4 | 5 | This page will introduce the general concept of *point clouds* and illustrate 6 | the capabilities of pyntcloud as a point cloud processing tool. 7 | 8 | 9 | Point clouds 10 | ============ 11 | Point clouds are one of the most relevant entities for representing three dimensional 12 | data these days, along with polygonal meshes (which are just a special case of point clouds with 13 | connectivity graph attached). 14 | 15 | In its simplest form, a point cloud is a set of points in a cartesian coordinate 16 | system. 17 | 18 | Accurate `3D point clouds `__ can nowadays be (easily and cheaply) 19 | acquired from different sources. For example: 20 | 21 | - RGB-D devices: `Google Tango `__, `Microsoft Kinect `__, etc. 22 | 23 | - `Lidar `__. 24 | 25 | - Camera + Photogrammetry software (`Open source Colmap `__, `Agisoft Photoscan `__, . . . ) 26 | 27 | pyntcloud 28 | ========= 29 | pyntcloud enables simple and interactive exploration of point cloud data, regardless of which sensor was used to generate it or what the use case is. 30 | 31 | Although it was built for being used on `Jupyter Notebooks `__, the library is suitable for other kinds of uses. 32 | 33 | pyntcloud is composed of several modules (as independent as possible) that englobe 34 | common point cloud processing operations: 35 | 36 | - :ref:`filters` / :ref:`filters_dev` 37 | 38 | - geometry 39 | 40 | - :ref:`io` / :ref:`io_dev` 41 | 42 | - learn 43 | 44 | - neighbors 45 | 46 | - plot 47 | 48 | - ransac 49 | 50 | - sampling 51 | 52 | - :ref:`scalar_fields` / :ref:`scalar_fields_dev` 53 | 54 | - :ref:`structures` / :ref:`structures_dev` 55 | 56 | - utils 57 | 58 | Most of the functionality of this modules can be accessed by the core class of 59 | the library, **PyntCloud**, and its corresponding methods: 60 | 61 | .. code-block:: python 62 | 63 | from pyntcloud import PyntCloud 64 | # io 65 | cloud = PyntCloud.from_file("some_file.ply") 66 | # structures 67 | kdtree_id = cloud.add_structure("kdtree") 68 | # neighbors 69 | k_neighbors = cloud.get_neighbors(k=5, kdtree=kdtree_id) 70 | # scalar_fields 71 | ev = cloud.add_scalar_field("eigen_values", k_neighbors=k_neighbors) 72 | # filters 73 | f = cloud.get_filter("BBOX", min_x=0.1, max_x=0.8) 74 | # ... 75 | 76 | Although most of the functionality in the modules can be used without constructing 77 | a PyntCloud instance, the recommended workflow for the average user is the one showcased above. 78 | -------------------------------------------------------------------------------- /docs/io_dev.rst: -------------------------------------------------------------------------------- 1 | .. _io_dev: 2 | 3 | ========= 4 | I/O - Dev 5 | ========= 6 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=pyntcloud 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | scipy 3 | pandas 4 | numba 5 | sphinx_bootstrap_theme 6 | -------------------------------------------------------------------------------- /docs/samplers.rst: -------------------------------------------------------------------------------- 1 | .. _samplers: 2 | 3 | ======== 4 | Samplers 5 | ======== 6 | 7 | .. currentmodule:: pyntcloud 8 | 9 | Samplers use PyntCloud information to generate a sample of points. These points might or might not have been present in the original point cloud. 10 | 11 | For example, `RandomPoints` generates a sample by randomly selecting points from the original point cloud. 12 | In this case all sample's points were present in the original point cloud. 13 | 14 | On the other hand, `VoxelgridCentroids` generates a sample by computing the centroid of each group of points inside of each occupied VoxelGrid's cell. 15 | In this case any of the sample's points were present in the original point cloud. 16 | 17 | All samplers take a point cloud as input and return a pandas DataFrame. 18 | 19 | This pandas DataFrame can be used to generate a new PyntCloud. 20 | 21 | All samplers are accessible trough: 22 | 23 | .. function:: PyntCloud.get_sample 24 | 25 | We group the available samplers based on what information is used for their computation. 26 | 27 | .. currentmodule:: pyntcloud.samplers 28 | 29 | 30 | Require points 31 | ============== 32 | 33 | 34 | "points_random_sampling" 35 | ------------------------ 36 | 37 | .. autoclass:: RandomPoints 38 | 39 | Require mesh 40 | ============ 41 | 42 | `pointcloud.mesh` must exists. 43 | 44 | "mesh_random_sampling" 45 | ---------------------- 46 | 47 | .. autoclass:: RandomMesh 48 | 49 | Require VoxelGrid 50 | ================= 51 | 52 | Required args: 53 | 54 | voxelgrid: VoxelGrid.id 55 | 56 | .. code-block:: python 57 | 58 | 59 | voxelgrid = pointcloud.add_structure("voxelgrid", ...) 60 | 61 | "voxelgrid_centers" 62 | ------------------- 63 | 64 | .. autoclass:: VoxelgridCenters 65 | 66 | "voxelgrid_centroids" 67 | --------------------- 68 | 69 | .. autoclass:: VoxelgridCentroids 70 | 71 | "voxelgrid_nearest" 72 | ------------------- 73 | 74 | .. autoclass:: VoxelgridNearest 75 | -------------------------------------------------------------------------------- /docs/samplers_dev.rst: -------------------------------------------------------------------------------- 1 | .. _samplers_dev: 2 | 3 | ============== 4 | Samplers - Dev 5 | ============== 6 | 7 | This page contains useful information for developers who want to modify / add content to the samplers module. 8 | 9 | First of all, two points of advice: 10 | 11 | - Use the existing samplers as guideline. 12 | 13 | - Follow the general CONTRIBUTING GUIDELINES. 14 | 15 | The Big Picture 16 | =============== 17 | 18 | .. currentmodule:: pyntcloud 19 | 20 | Filters are used by the method: 21 | 22 | .. function:: PyntCloud.get_sampler 23 | :noindex: 24 | 25 | Take a look at the source code in order to get a general overview of how samplers are being used. All samplers are classes and all have the same 26 | methods. 27 | 28 | The sections below will guide you trough the samplers module explaining how you can create your own samplers or where you need 29 | to look in order to modify existing ones. 30 | 31 | Base Class 32 | ========== 33 | 34 | .. currentmodule:: pyntcloud.samplers.base 35 | 36 | All samplers must inherit from the base class `Sampler` and implement its abstract methods. 37 | 38 | The base class is located at pyntcloud/filters/base.py 39 | 40 | .. autoclass:: Sampler 41 | 42 | At the very least, all samplers receive a PyntCloud when they are instantiated. 43 | 44 | The `Sampler.extract_info` method must be overridden in order to extract and save the information required to generate the sample in a attribute. 45 | 46 | See SUBMODULE BASE CLASS below for more information. 47 | 48 | `Sampler.compute` is where the sample is generated and returned. It should use the information extracted by the above method in order to generate the sample. 49 | 50 | See SPECIFIC SAMPLER CLASS below. 51 | 52 | Submodule Base Class 53 | ==================== 54 | 55 | .. currentmodule:: pyntcloud.samplers.s_voxelgrid 56 | 57 | Samplers are grouped into submodules according to which kind of information they require to be computed. 58 | 59 | For example, samplers that require a VoxelGrid to be computed are in pyntcloud/samplers/s_voxelgrid.py 60 | 61 | As the information required by all the filters in each submodule is the same, we create a submodule base class overriding the __init__ and extract_info 62 | methods of the Sampler base class. 63 | 64 | For example, in the s_voxelgrid submodule there is a Sampler_Voxelgrid class from which all the samplers that require a Voxelgrid inherit. 65 | 66 | .. autoclass:: Sampler_Voxelgrid 67 | 68 | If you don't find a submodule that extracts the information that your new sampler needs, create a new one using as guideline one of the existing ones. 69 | 70 | Specific Sampler Class 71 | ====================== 72 | 73 | .. currentmodule:: pyntcloud.samplers.s_mesh 74 | 75 | Once you have a submodule base class that extracts the right information, you have to actually create the specific class 76 | for your sampler, inheriting from the submodule base class and overriding the `Sampler.compute` method. 77 | 78 | If the computation of your sample requires some parameters from the user, you should override the `__init__` method in order to accept those 79 | parameters. 80 | 81 | For example, the `RandomMesh` sampler requires the user to specify if the sample will use RGB and/or normal information: 82 | 83 | .. autoclass:: RandomMesh 84 | 85 | Let PyntCloud know about your sampler 86 | ===================================== 87 | 88 | In order to do so, you have to do some things: 89 | 90 | - Add tests at `test/test_samplers.py`. 91 | - Import your new sampler(s) and/or submodule(s) at `pyntcloud/samplers/__init__.py`. 92 | - Include them in the ALL_SAMPLERS dictionary, giving them a **string alias**, at `pyntcloud/samplers/__init__.py`. 93 | - Document them in the `PyntCloud.get_sample` docstring at `pyntcloud/core_class.py`. 94 | - Document them at `docs/samplers.rst`. 95 | -------------------------------------------------------------------------------- /docs/scalar_fields_dev.rst: -------------------------------------------------------------------------------- 1 | .. _scalar_fields_dev: 2 | 3 | =================== 4 | Scalar Fields - Dev 5 | =================== 6 | 7 | 8 | This page contains useful information for developers that want to modify / add content to the scalar_fields module. 9 | 10 | First of all, two points of advice: 11 | 12 | - Use the existing scalar_fields as guideline. 13 | 14 | - Follow the general CONTRIBUTING GUIDELINES. 15 | 16 | The Big Picture 17 | =============== 18 | 19 | .. currentmodule:: pyntcloud 20 | 21 | Filters are used by the method: 22 | 23 | .. function:: PyntCloud.add_scalar_field 24 | :noindex: 25 | 26 | Take a look at the source code in order to get a general overview of how scalar fields are being used. All scalar fields are classes and all have the same 27 | methods. 28 | 29 | The sections below will guide you trough the scalar fields module explaining how you can create your own scalar fields or where you need 30 | to look in order to modify existing ones. 31 | 32 | Base Class 33 | ========== 34 | 35 | .. currentmodule:: pyntcloud.scalar_fields.base 36 | 37 | All filters must inherit from the base class `ScalarField` and implement its abstract methods. 38 | 39 | The base class is located at pyntcloud/scalar_fields/base.py 40 | 41 | .. autoclass:: ScalarField 42 | 43 | At the very least, all scalar fields receive a PyntCloud when they are instantiated. 44 | 45 | The `ScalarField.extract_info` method must be overridden in order to extract and save in a attribute the information required to compute the scalar field. 46 | 47 | See SUBMODULE BASE CLASS below for more information. 48 | 49 | `ScalarField.compute` is where the new DataFrame columns are generated. 50 | 51 | See SPECIFIC SCALAR FIELD CLASS below. 52 | 53 | Submodule Base Class 54 | ==================== 55 | 56 | .. currentmodule:: pyntcloud.scalar_fields.sf_voxelgrid 57 | 58 | Scalar fields are grouped in submodules according to which kind of information they require to be computed. 59 | 60 | For example, scalar fields that require a VoxelGrid to be computed are in pyntcloud/scalar_fields/sf_voxelgrid.py 61 | 62 | As the information required by all the scalar fields in each submodule is the same, we create a submodule base class overriding the __init__ and extract_info 63 | methods of the ScalarField base class. 64 | 65 | For example, in the sf_voxelgrid submodule there is a ScalarField_Voxelgrid class from which all the scalar fields that require a VoxelGrid inherit. 66 | 67 | .. autoclass:: ScalarField_Voxelgrid 68 | 69 | If you don't find a submodule that extracts the information that your new scalar field needs, create a new one using as guideline one of the existing ones. 70 | 71 | Specific Scalar Field Class 72 | =========================== 73 | 74 | Once you have a submodule base class that extracts the right information, you have to actually create the specific class 75 | for your scalar field, inheriting from the submodule base class and overriding the `ScalarField.compute` method. 76 | 77 | If the computation of your scalar requires some parameters from the user, you should override the `__init__` method in order to accept those 78 | parameters. 79 | 80 | .. currentmodule:: pyntcloud.scalar_fields.sf_xyz 81 | 82 | For example, the SphericalCoordinates scalar field (in pyntcloud/scalar_fields/sf_xyz.py) requires the user to specify if the output should be in degrees or not: 83 | 84 | .. autoclass:: SphericalCoordinates 85 | 86 | Let PyntCloud know about your filter 87 | ==================================== 88 | 89 | In order to do so, you have to do some things: 90 | 91 | - Add tests at `test/test_sf.py`. 92 | - Import your new scalar field(s) and/or submodule(s) at `pyntcloud/scalar_fields/__init__.py`. 93 | - Include them in the ALL_SF dictionary, giving them a **string alias** at `pyntcloud/scalar_fields/__init__.py`. 94 | - Document them in the `PyntCloud.add_scalar_field` docstring at `pyntcloud/core_class.py`. 95 | - Document them at `docs/scalar_fields.rst`. 96 | -------------------------------------------------------------------------------- /docs/structures.rst: -------------------------------------------------------------------------------- 1 | .. _structures: 2 | 3 | ========== 4 | Structures 5 | ========== 6 | 7 | .. currentmodule:: pyntcloud 8 | 9 | Structures are used for adding superpowers to PyntCloud instances. 10 | 11 | For example, a `VoxelGrid` can be used for: 12 | 13 | - Converting a point cloud into a valid input for a convolutional neural network. 14 | 15 | - Finding nearest neighbors. 16 | 17 | - Finding unconnected clusters of points in the point cloud. 18 | 19 | - Many other cool things. 20 | 21 | All structures are built on top of a point cloud, mesh or another structure. 22 | 23 | You can create structures using: 24 | 25 | .. function:: PyntCloud.add_structure 26 | 27 | .. currentmodule:: pyntcloud.structures 28 | 29 | Convex Hull 30 | =========== 31 | 32 | .. autoclass:: ConvexHull 33 | 34 | Delaunay3D 35 | ========== 36 | 37 | .. autoclass:: Delaunay3D 38 | 39 | KDTree 40 | ====== 41 | 42 | .. autoclass:: KDTree 43 | 44 | VoxelGrid 45 | ========= 46 | 47 | .. autoclass:: VoxelGrid 48 | -------------------------------------------------------------------------------- /docs/structures_dev.rst: -------------------------------------------------------------------------------- 1 | .. _structures_dev: 2 | 3 | ================ 4 | Structures - Dev 5 | ================ 6 | -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-1.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-2.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-3.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-4.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-5.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-6.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-7.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-eigen_values-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-eigen_values-8.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-normals-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-normals-1.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-normals-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-normals-2.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-normals-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-normals-3.png -------------------------------------------------------------------------------- /examples/data/images/scalar_fields-normals-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/scalar_fields-normals-4.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-1.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-2.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-3.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-4.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-5.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-6.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-7.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-8.png -------------------------------------------------------------------------------- /examples/data/images/structures-voxelgrid-9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/structures-voxelgrid-9.png -------------------------------------------------------------------------------- /examples/data/images/visualization-Polylines-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/visualization-Polylines-1.png -------------------------------------------------------------------------------- /examples/data/images/visualization-Polylines-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/visualization-Polylines-2.png -------------------------------------------------------------------------------- /examples/data/images/visualization-Polylines-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/visualization-Polylines-3.png -------------------------------------------------------------------------------- /examples/data/images/visualization-PyntCloud-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/examples/data/images/visualization-PyntCloud-1.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=48", "setuptools_scm[toml]>=6.3.1"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "pyntcloud" 7 | dynamic = ["version"] 8 | description = "Python library for working with 3D point clouds." 9 | readme = "README.md" 10 | requires-python = ">=3.11" 11 | license = { text = "The MIT License" } 12 | authors = [ 13 | { name = "David de la Iglesia Castro", email = "daviddelaiglesiacastro@gmail.com" } 14 | ] 15 | classifiers = [ 16 | "Programming Language :: Python :: 3", 17 | "License :: OSI Approved :: MIT License", 18 | "Operating System :: OS Independent", 19 | ] 20 | dependencies = [ 21 | "numpy", 22 | "scipy>=1.6.0", 23 | "pandas", 24 | ] 25 | 26 | [project.optional-dependencies] 27 | las = ["laspy", "lazrs"] 28 | plot = ["ipython", "matplotlib", "pyvista>=0.32.0"] 29 | numba = ["numba"] 30 | all = [ 31 | "laspy", 32 | "lazrs", 33 | "ipython", 34 | "matplotlib", 35 | "pyvista>=0.32.0", 36 | "numba", 37 | "open3d" 38 | ] 39 | 40 | [project.urls] 41 | repository = "https://github.com/daavoo/pyntcloud" 42 | 43 | [tool.setuptools.packages.find] 44 | exclude = ["tests", "tests.*"] 45 | where = ["src"] 46 | namespaces = false 47 | 48 | [tool.setuptools_scm] 49 | -------------------------------------------------------------------------------- /src/pyntcloud/__init__.py: -------------------------------------------------------------------------------- 1 | from .core_class import PyntCloud 2 | 3 | __all__ = [PyntCloud] 4 | -------------------------------------------------------------------------------- /src/pyntcloud/filters/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HAKUNA MATATA 3 | """ 4 | 5 | from .kdtree import ( 6 | RadiusOutlierRemovalFilter, 7 | StatisticalOutlierRemovalFilter, 8 | ) 9 | from .xyz import BoundingBoxFilter 10 | 11 | ALL_FILTERS = { 12 | # XYZ 13 | "BBOX": BoundingBoxFilter, 14 | # KDTree 15 | "ROR": RadiusOutlierRemovalFilter, 16 | "SOR": StatisticalOutlierRemovalFilter, 17 | } 18 | -------------------------------------------------------------------------------- /src/pyntcloud/filters/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class Filter(ABC): 5 | """Base class for filters.""" 6 | 7 | def __init__(self, *, pyntcloud): 8 | self.pyntcloud = pyntcloud 9 | 10 | @abstractmethod 11 | def extract_info(self): 12 | pass 13 | 14 | @abstractmethod 15 | def compute(self): 16 | pass 17 | -------------------------------------------------------------------------------- /src/pyntcloud/filters/kdtree.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from scipy.stats import zscore 4 | 5 | from .base import Filter 6 | 7 | 8 | class KDTreeFilter(Filter): 9 | def __init__(self, *, pyntcloud, kdtree_id): 10 | """ 11 | Parameters 12 | ---------- 13 | pyntcloud: pyntcloud.PyntCloud 14 | kdtree_id: pyntcloud.structures.KDTree.id 15 | Usually returned from PyntCloud.add_structure("kdtree"): 16 | kdtree_id = my_cloud.add_structure("kdtree") 17 | """ 18 | super().__init__(pyntcloud=pyntcloud) 19 | self.kdtree_id = kdtree_id 20 | 21 | def extract_info(self): 22 | self.points = self.pyntcloud.xyz 23 | self.kdtree = self.pyntcloud.structures[self.kdtree_id] 24 | 25 | def compute(self): 26 | pass 27 | 28 | 29 | class RadiusOutlierRemovalFilter(KDTreeFilter): 30 | """Compute a Radius Outlier Removal filter using the given KDTree. 31 | 32 | Parameters 33 | ---------- 34 | kdtree_id: pyntcloud.structures.KDTree.id 35 | k : int 36 | Number of neighbors that will be used to compute the filter. 37 | r : float 38 | The radius of the sphere with center on each point. The filter 39 | will look for the required number of neighbors inside that sphere. 40 | 41 | Notes 42 | ----- 43 | > The distances between each point and its 'k' nearest neighbors that 44 | exceed the given 'r' are marked as False. 45 | 46 | > The points having any distance marked as False will be trimmed. 47 | 48 | The parameter r should be used in order to adjust the filter to 49 | the desired result. 50 | 51 | A LOWER 'r' value will result in a HIGHER number of points trimmed. 52 | """ 53 | 54 | def __init__(self, *, pyntcloud, kdtree_id, k, r): 55 | super().__init__(pyntcloud=pyntcloud, kdtree_id=kdtree_id) 56 | self.k = k 57 | self.r = r 58 | 59 | def compute(self): 60 | distances = self.kdtree.query(self.points, k=self.k, workers=-1)[0] 61 | ror_filter = np.all(distances < self.r, axis=1) 62 | 63 | return ror_filter 64 | 65 | 66 | class StatisticalOutlierRemovalFilter(KDTreeFilter): 67 | """Compute a Statistical Outlier Removal filter using the given KDTree. 68 | 69 | Parameters 70 | ---------- 71 | kdtree_id: pyntcloud.structures.KDTree.id 72 | k : int 73 | Number of neighbors that will be used to compute the filter. 74 | z_max: float 75 | The maximum Z score which determines if the point is an outlier. 76 | 77 | Notes 78 | ----- 79 | > For each point, the mean of the distances between it and its 'k' nearest 80 | neighbors is computed. 81 | 82 | > The Z score of those means is computed. 83 | 84 | > Points with a Z score outside the range [-z_max, z_max] are marked 85 | as False, in order to be trimmed. 86 | 87 | The optional parameter z_max should be used in order to adjust 88 | the filter to the desired result. 89 | 90 | A LOWER 'z_max' value will result in a HIGHER number of points trimmed. 91 | """ 92 | 93 | def __init__(self, *, pyntcloud, kdtree_id, k, z_max): 94 | super().__init__(pyntcloud=pyntcloud, kdtree_id=kdtree_id) 95 | self.k = k 96 | self.z_max = z_max 97 | 98 | def compute(self): 99 | distances = self.kdtree.query(self.points, k=self.k, workers=-1)[0] 100 | z_distances = zscore(np.mean(distances, axis=1), ddof=1) 101 | sor_filter = abs(z_distances) < self.z_max 102 | 103 | return sor_filter 104 | -------------------------------------------------------------------------------- /src/pyntcloud/filters/xyz.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import Filter 3 | 4 | 5 | class XYZFilter(Filter): 6 | def extract_info(self): 7 | self.points = self.pyntcloud.xyz 8 | 9 | 10 | class BoundingBoxFilter(XYZFilter): 11 | """ 12 | Compute a bounding box filter for the given points. 13 | 14 | Parameters 15 | ---------- 16 | 17 | min_x, max_x, min_y, max_y, min_z, max_z: float 18 | The bounding box limits for each coordinate. 19 | If some limits are missing, the default values are -infinite 20 | for the min_i and infinite for the max_i. 21 | 22 | """ 23 | 24 | def __init__( 25 | self, 26 | *, 27 | pyntcloud, 28 | min_x=-np.inf, 29 | max_x=np.inf, 30 | min_y=-np.inf, 31 | max_y=np.inf, 32 | min_z=-np.inf, 33 | max_z=np.inf, 34 | ): 35 | super().__init__(pyntcloud=pyntcloud) 36 | self.min_x, self.max_x = min_x, max_x 37 | self.min_y, self.max_y = min_y, max_y 38 | self.min_z, self.max_z = min_z, max_z 39 | 40 | def compute(self): 41 | bound_x = np.logical_and( 42 | self.points[:, 0] > self.min_x, self.points[:, 0] < self.max_x 43 | ) 44 | bound_y = np.logical_and( 45 | self.points[:, 1] > self.min_y, self.points[:, 1] < self.max_y 46 | ) 47 | bound_z = np.logical_and( 48 | self.points[:, 2] > self.min_z, self.points[:, 2] < self.max_z 49 | ) 50 | 51 | bb_filter = np.logical_and(np.logical_and(bound_x, bound_y), bound_z) 52 | 53 | return bb_filter 54 | -------------------------------------------------------------------------------- /src/pyntcloud/geometry/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HAKUNA MATATA 3 | """ 4 | 5 | from .models.plane import Plane 6 | from .models.sphere import Sphere 7 | 8 | __all__ = [Plane, Sphere] 9 | -------------------------------------------------------------------------------- /src/pyntcloud/geometry/areas.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def triangle_area(triangle): 5 | """triangle is a (3,3) array such as triangle[i] is the ith vertex of the triangle""" 6 | return 0.5 * np.linalg.norm( 7 | np.cross(triangle[1] - triangle[0], triangle[2] - triangle[0]) 8 | ) 9 | 10 | 11 | def triangle_area_multi(v1, v2, v3): 12 | """v1, v2, v3 are (N,3) arrays. Each one represents the vertices 13 | such as v1[i], v2[i], v3[i] represent the ith triangle 14 | """ 15 | return 0.5 * np.linalg.norm(np.cross(v2 - v1, v3 - v1), axis=1) 16 | 17 | 18 | def coplanar_area(points, plane_normal=None): 19 | """Area of the coplanar polygon formed by the given points. 20 | 21 | Parameters 22 | ---------- 23 | points: array 24 | The vertices of the selected area, the points are expected to be coplanar. 25 | Expected format: 26 | array([ 27 | [x1,y1,z1], 28 | ..., 29 | [xn,yn,zn]]) 30 | 31 | Returns 32 | ------- 33 | area : float 34 | The area of the polygon formed by the given coplanar points. 35 | 36 | """ 37 | 38 | if not plane_normal: 39 | # p, normal = plane_def_by(points[:3]) 40 | pass 41 | 42 | else: 43 | # normal = normalize(plane_normal) 44 | pass 45 | 46 | #: get an array with the first point positioned as last 47 | points_rolled = np.roll(points, len(points) - 1, axis=0) 48 | 49 | cross_product = np.cross(points, points_rolled) 50 | 51 | summed = np.sum(cross_product, axis=0) 52 | 53 | total = np.dot(summed, plane_normal) 54 | 55 | area = 0.5 * abs(total) 56 | 57 | return area 58 | 59 | 60 | def projected_area(points, plane_point, plane_normal): 61 | """Area of the polygon formed by the points projected on the given plane. 62 | 63 | Parameters 64 | ---------- 65 | points: array 66 | The vertices of the selected area.Expected format: 67 | array([ 68 | [x1,y1,z1], 69 | ..., 70 | [xn,yn,zn]]) 71 | 72 | 73 | Returns 74 | ------- 75 | area : float 76 | The area of the polygon formed by the given coplanar points. 77 | 78 | """ 79 | 80 | # points = project_on_plane(points, plane_point, plane_normal) 81 | 82 | area = coplanar_area(points, plane_normal=plane_normal) 83 | 84 | return area 85 | -------------------------------------------------------------------------------- /src/pyntcloud/geometry/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/src/pyntcloud/geometry/models/__init__.py -------------------------------------------------------------------------------- /src/pyntcloud/geometry/models/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class GeometryModel(ABC): 5 | @abstractmethod 6 | def from_k_points(self, points): 7 | pass 8 | 9 | @abstractmethod 10 | def get_projections(self, points, only_distances=False): 11 | pass 12 | -------------------------------------------------------------------------------- /src/pyntcloud/geometry/models/plane.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import GeometryModel 3 | from ...utils.array import PCA 4 | 5 | 6 | class Plane(GeometryModel): 7 | def __init__(self, point=None, normal=None): 8 | self.point = point 9 | self.normal = normal 10 | if normal is not None: 11 | self.normal /= np.linalg.norm(normal) 12 | 13 | def from_k_points(self, points): 14 | normal = np.cross(points[1] - points[0], points[2] - points[0]) 15 | self.point = points[0] 16 | self.normal = normal / np.linalg.norm(normal) 17 | 18 | def from_equation(self, a, b, c, d): 19 | normal = np.array([a, b, c]) 20 | point = np.array([-d / a, -d / b, -d / c]) 21 | self.point = point 22 | self.normal = normal / np.linalg.norm(normal) 23 | 24 | def from_point_cloud(self, points): 25 | """ 26 | Least Squares fit. 27 | 28 | Parameters 29 | ---------- 30 | points: (N, 3) ndarray 31 | """ 32 | w, v = PCA(points) 33 | #: the normal of the plane is the last eigenvector 34 | self.normal = v[:, 2] 35 | self.point = np.mean(points, axis=0) 36 | 37 | def get_equation(self): 38 | a, b, c = self.normal 39 | d = -np.dot(self.normal, self.point) 40 | return a, b, c, d 41 | 42 | def get_projections(self, points, only_distances=False): 43 | vectors = points - self.point 44 | distances = np.abs(np.dot(vectors, self.normal)) 45 | if only_distances: 46 | return distances 47 | projections = points - (distances[:, None] * self.normal) 48 | return distances, projections 49 | -------------------------------------------------------------------------------- /src/pyntcloud/geometry/rotations.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Rx(angle, degrees=True): 5 | if degrees: 6 | cx = np.cos(np.deg2rad(angle)) 7 | sx = np.sin(np.deg2rad(angle)) 8 | else: 9 | cx = np.cos(angle) 10 | sx = np.sin(angle) 11 | return np.array([[1, 0, 0], [0, cx, sx], [0, -sx, cx]]) 12 | 13 | 14 | def Ry(angle, degrees=True): 15 | if degrees: 16 | cy = np.cos(np.deg2rad(angle)) 17 | sy = np.sin(np.deg2rad(angle)) 18 | else: 19 | cy = np.cos(angle) 20 | sy = np.sin(angle) 21 | return np.array([[cy, 0, -sy], [0, 1, 0], [sy, 0, cy]]) 22 | 23 | 24 | def Rz(angle, degrees=True): 25 | if degrees: 26 | cz = np.cos(np.deg2rad(angle)) 27 | sz = np.sin(np.deg2rad(angle)) 28 | else: 29 | cz = np.cos(angle) 30 | sz = np.sin(angle) 31 | return np.array([[cz, sz, 0], [-sz, cz, 0], [0, 0, 1]]) 32 | -------------------------------------------------------------------------------- /src/pyntcloud/io/__init__.py: -------------------------------------------------------------------------------- 1 | from pyntcloud.io.open3d import from_open3d, to_open3d 2 | from pyntcloud.io.pyvista import from_pyvista, to_pyvista 3 | from .ascii import read_ascii, write_ascii 4 | from .bin import read_bin, write_bin 5 | from .las import read_las 6 | from .npz import read_npz, write_npz 7 | from .obj import read_obj, write_obj 8 | from .ply import read_ply, write_ply 9 | from .off import read_off 10 | from .pcd import read_pcd 11 | 12 | FROM_FILE = { 13 | "ASC": read_ascii, 14 | "BIN": read_bin, 15 | "CSV": read_ascii, 16 | "LAS": read_las, 17 | "LAZ": read_las, 18 | "NPZ": read_npz, 19 | "OBJ": read_obj, 20 | "OFF": read_off, 21 | "PCD": read_pcd, 22 | "PLY": read_ply, 23 | "PTS": read_ascii, 24 | "TXT": read_ascii, 25 | "XYZ": read_ascii, 26 | } 27 | FROM_INSTANCE = {"PYVISTA": from_pyvista, "OPEN3D": from_open3d} 28 | 29 | TO_FILE = { 30 | "ASC": write_ascii, 31 | "BIN": write_bin, 32 | "CSV": write_ascii, 33 | "NPZ": write_npz, 34 | "OBJ": write_obj, 35 | "PLY": write_ply, 36 | "PTS": write_ascii, 37 | "TXT": write_ascii, 38 | "XYZ": write_ascii, 39 | } 40 | TO_INSTANCE = {"PYVISTA": to_pyvista, "OPEN3D": to_open3d} 41 | -------------------------------------------------------------------------------- /src/pyntcloud/io/ascii.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def read_ascii(filename, **kwargs): 5 | """Read an ascii file and store elements in pandas DataFrame. 6 | 7 | Parameters 8 | ---------- 9 | filename: str 10 | Path to the filename 11 | kwargs: pandas.read_csv supported kwargs 12 | Check pandas documentation for all possibilities. 13 | Returns 14 | ------- 15 | data: dict 16 | Elements as pandas DataFrames. 17 | """ 18 | 19 | data = {"points": pd.read_csv(filename, **kwargs)} 20 | 21 | return data 22 | 23 | 24 | def write_ascii(filename, points, **kwargs): 25 | """Write points content to filename. 26 | 27 | Parameters 28 | ---------- 29 | filename: str 30 | Path to output filename 31 | points: pd.DataFrame 32 | Points data 33 | 34 | kwargs: see pd.DataFrame.to_csv 35 | 36 | Returns 37 | ------- 38 | bool 39 | """ 40 | points[["x", "y", "z"]].to_csv(filename, **kwargs) 41 | 42 | return True 43 | -------------------------------------------------------------------------------- /src/pyntcloud/io/bin.py: -------------------------------------------------------------------------------- 1 | # HAKUNA MATATA 2 | 3 | # Contributed by: Nicholas Mitchell 4 | 5 | import numpy as np 6 | import pandas as pd 7 | 8 | 9 | def read_bin(filename, shape=None, **kwargs): 10 | """Read a _raw binary_ file and store all possible elements in pandas DataFrame. 11 | 12 | If the shape of the array is known, it can be specified using `shape`. The 13 | first three columns are used for x, y and z. Otherwise the binary file is 14 | assumed have row-major format, three columns are formed and used as x, y and 15 | z , respectively. 16 | 17 | NOTE: binary files that are not `raw` will not behave as expected. If they 18 | contain a header/footer with meta data, or were generated e.g. via Protobuf, 19 | then bahviour is also undefined. 20 | 21 | Parameters 22 | ---------- 23 | filename: str 24 | Path to the filename 25 | shape: (n_rows, n_cols) - shape to be formed from the loaded binary array, optional. 26 | **kwargs: 27 | kwargs: numpy.fromfile supported kwargs 28 | Check NumPy documentation for all possibilities. 29 | 30 | Returns 31 | ------- 32 | data: dict 33 | If possible, elements as pandas DataFrames else a NumPy ndarray 34 | """ 35 | data = {} 36 | 37 | kwargs["dtype"] = kwargs.get("dtype", np.float32) 38 | arr = np.fromfile(filename, **kwargs) 39 | 40 | if shape is not None: 41 | try: 42 | arr = arr.reshape(shape) 43 | except ValueError: 44 | raise ValueError( 45 | ( 46 | "The array cannot be reshaped to {0} as " 47 | "it has {1} elements, which is not " 48 | "divisible by three".format(shape, arr.size) 49 | ) 50 | ) 51 | else: 52 | arr = arr.reshape((-1, 3)) 53 | 54 | data["points"] = pd.DataFrame(arr[:, 0:3], columns=["x", "y", "z"]) 55 | 56 | return data 57 | 58 | 59 | def write_bin(filename, **kwargs): 60 | """Write the raw point data in `PyntCloud.xyz` to a binary file. 61 | 62 | Parameters 63 | ---------- 64 | filename: str 65 | The created file will be named with this 66 | kwargs: numpy.ndarray.tofile supported kwargs 67 | Check NumPy documentation on raw binary files for all possibilities. 68 | 69 | Returns 70 | ------- 71 | boolean 72 | True if no problems 73 | """ 74 | # Extract just the x, y, z coordinates from the points dataframe 75 | point_array = kwargs["points"][["x", "y", "z"]].values 76 | 77 | # Remove the points from kwargs now. 78 | # Any remaining kwargs are meant for np.ndarray.tofile() 79 | del kwargs["points"] 80 | 81 | # Test that any remaining kwargs are only those allowed 82 | # It should be the empty set 83 | remaining_kwargs = set(kwargs.keys()) - set(["sep", "format"]) 84 | if not len(remaining_kwargs) == 0: 85 | raise ValueError( 86 | ( 87 | "Only keyword arguments meant for numpy.ndarray.tofile " 88 | "are accepted. Please see the numpy documentation" 89 | ) 90 | ) 91 | 92 | point_array.tofile(filename, **kwargs) 93 | 94 | return True 95 | -------------------------------------------------------------------------------- /src/pyntcloud/io/npz.py: -------------------------------------------------------------------------------- 1 | # HAKUNA MATATA 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def read_npz(filename, points_name="points", mesh_name="mesh"): 8 | """Read a .npz file and store all possible elements in pandas DataFrame 9 | Parameters 10 | ---------- 11 | filename: str 12 | Path to the filename 13 | Returns 14 | ------- 15 | data: dict 16 | If possible, elements as pandas DataFrames else input format 17 | """ 18 | 19 | data = {} 20 | with np.load(filename) as npz: 21 | data["points"] = pd.DataFrame(npz[points_name]) 22 | if mesh_name in npz: 23 | data["mesh"] = pd.DataFrame(npz[mesh_name]) 24 | return data 25 | 26 | 27 | def write_npz(filename, **kwargs): 28 | """ 29 | Parameters 30 | ---------- 31 | filename: str 32 | The created file will be named with this 33 | 34 | kwargs: Elements of the pyntcloud to be saved 35 | 36 | Returns 37 | ------- 38 | boolean 39 | True if no problems 40 | """ 41 | 42 | for k in kwargs: 43 | if isinstance(kwargs[k], pd.DataFrame): 44 | kwargs[k] = kwargs[k].to_records(index=False) 45 | np.savez_compressed(filename, **kwargs) 46 | return True 47 | -------------------------------------------------------------------------------- /src/pyntcloud/io/off.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | 5 | def read_off(filename): 6 | with open(filename) as off: 7 | first_line = off.readline() 8 | if "OFF" not in first_line: 9 | raise ValueError("The file does not start with the word OFF") 10 | color = True if "C" in first_line else False 11 | 12 | n_points = 0 13 | n_faces = 0 14 | 15 | count = 1 16 | for line in off: 17 | count += 1 18 | if line.startswith("#"): 19 | continue 20 | line = line.strip().split() 21 | if len(line) > 1: 22 | n_points = int(line[0]) 23 | n_faces = int(line[1]) 24 | break 25 | 26 | if n_points == 0: 27 | raise ValueError("The file has no points") 28 | 29 | data = {} 30 | point_names = ["x", "y", "z"] 31 | point_types = {"x": np.float32, "y": np.float32, "z": np.float32} 32 | 33 | if color: 34 | point_names.extend(["red", "green", "blue"]) 35 | point_types = dict( 36 | point_types, **{"red": np.uint8, "green": np.uint8, "blue": np.uint8} 37 | ) 38 | 39 | data["points"] = pd.read_csv( 40 | off, 41 | sep=" ", 42 | header=None, 43 | engine="c", 44 | nrows=n_points, 45 | names=point_names, 46 | dtype=point_types, 47 | index_col=False, 48 | comment="#", 49 | ) 50 | 51 | data["mesh"] = pd.read_csv( 52 | filename, 53 | sep=" ", 54 | header=None, 55 | engine="c", 56 | skiprows=(count + n_points), 57 | nrows=n_faces, 58 | usecols=[1, 2, 3], 59 | names=["v1", "v2", "v3"], 60 | comment="#", 61 | ) 62 | return data 63 | -------------------------------------------------------------------------------- /src/pyntcloud/io/open3d.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | 5 | def from_open3d(o3d_data, **kwargs): 6 | """Create a PyntCloud instance from Open3D's PointCloud/TriangleMesh instance""" 7 | try: 8 | import open3d as o3d 9 | except ImportError: 10 | raise ImportError("Open3D must be installed. Try `pip install open3d`") 11 | 12 | if not isinstance(o3d_data, (o3d.geometry.PointCloud, o3d.geometry.TriangleMesh)): 13 | raise TypeError( 14 | f"Type {type(o3d_data)} not supported for conversion." 15 | f"Expected {o3d.geometry.PointCloud} or {o3d.geometry.TriangleMesh}" 16 | ) 17 | 18 | mesh = None 19 | if isinstance(o3d_data, o3d.geometry.TriangleMesh): 20 | mesh = pd.DataFrame( 21 | data=np.asarray(o3d_data.triangles), columns=["v1", "v2", "v3"] 22 | ) 23 | 24 | points = pd.DataFrame( 25 | data=np.asarray(o3d_data.vertices), columns=["x", "y", "z"] 26 | ) 27 | 28 | if o3d_data.vertex_colors: 29 | colors = (np.asarray(o3d_data.vertex_colors) * 255).astype(np.uint8) 30 | points["red"] = colors[:, 0] 31 | points["green"] = colors[:, 1] 32 | points["blue"] = colors[:, 2] 33 | 34 | if o3d_data.vertex_normals: 35 | normals = np.asarray(o3d_data.vertex_normals) 36 | points["nx"] = normals[:, 0] 37 | points["ny"] = normals[:, 1] 38 | points["nz"] = normals[:, 2] 39 | 40 | elif isinstance(o3d_data, o3d.geometry.PointCloud): 41 | points = pd.DataFrame(data=np.asarray(o3d_data.points), columns=["x", "y", "z"]) 42 | 43 | if o3d_data.colors: 44 | colors = (np.asarray(o3d_data.colors) * 255).astype(np.uint8) 45 | points["red"] = colors[:, 0] 46 | points["green"] = colors[:, 1] 47 | points["blue"] = colors[:, 2] 48 | 49 | if o3d_data.normals: 50 | normals = np.asarray(o3d_data.normals) 51 | points["nx"] = normals[:, 0] 52 | points["ny"] = normals[:, 1] 53 | points["nz"] = normals[:, 2] 54 | 55 | return {"points": points, "mesh": mesh} 56 | 57 | 58 | def to_open3d(cloud, mesh=True, colors=True, normals=True, **kwargs): 59 | """Convert PyntCloud's instance `cloud` to Open3D's PointCloud/TriangleMesh instance""" 60 | try: 61 | import open3d as o3d 62 | except ImportError: 63 | raise ImportError("Open3D must be installed. Try `pip install open3d`") 64 | 65 | if mesh and cloud.mesh is not None: 66 | triangle_mesh = o3d.geometry.TriangleMesh() 67 | triangle_mesh.triangles = o3d.utility.Vector3iVector( 68 | cloud.mesh[["v1", "v2", "v3"]].values 69 | ) 70 | triangle_mesh.vertices = o3d.utility.Vector3dVector(cloud.xyz) 71 | if colors and {"red", "green", "blue"}.issubset(cloud.points.columns): 72 | triangle_mesh.vertex_colors = o3d.utility.Vector3dVector( 73 | cloud.points[["red", "green", "blue"]].values 74 | ) 75 | if normals and {"nx", "ny", "nz"}.issubset(cloud.points.columns): 76 | triangle_mesh.vertex_normals = o3d.utility.Vector3dVector( 77 | cloud.points[["nx", "ny", "nz"]].values 78 | ) 79 | return triangle_mesh 80 | else: 81 | point_cloud = o3d.geometry.PointCloud() 82 | point_cloud.points = o3d.utility.Vector3dVector(cloud.xyz) 83 | if colors and {"red", "green", "blue"}.issubset(cloud.points.columns): 84 | point_cloud.colors = o3d.utility.Vector3dVector( 85 | cloud.points[["red", "green", "blue"]].values 86 | ) 87 | if normals and {"nx", "ny", "nz"}.issubset(cloud.points.columns): 88 | point_cloud.normals = o3d.utility.Vector3dVector( 89 | cloud.points[["nx", "ny", "nz"]].values 90 | ) 91 | return point_cloud 92 | -------------------------------------------------------------------------------- /src/pyntcloud/io/pyvista.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def from_pyvista(poly_data, **kwargs): 8 | """Load a PyntCloud mesh from PyVista's PolyData instance""" 9 | try: 10 | import pyvista as pv 11 | except ImportError: 12 | raise ImportError("PyVista must be installed. Try `pip install pyvista`") 13 | 14 | if not isinstance(poly_data, pv.PolyData): 15 | raise TypeError( 16 | "Type {} not yet supported for conversion.".format(type(poly_data)) 17 | ) 18 | 19 | mesh = None 20 | if poly_data.faces is not None: 21 | mesh = poly_data.faces.reshape(-1, 4) 22 | if not np.all(3 == mesh[:, 0]): 23 | raise ValueError( 24 | "This mesh is not triangulated. Try triangulating the mesh before passing to PyntCloud." 25 | ) 26 | mesh = pd.DataFrame(data=mesh[:, 1:], columns=["v1", "v2", "v3"]) 27 | 28 | points = pd.DataFrame(data=poly_data.points, columns=["x", "y", "z"]) 29 | 30 | scalars = poly_data.point_data 31 | for name, array in scalars.items(): 32 | if array.ndim == 1: 33 | points[name] = array 34 | elif array.ndim == 2: 35 | if name == "RGB": 36 | points["red"] = array[:, 0] 37 | points["green"] = array[:, 1] 38 | points["blue"] = array[:, 2] 39 | elif name == "Normals": 40 | points["nx"] = array[:, 0] 41 | points["ny"] = array[:, 1] 42 | points["nz"] = array[:, 2] 43 | else: 44 | for n in range(array.shape[1]): 45 | points["{}_{}".format(name, n)] = array[:, n] 46 | else: 47 | warnings.warn( 48 | "Ignoring scalar field {} with ndim > 2 ({})".format(name, array.ndim) 49 | ) 50 | 51 | return {"points": points, "mesh": mesh} 52 | 53 | 54 | def to_pyvista(cloud, mesh=False, use_as_color=("red", "green", "blue"), **kwargs): 55 | """Convert PyntCloud's instance `cloud` to PyVista's PolyData instance""" 56 | try: 57 | import pyvista as pv 58 | except ImportError: 59 | raise ImportError("PyVista must be installed. Try `pip install pyvista`") 60 | if mesh and cloud.mesh is not None: 61 | mesh = cloud.mesh[["v1", "v2", "v3"]].values 62 | else: 63 | mesh = None 64 | # Either make point cloud or triangulated mesh 65 | if mesh is not None: 66 | # Update cells of PolyData 67 | types = np.full(len(mesh), 3, dtype=int) 68 | faces = np.insert(mesh, 0, types, axis=1) 69 | poly = pv.PolyData(cloud.xyz, faces) 70 | else: 71 | poly = pv.PolyData(cloud.xyz) 72 | 73 | avoid = ["x", "y", "z"] 74 | # add scalar arrays 75 | if all(c in cloud.points.columns for c in use_as_color): 76 | colors = cloud.points[list(use_as_color)].values 77 | poly.point_data["RGB"] = colors 78 | avoid += list(use_as_color) 79 | # Add other arrays 80 | for name in cloud.points.columns: 81 | if name not in avoid: 82 | poly.point_data[name] = cloud.points[name] 83 | 84 | return poly 85 | -------------------------------------------------------------------------------- /src/pyntcloud/neighbors/__init__.py: -------------------------------------------------------------------------------- 1 | from .k_neighbors import k_neighbors 2 | from .r_neighbors import r_neighbors 3 | 4 | 5 | __all__ = [k_neighbors, r_neighbors] 6 | -------------------------------------------------------------------------------- /src/pyntcloud/neighbors/k_neighbors.py: -------------------------------------------------------------------------------- 1 | def k_neighbors(kdtree, k): 2 | """Get indices of K neartest neighbors for each point 3 | 4 | Parameters 5 | ---------- 6 | kdtree: pyntcloud.structrues.KDTree 7 | The KDTree built on top of the points in point cloud 8 | 9 | k: int 10 | Number of neighbors to find 11 | 12 | Returns 13 | ------- 14 | k_neighbors: (N, k) array 15 | Where N = kdtree.data.shape[0] 16 | """ 17 | # [1] to select indices and ignore distances 18 | # [:,1:] to discard self-neighbor 19 | return kdtree.query(kdtree.data, k=k + 1, workers=-1)[1][:, 1:] 20 | -------------------------------------------------------------------------------- /src/pyntcloud/neighbors/r_neighbors.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def r_neighbors(kdtree, r): 5 | """Get indices of all neartest neighbors with a distance < r for each point 6 | 7 | Parameters 8 | ---------- 9 | kdtree: pyntcloud.structrues.KDTree 10 | The KDTree built on top of the points in point cloud 11 | 12 | r: float 13 | Maximum distance to consider a neighbor 14 | 15 | Returns 16 | ------- 17 | r_neighbors: (N, X) ndarray of lists 18 | Where N = kdtree.data.shape[0] 19 | len(X) varies for each point 20 | """ 21 | return np.array(kdtree.query_ball_tree(kdtree, r)) 22 | -------------------------------------------------------------------------------- /src/pyntcloud/plot/__init__.py: -------------------------------------------------------------------------------- 1 | DESCRIPTION = """\ 2 | PyntCloud 3 | {} points with {} scalar fields 4 | {} faces in mesh 5 | {} kdtrees 6 | {} voxelgrids 7 | Centroid: {}, {}, {} 8 | Other attributes:{} 9 | """ 10 | 11 | AVAILABLE_BACKENDS = [] 12 | # Add each backend in order of preference 13 | # Add pythreejs 14 | try: 15 | import pythreejs # noqa: F401 16 | 17 | AVAILABLE_BACKENDS.append("pythreejs") 18 | except ImportError: 19 | pass 20 | try: 21 | import pyvista # noqa: F401 22 | 23 | AVAILABLE_BACKENDS.append("pyvista") 24 | except ImportError: 25 | pass 26 | try: 27 | import matplotlib # noqa: F401 28 | 29 | AVAILABLE_BACKENDS.append("matplotlib") 30 | except ImportError: 31 | pass 32 | -------------------------------------------------------------------------------- /src/pyntcloud/plot/common.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def get_colors(cloud, use_as_color, cmap): 5 | try: 6 | colors = cloud.points[use_as_color].values 7 | except KeyError: 8 | colors = None 9 | if use_as_color != ["red", "green", "blue"] and colors is not None: 10 | import matplotlib.pyplot as plt 11 | 12 | s_m = plt.cm.ScalarMappable(cmap=cmap) 13 | colors = s_m.to_rgba(colors)[:, :-1] * 255 14 | elif colors is None: 15 | # default color orange 16 | colors = np.repeat([[255, 125, 0]], cloud.xyz.shape[0], axis=0) 17 | return colors.astype(np.uint8) 18 | -------------------------------------------------------------------------------- /src/pyntcloud/plot/matplotlib_backend.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | try: 4 | import matplotlib.pyplot as plt 5 | except ImportError: 6 | plt = None 7 | 8 | from .common import get_colors 9 | 10 | 11 | def set_proper_aspect_ratio(ax): 12 | extents = np.array([getattr(ax, "get_{}lim".format(dim))() for dim in "xyz"]) 13 | sz = extents[:, 1] - extents[:, 0] 14 | centers = np.mean(extents, axis=1) 15 | maxsize = max(abs(sz)) 16 | r = maxsize / 2 17 | for ctr, dim in zip(centers, "xyz"): 18 | getattr(ax, "set_{}lim".format(dim))(ctr - r, ctr + r) 19 | 20 | 21 | def plot_with_matplotlib(cloud, **kwargs): 22 | colors = get_colors(cloud, kwargs["use_as_color"], kwargs["cmap"]) 23 | 24 | ptp = np.ptp(cloud.xyz) 25 | 26 | plt.figure(figsize=(10, 10)) 27 | ax = plt.axes(projection="3d") 28 | ax.view_init(elev=kwargs["elev"], azim=kwargs["azim"]) 29 | 30 | ax.scatter( 31 | cloud.xyz[:, 0], 32 | cloud.xyz[:, 1], 33 | cloud.xyz[:, 2], 34 | marker="D", 35 | facecolors=colors / 255, 36 | zdir="z", 37 | depthshade=True, 38 | s=kwargs["initial_point_size"] or ptp / 10, 39 | ) 40 | 41 | set_proper_aspect_ratio(ax) 42 | 43 | return plt.show() 44 | -------------------------------------------------------------------------------- /src/pyntcloud/plot/pyvista_backend.py: -------------------------------------------------------------------------------- 1 | try: 2 | import pyvista as pv 3 | except ImportError: 4 | pv = None 5 | 6 | from .common import get_colors 7 | 8 | 9 | def plot_with_pyvista(cloud, **kwargs): 10 | """Plot using PyVista. Additional kwargs for controoling PyVista scene are 11 | listed here. 12 | 13 | 14 | Parameters 15 | ---------- 16 | off_screen : bool, optional 17 | Renders off screen when False. Useful for automated screenshots. 18 | 19 | notebook : bool, optional 20 | When True, the resulting plot is placed inline a jupyter notebook. 21 | Assumes a jupyter console is active. Automatically enables off_screen. 22 | 23 | render_points_as_spheres : bool, optional 24 | Render the points as spheres 25 | 26 | eye_dome_lighting : bool, optional 27 | Leverage PyVista's Eyd Dome Lighting (EDL) shading for improved 28 | depth perception. 29 | 30 | use_panel : bool, optional 31 | If False, the interactive rendering from panel will not be used in 32 | notebooks 33 | 34 | cpos : list(tuple(floats)) 35 | The camera position to use 36 | 37 | title : string, optional 38 | Title of plotting window. 39 | 40 | screenshot : string, optional 41 | The path to the PNG file to save a screenshot 42 | 43 | point_size : float, optional 44 | Alias for ``initial_point_size`` 45 | """ 46 | if pv is None: 47 | raise ImportError("PyVista must be installed to use it for plotting.") 48 | # Get point size 49 | point_size = kwargs["initial_point_size"] 50 | if point_size is None: 51 | point_size = kwargs.pop("point_size", 5.0) 52 | 53 | # Get an RGB array using PyntCloud 54 | colors = get_colors(cloud, kwargs["use_as_color"], kwargs["cmap"]) 55 | 56 | poly_data = cloud.to_instance("pyvista", mesh=kwargs.pop("mesh", False)) 57 | 58 | plotter = pv.Plotter( 59 | window_size=[kwargs.pop("width"), kwargs.pop("height")], 60 | off_screen=kwargs.pop("off_screen", None), 61 | notebook=kwargs.pop("notebook", None), 62 | ) 63 | 64 | # Add the poly data to the scene 65 | plotter.add_mesh( 66 | poly_data, 67 | point_size=point_size, 68 | scalars=colors, 69 | rgb=True, 70 | render_points_as_spheres=kwargs.pop("render_points_as_spheres", False), 71 | ) 72 | 73 | if kwargs.pop("eye_dome_lighting", None): 74 | plotter.enable_eye_dome_lighting() 75 | 76 | # TODO: Leverage `kwargs["elev"]` and `kwargs["azim"]` 77 | 78 | return plotter.show( 79 | use_panel=kwargs.pop("use_panel", None), 80 | title=kwargs.pop("title", None), 81 | screenshot=kwargs.pop("screenshot", False), 82 | cpos=kwargs.pop("cpos", None), 83 | ) 84 | -------------------------------------------------------------------------------- /src/pyntcloud/ransac/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HAKUNA MATATA 3 | """ 4 | 5 | from .fitters import single_fit 6 | from .models import RansacPlane, RansacSphere 7 | from .samplers import RandomRansacSampler, VoxelgridRansacSampler 8 | 9 | RANSAC_MODELS = {"plane": RansacPlane, "sphere": RansacSphere} 10 | RANSAC_SAMPLERS = {"random": RandomRansacSampler, "voxelgrid": VoxelgridRansacSampler} 11 | 12 | __all__ = ["single_fit", "RANSAC_MODELS", "RANSAC_SAMPLERS"] 13 | -------------------------------------------------------------------------------- /src/pyntcloud/ransac/fitters.py: -------------------------------------------------------------------------------- 1 | # HAKUNA MATATA 2 | 3 | """ 4 | Ransac Implementation 5 | """ 6 | 7 | import numpy as np 8 | from .samplers import RandomRansacSampler 9 | 10 | 11 | def single_fit( 12 | points, 13 | model, 14 | sampler=RandomRansacSampler, 15 | model_kwargs={}, 16 | sampler_kwargs={}, 17 | max_iterations=100, 18 | return_model=False, 19 | n_inliers_to_stop=None, 20 | ): 21 | """RANdom SAmple Consensus for fitting model a single model to points. 22 | 23 | points: ndarray 24 | (N, M) ndarray where N is the number of points and M is the number 25 | scalar fields associated to each of those points. 26 | M is usually 3 for representing the x, y, and z coordinates of each point. 27 | 28 | model: Ransac_Model 29 | Class (NOT INSTANCE!) representing the model that will be fitted to points. 30 | Check ransac/models for reference. 31 | 32 | sampler: Ransac_Sampler 33 | Class (NOT INSTANCE!) used to sample points on each iteration. 34 | Check ransac/samplers for reference. 35 | 36 | model_kwargs: dict, optional 37 | Default: {} 38 | Arguments that will be used on model's instantiation. 39 | Variable according to passed model. 40 | 41 | sampler_kwargs: dict, optional 42 | Default: {} 43 | Arguments that will be used on sampler's instantiation. 44 | Variable according to passed sampler. 45 | 46 | max_iterations: int, optional 47 | Default: 100 48 | Maximum number of iterations. 49 | 50 | return_model: bool, optional (default False) 51 | Whether the best fitted model will be returned or not. 52 | 53 | n_inliers_to_stop: int, optional 54 | Default None 55 | If the model fits a number of inliers > n_inliers_to_stop the loop will end. 56 | 57 | """ 58 | 59 | model = model(**model_kwargs) 60 | sampler = sampler(points, model.k, **sampler_kwargs) 61 | 62 | n_best_inliers = 0 63 | if n_inliers_to_stop is None: 64 | n_inliers_to_stop = len(points) 65 | 66 | for i in range(max_iterations): 67 | k_points = sampler.get_sample() 68 | 69 | if not model.are_valid(k_points): 70 | print(k_points) 71 | continue 72 | 73 | model.fit(k_points) 74 | 75 | all_distances = model.get_distances(points) 76 | 77 | inliers = all_distances <= model.max_dist 78 | 79 | n_inliers = np.sum(inliers) 80 | 81 | if n_inliers > n_best_inliers: 82 | n_best_inliers = n_inliers 83 | best_inliers = inliers 84 | 85 | if n_best_inliers > n_inliers_to_stop: 86 | break 87 | 88 | if return_model: 89 | model.least_squares_fit(points[best_inliers]) 90 | return best_inliers, model 91 | 92 | else: 93 | return best_inliers 94 | -------------------------------------------------------------------------------- /src/pyntcloud/ransac/models.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from abc import ABC, abstractmethod 3 | from ..geometry import Plane, Sphere 4 | 5 | 6 | class RansacModel(ABC): 7 | """ 8 | Base class for ransac models. 9 | 10 | Parameters 11 | ---------- 12 | max_dist : float 13 | Treshold distance to consider a point as an inlier. 14 | """ 15 | 16 | def __init__(self, max_dist=1e-4): 17 | self.max_dist = max_dist 18 | 19 | def fit(self, k_points): 20 | return self.from_k_points(k_points) 21 | 22 | def get_distances(self, points): 23 | return self.get_projections(points, only_distances=True) 24 | 25 | def least_squares_fit(self, points): 26 | return self.from_point_cloud(points) 27 | 28 | @abstractmethod 29 | def are_valid(self, k_points): 30 | pass 31 | 32 | 33 | class RansacPlane(RansacModel, Plane): 34 | def __init__(self, max_dist=1e-4): 35 | super().__init__(max_dist=max_dist) 36 | self.k = 3 37 | 38 | def are_valid(self, k_points): 39 | return True 40 | 41 | 42 | class RansacSphere(RansacModel, Sphere): 43 | def __init__(self, max_dist=1e-4): 44 | super().__init__(max_dist=max_dist) 45 | self.k = 4 46 | 47 | def are_valid(self, k_points): 48 | # check if points are coplanar 49 | x = np.ones((4, 4)) 50 | x[:-1, :] = k_points.T 51 | if np.linalg.det(x) == 0: 52 | return False 53 | else: 54 | return True 55 | -------------------------------------------------------------------------------- /src/pyntcloud/ransac/samplers.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from abc import ABC, abstractmethod 4 | from ..structures import VoxelGrid 5 | 6 | 7 | class RansacSampler(ABC): 8 | """Base class for ransac samplers. 9 | 10 | Parameters 11 | ---------- 12 | points : ndarray 13 | (N, M) ndarray where N is the number of points and M is the number 14 | scalar fields associated to each of those points. 15 | M is usually 3 for representing the x, y, and z coordinates of each point. 16 | 17 | k : int 18 | The number of points that will be sampled in each call of get_sample(). 19 | This number depends on the model used. See ransac/models.py. 20 | """ 21 | 22 | def __init__(self, points, k): 23 | self.points = points 24 | self.k = k 25 | 26 | @abstractmethod 27 | def get_sample(self): 28 | pass 29 | 30 | 31 | class RandomRansacSampler(RansacSampler): 32 | """Sample random points. 33 | 34 | Inherits from RansacSampler. 35 | 36 | """ 37 | 38 | def __init__(self, points, k): 39 | super().__init__(points, k) 40 | 41 | def get_sample(self): 42 | """Get k unique random points.""" 43 | sample = np.random.choice(len(self.points), self.k, replace=False) 44 | return self.points[sample] 45 | 46 | 47 | class VoxelgridRansacSampler(RansacSampler): 48 | """Sample random points inside the same random voxel. 49 | 50 | Inherits from RansacSampler. 51 | 52 | Parameters 53 | ---------- 54 | points: (N, 3) numpy.array 55 | k: int 56 | Numbber of points to sample. 57 | n_x, n_y, n_z : int, optional 58 | Default: 1 59 | The number of segments in which each axis will be divided. 60 | Ignored if corresponding size_x, size_y or size_z is not None. 61 | size_x, size_y, size_z : float, optional 62 | Default: None 63 | The desired voxel size along each axis. 64 | If not None, the corresponding n_x, n_y or n_z will be ignored. 65 | regular_bounding_box : bool, optional 66 | Default: True 67 | If True, the bounding box of the point cloud will be adjusted 68 | in order to have all the dimensions of equal length. 69 | """ 70 | 71 | def __init__( 72 | self, 73 | points, 74 | k, 75 | n_x=1, 76 | n_y=1, 77 | n_z=1, 78 | size_x=None, 79 | size_y=None, 80 | size_z=None, 81 | regular_bounding_box=True, 82 | ): 83 | super().__init__(points, k) 84 | self.voxelgrid = VoxelGrid( 85 | points=self.points, 86 | n_x=n_x, 87 | n_y=n_y, 88 | n_z=n_z, 89 | size_x=size_x, 90 | size_y=size_y, 91 | size_z=size_z, 92 | regular_bounding_box=regular_bounding_box, 93 | ) 94 | self.voxelgrid.compute() 95 | 96 | def get_sample(self): 97 | """Get k unique random points from the same voxel of one randomly picked point.""" 98 | # pick one point and get its voxel index 99 | idx = np.random.randint(0, len(self.points)) 100 | voxel = self.voxelgrid.voxel_n[idx] 101 | 102 | # get index of points inside that voxel and convert to probabilities 103 | points_in_voxel = (self.voxelgrid.voxel_n == voxel).astype(int) 104 | points_in_voxel = points_in_voxel / points_in_voxel.sum() 105 | 106 | sample = np.random.choice( 107 | len(self.points), self.k, replace=False, p=points_in_voxel 108 | ) 109 | 110 | return self.points[sample] 111 | -------------------------------------------------------------------------------- /src/pyntcloud/samplers/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HAKUNA MATATA 3 | """ 4 | 5 | from .points import RandomPointsSampler, FarthestPointsSampler 6 | from .mesh import RandomMeshSampler 7 | from .voxelgrid import ( 8 | VoxelgridCentersSampler, 9 | VoxelgridCentroidsSampler, 10 | VoxelgridNearestSampler, 11 | VoxelgridHighestSampler, 12 | ) 13 | 14 | ALL_SAMPLERS = { 15 | # Mesh 16 | "mesh_random": RandomMeshSampler, 17 | # Points 18 | "points_random": RandomPointsSampler, 19 | "points_farthest": FarthestPointsSampler, 20 | # Voxelgrid 21 | "voxelgrid_centers": VoxelgridCentersSampler, 22 | "voxelgrid_centroids": VoxelgridCentroidsSampler, 23 | "voxelgrid_nearest": VoxelgridNearestSampler, 24 | "voxelgrid_highest": VoxelgridHighestSampler, 25 | } 26 | -------------------------------------------------------------------------------- /src/pyntcloud/samplers/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class Sampler(ABC): 5 | """Base class for sampling methods.""" 6 | 7 | def __init__(self, *, pyntcloud): 8 | self.pyntcloud = pyntcloud 9 | 10 | @abstractmethod 11 | def extract_info(self): 12 | pass 13 | 14 | @abstractmethod 15 | def compute(self): 16 | pass 17 | -------------------------------------------------------------------------------- /src/pyntcloud/samplers/mesh.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from .base import Sampler 5 | from ..geometry.areas import triangle_area_multi 6 | 7 | 8 | class MeshSampler(Sampler): 9 | """ """ 10 | 11 | def __init__(self, *, pyntcloud, rgb=False, normals=False): 12 | super().__init__(pyntcloud=pyntcloud) 13 | self.rgb = rgb 14 | self.normals = normals 15 | 16 | def extract_info(self): 17 | v1, v2, v3 = self.pyntcloud.get_mesh_vertices( 18 | rgb=self.rgb, normals=self.normals 19 | ) 20 | 21 | self.v1_xyz = v1[:, :3] 22 | self.v2_xyz = v2[:, :3] 23 | self.v3_xyz = v3[:, :3] 24 | 25 | if self.rgb: 26 | self.v1_rgb = v1[:, 3:6] 27 | self.v2_rgb = v2[:, 3:6] 28 | self.v3_rgb = v3[:, 3:6] 29 | 30 | if self.normals: 31 | self.v1_normals = v1[:, 6:] 32 | self.v2_normals = v2[:, 6:] 33 | self.v3_normals = v3[:, 6:] 34 | 35 | elif self.normals: 36 | self.v1_normals = v1[:, 3:6] 37 | self.v2_normals = v2[:, 3:6] 38 | self.v3_normals = v3[:, 3:6] 39 | 40 | 41 | class RandomMeshSampler(MeshSampler): 42 | """Sample points adjusting probabilities according to triangle area. 43 | 44 | Parameters 45 | ---------- 46 | n: int 47 | Number of points to be sampled. 48 | 49 | rgb: bool, optional 50 | Default: False 51 | Indicates if RGB values will also be sampled. 52 | 53 | normals: bool, optional 54 | Default: False 55 | Indicates if normals will also be sampled. 56 | 57 | """ 58 | 59 | def __init__(self, *, pyntcloud, n, rgb=False, normals=False): 60 | super().__init__(pyntcloud=pyntcloud, rgb=rgb, normals=normals) 61 | self.n = n 62 | 63 | def compute(self): 64 | areas = triangle_area_multi(self.v1_xyz, self.v2_xyz, self.v3_xyz) 65 | probabilities = areas / np.sum(areas) 66 | random_idx = np.random.choice( 67 | np.arange(len(areas)), size=self.n, p=probabilities 68 | ) 69 | 70 | v1_xyz = self.v1_xyz[random_idx] 71 | v2_xyz = self.v2_xyz[random_idx] 72 | v3_xyz = self.v3_xyz[random_idx] 73 | 74 | # (n, 1) the 1 is for broadcasting 75 | u = np.random.uniform(low=0.0, high=1.0, size=(self.n, 1)) 76 | v = np.random.uniform(low=0.0, high=1 - u, size=(self.n, 1)) 77 | 78 | result = pd.DataFrame() 79 | 80 | result_xyz = (v1_xyz * u) + (v2_xyz * v) + ((1 - (u + v)) * v3_xyz) 81 | result_xyz = result_xyz.astype(np.float32) 82 | 83 | result["x"] = result_xyz[:, 0] 84 | result["y"] = result_xyz[:, 1] 85 | result["z"] = result_xyz[:, 2] 86 | 87 | if self.rgb: 88 | v1_rgb = self.v1_rgb[random_idx] 89 | v2_rgb = self.v2_rgb[random_idx] 90 | v3_rgb = self.v3_rgb[random_idx] 91 | 92 | result_rgb = (v1_rgb * u) + (v2_rgb * v) + ((1 - (u + v)) * v3_rgb) 93 | result_rgb = result_rgb.astype(np.uint8) 94 | 95 | result["red"] = result_rgb[:, 0] 96 | result["green"] = result_rgb[:, 1] 97 | result["blue"] = result_rgb[:, 2] 98 | 99 | if self.normals: 100 | v1_normals = self.v1_normals[random_idx] 101 | v2_normals = self.v2_normals[random_idx] 102 | v3_normals = self.v3_normals[random_idx] 103 | 104 | sum_normals = v1_normals + v2_normals + v3_normals 105 | 106 | result_normals = ( 107 | sum_normals / np.linalg.norm(sum_normals, axis=1)[..., None] 108 | ) 109 | result_normals = result_normals.astype(np.float32) 110 | 111 | result["nx"] = result_normals[:, 0] 112 | result["ny"] = result_normals[:, 1] 113 | result["nz"] = result_normals[:, 2] 114 | 115 | return result 116 | -------------------------------------------------------------------------------- /src/pyntcloud/samplers/points.py: -------------------------------------------------------------------------------- 1 | from .base import Sampler 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | class PointsSampler(Sampler): 8 | """ """ 9 | 10 | def extract_info(self): 11 | self.points = self.pyntcloud.points 12 | 13 | 14 | class RandomPointsSampler(PointsSampler): 15 | """ 16 | Parameters 17 | ---------- 18 | n: int 19 | Number of unique points that will be chosen. 20 | """ 21 | 22 | def __init__(self, *, pyntcloud, n): 23 | super().__init__(pyntcloud=pyntcloud) 24 | self.n = n 25 | 26 | def compute(self): 27 | if self.n > len(self.points): 28 | raise ValueError( 29 | "n can't be higher than the number of points in the PyntCloud." 30 | ) 31 | return self.points.sample(self.n).reset_index(drop=True) 32 | 33 | 34 | class FarthestPointsSampler(PointsSampler): 35 | """ 36 | Parameters 37 | ---------- 38 | n: int 39 | Number of unique points that will be chosen. 40 | d_metric: 3*3 numpy array 41 | a positive semi-definite matrix which defines a distance metric 42 | """ 43 | 44 | def __init__(self, *, pyntcloud, n, d_metric=np.eye(3)): 45 | """d_metric -> Euclidean distance space by default, can be modified to other Mahalanobis distance as well""" 46 | super().__init__(pyntcloud=pyntcloud) 47 | self.n = n 48 | if not np.all(np.linalg.eigvals(d_metric) >= 0): 49 | raise ValueError("the distance metric must be positive semi-definite") 50 | self.d_metric = d_metric 51 | 52 | def cal_distance(self, point, solution_set): 53 | """ 54 | :param point: points which is not sampled yet, N*10 or N*3 numpy array 55 | :param solution_set: the points which has been selected, M*3 or M*10 array 56 | :return: a (N, ) array, where each element is equal to the sum of distance 57 | of all points in 'solution_set' w.r.t the unselected point in the 'point' 58 | """ 59 | distance_sum = np.zeros(len(point)) 60 | 61 | for pt in solution_set: 62 | distance_sum += np.diag( 63 | np.dot( 64 | (point[:, :3] - pt[:3]), self.d_metric @ (point[:, :3] - pt[:3]).T 65 | ) 66 | ) 67 | return distance_sum 68 | 69 | def compute(self): 70 | "incremental farthest search" 71 | if self.n > len(self.points): 72 | raise ValueError("sampled points can't be more than the original input") 73 | remaining_points = self.points.values 74 | 75 | # the sampled points set as the return 76 | select_idx = np.random.randint(low=0, high=len(self.points)) 77 | # to remain the shape as (1, n) instead of (n, ) 78 | solution_set = remaining_points[select_idx : select_idx + 1] 79 | remaining_points = np.delete(remaining_points, select_idx, 0) 80 | 81 | for _ in range(self.n - 1): 82 | distance_sum = self.cal_distance(remaining_points, solution_set) 83 | select_idx = np.argmax(distance_sum) 84 | solution_set = np.concatenate( 85 | [solution_set, remaining_points[select_idx : select_idx + 1]], axis=0 86 | ) 87 | remaining_points = np.delete(remaining_points, select_idx, 0) 88 | 89 | return pd.DataFrame(solution_set, columns=self.points.columns) 90 | -------------------------------------------------------------------------------- /src/pyntcloud/samplers/voxelgrid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from scipy.spatial.distance import cdist 5 | 6 | from .base import Sampler 7 | 8 | 9 | class VoxelgridSampler(Sampler): 10 | def __init__(self, *, pyntcloud, voxelgrid_id): 11 | super().__init__(pyntcloud=pyntcloud) 12 | self.voxelgrid_id = voxelgrid_id 13 | 14 | def extract_info(self): 15 | self.voxelgrid = self.pyntcloud.structures[self.voxelgrid_id] 16 | 17 | 18 | class VoxelgridCentersSampler(VoxelgridSampler): 19 | """Returns the points that represent each occupied voxel's center.""" 20 | 21 | def compute(self): 22 | return pd.DataFrame( 23 | self.voxelgrid.voxel_centers[np.unique(self.voxelgrid.voxel_n)], 24 | columns=["x", "y", "z"], 25 | ) 26 | 27 | 28 | class VoxelgridCentroidsSampler(VoxelgridSampler): 29 | """Returns the centroid of each group of points inside each occupied voxel.""" 30 | 31 | def compute(self): 32 | df = pd.DataFrame(self.pyntcloud.xyz, columns=["x", "y", "z"]) 33 | df["voxel_n"] = self.voxelgrid.voxel_n 34 | return df.groupby("voxel_n").mean() 35 | 36 | 37 | class VoxelgridNearestSampler(VoxelgridSampler): 38 | """Returns the N closest points to each occupied voxel's center.""" 39 | 40 | def __init__(self, *, pyntcloud, voxelgrid_id, n=1): 41 | super().__init__(pyntcloud=pyntcloud, voxelgrid_id=voxelgrid_id) 42 | self.n = n 43 | 44 | def compute(self): 45 | voxel_n_id = "voxel_n({})".format(self.voxelgrid_id) 46 | if voxel_n_id not in self.pyntcloud.points: 47 | self.pyntcloud.points[voxel_n_id] = self.voxelgrid.voxel_n 48 | nearests = [] 49 | for voxel_n, x in self.pyntcloud.points.groupby(voxel_n_id, sort=False): 50 | xyz = x.loc[:, ["x", "y", "z"]].values 51 | center = self.voxelgrid.voxel_centers[voxel_n] 52 | voxel_nearest = cdist([center], xyz)[0].argsort()[: self.n] 53 | nearests.extend(x.index.values[voxel_nearest]) 54 | return self.pyntcloud.points.iloc[nearests].reset_index(drop=True) 55 | 56 | 57 | class VoxelgridHighestSampler(VoxelgridSampler): 58 | """Returns the highest points of each voxel.""" 59 | 60 | def compute(self): 61 | voxel_n_id = "voxel_n({})".format(self.voxelgrid_id) 62 | if voxel_n_id not in self.pyntcloud.points: 63 | self.pyntcloud.points[voxel_n_id] = self.voxelgrid.voxel_n 64 | return self.pyntcloud.points.iloc[ 65 | self.pyntcloud.points.groupby(voxel_n_id)["z"].idxmax() 66 | ].reset_index(drop=True) 67 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/__init__.py: -------------------------------------------------------------------------------- 1 | from .eigenvalues import ( 2 | Anisotropy, 3 | Curvature, 4 | Eigenentropy, 5 | EigenSum, 6 | Linearity, 7 | Omnivariance, 8 | Planarity, 9 | Sphericity, 10 | ) 11 | from .k_neighbors import ( 12 | EigenDecomposition, 13 | EigenValues, 14 | UnorientedNormals, 15 | ) 16 | from .normals import ( 17 | InclinationDegrees, 18 | InclinationRadians, 19 | OrientationDegrees, 20 | OrientationRadians, 21 | ) 22 | from .rgb import HueSaturationValue, RelativeLuminance, RGBIntensity 23 | from .voxelgrid import VoxelN, VoxelX, VoxelY, VoxelZ, EuclideanClusters 24 | from .xyz import ( 25 | PlaneFit, 26 | SphereFit, 27 | CustomFit, 28 | SphericalCoordinates, 29 | CylindricalCoordinates, 30 | ) 31 | 32 | ALL_SF = { 33 | # Eigenvalues 34 | "anisotropy": Anisotropy, 35 | "curvature": Curvature, 36 | "eigenentropy": Eigenentropy, 37 | "eigen_sum": EigenSum, 38 | "linearity": Linearity, 39 | "omnivariance": Omnivariance, 40 | "planarity": Planarity, 41 | "sphericity": Sphericity, 42 | # Kneighbors 43 | "eigen_decomposition": EigenDecomposition, 44 | "eigen_values": EigenValues, 45 | "normals": UnorientedNormals, 46 | # Normals 47 | "inclination_degrees": InclinationDegrees, 48 | "inclination_radians": InclinationRadians, 49 | "orientation_degrees": OrientationDegrees, 50 | "orientation_radians": OrientationRadians, 51 | # RGB 52 | "hsv": HueSaturationValue, 53 | "relative_luminance": RelativeLuminance, 54 | "rgb_intensity": RGBIntensity, 55 | # Voxelgrid 56 | "voxel_n": VoxelN, 57 | "voxel_x": VoxelX, 58 | "voxel_y": VoxelY, 59 | "voxel_z": VoxelZ, 60 | "euclidean_clusters": EuclideanClusters, 61 | # XYZ 62 | "custom_fit": CustomFit, 63 | "plane_fit": PlaneFit, 64 | "sphere_fit": SphereFit, 65 | "spherical_coords": SphericalCoordinates, 66 | "cylindrical_coords": CylindricalCoordinates, 67 | } 68 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from collections import OrderedDict 3 | 4 | 5 | class ScalarField(ABC): 6 | """Base class for scalar fields.""" 7 | 8 | def __init__(self, *, pyntcloud): 9 | self.pyntcloud = pyntcloud 10 | self.to_be_added = OrderedDict() 11 | 12 | def get_and_set(self): 13 | sf_added = [] 14 | for k, v in self.to_be_added.items(): 15 | sf_added.append(k) 16 | self.pyntcloud.points[k] = v 17 | 18 | if len(sf_added) == 1: 19 | return sf_added[0] 20 | else: 21 | return sf_added 22 | 23 | @abstractmethod 24 | def extract_info(self): 25 | pass 26 | 27 | @abstractmethod 28 | def compute(self): 29 | pass 30 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/eigenvalues.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import ScalarField 3 | 4 | 5 | class EigenValuesScalarField(ScalarField): 6 | """ 7 | Parameters 8 | ---------- 9 | ev : list of str 10 | Column names of the eigen values. 11 | Tip: 12 | ev = self.add_scalar_field("eigen_values", ...) 13 | """ 14 | 15 | def __init__(self, *, pyntcloud, ev): 16 | super().__init__(pyntcloud=pyntcloud) 17 | self.k = ev[0].split("e1")[1] 18 | self.ev = ev 19 | 20 | def extract_info(self): 21 | self.ev = self.pyntcloud.points[self.ev].values 22 | 23 | 24 | class Anisotropy(EigenValuesScalarField): 25 | """ """ 26 | 27 | def compute(self): 28 | name = "anisotropy{}".format(self.k) 29 | ev = self.ev 30 | self.to_be_added[name] = np.nan_to_num((ev[:, 0] - ev[:, 2]) / ev[:, 0]) 31 | 32 | 33 | class Curvature(EigenValuesScalarField): 34 | """ """ 35 | 36 | def compute(self): 37 | name = "curvature{}".format(self.k) 38 | ev = self.ev 39 | self.to_be_added[name] = np.nan_to_num( 40 | ev[:, 2] / (ev[:, 0] + ev[:, 1] + ev[:, 2]) 41 | ) 42 | 43 | 44 | class Eigenentropy(EigenValuesScalarField): 45 | """ """ 46 | 47 | def compute(self): 48 | name = "eigenentropy{}".format(self.k) 49 | ev = self.ev 50 | result = np.zeros(ev.shape[0]) 51 | for i in range(3): 52 | result += ev[:, i] * np.log(ev[:, i]) 53 | self.to_be_added[name] = np.nan_to_num(-result) 54 | 55 | 56 | class EigenSum(EigenValuesScalarField): 57 | """ """ 58 | 59 | def compute(self): 60 | name = "eigen_sum{}".format(self.k) 61 | self.to_be_added[name] = self.ev[:, 0] + self.ev[:, 1] + self.ev[:, 2] 62 | 63 | 64 | class Linearity(EigenValuesScalarField): 65 | """ """ 66 | 67 | def compute(self): 68 | name = "linearity{}".format(self.k) 69 | ev = self.ev 70 | self.to_be_added[name] = np.nan_to_num((ev[:, 0] - ev[:, 1]) / ev[:, 0]) 71 | 72 | 73 | class Omnivariance(EigenValuesScalarField): 74 | """ """ 75 | 76 | def compute(self): 77 | name = "omnivariance{}".format(self.k) 78 | ev = self.ev 79 | self.to_be_added[name] = np.nan_to_num( 80 | (ev[:, 0] * ev[:, 1] * ev[:, 2]) ** (1 / 3) 81 | ) 82 | 83 | 84 | class Planarity(EigenValuesScalarField): 85 | """ """ 86 | 87 | def compute(self): 88 | name = "planarity{}".format(self.k) 89 | ev = self.ev 90 | self.to_be_added[name] = np.nan_to_num((ev[:, 1] - ev[:, 2]) / ev[:, 0]) 91 | 92 | 93 | class Sphericity(EigenValuesScalarField): 94 | """ """ 95 | 96 | def compute(self): 97 | name = "sphericity{}".format(self.k) 98 | ev = self.ev 99 | self.to_be_added[name] = np.nan_to_num(ev[:, 2] / ev[:, 0]) 100 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/k_neighbors.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .base import ScalarField 4 | from ..utils.array import cov3D 5 | 6 | 7 | class KNeighborsScalarField(ScalarField): 8 | """ 9 | Parameters 10 | ---------- 11 | k_neighbors: ndarray 12 | (N, k, 3) The k neighbours associated to each of the N points. 13 | """ 14 | 15 | def __init__(self, *, pyntcloud, k_neighbors): 16 | super().__init__(pyntcloud=pyntcloud) 17 | # add each point to its neighborhood 18 | self.k_neighbors_idx = np.c_[range(len(k_neighbors)), k_neighbors] 19 | 20 | def extract_info(self): 21 | self.k_neighbors = self.pyntcloud.xyz[self.k_neighbors_idx] 22 | 23 | 24 | class EigenValues(KNeighborsScalarField): 25 | """Compute the eigen values of each point's neighbourhood.""" 26 | 27 | def compute(self): 28 | cov = cov3D(self.k_neighbors) 29 | eigenvalues = np.linalg.eigvals(cov) 30 | sort = eigenvalues.argsort() 31 | 32 | # range from 0-shape[0] to allow indexing along axis 1 and 2 33 | idx_trick = range(eigenvalues.shape[0]) 34 | 35 | e1 = eigenvalues[idx_trick, sort[:, 2]] 36 | e2 = eigenvalues[idx_trick, sort[:, 1]] 37 | e3 = eigenvalues[idx_trick, sort[:, 0]] 38 | 39 | k = self.k_neighbors.shape[1] 40 | self.to_be_added["e1({})".format(k)] = e1 41 | self.to_be_added["e2({})".format(k)] = e2 42 | self.to_be_added["e3({})".format(k)] = e3 43 | 44 | 45 | class EigenDecomposition(KNeighborsScalarField): 46 | """Compute the eigen decomposition of each point's neighbourhood.""" 47 | 48 | def compute(self): 49 | cov = cov3D(self.k_neighbors) 50 | eigenvalues, eigenvectors = np.linalg.eig(cov) 51 | sort = eigenvalues.argsort() 52 | 53 | # range from 0-shape[0] to allow indexing along axis 1 and 2 54 | idx_trick = range(eigenvalues.shape[0]) 55 | 56 | e1 = eigenvalues[idx_trick, sort[:, 2]] 57 | e2 = eigenvalues[idx_trick, sort[:, 1]] 58 | e3 = eigenvalues[idx_trick, sort[:, 0]] 59 | 60 | k = self.k_neighbors.shape[1] 61 | self.to_be_added["e1({})".format(k)] = e1 62 | self.to_be_added["e2({})".format(k)] = e2 63 | self.to_be_added["e3({})".format(k)] = e3 64 | 65 | ev1 = eigenvectors[idx_trick, :, sort[:, 2]] 66 | ev2 = eigenvectors[idx_trick, :, sort[:, 1]] 67 | ev3 = eigenvectors[idx_trick, :, sort[:, 0]] 68 | 69 | self.to_be_added["ev1_x({})".format(k)] = ev1[:, 0] 70 | self.to_be_added["ev1_y({})".format(k)] = ev1[:, 1] 71 | self.to_be_added["ev1_z({})".format(k)] = ev1[:, 2] 72 | 73 | self.to_be_added["ev2_x({})".format(k)] = ev2[:, 0] 74 | self.to_be_added["ev2_y({})".format(k)] = ev2[:, 1] 75 | self.to_be_added["ev2_z({})".format(k)] = ev2[:, 2] 76 | 77 | self.to_be_added["ev3_x({})".format(k)] = ev3[:, 0] 78 | self.to_be_added["ev3_y({})".format(k)] = ev3[:, 1] 79 | self.to_be_added["ev3_z({})".format(k)] = ev3[:, 2] 80 | 81 | 82 | class UnorientedNormals(KNeighborsScalarField): 83 | """Compute normals using SVD.""" 84 | 85 | def compute(self): 86 | cov = cov3D(self.k_neighbors) 87 | u, s, v = np.linalg.svd(cov) 88 | 89 | normals = u[:, :, -1] 90 | 91 | k = self.k_neighbors.shape[1] 92 | self.to_be_added["nx({})".format(k)] = normals[:, 0] 93 | self.to_be_added["ny({})".format(k)] = normals[:, 1] 94 | self.to_be_added["nz({})".format(k)] = normals[:, 2] 95 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/normals.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import ScalarField 3 | 4 | 5 | class NormalsScalarField(ScalarField): 6 | def extract_info(self): 7 | self.normals = self.pyntcloud.points[["nx", "ny", "nz"]].values 8 | 9 | 10 | class InclinationDegrees(NormalsScalarField): 11 | """Vertical inclination with respect to Z axis in degrees.""" 12 | 13 | def compute(self): 14 | inclination = np.arccos(self.normals[:, -1]) 15 | self.to_be_added["inclination_deg"] = np.rad2deg(inclination) 16 | 17 | 18 | class InclinationRadians(NormalsScalarField): 19 | """Vertical inclination with respect to Z axis in radians.""" 20 | 21 | def compute(self): 22 | inclination = np.arccos(self.normals[:, -1]) 23 | self.to_be_added["inclination_rad"] = inclination 24 | 25 | 26 | class OrientationDegrees(NormalsScalarField): 27 | """Horizontal orientation with respect to the XY plane in degrees.""" 28 | 29 | def compute(self): 30 | angle = np.arctan2(self.normals[:, 0], self.normals[:, 1]) 31 | # convert (-180 , 180) to (0 , 360) 32 | angle = np.where(angle < 0, angle + (2 * np.pi), angle) 33 | self.to_be_added["orientation_deg"] = np.rad2deg(angle) 34 | 35 | 36 | class OrientationRadians(NormalsScalarField): 37 | """Horizontal orientation with respect to the XY plane in radians.""" 38 | 39 | def compute(self): 40 | angle = np.arctan2(self.normals[:, 0], self.normals[:, 1]) 41 | # convert (-180 , 180) to (0 , 360) 42 | angle = np.where(angle < 0, angle + (2 * np.pi), angle) 43 | self.to_be_added["orientation_rad"] = angle 44 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/rgb.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import ScalarField 3 | 4 | 5 | class RGBScalarField(ScalarField): 6 | def extract_info(self): 7 | self.rgb = self.pyntcloud.points[["red", "green", "blue"]].values.astype("f") 8 | 9 | 10 | class RGBIntensity(RGBScalarField): 11 | """Red, green and blue intensity.""" 12 | 13 | def compute(self): 14 | rgb_i = np.nan_to_num(self.rgb / np.sum(self.rgb, axis=1, keepdims=True)) 15 | self.to_be_added["Ri"] = rgb_i[:, 0] 16 | self.to_be_added["Gi"] = rgb_i[:, 1] 17 | self.to_be_added["Bi"] = rgb_i[:, 2] 18 | 19 | 20 | class RelativeLuminance(RGBScalarField): 21 | """Similar to grayscale. Computed following Wikipedia.""" 22 | 23 | def compute(self): 24 | self.rgb /= 255.0 25 | coefficients = np.array([0.2125, 0.7154, 0.0721]) 26 | self.to_be_added["relative_luminance"] = np.einsum( 27 | "ij, j", self.rgb, coefficients 28 | ) 29 | 30 | 31 | class HueSaturationValue(RGBScalarField): 32 | """Hue, Saturation, Value colorspace.""" 33 | 34 | def compute(self): 35 | rgb = self.rgb 36 | MAX = np.max(rgb, -1) 37 | MIN = np.min(rgb, -1) 38 | MAX_MIN = np.ptp(rgb, -1) 39 | 40 | H = np.empty_like(MAX) 41 | 42 | idx = rgb[:, 0] == MAX 43 | H[idx] = 60 * (rgb[idx, 1] - rgb[idx, 2]) / MAX_MIN[idx] 44 | H[np.logical_and(idx, rgb[:, 1] < rgb[:, 2])] += 360 45 | 46 | idx = rgb[:, 1] == MAX 47 | H[idx] = (60 * (rgb[idx, 2] - rgb[idx, 0]) / MAX_MIN[idx]) + 120 48 | 49 | idx = rgb[:, 2] == MAX 50 | H[idx] = (60 * (rgb[idx, 0] - rgb[idx, 1]) / MAX_MIN[idx]) + 240 51 | 52 | self.to_be_added["H"] = np.nan_to_num(H) 53 | self.to_be_added["S"] = np.nan_to_num(np.where(MAX == 0, 0, 1 - (MIN / MAX))) 54 | self.to_be_added["V"] = np.nan_to_num(MAX / 255 * 100) 55 | -------------------------------------------------------------------------------- /src/pyntcloud/scalar_fields/voxelgrid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from .base import ScalarField 3 | 4 | 5 | class VoxelgridScalarField(ScalarField): 6 | def __init__(self, *, pyntcloud, voxelgrid_id): 7 | super().__init__(pyntcloud=pyntcloud) 8 | self.voxelgrid_id = voxelgrid_id 9 | 10 | def extract_info(self): 11 | self.voxelgrid = self.pyntcloud.structures[self.voxelgrid_id] 12 | 13 | def compute(self): 14 | pass 15 | 16 | 17 | class VoxelX(VoxelgridScalarField): 18 | """Voxel index along x axis.""" 19 | 20 | def compute(self): 21 | name = "{}({})".format("voxel_x", self.voxelgrid_id) 22 | self.to_be_added[name] = self.voxelgrid.voxel_x 23 | 24 | 25 | class VoxelY(VoxelgridScalarField): 26 | """Voxel index along y axis.""" 27 | 28 | def compute(self): 29 | name = "{}({})".format("voxel_y", self.voxelgrid_id) 30 | self.to_be_added[name] = self.voxelgrid.voxel_y 31 | 32 | 33 | class VoxelZ(VoxelgridScalarField): 34 | """Voxel index along z axis.""" 35 | 36 | def compute(self): 37 | name = "{}({})".format("voxel_z", self.voxelgrid_id) 38 | self.to_be_added[name] = self.voxelgrid.voxel_z 39 | 40 | 41 | class VoxelN(VoxelgridScalarField): 42 | """Voxel index in 3D array using 'C' order.""" 43 | 44 | def compute(self): 45 | name = "{}({})".format("voxel_n", self.voxelgrid_id) 46 | self.to_be_added[name] = self.voxelgrid.voxel_n 47 | 48 | 49 | class EuclideanClusters(VoxelgridScalarField): 50 | """Assing corresponding cluster to each point inside each voxel.""" 51 | 52 | def compute(self): 53 | name = "{}({})".format("clusters", self.voxelgrid_id) 54 | 55 | to_be_processed = np.zeros(self.voxelgrid.n_voxels, dtype=bool) 56 | to_be_processed[np.unique(self.voxelgrid.voxel_n)] = True 57 | 58 | clusters = np.zeros(self.voxelgrid.voxel_n.shape[0]) 59 | 60 | C = 0 61 | while np.any(to_be_processed): 62 | Q = [] 63 | Q.append(np.random.choice(np.where(to_be_processed)[0])) 64 | 65 | for voxel in Q: 66 | clusters[np.where(self.voxelgrid.voxel_n == voxel)[0]] = C 67 | to_be_processed[voxel] = False 68 | neighbors = self.voxelgrid.get_voxel_neighbors(voxel) 69 | for neighbor in neighbors: 70 | if to_be_processed[neighbor]: 71 | Q.append(neighbor) 72 | to_be_processed[neighbor] = False 73 | C += 1 74 | 75 | self.to_be_added[name] = clusters 76 | -------------------------------------------------------------------------------- /src/pyntcloud/structures/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HAKUNA MATATA 3 | """ 4 | 5 | from .convex_hull import ConvexHull 6 | from .delaunay import Delaunay3D 7 | from .kdtree import KDTree 8 | from .voxelgrid import VoxelGrid 9 | 10 | ALL_STRUCTURES = { 11 | "convex_hull": ConvexHull, 12 | "delaunay3D": Delaunay3D, 13 | "kdtree": KDTree, 14 | "voxelgrid": VoxelGrid, 15 | } 16 | -------------------------------------------------------------------------------- /src/pyntcloud/structures/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class Structure(ABC): 5 | """Base class for structures.""" 6 | 7 | def __init__(self, *, points): 8 | self._points = points 9 | 10 | def get_and_set(self, pyntcloud): 11 | pyntcloud.structures[self.id] = self 12 | return self.id 13 | 14 | @classmethod 15 | def extract_info(cls, pyntcloud): 16 | """ABC API""" 17 | info = { 18 | "points": pyntcloud.xyz, 19 | } 20 | return info 21 | 22 | @abstractmethod 23 | def compute(self): 24 | pass 25 | 26 | 27 | class StructuresDict(dict): 28 | """Custom class to restrict PyntCloud.structures assigment.""" 29 | 30 | def __init__(self, *args): 31 | self.n_voxelgrids = 0 32 | self.n_kdtrees = 0 33 | self.n_delaunays = 0 34 | self.n_convex_hulls = 0 35 | super().__init__(*args) 36 | 37 | def __setitem__(self, key, val): 38 | if not issubclass(val.__class__, Structure): 39 | raise TypeError("{} must be base.Structure subclass".format(key)) 40 | 41 | # TODO better structure.id check 42 | if key.startswith("V"): 43 | self.n_voxelgrids += 1 44 | elif key.startswith("K"): 45 | self.n_kdtrees += 1 46 | elif key.startswith("D"): 47 | self.n_delaunays += 1 48 | elif key.startswith("CH"): 49 | self.n_convex_hulls += 1 50 | else: 51 | raise ValueError("{} is not a valid structure.id".format(key)) 52 | super().__setitem__(key, val) 53 | -------------------------------------------------------------------------------- /src/pyntcloud/structures/convex_hull.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from scipy.spatial import ConvexHull as scipy_ConvexHull 3 | 4 | 5 | from .base import Structure 6 | 7 | 8 | class ConvexHull(scipy_ConvexHull, Structure): 9 | def __init__(self, points, incremental=False, qhull_options=None): 10 | Structure.__init__(self, points=points) 11 | self._incremental = incremental 12 | self._qhull_options = qhull_options 13 | 14 | def compute(self): 15 | """ABC API""" 16 | self.id = "CH({})".format(self._qhull_options) 17 | scipy_ConvexHull.__init__( 18 | self, self._points, self._incremental, self._qhull_options 19 | ) 20 | 21 | def get_mesh(self): 22 | """ 23 | Use convex hull simplices to build mesh. 24 | 25 | The returned mesh is in mesh-vertex format, suitable for 26 | been assigned to PyntCloud.mesh. 27 | """ 28 | mesh = pd.DataFrame(self.simplices, columns=["v1", "v2", "v3"]) 29 | 30 | return mesh 31 | -------------------------------------------------------------------------------- /src/pyntcloud/structures/delaunay.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from scipy.spatial import Delaunay 3 | from itertools import combinations 4 | 5 | from .base import Structure 6 | 7 | 8 | class Delaunay3D(Delaunay, Structure): 9 | def __init__( 10 | self, points, furthest_site=False, incremental=False, qhull_options=None 11 | ): 12 | Structure.__init__(self, points=points) 13 | self._furthest_site = furthest_site 14 | self._incremental = incremental 15 | self._qhull_options = qhull_options 16 | 17 | def compute(self): 18 | """ABC API""" 19 | self.id = "D({},{})".format(self._furthest_site, self._qhull_options) 20 | Delaunay.__init__( 21 | self, 22 | self._points, 23 | self._furthest_site, 24 | self._incremental, 25 | self._qhull_options, 26 | ) 27 | 28 | def get_mesh(self): 29 | """ 30 | Decompose the tetrahedrons into triangles to build mesh. 31 | 32 | The returned mesh is in mesh-vertex format, suitable for 33 | been assigned to PyntCloud.mesh. 34 | """ 35 | triangles = [] 36 | for tetra in self.simplices: 37 | for tri in combinations(tetra, 3): 38 | triangles.append([tri[0], tri[1], tri[2]]) 39 | mesh = pd.DataFrame(triangles, columns=["v1", "v2", "v3"]) 40 | 41 | return mesh 42 | -------------------------------------------------------------------------------- /src/pyntcloud/structures/kdtree.py: -------------------------------------------------------------------------------- 1 | from scipy.spatial import KDTree as sKDTree 2 | 3 | from .base import Structure 4 | 5 | 6 | class KDTree(sKDTree, Structure): 7 | def __init__( 8 | self, *, points, leafsize=16, compact_nodes=False, balanced_tree=False 9 | ): 10 | Structure.__init__(self, points=points) 11 | self._leafsize = leafsize 12 | self._compact_nodes = compact_nodes 13 | self._balanced_tree = balanced_tree 14 | 15 | def compute(self): 16 | self.id = "K({},{},{})".format( 17 | self._leafsize, self._compact_nodes, self._balanced_tree 18 | ) 19 | super(sKDTree, self).__init__( 20 | self._points, 21 | leafsize=self._leafsize, 22 | compact_nodes=self._compact_nodes, 23 | balanced_tree=self._balanced_tree, 24 | ) 25 | -------------------------------------------------------------------------------- /src/pyntcloud/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/src/pyntcloud/utils/__init__.py -------------------------------------------------------------------------------- /src/pyntcloud/utils/array.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def cartesian(arrays, out=None): 5 | """Generate a cartesian product of input arrays. 6 | 7 | Parameters 8 | ---------- 9 | arrays : list of array-like 10 | 1-D arrays to form the cartesian product of. 11 | out : ndarray 12 | Array to place the cartesian product in. 13 | 14 | Returns 15 | ------- 16 | out : ndarray 17 | 2-D array of shape (M, len(arrays)) containing cartesian products 18 | formed of input arrays. 19 | 20 | Examples 21 | -------- 22 | >>> cartesian(([1, 2, 3], [4, 5], [6, 7])) 23 | array([[1, 4, 6], 24 | [1, 4, 7], 25 | [1, 5, 6], 26 | [1, 5, 7], 27 | [2, 4, 6], 28 | [2, 4, 7], 29 | [2, 5, 6], 30 | [2, 5, 7], 31 | [3, 4, 6], 32 | [3, 4, 7], 33 | [3, 5, 6], 34 | [3, 5, 7]]) 35 | 36 | """ 37 | arrays = [np.asarray(x) for x in arrays] 38 | shape = (len(x) for x in arrays) 39 | dtype = arrays[0].dtype 40 | 41 | ix = np.indices(shape) 42 | ix = ix.reshape(len(arrays), -1).T 43 | 44 | if out is None: 45 | out = np.empty_like(ix, dtype=dtype) 46 | 47 | for n, arr in enumerate(arrays): 48 | out[:, n] = arrays[n][ix[:, n]] 49 | 50 | return out 51 | 52 | 53 | def PCA(data, correlation=False, sort=True): 54 | """Applies Principal Component Analysis to the data 55 | 56 | Parameters 57 | ---------- 58 | data: array 59 | The array containing the data. The array must have NxM dimensions, where each 60 | of the N rows represents a different individual record and each of the M columns 61 | represents a different variable recorded for that individual record. 62 | array([ 63 | [V11, ... , V1m], 64 | ..., 65 | [Vn1, ... , Vnm]]) 66 | 67 | correlation(Optional) : bool 68 | Set the type of matrix to be computed (see Notes): 69 | If True compute the correlation matrix. 70 | If False(Default) compute the covariance matrix. 71 | 72 | sort(Optional) : bool 73 | Set the order that the eigenvalues/vectors will have 74 | If True(Default) they will be sorted (from higher value to less). 75 | If False they won't. 76 | Returns 77 | ------- 78 | eigenvalues: (1,M) array 79 | The eigenvalues of the corresponding matrix. 80 | 81 | eigenvector: (M,M) array 82 | The eigenvectors of the corresponding matrix. 83 | 84 | Notes 85 | ----- 86 | The correlation matrix is a better choice when there are different magnitudes 87 | representing the M variables. Use covariance matrix in other cases. 88 | 89 | """ 90 | 91 | mean = np.mean(data, axis=0) 92 | 93 | data_adjust = data - mean 94 | 95 | #: the data is transposed due to np.cov/corrcoef syntax 96 | if correlation: 97 | matrix = np.corrcoef(data_adjust.T) 98 | 99 | else: 100 | matrix = np.cov(data_adjust.T) 101 | 102 | eigenvalues, eigenvectors = np.linalg.eig(matrix) 103 | 104 | if sort: 105 | #: sort eigenvalues and eigenvectors 106 | sort = eigenvalues.argsort()[::-1] 107 | eigenvalues = eigenvalues[sort] 108 | eigenvectors = eigenvectors[:, sort] 109 | 110 | return eigenvalues, eigenvectors 111 | 112 | 113 | def point_in_array_2D(point, array_2D): 114 | point = np.array(point, dtype=array_2D.dtype) 115 | for other_point in array_2D: 116 | if np.all(point == other_point): 117 | return True 118 | 119 | 120 | def cov3D(k_neighbors): 121 | """(N,K,3)""" 122 | diffs = k_neighbors - k_neighbors.mean(1, keepdims=True) 123 | return np.einsum("ijk,ijl->ikl", diffs, diffs) / k_neighbors.shape[1] 124 | -------------------------------------------------------------------------------- /src/pyntcloud/utils/dataframe.py: -------------------------------------------------------------------------------- 1 | def convert_columns_dtype(df, old_dtype, new_dtype): 2 | """ 3 | Parameters 4 | ---------- 5 | df: pandas.DataFrame 6 | 7 | old_dtype: numpy dtype 8 | 9 | new_dtype: numpy dtype 10 | """ 11 | changed = [] 12 | for column in df.columns: 13 | if df[column].dtype == old_dtype: 14 | df[column] = df[column].astype(new_dtype) 15 | changed.append(column) 16 | 17 | return changed 18 | -------------------------------------------------------------------------------- /src/pyntcloud/utils/mesh.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | 5 | def quadrilateral_to_triangular(mesh): 6 | new_mesh = pd.DataFrame() 7 | 8 | quadrilateral_vertex = mesh[["v1", "v2", "v3", "v4"]].values 9 | triangular_vertex = np.vstack( 10 | (quadrilateral_vertex[:, [0, 1, 2]], quadrilateral_vertex[:, [2, 3, 0]]) 11 | ) 12 | 13 | new_mesh["v1"] = triangular_vertex[:, 0] 14 | new_mesh["v2"] = triangular_vertex[:, 1] 15 | new_mesh["v3"] = triangular_vertex[:, 2] 16 | 17 | if "vn1" in mesh.columns: 18 | quadrilateral_vertex_normals = mesh[["vn1", "vn2", "vn3", "vn4"]].values 19 | triangular_vertex_normals = np.vstack( 20 | ( 21 | quadrilateral_vertex_normals[:, [0, 1, 2]], 22 | quadrilateral_vertex_normals[:, [2, 3, 0]], 23 | ) 24 | ) 25 | 26 | new_mesh["vn1"] = triangular_vertex_normals[:, 0] 27 | new_mesh["vn2"] = triangular_vertex_normals[:, 1] 28 | new_mesh["vn3"] = triangular_vertex_normals[:, 2] 29 | 30 | if "vt1" in mesh.columns: 31 | quadrilateral_vertex_texture = mesh[["vt1", "vt2", "vt3", "vt4"]].values 32 | 33 | triangular_vertex_texture = np.vstack( 34 | ( 35 | quadrilateral_vertex_texture[:, [0, 1, 2]], 36 | quadrilateral_vertex_texture[:, [2, 3, 0]], 37 | ) 38 | ) 39 | 40 | new_mesh["vt1"] = triangular_vertex_texture[:, 0] 41 | new_mesh["vt2"] = triangular_vertex_texture[:, 1] 42 | new_mesh["vt3"] = triangular_vertex_texture[:, 2] 43 | 44 | return new_mesh 45 | -------------------------------------------------------------------------------- /src/pyntcloud/utils/numba.py: -------------------------------------------------------------------------------- 1 | from numba import njit 2 | 3 | 4 | @njit 5 | def groupby_count(xyz, indices, out): 6 | for i in range(xyz.shape[0]): 7 | out[indices[i]] += 1 8 | return out 9 | 10 | 11 | @njit 12 | def groupby_sum(xyz, indices, N, out): 13 | for i in range(xyz.shape[0]): 14 | out[indices[i]] += xyz[i][N] 15 | return out 16 | 17 | 18 | @njit 19 | def groupby_max(xyz, indices, N, out): 20 | for i in range(xyz.shape[0]): 21 | if xyz[i][N] > out[indices[i]]: 22 | out[indices[i]] = xyz[i][N] 23 | return out 24 | -------------------------------------------------------------------------------- /tests/data/diamond.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.bin -------------------------------------------------------------------------------- /tests/data/diamond.dae: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | VCGLab 6 | VCGLib | MeshLab 7 | 8 | Y_UP 9 | mar may 2 16:13:47 2017 10 | mar may 2 16:13:47 2017 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 0.5 0 0.5 0 0.5 0.5 0.5 0.5 0 1 0.5 0.5 0.5 1 0.5 0.5 0.5 1 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -0.57735 -0.57735 -0.57735 -0.57735 0.57735 0.57735 -0.57735 -0.57735 0.57735 0.57735 -0.57735 0.57735 0.57735 0.57735 -0.57735 0.57735 -0.57735 0.57735 0.57735 0.57735 0.57735 -0.57735 0.57735 0.57735 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 1 0 0 1 0 0 0 1 0 1 0 0 1 0 0 0 0 1 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 |

0 0 0 1 1 0 2 2 0 0 0 1 3 3 1 2 2 1 3 3 2 4 4 2 2 2 2 4 4 3 1 1 3 2 2 3 0 0 4 1 1 4 5 5 4 0 0 5 3 3 5 5 5 5 3 3 6 4 4 6 5 5 6 4 4 7 1 1 7 5 5 7

56 |
57 |
58 |
59 |
60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 |
75 | -------------------------------------------------------------------------------- /tests/data/diamond.json: -------------------------------------------------------------------------------- 1 | { 2 | "version" : "0.1.0", 3 | 4 | "comment" : "Generated by MeshLab JSON Exporter", 5 | 6 | "id" : 1, 7 | "name" : "mesh", 8 | 9 | "vertices" : 10 | [ 11 | { 12 | "name" : "position_buffer", 13 | "size" : 3, 14 | "type" : "float32", 15 | "normalized" : false, 16 | "values" : 17 | [ 18 | 0.5, 0, 0.5, 0, 0.5, 0.5, 0.5, 0.5, 0, 1, 0.5, 0.5, 0.5, 1, 0.5, 0.5, 0.5, 1 19 | ] 20 | }, 21 | 22 | { 23 | "name" : "normal_buffer", 24 | "size" : 3, 25 | "type" : "float32", 26 | "normalized" : false, 27 | "values" : 28 | [ 29 | 0, -1, 0, -1, 0, 0, 0, 0, -1, 1, 0, 0, 0, 1, 0, 0, 0, 1 30 | ] 31 | }, 32 | 33 | { 34 | "name" : "color_buffer", 35 | "size" : 4, 36 | "type" : "uint8", 37 | "normalized" : true, 38 | "values" : 39 | [ 40 | 255, 0, 0, 255, 255, 0, 0, 255, 0, 255, 0, 255, 255, 0, 0, 255, 255, 0, 0, 255, 0, 0, 255, 255 41 | ] 42 | } 43 | ], 44 | 45 | "connectivity" : 46 | [ 47 | { 48 | "name" : "triangles", 49 | "mode" : "triangles_list", 50 | "indexed" : true, 51 | "indexType" : "uint32", 52 | "indices" : 53 | [ 54 | 0, 1, 2, 0, 3, 2, 3, 4, 2, 4, 1, 2, 0, 1, 5, 0, 3, 5, 3, 4, 5, 4, 1, 5 55 | ] 56 | } 57 | ], 58 | 59 | "mapping" : 60 | [ 61 | { 62 | "name" : "standard", 63 | "primitives" : "triangles", 64 | "attributes" : 65 | [ 66 | { 67 | "source" : "position_buffer", 68 | "semantic" : "position", 69 | "set" : 0 70 | }, 71 | { 72 | "source" : "normal_buffer", 73 | "semantic" : "normal", 74 | "set" : 0 75 | }, 76 | { 77 | "source" : "color_buffer", 78 | "semantic" : "color", 79 | "set" : 0 80 | } 81 | ] 82 | } 83 | ], 84 | 85 | "custom" : null 86 | } 87 | -------------------------------------------------------------------------------- /tests/data/diamond.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.las -------------------------------------------------------------------------------- /tests/data/diamond.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.laz -------------------------------------------------------------------------------- /tests/data/diamond.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.npy -------------------------------------------------------------------------------- /tests/data/diamond.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.npz -------------------------------------------------------------------------------- /tests/data/diamond.obj: -------------------------------------------------------------------------------- 1 | #### 2 | # 3 | # OBJ File Generated by Meshlab 4 | # 5 | #### 6 | # Object diamond.obj 7 | # 8 | # Vertices: 6 9 | # Faces: 8 10 | # 11 | #### 12 | vn 0.000000 -1.000000 0.000000 13 | v 0.500000 0.000000 0.500000 14 | vn -1.000000 0.000000 0.000000 15 | v 0.000000 0.500000 0.500000 16 | vn 0.000000 0.000000 -1.000000 17 | v 0.500000 0.500000 0.000000 18 | vn 1.000000 0.000000 0.000000 19 | v 1.000000 0.500000 0.500000 20 | vn 0.000000 1.000000 0.000000 21 | v 0.500000 1.000000 0.500000 22 | vn 0.000000 0.000000 1.000000 23 | v 0.500000 0.500000 1.000000 24 | # 6 vertices, 0 vertices normals 25 | 26 | f 1//1 2//2 3//3 27 | f 1//1 4//4 3//3 28 | f 4//4 5//5 3//3 29 | f 5//5 2//2 3//3 30 | f 1//1 2//2 6//6 31 | f 1//1 4//4 6//6 32 | f 4//4 5//5 6//6 33 | f 5//5 2//2 6//6 34 | # 8 faces, 0 coords texture 35 | 36 | # End of File 37 | -------------------------------------------------------------------------------- /tests/data/diamond.off: -------------------------------------------------------------------------------- 1 | OFF 2 | 6 8 0 3 | 0.5 0 0.5 4 | 0 0.5 0.5 5 | 0.5 0.5 0 6 | 1 0.5 0.5 7 | 0.5 1 0.5 8 | 0.5 0.5 1 9 | 3 0 1 2 10 | 3 0 3 2 11 | 3 3 4 2 12 | 3 4 1 2 13 | 3 0 1 5 14 | 3 0 3 5 15 | 3 3 4 5 16 | 3 4 1 5 17 | -------------------------------------------------------------------------------- /tests/data/diamond.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond.ply -------------------------------------------------------------------------------- /tests/data/diamond.vtk: -------------------------------------------------------------------------------- 1 | # vtk DataFile Version 3.0 2 | vtk output 3 | ASCII 4 | DATASET POLYDATA 5 | POINTS 6 float 6 | 0.50000000 0.00000000 0.50000000 7 | 0.00000000 0.50000000 0.50000000 8 | 0.50000000 0.50000000 0.00000000 9 | 1.00000000 0.50000000 0.50000000 10 | 0.50000000 1.00000000 0.50000000 11 | 0.50000000 0.50000000 1.00000000 12 | POLYGONS 8 32 13 | 3 0 1 2 14 | 3 0 3 2 15 | 3 3 4 2 16 | 3 4 1 2 17 | 3 0 1 5 18 | 3 0 3 5 19 | 3 3 4 5 20 | 3 4 1 5 21 | POINT_DATA 6 22 | NORMALS Normals float 23 | 0.00097752 -0.99999905 0.00097752 24 | -0.99999905 0.00097752 0.00097752 25 | 0.00097752 0.00097752 -0.99999905 26 | 0.99999905 0.00097752 0.00097752 27 | 0.00097752 0.99999905 0.00097752 28 | 0.00097752 0.00097752 0.99999905 29 | COLOR_SCALARS RGB 3 30 | 1.00000000 0.00000000 0.00000000 31 | 1.00000000 0.00000000 0.00000000 32 | 0.00000000 1.00000000 0.00000000 33 | 1.00000000 0.00000000 0.00000000 34 | 1.00000000 0.00000000 0.00000000 35 | 0.00000000 0.00000000 1.00000000 36 | -------------------------------------------------------------------------------- /tests/data/diamond.x3d: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /tests/data/diamond.xyz: -------------------------------------------------------------------------------- 1 | 0.500000 0.000000 0.500000 0.000000 -1.000000 0.000000 2 | 0.000000 0.500000 0.500000 -1.000000 0.000000 0.000000 3 | 0.500000 0.500000 0.000000 0.000000 0.000000 -1.000000 4 | 1.000000 0.500000 0.500000 1.000000 0.000000 0.000000 5 | 0.500000 1.000000 0.500000 0.000000 1.000000 0.000000 6 | 0.500000 0.500000 1.000000 0.000000 0.000000 1.000000 7 | -------------------------------------------------------------------------------- /tests/data/diamond_ascii.ply: -------------------------------------------------------------------------------- 1 | ply 2 | format ascii 1.0 3 | comment PyntCloud is cool 4 | element vertex 6 5 | property float x 6 | property float y 7 | property float z 8 | property float nx 9 | property float ny 10 | property float nz 11 | property uchar red 12 | property uchar green 13 | property uchar blue 14 | property uchar alpha 15 | element face 8 16 | property list uchar int vertex_indices 17 | end_header 18 | 0.5 0 0.5 0 -1 0 255 0 0 255 19 | 0 0.5 0.5 -1 0 0 255 0 0 255 20 | 0.5 0.5 0 0 0 -1 0 255 0 255 21 | 1 0.5 0.5 1 0 0 255 0 0 255 22 | 0.5 1 0.5 0 1 0 255 0 0 255 23 | 0.5 0.5 1 0 0 1 0 0 255 255 24 | 3 0 1 2 25 | 3 0 3 2 26 | 3 3 4 2 27 | 3 4 1 2 28 | 3 0 1 5 29 | 3 0 3 5 30 | 3 3 4 5 31 | 3 4 1 5 32 | -------------------------------------------------------------------------------- /tests/data/diamond_ascii.stl: -------------------------------------------------------------------------------- 1 | solid vcg 2 | facet normal -5.773503e-01 -5.773503e-01 -5.773503e-01 3 | outer loop 4 | vertex 5.000000e-01 0.000000e+00 5.000000e-01 5 | vertex 0.000000e+00 5.000000e-01 5.000000e-01 6 | vertex 5.000000e-01 5.000000e-01 0.000000e+00 7 | endloop 8 | endfacet 9 | facet normal -5.773503e-01 5.773503e-01 5.773503e-01 10 | outer loop 11 | vertex 5.000000e-01 0.000000e+00 5.000000e-01 12 | vertex 1.000000e+00 5.000000e-01 5.000000e-01 13 | vertex 5.000000e-01 5.000000e-01 0.000000e+00 14 | endloop 15 | endfacet 16 | facet normal -5.773503e-01 -5.773503e-01 5.773503e-01 17 | outer loop 18 | vertex 1.000000e+00 5.000000e-01 5.000000e-01 19 | vertex 5.000000e-01 1.000000e+00 5.000000e-01 20 | vertex 5.000000e-01 5.000000e-01 0.000000e+00 21 | endloop 22 | endfacet 23 | facet normal 5.773503e-01 -5.773503e-01 5.773503e-01 24 | outer loop 25 | vertex 5.000000e-01 1.000000e+00 5.000000e-01 26 | vertex 0.000000e+00 5.000000e-01 5.000000e-01 27 | vertex 5.000000e-01 5.000000e-01 0.000000e+00 28 | endloop 29 | endfacet 30 | facet normal 5.773503e-01 5.773503e-01 -5.773503e-01 31 | outer loop 32 | vertex 5.000000e-01 0.000000e+00 5.000000e-01 33 | vertex 0.000000e+00 5.000000e-01 5.000000e-01 34 | vertex 5.000000e-01 5.000000e-01 1.000000e+00 35 | endloop 36 | endfacet 37 | facet normal 5.773503e-01 -5.773503e-01 5.773503e-01 38 | outer loop 39 | vertex 5.000000e-01 0.000000e+00 5.000000e-01 40 | vertex 1.000000e+00 5.000000e-01 5.000000e-01 41 | vertex 5.000000e-01 5.000000e-01 1.000000e+00 42 | endloop 43 | endfacet 44 | facet normal 5.773503e-01 5.773503e-01 5.773503e-01 45 | outer loop 46 | vertex 1.000000e+00 5.000000e-01 5.000000e-01 47 | vertex 5.000000e-01 1.000000e+00 5.000000e-01 48 | vertex 5.000000e-01 5.000000e-01 1.000000e+00 49 | endloop 50 | endfacet 51 | facet normal -5.773503e-01 5.773503e-01 5.773503e-01 52 | outer loop 53 | vertex 5.000000e-01 1.000000e+00 5.000000e-01 54 | vertex 0.000000e+00 5.000000e-01 5.000000e-01 55 | vertex 5.000000e-01 5.000000e-01 1.000000e+00 56 | endloop 57 | endfacet 58 | endsolid vcg 59 | -------------------------------------------------------------------------------- /tests/data/diamond_ascii_vertex_index.ply: -------------------------------------------------------------------------------- 1 | ply 2 | format ascii 1.0 3 | comment PyntCloud is cool 4 | element vertex 6 5 | property float x 6 | property float y 7 | property float z 8 | property float nx 9 | property float ny 10 | property float nz 11 | property uchar red 12 | property uchar green 13 | property uchar blue 14 | property uchar alpha 15 | element face 8 16 | property list uchar int vertex_index 17 | end_header 18 | 0.5 0 0.5 0 -1 0 255 0 0 255 19 | 0 0.5 0.5 -1 0 0 255 0 0 255 20 | 0.5 0.5 0 0 0 -1 0 255 0 255 21 | 1 0.5 0.5 1 0 0 255 0 0 255 22 | 0.5 1 0.5 0 1 0 255 0 0 255 23 | 0.5 0.5 1 0 0 1 0 0 255 255 24 | 3 0 1 2 25 | 3 0 3 2 26 | 3 3 4 2 27 | 3 4 1 2 28 | 3 0 1 5 29 | 3 0 3 5 30 | 3 3 4 5 31 | 3 4 1 5 32 | -------------------------------------------------------------------------------- /tests/data/diamond_color.obj: -------------------------------------------------------------------------------- 1 | #### 2 | # 3 | # OBJ File Generated by Meshlab 4 | # 5 | #### 6 | # Object diamond.obj 7 | # 8 | # Vertices: 6 9 | # Faces: 8 10 | # 11 | #### 12 | vn 0.000000 -1.000000 0.000000 13 | v 0.500000 0.000000 0.500000 1.000000 0.000000 0.000000 14 | vn -1.000000 0.000000 0.000000 15 | v 0.000000 0.500000 0.500000 1.000000 0.000000 0.000000 16 | vn 0.000000 0.000000 -1.000000 17 | v 0.500000 0.500000 0.000000 0.000000 1.000000 0.000000 18 | vn 1.000000 0.000000 0.000000 19 | v 1.000000 0.500000 0.500000 1.000000 0.000000 0.000000 20 | vn 0.000000 1.000000 0.000000 21 | v 0.500000 1.000000 0.500000 1.000000 0.000000 0.000000 22 | vn 0.000000 0.000000 1.000000 23 | v 0.500000 0.500000 1.000000 0.000000 0.000000 1.000000 24 | # 6 vertices, 0 vertices normals 25 | 26 | f 1//1 2//2 3//3 27 | f 1//1 4//4 3//3 28 | f 4//4 5//5 3//3 29 | f 5//5 2//2 3//3 30 | f 1//1 2//2 6//6 31 | f 1//1 4//4 6//6 32 | f 4//4 5//5 6//6 33 | f 5//5 2//2 6//6 34 | # 8 faces, 0 coords texture 35 | 36 | # End of File 37 | -------------------------------------------------------------------------------- /tests/data/diamond_color.off: -------------------------------------------------------------------------------- 1 | COFF 2 | 6 8 0 3 | 0.5 0 0.5 255 0 0 255 4 | 0 0.5 0.5 255 0 0 255 5 | 0.5 0.5 0 0 255 0 255 6 | 1 0.5 0.5 255 0 0 255 7 | 0.5 1 0.5 255 0 0 255 8 | 0.5 0.5 1 0 0 255 255 9 | 3 0 1 2 10 | 3 0 3 2 11 | 3 3 4 2 12 | 3 4 1 2 13 | 3 0 1 5 14 | 3 0 3 5 15 | 3 3 4 5 16 | 3 4 1 5 17 | -------------------------------------------------------------------------------- /tests/data/diamond_with_bool.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/diamond_with_bool.ply -------------------------------------------------------------------------------- /tests/data/has_offsets.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/has_offsets.las -------------------------------------------------------------------------------- /tests/data/mnist.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/mnist.npz -------------------------------------------------------------------------------- /tests/data/obj_issue_221.obj: -------------------------------------------------------------------------------- 1 | # Blender v2.79 (sub 0) OBJ File: 'untitled.blend' 2 | # www.blender.org 3 | v -1.000000 1.000000 0.500000 4 | v -1.000000 1.000000 -0.500000 5 | v 1.000000 1.000000 0.500000 6 | v 1.000000 1.000000 -0.500000 7 | v 0.000000 1.000000 -0.500000 8 | v 0.000000 1.000000 0.500000 9 | v 0.000000 0.600000 -0.500000 10 | v 0.600000 0.600000 -0.500000 11 | v 0.600000 0.600000 0.500000 12 | v 0.600000 0.200000 -0.500000 13 | v 0.600000 0.200000 0.500000 14 | v 0.000000 0.200000 0.500000 15 | v 0.000000 0.200000 -0.500000 16 | v -0.600000 0.200000 -0.500000 17 | v -0.600000 0.200000 0.500000 18 | v -0.600000 0.600000 -0.500000 19 | v 0.000000 0.600000 0.500000 20 | v -0.600000 0.600000 0.500000 21 | v -1.000000 -1.000000 0.500000 22 | v -1.000000 -1.000000 -0.500000 23 | v 1.000000 -1.000000 0.500000 24 | v 1.000000 -1.000000 -0.500000 25 | v 0.000000 -1.000000 -0.500000 26 | v 0.000000 -1.000000 0.500000 27 | v -1.000000 0.000000 0.500000 28 | v -1.000000 0.000000 -0.500000 29 | v 1.000000 0.000000 -0.500000 30 | v 1.000000 0.000000 0.500000 31 | v 0.000000 0.000000 0.500000 32 | v 0.000000 0.000000 -0.500000 33 | v 0.000000 -0.600000 -0.500000 34 | v 0.600000 -0.600000 -0.500000 35 | v 0.600000 -0.600000 0.500000 36 | v 0.600000 -0.200000 -0.500000 37 | v 0.600000 -0.200000 0.500000 38 | v 0.000000 -0.200000 0.500000 39 | v 0.000000 -0.200000 -0.500000 40 | v -0.600000 -0.200000 -0.500000 41 | v -0.600000 -0.200000 0.500000 42 | v -0.600000 -0.600000 -0.500000 43 | v 0.000000 -0.600000 0.500000 44 | v -0.600000 -0.600000 0.500000 45 | f 1 26 25 46 | f 8 4 27 47 | f 4 28 27 48 | f 18 1 25 49 | f 2 6 5 50 | f 5 3 4 51 | f 9 11 28 52 | f 16 14 26 53 | f 11 8 10 54 | f 14 18 15 55 | f 12 10 13 56 | f 17 16 7 57 | f 26 19 25 58 | f 32 34 27 59 | f 28 22 27 60 | f 42 39 25 61 | f 24 20 23 62 | f 21 23 22 63 | f 28 33 21 64 | f 26 40 20 65 | f 32 35 34 66 | f 42 38 39 67 | f 37 35 36 68 | f 31 42 41 69 | f 1 2 26 70 | f 27 30 10 71 | f 30 13 10 72 | f 7 5 8 73 | f 5 4 8 74 | f 27 10 8 75 | f 4 3 28 76 | f 25 29 15 77 | f 29 12 15 78 | f 17 6 18 79 | f 6 1 18 80 | f 25 15 18 81 | f 2 1 6 82 | f 5 6 3 83 | f 3 6 9 84 | f 6 17 9 85 | f 12 29 11 86 | f 29 28 11 87 | f 3 9 28 88 | f 2 5 16 89 | f 5 7 16 90 | f 13 30 14 91 | f 30 26 14 92 | f 2 16 26 93 | f 11 9 8 94 | f 14 16 18 95 | f 13 14 15 96 | f 12 11 10 97 | f 13 15 12 98 | f 7 8 9 99 | f 17 18 16 100 | f 7 9 17 101 | f 26 20 19 102 | f 37 30 34 103 | f 30 27 34 104 | f 22 23 32 105 | f 23 31 32 106 | f 22 32 27 107 | f 28 21 22 108 | f 36 29 39 109 | f 29 25 39 110 | f 19 24 42 111 | f 24 41 42 112 | f 19 42 25 113 | f 24 19 20 114 | f 21 24 23 115 | f 28 29 35 116 | f 29 36 35 117 | f 41 24 33 118 | f 24 21 33 119 | f 28 35 33 120 | f 26 30 38 121 | f 30 37 38 122 | f 31 23 40 123 | f 23 20 40 124 | f 26 38 40 125 | f 32 33 35 126 | f 42 40 38 127 | f 39 38 37 128 | f 37 34 35 129 | f 36 39 37 130 | f 33 32 31 131 | f 31 40 42 132 | f 41 33 31 133 | -------------------------------------------------------------------------------- /tests/data/obj_issue_226.obj: -------------------------------------------------------------------------------- 1 | # mycube.obj 2 | # created by OBJ_WRITE. 3 | 4 | g Group001 5 | 6 | v 0.000000 0.000000 0.000000 1.000000 7 | v 1.000000 0.000000 0.000000 1.000000 8 | v 0.000000 1.000000 0.000000 1.000000 9 | v 1.000000 1.000000 0.000000 1.000000 10 | v 0.000000 0.000000 1.000000 1.000000 11 | v 1.000000 0.000000 1.000000 1.000000 12 | v 0.000000 1.000000 1.000000 1.000000 13 | v 1.000000 1.000000 1.000000 1.000000 14 | 15 | f 1 3 2 16 | f 2 3 4 17 | f 1 6 5 18 | f 1 2 6 19 | f 3 7 4 20 | f 4 7 8 21 | f 5 6 8 22 | f 5 8 7 23 | f 1 5 7 24 | f 1 7 3 25 | f 2 4 6 26 | f 6 4 8 27 | -------------------------------------------------------------------------------- /tests/data/obj_issue_vn.obj: -------------------------------------------------------------------------------- 1 | # Blender v2.80 (sub 44) OBJ File: '' 2 | # www.blender.org 3 | v 0.078327 0.314393 0.471797 4 | v 0.072773 0.314464 0.473065 5 | v 0.075795 0.318658 0.469309 6 | vt 0.479015 0.605341 7 | vt 0.476496 0.604942 8 | vt 0.479101 0.603193 9 | vn -0.1889 -0.5761 -0.7953 10 | vn -0.1791 -0.5816 -0.7935 11 | vn -0.1873 -0.5788 -0.7936 12 | f 1/1/1 2/2/2 3/3/3 13 | -------------------------------------------------------------------------------- /tests/data/plane.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/plane.npz -------------------------------------------------------------------------------- /tests/data/simple.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/simple.las -------------------------------------------------------------------------------- /tests/data/simple.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/simple.laz -------------------------------------------------------------------------------- /tests/data/sphere.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/sphere.ply -------------------------------------------------------------------------------- /tests/data/voxelgrid.ply: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/daavoo/pyntcloud/8368c6a22f8060aeafacf2964276e8704d732145/tests/data/voxelgrid.ply -------------------------------------------------------------------------------- /tests/integration/filters/test_kdtree_filters.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from numpy.testing import assert_array_equal 4 | 5 | 6 | @pytest.mark.parametrize("kdtree_id", ["FOO", "K", "K(10)", "K(16", "K16)", "K16"]) 7 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 8 | def test_ROR_raises_KeyError_if_id_is_not_valid( 9 | pyntcloud_with_kdtree_and_kdtree_id, kdtree_id 10 | ): 11 | cloud, true_id = pyntcloud_with_kdtree_and_kdtree_id 12 | with pytest.raises(KeyError): 13 | cloud.get_filter("ROR", kdtree_id=kdtree_id, k=2, r=0.2) 14 | 15 | 16 | @pytest.mark.parametrize("kdtree_id", ["FOO", "K", "K(10)", "K(16", "K16)", "K16"]) 17 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 18 | def test_SOR_raises_KeyError_if_id_is_not_valid( 19 | pyntcloud_with_kdtree_and_kdtree_id, kdtree_id 20 | ): 21 | cloud, true_id = pyntcloud_with_kdtree_and_kdtree_id 22 | with pytest.raises(KeyError): 23 | cloud.get_filter("SOR", kdtree_id=kdtree_id, k=2, z_max=0.5) 24 | 25 | 26 | @pytest.mark.parametrize( 27 | "k,r,expected_result", 28 | [ 29 | (2, 0.2, [True, True, True, False, True, True]), 30 | (3, 0.2, [False, True, False, False, False, False]), 31 | (3, 0.35, [True, True, True, False, False, False]), 32 | ], 33 | ) 34 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 35 | def test_ROR_expected_results( 36 | pyntcloud_with_kdtree_and_kdtree_id, k, r, expected_result 37 | ): 38 | cloud, kdtree_id = pyntcloud_with_kdtree_and_kdtree_id 39 | result = cloud.get_filter("ROR", kdtree_id=kdtree_id, k=k, r=r) 40 | assert_array_equal(result, expected_result) 41 | 42 | 43 | @pytest.mark.parametrize( 44 | "k,z_max,expected_result", [(2, 0.5, [True, True, True, False, True, True])] 45 | ) 46 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 47 | def test_SOR_expected_results( 48 | pyntcloud_with_kdtree_and_kdtree_id, k, z_max, expected_result 49 | ): 50 | cloud, kdtree_id = pyntcloud_with_kdtree_and_kdtree_id 51 | result = cloud.get_filter("SOR", kdtree_id=kdtree_id, k=k, z_max=z_max) 52 | assert_array_equal(result, expected_result) 53 | -------------------------------------------------------------------------------- /tests/integration/filters/test_xyz_filters.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from numpy.testing import assert_array_equal 4 | 5 | 6 | @pytest.mark.usefixtures("simple_pyntcloud") 7 | def test_BBOX_default_values(simple_pyntcloud): 8 | """ 9 | Default bounding box values are infinite so all points 10 | should pass the filter. 11 | """ 12 | result = simple_pyntcloud.get_filter("BBOX") 13 | assert all(result) 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "bounding_box,expected_result", 18 | [ 19 | ( 20 | {"min_x": 0.4, "max_x": 0.6, "min_y": 0.4, "max_y": 0.6}, 21 | [False, False, False, True, False, False], 22 | ), 23 | ( 24 | { 25 | "min_x": 0.4, 26 | }, 27 | [False, False, False, True, True, True], 28 | ), 29 | ( 30 | { 31 | "max_x": 1.0, 32 | }, 33 | [True, True, True, True, True, False], 34 | ), 35 | ], 36 | ) 37 | @pytest.mark.usefixtures("simple_pyntcloud") 38 | def test_BBOX_expected_results(simple_pyntcloud, bounding_box, expected_result): 39 | result = simple_pyntcloud.get_filter("BBOX", **bounding_box) 40 | assert_array_equal(result, expected_result) 41 | -------------------------------------------------------------------------------- /tests/integration/io/test_from_instance.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | from pyntcloud import PyntCloud 4 | 5 | try: 6 | import pyvista as pv 7 | 8 | SKIP_PYVISTA = False 9 | except ImportError: 10 | pv = None 11 | SKIP_PYVISTA = True 12 | 13 | try: 14 | import open3d as o3d 15 | 16 | SKIP_OPEN3D = False 17 | except ImportError: 18 | o3d = None 19 | SKIP_OPEN3D = True 20 | 21 | 22 | @pytest.mark.skipif(SKIP_PYVISTA, reason="Requires PyVista") 23 | def test_pyvista_conversion(data_path): 24 | original_point_cloud = pv.read(str(data_path / "diamond.ply")) 25 | cloud = PyntCloud.from_instance("pyvista", original_point_cloud) 26 | assert np.allclose(cloud.xyz, original_point_cloud.points) 27 | assert {"red", "green", "blue"}.issubset(cloud.points.columns) 28 | assert np.allclose( 29 | cloud.points[["red", "green", "blue"]].values, 30 | original_point_cloud.point_data["RGB"], 31 | ) 32 | assert {"nx", "ny", "nz"}.issubset(cloud.points.columns) 33 | assert np.allclose( 34 | cloud.points[["nx", "ny", "nz"]].values, 35 | original_point_cloud.point_data["Normals"], 36 | ) 37 | assert cloud.mesh is not None 38 | 39 | 40 | @pytest.mark.skipif(SKIP_PYVISTA, reason="Requires PyVista") 41 | def test_pyvista_normals_are_handled(): 42 | poly = pv.Sphere() 43 | pc = PyntCloud.from_instance("pyvista", poly) 44 | assert all(x in pc.points.columns for x in ["nx", "ny", "nz"]) 45 | 46 | 47 | @pytest.mark.skipif(SKIP_PYVISTA, reason="Requires PyVista") 48 | def test_pyvista_multicomponent_scalars_are_splitted(): 49 | poly = pv.Sphere() 50 | poly.point_data["foo"] = np.zeros_like(poly.points) 51 | pc = PyntCloud.from_instance("pyvista", poly) 52 | assert all(x in pc.points.columns for x in ["foo_0", "foo_1", "foo_2"]) 53 | 54 | 55 | @pytest.mark.skipif(SKIP_PYVISTA, reason="Requires PyVista") 56 | def test_pyvista_rgb_is_handled(): 57 | """Serves as regression test for old `in` behaviour that could cause a subtle bug 58 | if poin_arrays contain a field with `name in "RGB"` 59 | """ 60 | poly = pv.Sphere() 61 | poly.point_data["RG"] = np.zeros_like(poly.points)[:, :2] 62 | pc = PyntCloud.from_instance("pyvista", poly) 63 | assert all(x in pc.points.columns for x in ["RG_0", "RG_1"]) 64 | 65 | 66 | @pytest.mark.skipif(SKIP_OPEN3D, reason="Requires Open3D") 67 | def test_open3d_point_cloud(data_path): 68 | point_cloud = o3d.io.read_point_cloud(str(data_path.joinpath("diamond.ply"))) 69 | cloud = PyntCloud.from_instance("open3d", point_cloud) 70 | assert np.allclose(cloud.xyz, np.asarray(point_cloud.points)) 71 | assert {"red", "green", "blue"}.issubset(cloud.points.columns) 72 | assert np.allclose( 73 | cloud.points[["red", "green", "blue"]].values / 255.0, 74 | np.asarray(point_cloud.colors), 75 | ) 76 | 77 | assert {"nx", "ny", "nz"}.issubset(cloud.points.columns) 78 | assert np.allclose( 79 | cloud.points[["nx", "ny", "nz"]].values, np.asarray(point_cloud.normals) 80 | ) 81 | 82 | 83 | @pytest.mark.skipif(SKIP_OPEN3D, reason="Requires Open3D") 84 | def test_open3d_triangle_mesh(data_path): 85 | triangle_mesh = o3d.io.read_triangle_mesh(str(data_path.joinpath("diamond.ply"))) 86 | cloud = PyntCloud.from_instance("open3d", triangle_mesh) 87 | assert cloud.mesh is not None 88 | assert np.allclose(cloud.mesh.values, triangle_mesh.triangles) 89 | 90 | assert np.allclose(cloud.xyz, triangle_mesh.vertices) 91 | 92 | assert {"red", "green", "blue"}.issubset(cloud.points.columns) 93 | assert np.allclose( 94 | cloud.points[["red", "green", "blue"]].values / 255.0, 95 | triangle_mesh.vertex_colors, 96 | ) 97 | 98 | assert {"nx", "ny", "nz"}.issubset(cloud.points.columns) 99 | assert np.allclose( 100 | cloud.points[["nx", "ny", "nz"]].values, triangle_mesh.vertex_normals 101 | ) 102 | -------------------------------------------------------------------------------- /tests/integration/io/test_to_file.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pyntcloud import PyntCloud 4 | 5 | from test_from_file import assert_points_xyz, assert_points_color, assert_mesh 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "extension,color,mesh,comments", 10 | [ 11 | (".ply", True, True, False), 12 | ("_ascii.ply", True, True, True), 13 | (".npz", True, True, False), 14 | (".obj", False, True, False), 15 | (".bin", False, False, False), 16 | ], 17 | ) 18 | def test_to_file(tmpdir, diamond, extension, color, mesh, comments): 19 | extra_write_args = {} 20 | if mesh: 21 | extra_write_args["also_save"] = ["mesh"] 22 | if comments: 23 | extra_write_args["comments"] = ["PyntCloud is cool"] 24 | if extension == ".ply": 25 | extra_write_args["as_text"] = False 26 | if extension == "_ascii.ply": 27 | extra_write_args["as_text"] = True 28 | 29 | diamond.to_file(str(tmpdir.join("written{}".format(extension))), **extra_write_args) 30 | 31 | written_file = PyntCloud.from_file(str(tmpdir.join("written{}".format(extension)))) 32 | 33 | assert_points_xyz(written_file) 34 | if color: 35 | assert_points_color(written_file) 36 | if mesh: 37 | assert_mesh(written_file) 38 | if comments: 39 | assert written_file.comments == ["PyntCloud is cool"] 40 | 41 | 42 | def test_to_bin_raises_ValueError_if_invalid_kwargs(tmpdir, diamond): 43 | with pytest.raises(ValueError): 44 | diamond.to_file(str(tmpdir.join("written.bin")), also_save=["mesh"]) 45 | 46 | 47 | def test_write_ply_with_bool(plane_pyntcloud, tmp_path): 48 | """Expectation: a PyntCloud class holding Boolean column within `points` can be written and re-read as a PLY file. 49 | 50 | After adding the new column, we have the following DataFrame under plane_pyntcloud.points: 51 | 52 | x y z bool_col 53 | 0 0.0 0.0 0.0 True 54 | 1 1.0 1.0 0.0 True 55 | 2 2.0 2.0 0.0 False 56 | 3 1.0 2.0 0.0 True 57 | 4 0.1 0.2 0.3 True 58 | """ 59 | # Insert the additional column of dtype: bool. 60 | plane_pyntcloud.points["bool_col"] = plane_pyntcloud.points.x < 2 61 | 62 | # Write the DataFrame containing Boolean data. 63 | ply_out = (tmp_path / "test_file.ply").as_posix() 64 | plane_pyntcloud.to_file(ply_out) 65 | 66 | # Reload the test file and compare it is exactly as it was before writing. 67 | new_pyntcloud = PyntCloud.from_file(ply_out, allow_bool=True) 68 | assert new_pyntcloud.points.equals(plane_pyntcloud.points), ( 69 | "Re-read pyntcloud is not identical to before writing" 70 | ) 71 | -------------------------------------------------------------------------------- /tests/integration/io/test_to_instance.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | from pyntcloud import PyntCloud 4 | 5 | try: 6 | import pyvista as pv 7 | 8 | SKIP_PYVISTA = False 9 | except ImportError: 10 | pv = None 11 | SKIP_PYVISTA = True 12 | 13 | try: 14 | import open3d as o3d 15 | 16 | SKIP_OPEN3D = False 17 | except ImportError: 18 | o3d = None 19 | SKIP_OPEN3D = True 20 | 21 | 22 | @pytest.mark.skipif(SKIP_PYVISTA, reason="Requires PyVista") 23 | def test_pyvista_conversion(data_path): 24 | cloud = PyntCloud.from_file(str(data_path.joinpath("diamond.ply"))) 25 | poly = cloud.to_instance("pyvista", mesh=True) 26 | assert np.allclose(cloud.xyz, poly.points) 27 | faces = poly.faces.reshape((-1, 4))[:, 1:] 28 | assert np.allclose(cloud.mesh.values, faces) 29 | 30 | 31 | @pytest.mark.skipif(SKIP_OPEN3D, reason="Requires Open3D") 32 | def test_open3d_point_cloud_conversion(data_path): 33 | cloud = PyntCloud.from_file(str(data_path.joinpath("diamond.ply"))) 34 | point_cloud = cloud.to_instance("open3d", mesh=False) 35 | assert isinstance(point_cloud, o3d.geometry.PointCloud) 36 | assert np.allclose(cloud.xyz, point_cloud.points) 37 | 38 | 39 | @pytest.mark.skipif(SKIP_OPEN3D, reason="Requires Open3D") 40 | def test_open3d_triangle_mesh_conversion(data_path): 41 | cloud = PyntCloud.from_file(str(data_path.joinpath("diamond.ply"))) 42 | # mesh=True by default 43 | triangle_mesh = cloud.to_instance("open3d") 44 | assert isinstance(triangle_mesh, o3d.geometry.TriangleMesh) 45 | assert np.allclose(cloud.xyz, triangle_mesh.vertices) 46 | assert np.allclose(cloud.mesh.values, triangle_mesh.triangles) 47 | -------------------------------------------------------------------------------- /tests/integration/samplers/test_mesh_samplers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pandas import DataFrame 4 | 5 | from pyntcloud import PyntCloud 6 | 7 | 8 | def test_mesh_random_sampling_return_type(diamond): 9 | sample = diamond.get_sample("mesh_random", n=10, rgb=True, normals=True) 10 | assert sample is DataFrame 11 | 12 | sample = diamond.get_sample( 13 | "mesh_random", n=10, rgb=True, normals=True, as_PyntCloud=True 14 | ) 15 | assert sample is PyntCloud 16 | 17 | 18 | @pytest.mark.parametrize("n", [1, 5, 10, 50, 100]) 19 | def test_mesh_random_sampling_n_argument(diamond, n): 20 | sample = diamond.get_sample("mesh_random", n=n, rgb=True, normals=True) 21 | assert len(sample) == n 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "rgb,normals", [(False, False), (True, False), (True, True), (False, True)] 26 | ) 27 | def test_mesh_random_sampling_rgb_normals_optional_arguments(diamond, rgb, normals): 28 | sample = diamond.get_sample("mesh_random", n=10, rgb=rgb, normals=normals) 29 | 30 | for x in ["red", "green", "blue"]: 31 | assert (x in sample) == rgb 32 | 33 | for x in ["nx", "ny", "nz"]: 34 | assert (x in sample) == normals 35 | 36 | 37 | @pytest.mark.parametrize("n", [1, 5, 10, 50, 100]) 38 | @pytest.mark.usefixtures("diamond") 39 | def test_mesh_random_sampling_sampled_points_bounds(diamond, n): 40 | sample = diamond.get_sample("mesh_random", n=n, rgb=True, normals=True) 41 | 42 | assert all(sample[["x", "y", "z"]].values.max(0) <= diamond.xyz.max(0)) 43 | assert all(sample[["x", "y", "z"]].values.min(0) >= diamond.xyz.min(0)) 44 | -------------------------------------------------------------------------------- /tests/integration/samplers/test_points_samplers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pandas import DataFrame 4 | 5 | from pyntcloud import PyntCloud 6 | from pyntcloud.utils.array import point_in_array_2D 7 | 8 | 9 | def test_mesh_random_sampling_return_type(simple_pyntcloud): 10 | sample = simple_pyntcloud.get_sample("points_random", n=5) 11 | assert sample is DataFrame 12 | 13 | sample = simple_pyntcloud.get_sample("points_random", n=5, as_PyntCloud=True) 14 | assert sample is PyntCloud 15 | 16 | 17 | @pytest.mark.parametrize("n", [1, 5, 6]) 18 | def test_RandomPointsSampler_n_argument(simple_pyntcloud, n): 19 | sample = simple_pyntcloud.get_sample("points_random", n=n) 20 | assert len(sample) == n 21 | 22 | 23 | @pytest.mark.usefixtures("simple_pyntcloud") 24 | def test_RandomPointsSampler_raises_ValueError_on_invalid_n(simple_pyntcloud): 25 | with pytest.raises(ValueError): 26 | simple_pyntcloud.get_sample("points_random", n=10) 27 | 28 | 29 | @pytest.mark.usefixtures("simple_pyntcloud") 30 | def test_RandomPointsSampler_sampled_points_are_from_original(simple_pyntcloud): 31 | for i in range(10): 32 | sample = simple_pyntcloud.get_sample("points_random", n=1) 33 | assert point_in_array_2D(sample, simple_pyntcloud.xyz) 34 | 35 | 36 | @pytest.mark.parametrize("n", [1, 5, 6]) 37 | @pytest.mark.usefixtures("simple_pyntcloud") 38 | def test_FarthestPointsSampler_n_argument(simple_pyntcloud, n): 39 | sample = simple_pyntcloud.get_sample("points_farthest", n=n) 40 | assert len(sample) == n 41 | 42 | 43 | @pytest.mark.usefixtures("simple_pyntcloud") 44 | def test_FarthestPointsSampler_raises_ValueError_on_invalid_n(simple_pyntcloud): 45 | with pytest.raises(ValueError): 46 | simple_pyntcloud.get_sample("points_farthest", n=10) 47 | 48 | 49 | @pytest.mark.usefixtures("simple_pyntcloud") 50 | def test_FarthestPointsSampler_sampled_points_are_from_original(simple_pyntcloud): 51 | for i in range(10): 52 | sample = simple_pyntcloud.get_sample("points_farthest", n=1) 53 | assert point_in_array_2D(sample, simple_pyntcloud.xyz) 54 | -------------------------------------------------------------------------------- /tests/integration/samplers/test_voxelgrid_samplers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pandas import DataFrame 4 | 5 | from pyntcloud import PyntCloud 6 | from pyntcloud.utils.array import point_in_array_2D 7 | 8 | 9 | @pytest.mark.parametrize( 10 | "sampling_method", ["voxelgrid_centers", "voxelgrid_centroids", "voxelgrid_nearest"] 11 | ) 12 | @pytest.mark.usefixtures("simple_pyntcloud") 13 | def test_voxelgrid_sampling_return_type(simple_pyntcloud, sampling_method): 14 | voxelgrid_id = simple_pyntcloud.add_structure("voxelgrid") 15 | 16 | sample = simple_pyntcloud.get_sample(sampling_method, voxelgrid_id=voxelgrid_id) 17 | assert sample is DataFrame 18 | 19 | sample = simple_pyntcloud.get_sample( 20 | sampling_method, voxelgrid_id=voxelgrid_id, as_PyntCloud=True 21 | ) 22 | assert sample is PyntCloud 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "n_x,n_y,n_z,expected_n,expected_point", 27 | [(2, 2, 2, 2, [0.25, 0.25, 0.25]), (2, 1, 1, 2, [0.25, 0.5, 0.5])], 28 | ) 29 | @pytest.mark.usefixtures("simple_pyntcloud") 30 | def test_voxelgrid_centers_expected_values( 31 | simple_pyntcloud, n_x, n_y, n_z, expected_n, expected_point 32 | ): 33 | voxelgrid_id = simple_pyntcloud.add_structure( 34 | "voxelgrid", n_x=n_x, n_y=n_y, n_z=n_z 35 | ) 36 | sample = simple_pyntcloud.get_sample("voxelgrid_centers", voxelgrid_id=voxelgrid_id) 37 | assert len(sample) == expected_n 38 | assert point_in_array_2D(expected_point, sample.values) 39 | 40 | 41 | @pytest.mark.parametrize( 42 | "n_x,n_y,n_z,expected_n,expected_point", 43 | [(2, 2, 2, 2, [0.2, 0.2, 0.2]), (2, 1, 1, 2, [0.2, 0.2, 0.2])], 44 | ) 45 | @pytest.mark.usefixtures("simple_pyntcloud") 46 | def test_voxelgrid_centroids_expected_values( 47 | simple_pyntcloud, n_x, n_y, n_z, expected_n, expected_point 48 | ): 49 | voxelgrid_id = simple_pyntcloud.add_structure( 50 | "voxelgrid", n_x=n_x, n_y=n_y, n_z=n_z 51 | ) 52 | sample = simple_pyntcloud.get_sample( 53 | "voxelgrid_centroids", voxelgrid_id=voxelgrid_id 54 | ) 55 | assert len(sample) == expected_n 56 | assert point_in_array_2D(expected_point, sample.values) 57 | 58 | 59 | @pytest.mark.parametrize( 60 | "n_x,n_y,n_z,n_points,expected_n,expected_point", 61 | [ 62 | (2, 2, 2, 1, 2, [0.2, 0.2, 0.2]), 63 | (2, 2, 2, 2, 4, [0.1, 0.1, 0.1]), 64 | (2, 1, 1, 1, 2, [0.5, 0.5, 0.5]), 65 | ], 66 | ) 67 | @pytest.mark.usefixtures("simple_pyntcloud") 68 | def test_voxelgrid_nearest_expected_values( 69 | simple_pyntcloud, n_x, n_y, n_z, n_points, expected_n, expected_point 70 | ): 71 | voxelgrid_id = simple_pyntcloud.add_structure( 72 | "voxelgrid", n_x=n_x, n_y=n_y, n_z=n_z 73 | ) 74 | sample = simple_pyntcloud.get_sample( 75 | "voxelgrid_nearest", voxelgrid_id=voxelgrid_id, n=n_points 76 | ) 77 | assert len(sample) == expected_n 78 | assert point_in_array_2D(expected_point, sample.loc[:, ["x", "y", "z"]].values) 79 | 80 | 81 | @pytest.mark.parametrize( 82 | "size_x,expected_n,expected_in,expected_not_in", 83 | [ 84 | (0.1, 6, [0.0, 0.0, 0.0], [1.2, 1.2, 1.2]), 85 | (0.2, 4, [0.1, 0.1, 0.1], [0.9, 0.9, 0.9]), 86 | ], 87 | ) 88 | @pytest.mark.usefixtures("simple_pyntcloud") 89 | def test_voxelgrid_highest_expected_values( 90 | simple_pyntcloud, size_x, expected_n, expected_in, expected_not_in 91 | ): 92 | voxelgrid_id = simple_pyntcloud.add_structure("voxelgrid", size_x=size_x) 93 | sample = simple_pyntcloud.get_sample("voxelgrid_highest", voxelgrid_id=voxelgrid_id) 94 | 95 | assert len(sample) == expected_n 96 | assert point_in_array_2D(expected_in, sample.loc[:, ["x", "y", "z"]].values) 97 | assert not point_in_array_2D(expected_not_in, sample.loc[:, ["x", "y", "z"]].values) 98 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_eigenvalues_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | 6 | @pytest.mark.parametrize("scalar_field_name", ["anisotropy", "planarity"]) 7 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 8 | def test_eigen_values_scalar_fields_where_coplanar_points_have_value_of_1( 9 | pyntcloud_and_eigenvalues, scalar_field_name 10 | ): 11 | cloud, ev = pyntcloud_and_eigenvalues 12 | with np.errstate(divide="ignore", invalid="ignore"): 13 | scalar_field = cloud.add_scalar_field(scalar_field_name, ev=ev) 14 | scalar_field_values = cloud.points[scalar_field].values 15 | assert all(scalar_field_values[:5] == 1) 16 | assert scalar_field_values[5] < 1 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "scalar_field_name", 21 | ["curvature", "eigenentropy", "linearity", "omnivariance", "sphericity"], 22 | ) 23 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 24 | def test_eigen_values_scalar_fieldss_where_coplanar_points_have_value_of_0( 25 | pyntcloud_and_eigenvalues, scalar_field_name 26 | ): 27 | cloud, ev = pyntcloud_and_eigenvalues 28 | with np.errstate(divide="ignore", invalid="ignore"): 29 | scalar_field = cloud.add_scalar_field(scalar_field_name, ev=ev) 30 | scalar_field_values = cloud.points[scalar_field].values 31 | assert all(scalar_field_values[:5] == 0) 32 | assert scalar_field_values[5] > 0 33 | 34 | 35 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 36 | def test_eigen_sum_values(pyntcloud_and_eigenvalues): 37 | cloud, ev = pyntcloud_and_eigenvalues 38 | with np.errstate(divide="ignore", invalid="ignore"): 39 | scalar_field = cloud.add_scalar_field("eigen_sum", ev=ev) 40 | scalar_field_values = cloud.points[scalar_field].values 41 | assert all(scalar_field_values > 0) 42 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_k_neighbors_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.usefixtures("plane_pyntcloud", "plane_k_neighbors") 5 | def test_eigen_values_coplanar_points_e3_is_0(plane_pyntcloud, plane_k_neighbors): 6 | plane_pyntcloud.add_scalar_field("eigen_values", k_neighbors=plane_k_neighbors) 7 | assert plane_pyntcloud.points["e3(3)"][2] == 0 8 | assert plane_pyntcloud.points["e3(3)"][3] == 0 9 | 10 | 11 | @pytest.mark.usefixtures( 12 | "pyntcloud_with_rgb_and_normals", "pyntcloud_with_rgb_and_normals_k_neighbors" 13 | ) 14 | def test_eigen_values_bounds( 15 | pyntcloud_with_rgb_and_normals, pyntcloud_with_rgb_and_normals_k_neighbors 16 | ): 17 | scalar_fields = pyntcloud_with_rgb_and_normals.add_scalar_field( 18 | "eigen_values", k_neighbors=pyntcloud_with_rgb_and_normals_k_neighbors 19 | ) 20 | 21 | for x in scalar_fields: 22 | assert all(pyntcloud_with_rgb_and_normals.points[x] >= -1) 23 | assert all(pyntcloud_with_rgb_and_normals.points[x] <= 1) 24 | 25 | 26 | @pytest.mark.usefixtures("plane_pyntcloud", "plane_k_neighbors") 27 | def test_eigen_decomposition_coplanar_points_some_fields_are_0( 28 | plane_pyntcloud, plane_k_neighbors 29 | ): 30 | plane_pyntcloud.add_scalar_field( 31 | "eigen_decomposition", k_neighbors=plane_k_neighbors 32 | ) 33 | 34 | for x in ["e3(3)", "ev1_z(3)", "ev2_z(3)", "ev3_x(3)", "ev3_y(3)"]: 35 | assert plane_pyntcloud.points[x][2] == 0 36 | assert plane_pyntcloud.points[x][3] == 0 37 | 38 | 39 | @pytest.mark.usefixtures( 40 | "pyntcloud_with_rgb_and_normals", "pyntcloud_with_rgb_and_normals_k_neighbors" 41 | ) 42 | def test_eigen_decomposition_bounds( 43 | pyntcloud_with_rgb_and_normals, pyntcloud_with_rgb_and_normals_k_neighbors 44 | ): 45 | scalar_fields = pyntcloud_with_rgb_and_normals.add_scalar_field( 46 | "eigen_decomposition", k_neighbors=pyntcloud_with_rgb_and_normals_k_neighbors 47 | ) 48 | 49 | for x in scalar_fields: 50 | assert all(pyntcloud_with_rgb_and_normals.points[x] >= -1) 51 | assert all(pyntcloud_with_rgb_and_normals.points[x] <= 1) 52 | 53 | 54 | @pytest.mark.usefixtures("plane_pyntcloud", "plane_k_neighbors") 55 | def test_normals_coplanar_points_nx_ny_are_0(plane_pyntcloud, plane_k_neighbors): 56 | plane_pyntcloud.add_scalar_field("normals", k_neighbors=plane_k_neighbors) 57 | for x in ["nx(3)", "ny(3)"]: 58 | assert plane_pyntcloud.points[x][2] == 0 59 | assert plane_pyntcloud.points[x][3] == 0 60 | 61 | 62 | @pytest.mark.usefixtures( 63 | "pyntcloud_with_rgb_and_normals", "pyntcloud_with_rgb_and_normals_k_neighbors" 64 | ) 65 | def test_normals_bounds( 66 | pyntcloud_with_rgb_and_normals, pyntcloud_with_rgb_and_normals_k_neighbors 67 | ): 68 | scalar_fields = pyntcloud_with_rgb_and_normals.add_scalar_field( 69 | "normals", k_neighbors=pyntcloud_with_rgb_and_normals_k_neighbors 70 | ) 71 | 72 | for x in scalar_fields: 73 | assert all(pyntcloud_with_rgb_and_normals.points[x] >= -1) 74 | assert all(pyntcloud_with_rgb_and_normals.points[x] <= 1) 75 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_normals_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "scalar_field_name, min_val, max_val", 8 | [ 9 | ("inclination_degrees", 0, 180), 10 | ("inclination_radians", 0, np.pi), 11 | ("orientation_degrees", 0, 360), 12 | ("orientation_radians", 0, np.pi * 2), 13 | ], 14 | ) 15 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 16 | def test_normal_scalar_fields_bounds( 17 | pyntcloud_with_rgb_and_normals, scalar_field_name, min_val, max_val 18 | ): 19 | with np.errstate(divide="ignore", invalid="ignore"): 20 | scalar_field = pyntcloud_with_rgb_and_normals.add_scalar_field( 21 | scalar_field_name 22 | ) 23 | 24 | scalar_field_values = pyntcloud_with_rgb_and_normals.points[scalar_field] 25 | assert all(scalar_field_values >= min_val) 26 | assert all(scalar_field_values <= max_val) 27 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_rgb_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.parametrize("scalar_field", ["hsv", "relative_luminance", "rgb_intensity"]) 5 | @pytest.mark.usefixtures("simple_pyntcloud") 6 | def test_KeyError_is_raised_when_rgb_is_missing(simple_pyntcloud, scalar_field): 7 | with pytest.raises(KeyError): 8 | simple_pyntcloud.add_scalar_field(scalar_field) 9 | 10 | 11 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 12 | def test_hsv_bounds(pyntcloud_with_rgb_and_normals): 13 | pyntcloud_with_rgb_and_normals.add_scalar_field("hsv") 14 | assert min(pyntcloud_with_rgb_and_normals.points["H"]) >= 0 15 | assert max(pyntcloud_with_rgb_and_normals.points["H"]) <= 360 16 | assert min(pyntcloud_with_rgb_and_normals.points["S"]) >= 0 17 | assert max(pyntcloud_with_rgb_and_normals.points["S"]) <= 1 18 | assert min(pyntcloud_with_rgb_and_normals.points["V"]) >= 0 19 | assert max(pyntcloud_with_rgb_and_normals.points["V"]) <= 100 20 | 21 | 22 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 23 | def test_relative_luminance_bounds(pyntcloud_with_rgb_and_normals): 24 | pyntcloud_with_rgb_and_normals.add_scalar_field("relative_luminance") 25 | assert min(pyntcloud_with_rgb_and_normals.points["relative_luminance"]) >= 0 26 | assert max(pyntcloud_with_rgb_and_normals.points["relative_luminance"]) <= 255 27 | 28 | 29 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 30 | def test_rgb_intensity_bounds(pyntcloud_with_rgb_and_normals): 31 | pyntcloud_with_rgb_and_normals.add_scalar_field("rgb_intensity") 32 | assert min(pyntcloud_with_rgb_and_normals.points["Ri"]) >= 0 33 | assert max(pyntcloud_with_rgb_and_normals.points["Ri"]) <= 1 34 | assert min(pyntcloud_with_rgb_and_normals.points["Gi"]) >= 0 35 | assert max(pyntcloud_with_rgb_and_normals.points["Gi"]) <= 1 36 | assert min(pyntcloud_with_rgb_and_normals.points["Bi"]) >= 0 37 | assert max(pyntcloud_with_rgb_and_normals.points["Bi"]) <= 1 38 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_voxlegrid_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.parametrize( 5 | "scalar_field_name, min_val, max_val", 6 | [ 7 | ("voxel_n", 0, 4 * 4 * 4), 8 | ("voxel_x", 0, 4), 9 | ("voxel_y", 0, 4), 10 | ("voxel_z", 0, 4), 11 | ], 12 | ) 13 | @pytest.mark.usefixtures("pyntcloud_with_voxelgrid_and_voxelgrid_id") 14 | def test_voxelgrid_scalar_fields_bounds( 15 | pyntcloud_with_voxelgrid_and_voxelgrid_id, scalar_field_name, min_val, max_val 16 | ): 17 | cloud, voxelgrid_id = pyntcloud_with_voxelgrid_and_voxelgrid_id 18 | scalar_field = cloud.add_scalar_field(scalar_field_name, voxelgrid_id=voxelgrid_id) 19 | scalar_field_values = cloud.points[scalar_field].values 20 | assert all(scalar_field_values >= min_val) 21 | assert all(scalar_field_values <= max_val) 22 | 23 | 24 | @pytest.mark.usefixtures("pyntcloud_with_clusters_and_voxelgrid_id") 25 | def test_euclidean_clusters_values(pyntcloud_with_clusters_and_voxelgrid_id): 26 | cloud, voxelgrid_id = pyntcloud_with_clusters_and_voxelgrid_id 27 | scalar_field = cloud.add_scalar_field( 28 | "euclidean_clusters", voxelgrid_id=voxelgrid_id 29 | ) 30 | scalar_field_values = cloud.points[scalar_field].values 31 | assert all(scalar_field_values[:5] != scalar_field_values[5:]) 32 | -------------------------------------------------------------------------------- /tests/integration/scalar_fields/test_xyz_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | 6 | @pytest.mark.usefixtures("plane_pyntcloud") 7 | def test_plane_fit_max_dist(plane_pyntcloud): 8 | with np.errstate(divide="ignore", invalid="ignore"): 9 | plane_pyntcloud.add_scalar_field("plane_fit") 10 | assert sum(plane_pyntcloud.points["is_plane"]) == 4 11 | 12 | with np.errstate(divide="ignore", invalid="ignore"): 13 | plane_pyntcloud.add_scalar_field("plane_fit", max_dist=0.4) 14 | assert sum(plane_pyntcloud.points["is_plane"]) == 5 15 | 16 | 17 | @pytest.mark.usefixtures("sphere_pyntcloud") 18 | def test_sphere_fit_max_dist(sphere_pyntcloud): 19 | with np.errstate(divide="ignore", invalid="ignore"): 20 | sphere_pyntcloud.add_scalar_field("sphere_fit") 21 | assert sum(sphere_pyntcloud.points["is_sphere"]) == 4 22 | 23 | with np.errstate(divide="ignore", invalid="ignore"): 24 | sphere_pyntcloud.add_scalar_field("sphere_fit", max_dist=0.5) 25 | assert sum(sphere_pyntcloud.points["is_sphere"]) == 5 26 | 27 | 28 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 29 | def test_spherical_coords_bounds(pyntcloud_with_rgb_and_normals): 30 | with np.errstate(divide="ignore", invalid="ignore"): 31 | pyntcloud_with_rgb_and_normals.add_scalar_field("spherical_coords") 32 | assert all(pyntcloud_with_rgb_and_normals.points["polar"] >= 0) 33 | assert all(pyntcloud_with_rgb_and_normals.points["polar"] <= 180) 34 | 35 | assert all(pyntcloud_with_rgb_and_normals.points["azimuthal"] >= -180) 36 | assert all(pyntcloud_with_rgb_and_normals.points["azimuthal"] <= 180) 37 | 38 | with np.errstate(divide="ignore", invalid="ignore"): 39 | pyntcloud_with_rgb_and_normals.add_scalar_field( 40 | "spherical_coords", degrees=False 41 | ) 42 | 43 | assert all(pyntcloud_with_rgb_and_normals.points["polar"] >= 0) 44 | assert all(pyntcloud_with_rgb_and_normals.points["polar"] <= np.pi) 45 | 46 | assert all(pyntcloud_with_rgb_and_normals.points["azimuthal"] >= -np.pi) 47 | assert all(pyntcloud_with_rgb_and_normals.points["azimuthal"] <= np.pi) 48 | 49 | 50 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 51 | def test_cylindrical_coords_bounds(pyntcloud_with_rgb_and_normals): 52 | with np.errstate(divide="ignore", invalid="ignore"): 53 | pyntcloud_with_rgb_and_normals.add_scalar_field("cylindrical_coords") 54 | assert all(pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] >= -90) 55 | assert all(pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] <= 270) 56 | 57 | with np.errstate(divide="ignore", invalid="ignore"): 58 | pyntcloud_with_rgb_and_normals.add_scalar_field( 59 | "cylindrical_coords", degrees=False 60 | ) 61 | assert all( 62 | pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] >= -(np.pi / 2) 63 | ) 64 | assert all( 65 | pyntcloud_with_rgb_and_normals.points["angular_cylindrical"] <= (np.pi * 1.5) 66 | ) 67 | -------------------------------------------------------------------------------- /tests/integration/structures/test_voxelgrid_structures.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from pyntcloud import PyntCloud 7 | 8 | 9 | def test_default_number_of_voxels_per_axis(simple_pyntcloud): 10 | voxelgrid_id = simple_pyntcloud.add_structure("voxelgrid") 11 | voxelgrid = simple_pyntcloud.structures[voxelgrid_id] 12 | assert voxelgrid.n_voxels == 1 13 | assert np.all(voxelgrid.voxel_x == 0) 14 | assert np.all(voxelgrid.voxel_y == 0) 15 | assert np.all(voxelgrid.voxel_z == 0) 16 | assert np.all(voxelgrid.voxel_n == 0) 17 | feature_vector = voxelgrid.get_feature_vector() 18 | np.testing.assert_array_equal(feature_vector, np.array([[[1.0]]])) 19 | neighbors = voxelgrid.get_voxel_neighbors(0) 20 | assert neighbors == [0] 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "n_x, n_y, n_z, expected_voxel_n", 25 | [ 26 | (2, 1, 1, [0, 0, 0, 0, 1, 1]), 27 | (1, 2, 1, [0, 0, 0, 0, 1, 1]), 28 | (1, 1, 2, [0, 0, 0, 0, 1, 1]), 29 | (2, 2, 2, [0, 0, 0, 0, 7, 7]), 30 | (5, 1, 1, [0, 0, 1, 2, 4, 4]), 31 | (1, 5, 1, [0, 0, 1, 2, 4, 4]), 32 | (1, 1, 5, [0, 0, 1, 2, 4, 4]), 33 | (5, 5, 5, [0, 0, 31, 62, 124, 124]), 34 | (2, 5, 5, [0, 0, 6, 12, 49, 49]), 35 | (2, 5, 2, [0, 0, 2, 4, 19, 19]), 36 | (5, 2, 5, [0, 0, 11, 22, 49, 49]), 37 | (5, 5, 2, [0, 0, 12, 24, 49, 49]), 38 | ], 39 | ) 40 | def test_expected_voxel_n_for_different_number_of_voxels_per_axis( 41 | simple_pyntcloud, n_x, n_y, n_z, expected_voxel_n 42 | ): 43 | voxelgrid_id = simple_pyntcloud.add_structure( 44 | "voxelgrid", n_x=n_x, n_y=n_y, n_z=n_z 45 | ) 46 | voxelgrid = simple_pyntcloud.structures[voxelgrid_id] 47 | assert np.all(voxelgrid.voxel_n == expected_voxel_n) 48 | 49 | 50 | def test_sizes_override_number_of_voxels_per_axis(simple_pyntcloud): 51 | voxelgrid_id = simple_pyntcloud.add_structure( 52 | "voxelgrid", size_x=0.2, size_y=0.2, size_z=0.2 53 | ) 54 | voxelgrid = simple_pyntcloud.structures[voxelgrid_id] 55 | assert np.all(voxelgrid.x_y_z == [5, 5, 5]) 56 | assert voxelgrid.n_voxels == 125 57 | assert np.all(voxelgrid.voxel_n == [0, 0, 31, 62, 124, 124]) 58 | 59 | 60 | @pytest.mark.parametrize( 61 | "x,y,z", 62 | [ 63 | ([0, 1.0], [0, 0.5], [0, 0.5]), 64 | ([0, 0.5], [0, 1.0], [0, 0.5]), 65 | ([0, 0.5], [0, 0.5], [0, 1.0]), 66 | ], 67 | ) 68 | def test_regular_bounding_box_changes_the_shape_of_the_bounding_box(x, y, z): 69 | cloud = PyntCloud( 70 | pd.DataFrame( 71 | data={ 72 | "x": np.array(x, dtype=np.float32), 73 | "y": np.array(y, dtype=np.float32), 74 | "z": np.array(z, dtype=np.float32), 75 | } 76 | ) 77 | ) 78 | 79 | voxelgrid_id = cloud.add_structure( 80 | "voxelgrid", n_x=2, n_y=2, n_z=2, regular_bounding_box=False 81 | ) 82 | voxelgrid = cloud.structures[voxelgrid_id] 83 | 84 | irregular_last_centroid = voxelgrid.voxel_centers[-1] 85 | 86 | voxelgrid_id = cloud.add_structure("voxelgrid", n_x=2, n_y=2, n_z=2) 87 | voxelgrid = cloud.structures[voxelgrid_id] 88 | 89 | regular_last_centroid = voxelgrid.voxel_centers[-1] 90 | 91 | assert np.all(irregular_last_centroid <= regular_last_centroid) 92 | -------------------------------------------------------------------------------- /tests/integration/test_core_class.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | import pandas as pd 4 | from shutil import rmtree 5 | from pyntcloud import PyntCloud 6 | 7 | 8 | def test_points(): 9 | """PyntCloud.points. 10 | 11 | - Points must be a pandas DataFrame 12 | - DataFrame must have at least "x", "y" and "z" named columns 13 | - When PyntCloud.points is re-assigned all structures must be removed 14 | 15 | """ 16 | points = np.random.rand(10, 3) 17 | 18 | # not dataframe 19 | with pytest.raises(TypeError): 20 | PyntCloud(points) 21 | 22 | points = pd.DataFrame(points) 23 | 24 | # not x, y, z 25 | with pytest.raises(ValueError): 26 | PyntCloud(points) 27 | 28 | points = pd.DataFrame(points.values, columns=["x", "y", "z"]) 29 | 30 | assert PyntCloud(points) 31 | 32 | cloud = PyntCloud(points) 33 | 34 | cloud.add_structure("voxelgrid") 35 | 36 | assert len(cloud.structures) == 1 37 | 38 | # dummy filter 39 | x_above_05 = cloud.points["x"] > 0.5 40 | cloud.points = cloud.points[x_above_05] 41 | 42 | assert len(cloud.structures) == 0 43 | 44 | 45 | def test_repr(): 46 | """PyntCloud.__repr__. 47 | 48 | - When custom attributes are added, __repr__ must show its name and type 49 | 50 | """ 51 | points = np.random.rand(10, 3) 52 | points = pd.DataFrame(points, columns=["x", "y", "z"]) 53 | cloud = PyntCloud(points) 54 | 55 | # some dummy attribute 56 | important_dict = {"black": "Carl", "white": "Lenny"} 57 | cloud.important_information = important_dict 58 | 59 | reprstring = cloud.__repr__() 60 | reprstring = reprstring.split("\n") 61 | 62 | assert reprstring[-2].strip() == "important_information: " 63 | 64 | 65 | def test_split_on(data_path): 66 | """PyntCloud.split_on. 67 | 68 | - Raise KeyError on invalid scalar field 69 | - Raise ValueError on invalid save_format 70 | - and_return should return list of PyntClouds 71 | - Implicitily check save_path is working 72 | 73 | """ 74 | cloud = PyntCloud.from_file(str(data_path / "mnist.npz")) 75 | vg_id = cloud.add_structure("voxelgrid", n_x=2, n_y=2, n_z=2) 76 | 77 | voxel_n = cloud.add_scalar_field("voxel_n", voxelgrid_id=vg_id) 78 | 79 | with pytest.raises(KeyError): 80 | cloud.split_on("bad_sf") 81 | 82 | with pytest.raises(ValueError): 83 | cloud.split_on(voxel_n, save_format="bad_format") 84 | 85 | output = cloud.split_on(voxel_n, save_path="tmp_out") 86 | 87 | assert output is None 88 | 89 | output = cloud.split_on(voxel_n, and_return=True, save_path="tmp_out") 90 | 91 | assert len(output) == 8 92 | 93 | rmtree("tmp_out") 94 | -------------------------------------------------------------------------------- /tests/unit/filters/test_kdtree_filters.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from numpy.testing import assert_array_equal 4 | 5 | from pyntcloud.filters.kdtree import ( 6 | KDTreeFilter, 7 | RadiusOutlierRemovalFilter, 8 | StatisticalOutlierRemovalFilter, 9 | ) 10 | 11 | 12 | @pytest.mark.parametrize("kdtree_id", ["FOO", "K", "K(10)", "K(16", "K16)", "K16"]) 13 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 14 | def test_KDTreeFilter_raises_KeyError_if_id_is_not_valid( 15 | pyntcloud_with_kdtree_and_kdtree_id, kdtree_id 16 | ): 17 | cloud, true_id = pyntcloud_with_kdtree_and_kdtree_id 18 | filter = KDTreeFilter(pyntcloud=cloud, kdtree_id=kdtree_id) 19 | with pytest.raises(KeyError): 20 | filter.extract_info() 21 | 22 | 23 | @pytest.mark.parametrize( 24 | "k,r,expected_result", 25 | [ 26 | (2, 0.2, [True, True, True, False, True, True]), 27 | (3, 0.2, [False, True, False, False, False, False]), 28 | (3, 0.35, [True, True, True, False, False, False]), 29 | ], 30 | ) 31 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 32 | def test_RORFilter_expected_results( 33 | pyntcloud_with_kdtree_and_kdtree_id, k, r, expected_result 34 | ): 35 | cloud, kdtree_id = pyntcloud_with_kdtree_and_kdtree_id 36 | filter = RadiusOutlierRemovalFilter(pyntcloud=cloud, kdtree_id=kdtree_id, k=k, r=r) 37 | filter.extract_info() 38 | result = filter.compute() 39 | 40 | assert_array_equal(result, expected_result) 41 | 42 | 43 | @pytest.mark.parametrize( 44 | "k,z_max,expected_result", [(2, 0.5, [True, True, True, False, True, True])] 45 | ) 46 | @pytest.mark.usefixtures("pyntcloud_with_kdtree_and_kdtree_id") 47 | def test_SORFilter_expected_results( 48 | pyntcloud_with_kdtree_and_kdtree_id, k, z_max, expected_result 49 | ): 50 | cloud, kdtree_id = pyntcloud_with_kdtree_and_kdtree_id 51 | filter = StatisticalOutlierRemovalFilter( 52 | pyntcloud=cloud, kdtree_id=kdtree_id, k=k, z_max=z_max 53 | ) 54 | filter.extract_info() 55 | result = filter.compute() 56 | 57 | assert_array_equal(result, expected_result) 58 | -------------------------------------------------------------------------------- /tests/unit/filters/test_xyz_filters.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from numpy.testing import assert_array_equal 4 | 5 | from pyntcloud.filters.xyz import BoundingBoxFilter 6 | 7 | 8 | @pytest.mark.usefixtures("simple_pyntcloud") 9 | def test_BoundingBoxFilter_default_values(simple_pyntcloud): 10 | """ 11 | Default bounding box values are infinite so all points 12 | should pass the filter. 13 | """ 14 | bbox_filter = BoundingBoxFilter(pyntcloud=simple_pyntcloud) 15 | bbox_filter.extract_info() 16 | result = bbox_filter.compute() 17 | assert all(result) 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "bounding_box,expected_result", 22 | [ 23 | ( 24 | {"min_x": 0.4, "max_x": 0.6, "min_y": 0.4, "max_y": 0.6}, 25 | [False, False, False, True, False, False], 26 | ), 27 | ( 28 | { 29 | "min_x": 0.4, 30 | }, 31 | [False, False, False, True, True, True], 32 | ), 33 | ( 34 | { 35 | "max_x": 1.0, 36 | }, 37 | [True, True, True, True, True, False], 38 | ), 39 | ], 40 | ) 41 | @pytest.mark.usefixtures("simple_pyntcloud") 42 | def test_BoundingBoxFilter_expected_results( 43 | simple_pyntcloud, bounding_box, expected_result 44 | ): 45 | bbox_filter = BoundingBoxFilter(pyntcloud=simple_pyntcloud, **bounding_box) 46 | bbox_filter.extract_info() 47 | result = bbox_filter.compute() 48 | 49 | assert_array_equal(result, expected_result) 50 | -------------------------------------------------------------------------------- /tests/unit/ransac/test_samplers_ransac.py: -------------------------------------------------------------------------------- 1 | from pyntcloud.ransac.samplers import VoxelgridRansacSampler 2 | 3 | 4 | def test_voxelgrid_sampler_return_points_in_the_same_voxel( 5 | pyntcloud_with_rgb_and_normals, 6 | ): 7 | points = pyntcloud_with_rgb_and_normals.xyz 8 | 9 | sampler = VoxelgridRansacSampler(points=points, k=2, size_x=0.5) 10 | 11 | sample = sampler.get_sample() 12 | 13 | assert sample[0][0] - sample[1][0] < 0.5 14 | -------------------------------------------------------------------------------- /tests/unit/samplers/test_mesh_samplers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pyntcloud.samplers import RandomMeshSampler 4 | 5 | 6 | @pytest.mark.parametrize("n", [1, 5, 10, 50, 100]) 7 | @pytest.mark.usefixtures("diamond") 8 | def test_RandomMeshSampler_n_argument(diamond, n): 9 | sampler = RandomMeshSampler(pyntcloud=diamond, n=n, rgb=True, normals=True) 10 | sampler.extract_info() 11 | 12 | sample = sampler.compute() 13 | assert len(sample) == n 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "rgb,normals", [(False, False), (True, False), (True, True), (False, True)] 18 | ) 19 | @pytest.mark.usefixtures("diamond") 20 | def test_RandomMeshSampler_rgb_normals_optional_arguments(diamond, rgb, normals): 21 | sampler = RandomMeshSampler(pyntcloud=diamond, n=10, rgb=rgb, normals=normals) 22 | sampler.extract_info() 23 | 24 | sample = sampler.compute() 25 | for x in ["red", "green", "blue"]: 26 | assert (x in sample) == rgb 27 | 28 | for x in ["nx", "ny", "nz"]: 29 | assert (x in sample) == normals 30 | 31 | 32 | @pytest.mark.parametrize("n", [1, 5, 10, 50, 100]) 33 | @pytest.mark.usefixtures("diamond") 34 | def test_RandomMeshSampler_sampled_points_bounds(diamond, n): 35 | sampler = RandomMeshSampler(pyntcloud=diamond, n=n, rgb=True, normals=True) 36 | sampler.extract_info() 37 | 38 | sample = sampler.compute() 39 | assert all(sample[["x", "y", "z"]].values.max(0) <= diamond.xyz.max(0)) 40 | assert all(sample[["x", "y", "z"]].values.min(0) >= diamond.xyz.min(0)) 41 | -------------------------------------------------------------------------------- /tests/unit/samplers/test_points_samplers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pyntcloud.samplers import RandomPointsSampler 4 | from pyntcloud.utils.array import point_in_array_2D 5 | 6 | 7 | @pytest.mark.parametrize("n", [1, 5, 6]) 8 | @pytest.mark.usefixtures("simple_pyntcloud") 9 | def test_RandomPointsSampler_n_argument(simple_pyntcloud, n): 10 | sampler = RandomPointsSampler(pyntcloud=simple_pyntcloud, n=n) 11 | sampler.extract_info() 12 | 13 | sample = sampler.compute() 14 | assert len(sample) == n 15 | 16 | 17 | @pytest.mark.usefixtures("simple_pyntcloud") 18 | def test_RandomPointsSampler_raises_ValueError_on_invalid_n(simple_pyntcloud): 19 | sampler = RandomPointsSampler(pyntcloud=simple_pyntcloud, n=10) 20 | sampler.extract_info() 21 | with pytest.raises(ValueError): 22 | sampler.compute() 23 | 24 | 25 | @pytest.mark.usefixtures("simple_pyntcloud") 26 | def test_RandomPointsSampler_sampled_points_are_from_original(simple_pyntcloud): 27 | for i in range(10): 28 | sampler = RandomPointsSampler(pyntcloud=simple_pyntcloud, n=1) 29 | sampler.extract_info() 30 | 31 | sample = sampler.compute() 32 | assert point_in_array_2D(sample, simple_pyntcloud.xyz) 33 | -------------------------------------------------------------------------------- /tests/unit/scalar_fields/test_eigenvalues_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | from pyntcloud.scalar_fields.eigenvalues import ( 6 | Anisotropy, 7 | Curvature, 8 | Eigenentropy, 9 | EigenSum, 10 | Linearity, 11 | Omnivariance, 12 | Planarity, 13 | Sphericity, 14 | ) 15 | 16 | 17 | @pytest.mark.parametrize("scalar_field_class", [Anisotropy, Planarity]) 18 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 19 | def test_EigenValuesScalarFields_where_coplanar_points_have_value_of_1( 20 | pyntcloud_and_eigenvalues, scalar_field_class 21 | ): 22 | cloud, ev = pyntcloud_and_eigenvalues 23 | scalar_field = scalar_field_class(pyntcloud=cloud, ev=ev) 24 | scalar_field.extract_info() 25 | with np.errstate(divide="ignore", invalid="ignore"): 26 | scalar_field.compute() 27 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 28 | assert all(scalar_field_values[:5] == 1) 29 | assert scalar_field_values[5] < 1 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "scalar_field_class", [Curvature, Eigenentropy, Linearity, Omnivariance, Sphericity] 34 | ) 35 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 36 | def test_EigenValuesScalarFields_where_coplanar_points_have_value_of_0( 37 | pyntcloud_and_eigenvalues, scalar_field_class 38 | ): 39 | cloud, ev = pyntcloud_and_eigenvalues 40 | scalar_field = scalar_field_class(pyntcloud=cloud, ev=ev) 41 | scalar_field.extract_info() 42 | with np.errstate(divide="ignore", invalid="ignore"): 43 | scalar_field.compute() 44 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 45 | assert all(scalar_field_values[:5] == 0) 46 | assert scalar_field_values[5] > 0 47 | 48 | 49 | @pytest.mark.usefixtures("pyntcloud_and_eigenvalues") 50 | def test_EigenSum_values(pyntcloud_and_eigenvalues): 51 | cloud, ev = pyntcloud_and_eigenvalues 52 | scalar_field = EigenSum(pyntcloud=cloud, ev=ev) 53 | scalar_field.extract_info() 54 | with np.errstate(divide="ignore", invalid="ignore"): 55 | scalar_field.compute() 56 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 57 | assert all(scalar_field_values > 0) 58 | -------------------------------------------------------------------------------- /tests/unit/scalar_fields/test_normals_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | from pyntcloud.scalar_fields.normals import ( 6 | InclinationDegrees, 7 | InclinationRadians, 8 | OrientationDegrees, 9 | OrientationRadians, 10 | ) 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "scalar_field_class, min_val, max_val", 15 | [ 16 | (InclinationDegrees, 0, 180), 17 | (InclinationRadians, 0, np.pi), 18 | (OrientationDegrees, 0, 360), 19 | (OrientationRadians, 0, np.pi * 2), 20 | ], 21 | ) 22 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 23 | def test_NormalsScalarFields_bounds( 24 | pyntcloud_with_rgb_and_normals, scalar_field_class, min_val, max_val 25 | ): 26 | cloud = pyntcloud_with_rgb_and_normals 27 | scalar_field = scalar_field_class(pyntcloud=cloud) 28 | scalar_field.extract_info() 29 | with np.errstate(divide="ignore", invalid="ignore"): 30 | scalar_field.compute() 31 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 32 | assert all(scalar_field_values >= min_val) 33 | assert all(scalar_field_values <= max_val) 34 | -------------------------------------------------------------------------------- /tests/unit/scalar_fields/test_rgb_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pyntcloud.scalar_fields.rgb import ( 4 | HueSaturationValue, 5 | RelativeLuminance, 6 | RGBIntensity, 7 | ) 8 | 9 | 10 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 11 | def test_HueSaturationValue_bounds(pyntcloud_with_rgb_and_normals): 12 | scalar_field = HueSaturationValue(pyntcloud=pyntcloud_with_rgb_and_normals) 13 | scalar_field.extract_info() 14 | scalar_field.compute() 15 | assert min(scalar_field.to_be_added["H"]) >= 0 16 | assert max(scalar_field.to_be_added["H"]) <= 360 17 | assert min(scalar_field.to_be_added["S"]) >= 0 18 | assert max(scalar_field.to_be_added["S"]) <= 1 19 | assert min(scalar_field.to_be_added["V"]) >= 0 20 | assert max(scalar_field.to_be_added["V"]) <= 100 21 | 22 | 23 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 24 | def test_RelativeLuminance_bounds(pyntcloud_with_rgb_and_normals): 25 | scalar_field = RelativeLuminance(pyntcloud=pyntcloud_with_rgb_and_normals) 26 | scalar_field.extract_info() 27 | scalar_field.compute() 28 | assert min(scalar_field.to_be_added["relative_luminance"]) >= 0 29 | assert max(scalar_field.to_be_added["relative_luminance"]) <= 255 30 | 31 | 32 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 33 | def test_RGBIntensity_bounds(pyntcloud_with_rgb_and_normals): 34 | scalar_field = RGBIntensity(pyntcloud=pyntcloud_with_rgb_and_normals) 35 | scalar_field.extract_info() 36 | scalar_field.compute() 37 | assert min(scalar_field.to_be_added["Ri"]) >= 0 38 | assert max(scalar_field.to_be_added["Ri"]) <= 1 39 | assert min(scalar_field.to_be_added["Gi"]) >= 0 40 | assert max(scalar_field.to_be_added["Gi"]) <= 1 41 | assert min(scalar_field.to_be_added["Bi"]) >= 0 42 | assert max(scalar_field.to_be_added["Bi"]) <= 1 43 | -------------------------------------------------------------------------------- /tests/unit/scalar_fields/test_voxlegrid_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | from pyntcloud.scalar_fields.voxelgrid import ( 6 | EuclideanClusters, 7 | VoxelgridScalarField, 8 | VoxelN, 9 | VoxelX, 10 | VoxelY, 11 | VoxelZ, 12 | ) 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "voxelgrid_id", 17 | [ 18 | "FOO", 19 | "V([2, 2, 3],None,True)", 20 | "V([2, 2, 2],True,True))", 21 | "V([2, 2, 2],None,False)", 22 | "V([2, 2, 2])", 23 | "V(None,True)", 24 | ], 25 | ) 26 | @pytest.mark.usefixtures("pyntcloud_with_voxelgrid_and_voxelgrid_id") 27 | def test_VoxelgridScalarField_raises_KeyError_if_id_is_not_valid( 28 | pyntcloud_with_voxelgrid_and_voxelgrid_id, voxelgrid_id 29 | ): 30 | cloud, true_id = pyntcloud_with_voxelgrid_and_voxelgrid_id 31 | scalar_field = VoxelgridScalarField(pyntcloud=cloud, voxelgrid_id=voxelgrid_id) 32 | with pytest.raises(KeyError): 33 | scalar_field.extract_info() 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "scalar_field_class, min_val, max_val", 38 | [ 39 | (VoxelN, 0, 4 * 4 * 4), 40 | (VoxelX, 0, 4), 41 | (VoxelY, 0, 4), 42 | (VoxelZ, 0, 4), 43 | ], 44 | ) 45 | @pytest.mark.usefixtures("pyntcloud_with_voxelgrid_and_voxelgrid_id") 46 | def test_VoxelgridScalarField_bounds( 47 | pyntcloud_with_voxelgrid_and_voxelgrid_id, scalar_field_class, min_val, max_val 48 | ): 49 | cloud, voxelgrid_id = pyntcloud_with_voxelgrid_and_voxelgrid_id 50 | scalar_field = scalar_field_class(pyntcloud=cloud, voxelgrid_id=voxelgrid_id) 51 | scalar_field.extract_info() 52 | with np.errstate(divide="ignore", invalid="ignore"): 53 | scalar_field.compute() 54 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 55 | assert all(scalar_field_values >= min_val) 56 | assert all(scalar_field_values <= max_val) 57 | 58 | 59 | @pytest.mark.usefixtures("pyntcloud_with_clusters_and_voxelgrid_id") 60 | def test_EuclideanClusters_values(pyntcloud_with_clusters_and_voxelgrid_id): 61 | cloud, voxelgrid_id = pyntcloud_with_clusters_and_voxelgrid_id 62 | scalar_field = EuclideanClusters(pyntcloud=cloud, voxelgrid_id=voxelgrid_id) 63 | scalar_field.extract_info() 64 | with np.errstate(divide="ignore", invalid="ignore"): 65 | scalar_field.compute() 66 | scalar_field_values = next(iter(scalar_field.to_be_added.values())) 67 | assert all(scalar_field_values[:5] != scalar_field_values[5:]) 68 | -------------------------------------------------------------------------------- /tests/unit/scalar_fields/test_xyz_scalar_fields.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | 5 | from pyntcloud.scalar_fields.xyz import ( 6 | PlaneFit, 7 | SphereFit, 8 | SphericalCoordinates, 9 | CylindricalCoordinates, 10 | ) 11 | 12 | 13 | @pytest.mark.usefixtures("plane_pyntcloud") 14 | def test_PlaneFit_max_dist(plane_pyntcloud): 15 | scalar_field = PlaneFit(pyntcloud=plane_pyntcloud) 16 | scalar_field.extract_info() 17 | with np.errstate(divide="ignore", invalid="ignore"): 18 | scalar_field.compute() 19 | assert sum(scalar_field.to_be_added["is_plane"]) == 4 20 | 21 | scalar_field = PlaneFit(pyntcloud=plane_pyntcloud, max_dist=0.4) 22 | scalar_field.extract_info() 23 | with np.errstate(divide="ignore", invalid="ignore"): 24 | scalar_field.compute() 25 | assert sum(scalar_field.to_be_added["is_plane"]) == 5 26 | 27 | 28 | @pytest.mark.usefixtures("sphere_pyntcloud") 29 | def test_SphereFit_max_dist(sphere_pyntcloud): 30 | scalar_field = SphereFit(pyntcloud=sphere_pyntcloud) 31 | scalar_field.extract_info() 32 | with np.errstate(divide="ignore", invalid="ignore"): 33 | scalar_field.compute() 34 | assert sum(scalar_field.to_be_added["is_sphere"]) == 4 35 | 36 | scalar_field = SphereFit(pyntcloud=sphere_pyntcloud, max_dist=0.25) 37 | scalar_field.extract_info() 38 | with np.errstate(divide="ignore", invalid="ignore"): 39 | scalar_field.compute() 40 | assert sum(scalar_field.to_be_added["is_sphere"]) == 5 41 | 42 | 43 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 44 | def test_SphericalCoordinates_bounds(pyntcloud_with_rgb_and_normals): 45 | scalar_field = SphericalCoordinates(pyntcloud=pyntcloud_with_rgb_and_normals) 46 | scalar_field.extract_info() 47 | 48 | with np.errstate(divide="ignore", invalid="ignore"): 49 | scalar_field.compute() 50 | 51 | assert all(scalar_field.to_be_added["polar"] >= 0) 52 | assert all(scalar_field.to_be_added["polar"] <= 180) 53 | 54 | assert all(scalar_field.to_be_added["azimuthal"] >= -180) 55 | assert all(scalar_field.to_be_added["azimuthal"] <= 180) 56 | 57 | scalar_field = SphericalCoordinates( 58 | pyntcloud=pyntcloud_with_rgb_and_normals, degrees=False 59 | ) 60 | scalar_field.extract_info() 61 | 62 | with np.errstate(divide="ignore", invalid="ignore"): 63 | scalar_field.compute() 64 | 65 | assert all(scalar_field.to_be_added["polar"] >= 0) 66 | assert all(scalar_field.to_be_added["polar"] <= np.pi) 67 | 68 | assert all(scalar_field.to_be_added["azimuthal"] >= -np.pi) 69 | assert all(scalar_field.to_be_added["azimuthal"] <= np.pi) 70 | 71 | 72 | @pytest.mark.usefixtures("pyntcloud_with_rgb_and_normals") 73 | def test_CylindricalCoordinates_bounds(pyntcloud_with_rgb_and_normals): 74 | scalar_field = CylindricalCoordinates(pyntcloud=pyntcloud_with_rgb_and_normals) 75 | scalar_field.extract_info() 76 | with np.errstate(divide="ignore", invalid="ignore"): 77 | scalar_field.compute() 78 | 79 | assert all(scalar_field.to_be_added["angular_cylindrical"] >= -90) 80 | assert all(scalar_field.to_be_added["angular_cylindrical"] <= 270) 81 | 82 | scalar_field = CylindricalCoordinates( 83 | pyntcloud=pyntcloud_with_rgb_and_normals, degrees=False 84 | ) 85 | scalar_field.extract_info() 86 | with np.errstate(divide="ignore", invalid="ignore"): 87 | scalar_field.compute() 88 | 89 | assert all(scalar_field.to_be_added["angular_cylindrical"] >= -(np.pi / 2)) 90 | assert all(scalar_field.to_be_added["angular_cylindrical"] <= (np.pi * 1.5)) 91 | -------------------------------------------------------------------------------- /tests/unit/structures/test_voxelgrid_structures.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | from pyntcloud import PyntCloud 7 | from pyntcloud.structures import VoxelGrid 8 | 9 | 10 | def test_default_number_of_voxels_per_axis(simple_pyntcloud): 11 | voxelgrid = VoxelGrid(points=simple_pyntcloud.xyz) 12 | voxelgrid.compute() 13 | assert voxelgrid.n_voxels == 1 14 | assert np.all(voxelgrid.voxel_x == 0) 15 | assert np.all(voxelgrid.voxel_y == 0) 16 | assert np.all(voxelgrid.voxel_z == 0) 17 | assert np.all(voxelgrid.voxel_n == 0) 18 | feature_vector = voxelgrid.get_feature_vector() 19 | np.testing.assert_array_equal(feature_vector, np.array([[[1.0]]])) 20 | neighbors = voxelgrid.get_voxel_neighbors(0) 21 | assert neighbors == [0] 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "n_x, n_y, n_z, expected_voxel_n", 26 | [ 27 | (2, 1, 1, [0, 0, 0, 0, 1, 1]), 28 | (1, 2, 1, [0, 0, 0, 0, 1, 1]), 29 | (1, 1, 2, [0, 0, 0, 0, 1, 1]), 30 | (2, 2, 2, [0, 0, 0, 0, 7, 7]), 31 | (5, 1, 1, [0, 0, 1, 2, 4, 4]), 32 | (1, 5, 1, [0, 0, 1, 2, 4, 4]), 33 | (1, 1, 5, [0, 0, 1, 2, 4, 4]), 34 | (5, 5, 5, [0, 0, 31, 62, 124, 124]), 35 | (2, 5, 5, [0, 0, 6, 12, 49, 49]), 36 | (2, 5, 2, [0, 0, 2, 4, 19, 19]), 37 | (5, 2, 5, [0, 0, 11, 22, 49, 49]), 38 | (5, 5, 2, [0, 0, 12, 24, 49, 49]), 39 | ], 40 | ) 41 | def test_expected_voxel_n_for_different_number_of_voxels_per_axis( 42 | simple_pyntcloud, n_x, n_y, n_z, expected_voxel_n 43 | ): 44 | voxelgrid = VoxelGrid(points=simple_pyntcloud.xyz, n_x=n_x, n_y=n_y, n_z=n_z) 45 | voxelgrid.compute() 46 | assert np.all(voxelgrid.voxel_n == expected_voxel_n) 47 | 48 | 49 | def test_sizes_override_number_of_voxels_per_axis(simple_pyntcloud): 50 | voxelgrid = VoxelGrid( 51 | points=simple_pyntcloud.xyz, size_x=0.2, size_y=0.2, size_z=0.2 52 | ) 53 | voxelgrid.compute() 54 | assert np.all(voxelgrid.x_y_z == [5, 5, 5]) 55 | assert voxelgrid.n_voxels == 125 56 | assert np.all(voxelgrid.voxel_n == [0, 0, 31, 62, 124, 124]) 57 | 58 | 59 | @pytest.mark.parametrize( 60 | "x,y,z", 61 | [ 62 | ([0, 1.0], [0, 0.5], [0, 0.5]), 63 | ([0, 0.5], [0, 1.0], [0, 0.5]), 64 | ([0, 0.5], [0, 0.5], [0, 1.0]), 65 | ], 66 | ) 67 | def test_regular_bounding_box_changes_the_shape_of_the_bounding_box(x, y, z): 68 | cloud = PyntCloud( 69 | pd.DataFrame( 70 | data={ 71 | "x": np.array(x, dtype=np.float32), 72 | "y": np.array(y, dtype=np.float32), 73 | "z": np.array(z, dtype=np.float32), 74 | } 75 | ) 76 | ) 77 | 78 | voxelgrid = VoxelGrid( 79 | points=cloud.xyz, n_x=2, n_y=2, n_z=2, regular_bounding_box=False 80 | ) 81 | voxelgrid.compute() 82 | 83 | irregular_last_centroid = voxelgrid.voxel_centers[-1] 84 | 85 | voxelgrid = VoxelGrid(points=cloud.xyz, n_x=2, n_y=2, n_z=2) 86 | voxelgrid.compute() 87 | 88 | regular_last_centroid = voxelgrid.voxel_centers[-1] 89 | 90 | assert np.all(irregular_last_centroid <= regular_last_centroid) 91 | 92 | 93 | @pytest.mark.parametrize( 94 | "mode", 95 | [ 96 | "binary", 97 | "density", 98 | "TDF", 99 | "x_mean", 100 | "y_mean", 101 | "z_mean", 102 | "x_max", 103 | "y_max", 104 | "z_max", 105 | ], 106 | ) 107 | def test_output_shape_of_all_feature_vector_modes(mode, simple_pyntcloud): 108 | voxelgrid = VoxelGrid( 109 | points=simple_pyntcloud.xyz, n_x=2, n_y=2, n_z=2, regular_bounding_box=False 110 | ) 111 | voxelgrid.compute() 112 | 113 | feature_vector = voxelgrid.get_feature_vector(mode=mode) 114 | 115 | assert feature_vector.shape == (2, 2, 2) 116 | --------------------------------------------------------------------------------