├── .dockerignore ├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ └── pypi.yml ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CITATION.cff ├── CMakeLists.txt ├── COPYING.md ├── Dockerfile ├── LICENSE.md ├── MANIFEST.in ├── README.md ├── benchmarks ├── CMakeLists.txt ├── changepoint_detection_b.cpp ├── cylindersearch_b.cpp ├── distances_b.cpp ├── multiscale_normal.cpp ├── scalability.ipynb └── scaling.cpp ├── codecov.yml ├── demo ├── 4dobc-change_analysis.ipynb ├── kmeans_clustering_of_time_series.ipynb ├── m3c2-change_analysis.ipynb ├── m3c2ep_change_analysis.ipynb ├── principal_component_analysis_of_time_series.ipynb └── registration_standard_ICP.ipynb ├── doc ├── 4dobc-analysis.nblink ├── 4dobc-creation.nblink ├── 4dobc-customization.nblink ├── CMakeLists.txt ├── basic.rst ├── callbacks.rst ├── conf.py ├── cppapi.rst ├── customization.nblink ├── faq.rst ├── img │ ├── data_vos_2022_kijkduin.png │ ├── data_zahs_2022_ahk_2019_tls.png │ ├── thumb_youtube_anders_isprs2021.png │ ├── thumb_youtube_auto3dscapes.png │ └── thumb_youtube_zahs_isprs2022.png ├── index.rst ├── intro.rst ├── m3c2.nblink ├── m3c2ep.nblink ├── pbm3c2-longterm.nblink ├── pbm3c2-segmented.nblink ├── pbm3c2-tools.nblink ├── pbm3c2.nblink ├── pythonapi.rst └── registration.nblink ├── img ├── 4dobc_extraction_thumbnail.png ├── 4dobc_extraction_thumbnail.svg ├── kmeans_clustering_thumbnail.png ├── kmeans_clustering_thumbnail.svg ├── m3c2_change_analysis_thumbnail.png ├── m3c2_change_analysis_thumbnail.svg ├── m3c2ep_change_analysis_thumbnail.png ├── m3c2ep_change_analysis_thumbnail.svg ├── pbm3c2_thumbnail.png ├── pbm3c2_thumbnail.svg ├── pca_thumbnail.png ├── pca_thumbnail.svg ├── standard_icp_thumbnail.png └── standard_icp_thumbnail.svg ├── include └── py4dgeo │ ├── compute.hpp │ ├── epoch.hpp │ ├── kdtree.hpp │ ├── octree.hpp │ ├── openmp.hpp │ ├── py4dgeo.hpp │ ├── pybind11_numpy_interop.hpp │ ├── registration.hpp │ ├── searchtree.hpp │ └── segmentation.hpp ├── jupyter ├── 4dobc-analysis.ipynb ├── 4dobc-creation.ipynb ├── 4dobc-customization.ipynb ├── customization.ipynb ├── m3c2.ipynb ├── m3c2ep.ipynb ├── pbm3c2-longterm.ipynb ├── pbm3c2-segmented.ipynb ├── pbm3c2-tools.ipynb ├── pbm3c2.ipynb └── registration.ipynb ├── lib ├── directions.cpp ├── distances.cpp ├── epoch.cpp ├── kdtree.cpp ├── octree.cpp ├── registration.cpp ├── searchtree.cpp └── segmentation.cpp ├── py4dgeo_logo.png ├── pyproject.toml ├── requirements-dev.txt ├── src └── py4dgeo │ ├── UpdateableZipFile.py │ ├── __init__.py │ ├── cloudcompare.py │ ├── epoch.py │ ├── fallback.py │ ├── logger.py │ ├── m3c2.py │ ├── m3c2ep.py │ ├── pbm3c2.py │ ├── py4dgeo_python.cpp │ ├── registration.py │ ├── segmentation.py │ └── util.py └── tests ├── CMakeLists.txt ├── c++ ├── CMakeLists.txt ├── directions_t.cpp ├── distances_t.cpp ├── epoch_t.cpp ├── kdtree_t.cpp ├── octree_t.cpp ├── registration_t.cpp ├── searchtrees_t.cpp ├── segmentation_t.cpp ├── tests.cpp ├── testsetup.cpp └── testsetup.hpp └── python ├── __init__.py ├── conftest.py ├── helpers.py ├── test_cloudcompare.py ├── test_epoch.py ├── test_fallback.py ├── test_kdtree.py ├── test_logger.py ├── test_m3c2.py ├── test_m3c2ep.py ├── test_octree.py ├── test_pbm3c2_compute_distances.py ├── test_pbm3c2_consistency.py ├── test_pbm3c2_predict.py ├── test_registration.py ├── test_segmentation.py └── test_util.py /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | build 3 | Dockerfile 4 | env 5 | 6 | # scikit-build editable directory 7 | _skbuild 8 | 9 | # VSCode Configuration 10 | .vscode 11 | 12 | # Prerequisites 13 | *.d 14 | 15 | # Compiled Object files 16 | *.slo 17 | *.lo 18 | *.o 19 | *.obj 20 | 21 | # Precompiled Headers 22 | *.gch 23 | *.pch 24 | 25 | # Compiled Dynamic libraries 26 | *.so 27 | *.dylib 28 | *.dll 29 | 30 | # Fortran module files 31 | *.mod 32 | *.smod 33 | 34 | # Compiled Static libraries 35 | *.lai 36 | *.la 37 | *.a 38 | *.lib 39 | 40 | # Executables 41 | *.exe 42 | *.out 43 | *.app 44 | 45 | # Byte-compiled / optimized / DLL files 46 | __pycache__/ 47 | *.py[cod] 48 | *$py.class 49 | 50 | # C extensions 51 | *.so 52 | 53 | # Distribution / packaging 54 | .Python 55 | build/ 56 | develop-eggs/ 57 | dist/ 58 | downloads/ 59 | eggs/ 60 | .eggs/ 61 | lib64/ 62 | parts/ 63 | sdist/ 64 | var/ 65 | wheels/ 66 | share/python-wheels/ 67 | *.egg-info/ 68 | .installed.cfg 69 | *.egg 70 | MANIFEST 71 | 72 | # PyInstaller 73 | # Usually these files are written by a python script from a template 74 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 75 | *.manifest 76 | *.spec 77 | 78 | # Installer logs 79 | pip-log.txt 80 | pip-delete-this-directory.txt 81 | 82 | # Unit test / coverage reports 83 | htmlcov/ 84 | .tox/ 85 | .nox/ 86 | .coverage 87 | .coverage.* 88 | .cache 89 | nosetests.xml 90 | coverage.xml 91 | *.cover 92 | *.py,cover 93 | .hypothesis/ 94 | .pytest_cache/ 95 | cover/ 96 | 97 | # Translations 98 | *.mo 99 | *.pot 100 | 101 | # Django stuff: 102 | *.log 103 | local_settings.py 104 | db.sqlite3 105 | db.sqlite3-journal 106 | 107 | # Flask stuff: 108 | instance/ 109 | .webassets-cache 110 | 111 | # Scrapy stuff: 112 | .scrapy 113 | 114 | # Sphinx documentation 115 | docs/_build/ 116 | 117 | # PyBuilder 118 | .pybuilder/ 119 | target/ 120 | 121 | # Jupyter Notebook 122 | .ipynb_checkpoints 123 | 124 | # IPython 125 | profile_default/ 126 | ipython_config.py 127 | 128 | # pyenv 129 | # For a library or package, you might want to ignore these files since the code is 130 | # intended to run in multiple environments; otherwise, check them in: 131 | # .python-version 132 | 133 | # pipenv 134 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 135 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 136 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 137 | # install all needed dependencies. 138 | #Pipfile.lock 139 | 140 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 141 | __pypackages__/ 142 | 143 | # Celery stuff 144 | celerybeat-schedule 145 | celerybeat.pid 146 | 147 | # SageMath parsed files 148 | *.sage.py 149 | 150 | # Environments 151 | .env 152 | .venv 153 | env/ 154 | venv/ 155 | ENV/ 156 | env.bak/ 157 | venv.bak/ 158 | 159 | # Spyder project settings 160 | .spyderproject 161 | .spyproject 162 | 163 | # Rope project settings 164 | .ropeproject 165 | 166 | # mkdocs documentation 167 | /site 168 | 169 | # mypy 170 | .mypy_cache/ 171 | .dmypy.json 172 | dmypy.json 173 | 174 | # Pyre type checker 175 | .pyre/ 176 | 177 | # pytype static type analyzer 178 | .pytype/ 179 | 180 | # Cython debug symbols 181 | cython_debug/ 182 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | # We run CI on pushes to the main branch 5 | push: 6 | branches: 7 | - main 8 | # and on all pull requests to the main branch 9 | pull_request: 10 | branches: 11 | - main 12 | # as well as upon manual triggers through the 'Actions' tab of the Github UI 13 | workflow_dispatch: 14 | 15 | jobs: 16 | build-and-test: 17 | name: Testing on ${{matrix.os}} Py${{matrix.python-version}}- OpenMP ${{matrix.openmp}} 18 | runs-on: ${{matrix.os}} 19 | strategy: 20 | matrix: 21 | os: [ubuntu-latest, macos-13, macos-latest, windows-latest] 22 | python-version: ["3.9", "3.13"] 23 | openmp: ['ON', 'OFF'] 24 | 25 | steps: 26 | - uses: actions/checkout@v4 27 | with: 28 | submodules: 'recursive' 29 | 30 | - name: Set up Python 31 | uses: actions/setup-python@v5 32 | with: 33 | python-version: ${{ matrix.python-version }} 34 | 35 | - name: Install OpenMP 36 | if: runner.os == 'macOS' && matrix.openmp == 'ON' 37 | run: brew install libomp 38 | 39 | - name: make build directory 40 | run: cmake -E make_directory ${{runner.workspace}}/build 41 | 42 | - name: Install Python development requirements 43 | run: | 44 | python -m pip install -r requirements-dev.txt 45 | python -m pip install scikit-build-core pybind11 46 | 47 | - name: Install Python package 48 | run: | 49 | python -m pip install --no-build-isolation --config-settings=build-dir="build" --config-settings=cmake.build-type="Debug" --config-settings=cmake.define.BUILD_TESTING="ON" -v . 50 | 51 | - name: Install py4dgeo test data 52 | shell: bash 53 | run: | 54 | mkdir -p $GITHUB_WORKSPACE/tests/data 55 | copy_py4dgeo_test_data $GITHUB_WORKSPACE/tests/data 56 | 57 | - name: run tests 58 | shell: bash 59 | working-directory: ${{runner.workspace}}/build 60 | run: ctest --output-on-failure -C Debug 61 | 62 | - name: Run Python tests 63 | run: | 64 | python -m pytest --nbval 65 | 66 | coverage-test: 67 | name: Coverage Testing 68 | runs-on: ubuntu-latest 69 | 70 | steps: 71 | - uses: actions/checkout@v4 72 | with: 73 | submodules: 'recursive' 74 | 75 | - name: Set up Python 76 | uses: actions/setup-python@v5 77 | with: 78 | python-version: "3.9" 79 | 80 | - name: Install LCov 81 | run: | 82 | sudo apt-get install -y lcov 83 | 84 | - name: Install development requirements 85 | run: | 86 | python -m pip install -r requirements-dev.txt 87 | python -m pip install scikit-build-core pybind11 88 | 89 | - name: Install Python package 90 | run: | 91 | python -m pip install --no-build-isolation --config-settings=build-dir="build" --config-settings=cmake.build-type="Debug" --config-settings=cmake.define.BUILD_TESTING="ON" --config-settings=cmake.define.CMAKE_CXX_FLAGS="--coverage" -v -e . 92 | 93 | - name: Install py4dgeo test data 94 | shell: bash 95 | run: | 96 | mkdir -p $GITHUB_WORKSPACE/tests/data 97 | copy_py4dgeo_test_data $GITHUB_WORKSPACE/tests/data 98 | 99 | - name: run tests 100 | shell: bash 101 | working-directory: ${{runner.workspace}}/py4dgeo/build 102 | run: | 103 | ctest -C Debug 104 | 105 | - name: collect coverage report 106 | shell: bash 107 | working-directory: ${{runner.workspace}}/py4dgeo 108 | run: | 109 | lcov --directory ./build --capture --output-file coverage.info --ignore-errors mismatch 110 | lcov_cobertura coverage.info -o coverage2.xml 111 | 112 | - name: Run coverage tests 113 | working-directory: ${{runner.workspace}}/py4dgeo 114 | run: | 115 | python -m pytest --cov=src --cov-report=xml 116 | 117 | - name: Upload coverage to Codecov.io 118 | working-directory: ${{runner.workspace}}/py4dgeo 119 | run: | 120 | curl --connect-timeout 10 --retry 5 -Os https://uploader.codecov.io/latest/linux/codecov 121 | chmod +x codecov 122 | ./codecov -f py4dgeo/coverage.xml -F python 123 | ./codecov -f coverage2.xml -F cxx 124 | 125 | address-sanitizer: 126 | name: Address sanitizer run 127 | runs-on: ubuntu-latest 128 | env: 129 | ASAN_OPTIONS: "detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1" 130 | 131 | steps: 132 | - uses: actions/checkout@v4 133 | with: 134 | submodules: 'recursive' 135 | 136 | - name: Set up Python 137 | uses: actions/setup-python@v5 138 | with: 139 | python-version: "3.10" 140 | 141 | - name: Install Python development requirements 142 | run: | 143 | python -m pip install -r requirements-dev.txt 144 | python -m pip install scikit-build-core pybind11 145 | 146 | - name: Install Python package 147 | run: | 148 | python -m pip install --no-build-isolation --config-settings=build-dir="build" --config-settings=cmake.build-type="Debug" --config-settings=cmake.define.BUILD_TESTING="ON" --config-settings=cmake.define.CMAKE_CXX_FLAGS="-fsanitize=address -fsanitize-address-use-after-scope -fno-omit-frame-pointer" --config-settings=cmake.define.CMAKE_EXE_LINKER_FLAGS="-fsanitize=address -fsanitize-address-use-after-scope -fno-omit-frame-pointer" -v -e . 149 | 150 | - name: Install py4dgeo test data 151 | shell: bash 152 | working-directory: ${{runner.workspace}}/py4dgeo 153 | run: | 154 | mkdir -p $GITHUB_WORKSPACE/tests/data 155 | ASAN_OPTIONS="halt_on_error=0" LD_PRELOAD="$(gcc -print-file-name=libasan.so) $(gcc -print-file-name=libstdc++.so)" PYTHONPATH=src copy_py4dgeo_test_data $GITHUB_WORKSPACE/tests/data 156 | 157 | - name: run c++ tests 158 | shell: bash 159 | working-directory: ${{runner.workspace}}/py4dgeo/build 160 | run: LD_PRELOAD="$(gcc -print-file-name=libasan.so) $(gcc -print-file-name=libstdc++.so)" ctest --rerun-failed --output-on-failure -C Debug 161 | 162 | - name: run python tests 163 | shell: bash 164 | working-directory: ${{runner.workspace}}/py4dgeo 165 | # for LD_PRELOAD see https://github.com/google/sanitizers/issues/934#issuecomment-649516500 166 | run: | 167 | mv ./build/_py4dgeo.*.so src/py4dgeo/. 168 | echo "leak:/usr/bin/bash" > supp.txt 169 | echo "leak:_PyObject_New" >> supp.txt 170 | echo "leak:_PyObject_GC" >> supp.txt 171 | echo "leak:_PyUnicodeWriter_Finish">> supp.txt 172 | echo "leak:insert_to_emptydict" >> supp.txt 173 | echo "leak:new_keys_object" >> supp.txt 174 | echo "leak:PyArrayMethod_FromSpec_int" >> supp.txt 175 | echo "leak:PyDict_Copy" >> supp.txt 176 | echo "leak:PyUnicode_New" >> supp.txt 177 | echo "leak:pyo3::types::function::PyCFunction::internal_new_from_pointers" >> supp.txt 178 | echo "leak:pyo3::types::function::PyCFunction::internal_new::" >> supp.txt 179 | # hack to prevent external libs from dlclosing libraries, 180 | # which otherwise results in LSAN leaks that cannot be suppressed 181 | # https://github.com/google/sanitizers/issues/89#issuecomment-406316683 182 | echo "#include " > dlclose.c 183 | echo "int dlclose(void *handle) { return 0; }" >> dlclose.c 184 | clang -shared dlclose.c -o libdlclose.so 185 | LSAN_OPTIONS=suppressions="$(pwd)/supp.txt" PYTHONPATH=src LD_PRELOAD="$(gcc -print-file-name=libasan.so) $(gcc -print-file-name=libstdc++.so) $(pwd)/libdlclose.so" PYTHONMALLOC=malloc pytest -s 186 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: Build Wheels + PyPI deploy 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | deploy_to_testpypi: 7 | description: "Whether the build should be deployed to test.pypi.org" 8 | required: true 9 | default: "false" 10 | deploy_to_pypi: 11 | description: "Whether the build should be deployed to pypi.org" 12 | required: true 13 | default: "true" 14 | 15 | jobs: 16 | build-wheels: 17 | name: Build wheels on ${{ matrix.os }} 18 | runs-on: ${{ matrix.os }} 19 | 20 | strategy: 21 | matrix: 22 | include: 23 | - os: ubuntu-24.04 24 | - os: windows-2022 25 | - os: macos-13 26 | target: "13.0" 27 | - os: macos-14 28 | target: "14.0" 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | with: 33 | submodules: 'recursive' 34 | 35 | - name: Build wheels 36 | uses: pypa/cibuildwheel@v2.23 37 | env: 38 | MACOSX_DEPLOYMENT_TARGET: ${{ matrix.target }} 39 | 40 | - uses: actions/upload-artifact@v4 41 | with: 42 | name: wheels-${{ matrix.os }} 43 | path: ./wheelhouse/*.whl 44 | 45 | build-sdist: 46 | name: Build source distribution 47 | runs-on: ubuntu-24.04 48 | 49 | steps: 50 | - uses: actions/checkout@v4 51 | with: 52 | submodules: 'recursive' 53 | 54 | - name: Build SDist 55 | run: pipx run build --sdist 56 | 57 | - uses: actions/upload-artifact@v4 58 | with: 59 | name: sdist 60 | path: dist/*.tar.gz 61 | 62 | test-sdist: 63 | name: Test source distribution on ${{ matrix.os }} 64 | runs-on: ${{ matrix.os }} 65 | needs: [build-sdist] 66 | strategy: 67 | matrix: 68 | os: 69 | - ubuntu-24.04 70 | - windows-2022 71 | - macos-14 72 | 73 | steps: 74 | - uses: actions/checkout@v4 75 | with: 76 | submodules: 'recursive' 77 | 78 | - uses: actions/setup-python@v5 79 | name: Install Python 80 | with: 81 | python-version: '3.9' 82 | 83 | - uses: actions/download-artifact@v4 84 | with: 85 | name: sdist 86 | path: dist 87 | 88 | - name: Install OpenMP 89 | if: runner.os == 'macOS' 90 | run: brew install libomp 91 | 92 | - name: Install from SDist 93 | shell: bash 94 | run: 95 | python -m pip install dist/*.tar.gz 96 | 97 | - name: Install test requirements 98 | run: 99 | python -m pip install -r requirements-dev.txt 100 | 101 | - name: Run test suite 102 | run: 103 | python -m pytest 104 | 105 | upload_testpypi: 106 | needs: [build-sdist, test-sdist, build-wheels] 107 | runs-on: ubuntu-24.04 108 | permissions: 109 | id-token: write 110 | 111 | steps: 112 | - uses: actions/download-artifact@v4 113 | with: 114 | name: sdist 115 | merge-multiple: true 116 | path: dist 117 | 118 | - uses: actions/download-artifact@v4 119 | with: 120 | pattern: wheels-* 121 | merge-multiple: true 122 | path: dist 123 | 124 | - uses: pypa/gh-action-pypi-publish@release/v1 125 | if: github.event.inputs.deploy_to_testpypi == 'true' 126 | with: 127 | repository_url: https://test.pypi.org/legacy/ 128 | 129 | upload_pypi: 130 | needs: [build-sdist, build-wheels, upload_testpypi] 131 | runs-on: ubuntu-24.04 132 | permissions: 133 | id-token: write 134 | 135 | steps: 136 | - uses: actions/download-artifact@v4 137 | with: 138 | name: sdist 139 | merge-multiple: true 140 | path: dist 141 | 142 | - uses: actions/download-artifact@v4 143 | with: 144 | pattern: wheels-* 145 | merge-multiple: true 146 | path: dist 147 | 148 | - uses: pypa/gh-action-pypi-publish@release/v1 149 | if: github.event.inputs.deploy_to_pypi == 'true' 150 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore our test directory, it is automatically populated 2 | tests/data 3 | 4 | # scikit-build editable directory 5 | _skbuild 6 | 7 | # VSCode Configuration 8 | .vscode 9 | 10 | # Prerequisites 11 | *.d 12 | 13 | # Compiled Object files 14 | *.slo 15 | *.lo 16 | *.o 17 | *.obj 18 | 19 | # Precompiled Headers 20 | *.gch 21 | *.pch 22 | 23 | # Compiled Dynamic libraries 24 | *.so 25 | *.dylib 26 | *.dll 27 | 28 | # Fortran module files 29 | *.mod 30 | *.smod 31 | 32 | # Compiled Static libraries 33 | *.lai 34 | *.la 35 | *.a 36 | *.lib 37 | 38 | # Executables 39 | *.exe 40 | *.out 41 | *.app 42 | 43 | # Byte-compiled / optimized / DLL files 44 | __pycache__/ 45 | *.py[cod] 46 | *$py.class 47 | 48 | # C extensions 49 | *.so 50 | 51 | # Distribution / packaging 52 | .Python 53 | build/ 54 | develop-eggs/ 55 | dist/ 56 | downloads/ 57 | eggs/ 58 | .eggs/ 59 | lib64/ 60 | parts/ 61 | sdist/ 62 | var/ 63 | wheels/ 64 | share/python-wheels/ 65 | *.egg-info/ 66 | .installed.cfg 67 | *.egg 68 | MANIFEST 69 | 70 | # PyInstaller 71 | # Usually these files are written by a python script from a template 72 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 73 | *.manifest 74 | *.spec 75 | 76 | # Installer logs 77 | pip-log.txt 78 | pip-delete-this-directory.txt 79 | 80 | # Unit test / coverage reports 81 | htmlcov/ 82 | .tox/ 83 | .nox/ 84 | .coverage 85 | .coverage.* 86 | .cache 87 | nosetests.xml 88 | coverage.xml 89 | *.cover 90 | *.py,cover 91 | .hypothesis/ 92 | .pytest_cache/ 93 | cover/ 94 | 95 | # Translations 96 | *.mo 97 | *.pot 98 | 99 | # Django stuff: 100 | *.log 101 | local_settings.py 102 | db.sqlite3 103 | db.sqlite3-journal 104 | 105 | # Flask stuff: 106 | instance/ 107 | .webassets-cache 108 | 109 | # Scrapy stuff: 110 | .scrapy 111 | 112 | # Sphinx documentation 113 | docs/_build/ 114 | 115 | # PyBuilder 116 | .pybuilder/ 117 | target/ 118 | 119 | # Jupyter Notebook 120 | .ipynb_checkpoints 121 | 122 | # IPython 123 | profile_default/ 124 | ipython_config.py 125 | 126 | # pyenv 127 | # For a library or package, you might want to ignore these files since the code is 128 | # intended to run in multiple environments; otherwise, check them in: 129 | # .python-version 130 | 131 | # pipenv 132 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 133 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 134 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 135 | # install all needed dependencies. 136 | #Pipfile.lock 137 | 138 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 139 | __pypackages__/ 140 | 141 | # Celery stuff 142 | celerybeat-schedule 143 | celerybeat.pid 144 | 145 | # SageMath parsed files 146 | *.sage.py 147 | 148 | # Environments 149 | .env 150 | .venv 151 | env/ 152 | venv/ 153 | ENV/ 154 | env.bak/ 155 | venv.bak/ 156 | 157 | # Spyder project settings 158 | .spyderproject 159 | .spyproject 160 | 161 | # Rope project settings 162 | .ropeproject 163 | 164 | # mkdocs documentation 165 | /site 166 | 167 | # mypy 168 | .mypy_cache/ 169 | .dmypy.json 170 | dmypy.json 171 | 172 | # Pyre type checker 173 | .pyre/ 174 | 175 | # pytype static type analyzer 176 | .pytype/ 177 | 178 | # Cython debug symbols 179 | cython_debug/ 180 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "ext/Catch2"] 2 | path = ext/Catch2 3 | url = https://github.com/catchorg/Catch2.git 4 | [submodule "ext/eigen"] 5 | path = ext/eigen 6 | url = https://gitlab.com/libeigen/eigen.git 7 | [submodule "ext/nanoflann"] 8 | path = ext/nanoflann 9 | url = https://github.com/jlblancoc/nanoflann.git 10 | [submodule "ext/benchmark"] 11 | path = ext/benchmark 12 | url = https://github.com/google/benchmark 13 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # Make sure that Jupyter notebooks under version control 3 | # have their outputs stripped before committing 4 | - repo: https://github.com/kynan/nbstripout 5 | rev: 0.8.1 6 | hooks: 7 | - id: nbstripout 8 | files: ".ipynb" 9 | 10 | # Run Black - the uncompromising Python code formatter 11 | - repo: https://github.com/psf/black 12 | rev: 25.1.0 13 | hooks: 14 | - id: black-jupyter 15 | 16 | # Format C++ code with Clang-Format - automatically applying the changes 17 | - repo: https://github.com/ssciwr/clang-format-precommit 18 | rev: v16.0.2 19 | hooks: 20 | - id: clang-format 21 | args: 22 | - -i 23 | - --style=Mozilla 24 | exclude: ^ext/ 25 | 26 | # Add some general purpose useful hooks 27 | - repo: https://github.com/pre-commit/pre-commit-hooks 28 | rev: v5.0.0 29 | hooks: 30 | # Make sure that contained YAML files are well-formed 31 | - id: check-yaml 32 | # Trim trailing whitespace of all sorts 33 | - id: trailing-whitespace 34 | # Sort lines in requirements files 35 | - id: requirements-txt-fixer 36 | # Apply a file size limit of 500kB 37 | - id: check-added-large-files 38 | # Simple parser validation of e.g. pyproject.toml 39 | - id: check-toml 40 | # Unify file endings 41 | - id: end-of-file-fixer 42 | 43 | 44 | # CMake Formatting/Linting Utility 45 | - repo: https://github.com/cheshirekow/cmake-format-precommit 46 | rev: v0.6.13 47 | hooks: 48 | - id: cmake-format 49 | - id: cmake-lint 50 | args: 51 | # We *need* to set a variable with mixed casing - relax the linter to allow that. 52 | - --public-var-pattern 53 | - "[a-zA-Z][a-zA-Z0-9_]+" 54 | 55 | # Check Configuration as Code files for integrations 56 | - repo: https://github.com/python-jsonschema/check-jsonschema 57 | rev: 0.33.0 58 | hooks: 59 | - id: check-github-workflows 60 | - id: check-readthedocs 61 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | builder: html 5 | configuration: doc/conf.py 6 | 7 | formats: all 8 | 9 | submodules: 10 | include: all 11 | recursive: true 12 | 13 | build: 14 | os: ubuntu-22.04 15 | tools: 16 | python: "3.10" 17 | 18 | python: 19 | install: 20 | - requirements: requirements-dev.txt 21 | - method: pip 22 | path: .[docs] 23 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: py4dgeo Development Core Team 5 | title: "py4dgeo: library for change analysis in 4D point clouds." 6 | version: 0.7.0 7 | date-released: 2025-02-18 8 | license: ["MIT"] 9 | repository-code: "https://github.com/3dgeo-heidelberg/py4dgeo" 10 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.30) 2 | 3 | # Set a name and a version number for your project: 4 | project( 5 | py4dgeo 6 | VERSION 0.0.1 7 | LANGUAGES CXX) 8 | 9 | # Take into account _ROOT environment variable (used in packaging 10 | # process) 11 | cmake_policy(SET CMP0074 NEW) 12 | 13 | # Initialize some default paths 14 | include(GNUInstallDirs) 15 | 16 | # Define the minimum C++ standard that is required 17 | set(CMAKE_CXX_STANDARD 17) 18 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 19 | 20 | # Enable PIC for Python bindings 21 | set(CMAKE_POSITION_INDEPENDENT_CODE ON) 22 | 23 | # Compilation options 24 | set(BUILD_PYTHON 25 | ON 26 | CACHE BOOL "Enable building of Python bindings") 27 | set(BUILD_DOCS 28 | ON 29 | CACHE BOOL "Enable building of documentation") 30 | set(BUILD_BENCHMARKS 31 | OFF 32 | CACHE BOOL "Enable building of benchmark applications") 33 | set(PY4DGEO_WITH_OPENMP 34 | ON 35 | CACHE BOOL "Enable OpenMP parallelization") 36 | 37 | # Check that the repository was clone recursively 38 | if(NOT EXISTS ${PROJECT_SOURCE_DIR}/ext/Catch2/CMakeLists.txt) 39 | message( 40 | FATAL_ERROR 41 | "Submodules not found. py4dgeo needs to be either cloned with the" 42 | "'--recursive' flag or 'git submodule update --init' needs to be called") 43 | endif() 44 | 45 | # Compile the library 46 | add_library( 47 | py4dgeo 48 | lib/directions.cpp 49 | lib/distances.cpp 50 | lib/epoch.cpp 51 | lib/kdtree.cpp 52 | lib/octree.cpp 53 | lib/registration.cpp 54 | lib/segmentation.cpp 55 | lib/searchtree.cpp) 56 | target_include_directories( 57 | py4dgeo PUBLIC ${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/ext/eigen 58 | ${CMAKE_SOURCE_DIR}/ext/nanoflann/include) 59 | if(PY4DGEO_WITH_OPENMP) 60 | if(MSVC) 61 | include(CheckCXXCompilerFlag) 62 | check_cxx_compiler_flag("/openmp:llvm" MSVC_SUPPORTS_OPENMP_LLVM) 63 | if(MSVC_SUPPORTS_OPENMP_LLVM) 64 | set(OpenMP_RUNTIME_MSVC "llvm") 65 | endif() 66 | endif() 67 | 68 | find_package(OpenMP) 69 | if(OpenMP_FOUND) 70 | target_link_libraries(py4dgeo PUBLIC OpenMP::OpenMP_CXX) 71 | target_compile_definitions(py4dgeo PUBLIC PY4DGEO_WITH_OPENMP 72 | EIGEN_DONT_PARALLELIZE) 73 | endif() 74 | endif() 75 | 76 | # Compile the tests and benchmarks 77 | include(CTest) 78 | if(BUILD_TESTING) 79 | add_subdirectory(ext/Catch2) 80 | include(./ext/Catch2/contrib/Catch.cmake) 81 | add_subdirectory(tests) 82 | endif() 83 | 84 | if(BUILD_BENCHMARKS) 85 | set(BENCHMARK_ENABLE_GTEST_TESTS OFF) 86 | set(BENCHMARK_ENABLE_TESTING OFF) 87 | add_subdirectory(./ext/benchmark) 88 | add_subdirectory(benchmarks) 89 | endif() 90 | 91 | # Add the documentation 92 | if(BUILD_DOCS) 93 | add_subdirectory(doc) 94 | endif() 95 | 96 | if(BUILD_PYTHON) 97 | # Add Python bindings 98 | find_package(pybind11 CONFIG REQUIRED) 99 | pybind11_add_module(_py4dgeo MODULE src/py4dgeo/py4dgeo_python.cpp) 100 | target_link_libraries(_py4dgeo PUBLIC py4dgeo) 101 | 102 | # Install th Python module library target 103 | install(TARGETS _py4dgeo DESTINATION .) 104 | endif() 105 | 106 | # This prints a summary of found dependencies 107 | include(FeatureSummary) 108 | feature_summary(WHAT ALL) 109 | -------------------------------------------------------------------------------- /COPYING.md: -------------------------------------------------------------------------------- 1 | This is the list of copyright holders of py4dgeo. 2 | 3 | For information on the license, see LICENSE.md. 4 | 5 | * Dominic Kempf, 2020-2021 6 | 7 | The file `src/py4dgeo/zipfile.py` is licensed under Python Software Foundation License Version 2. 8 | Copyright (c) 2001-2022 Python Software Foundation; All Rights Reserved 9 | 10 | The file includes a change suggested for inclusion in CPython that adds a `remove` method to 11 | `zipfile.ZipFile` that allows deletion of files from archives opened in `append` mode. 12 | The original suggestion is found here: https://github.com/python/cpython/pull/19358 The 13 | original author, Yudi Levi, has agreed to distribution under the PSF license. 14 | 15 | The following data files included in the repository for testing purposes are under a different license and copyright: 16 | 17 | * `tests/data/ahk_2017_small.xyz` under CC-BY-SA 4.0. Citation: Pfeiffer, Jan; Höfle, Bernhard; Hämmerle, Martin; Zahs, Vivien; Rutzinger, Martin; Scaioni, Marco; Lindenbergh, Roderik; Oude Elberink, Sander; Pirotti, Francesco; Bremer, Magnus; Wujanz, Daniel; Zieher, Thomas (2019): Terrestrial laser scanning data of the Äußeres Hochebenkar rock glacier close to Obergurgl, Austria acquired during the Innsbruck Summer School of Alpine Research. Institute of Geography, University of Innsbruck, PANGAEA, https://doi.org/10.1594/PANGAEA.902042 18 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jupyter/base-notebook:584f43f06586 2 | 3 | # Install dependencies from Conda 4 | RUN conda install -c conda-forge \ 5 | cmake \ 6 | gxx_linux-64 \ 7 | jupyter-resource-usage \ 8 | make && \ 9 | conda clean -a -q -y 10 | 11 | # Copy the repository into the container 12 | COPY --chown=${NB_UID} . /opt/py4dgeo 13 | 14 | # Build and install the project 15 | RUN conda run -n base python -m pip install /opt/py4dgeo 16 | 17 | # Copy all the notebook files into the home directory 18 | RUN rm -rf ${HOME}/work && \ 19 | cp /opt/py4dgeo/jupyter/* ${HOME} 20 | 21 | # Make JupyterLab the default for this application 22 | ENV JUPYTER_ENABLE_LAB=yes 23 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 2 | 3 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 4 | 5 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CMakeLists.txt 2 | include COPYING.md 3 | include LICENSE.md 4 | include pyproject.toml 5 | include setup.py 6 | 7 | graft app 8 | graft ext 9 | graft include 10 | graft lib 11 | graft src 12 | -------------------------------------------------------------------------------- /benchmarks/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # Individual benchmark programs 2 | add_executable(changepoint_detection_b changepoint_detection_b.cpp) 3 | target_link_libraries(changepoint_detection_b PUBLIC benchmark::benchmark 4 | py4dgeo_test) 5 | add_test(NAME changepoint_detection_b COMMAND changepoint_detection_b) 6 | 7 | add_executable(cylindersearch_b cylindersearch_b.cpp) 8 | target_link_libraries(cylindersearch_b PUBLIC benchmark::benchmark py4dgeo_test) 9 | add_test(NAME cylindersearch_b COMMAND cylindersearch_b) 10 | 11 | add_executable(distances_b distances_b.cpp) 12 | target_link_libraries(distances_b PUBLIC benchmark::benchmark py4dgeo_test) 13 | add_test(NAME distances_b COMMAND distances_b) 14 | 15 | add_executable(scaling scaling.cpp) 16 | target_link_libraries(scaling PUBLIC benchmark::benchmark py4dgeo_test) 17 | add_test(NAME scaling COMMAND scaling) 18 | 19 | # Copy the scalability notebook into the build for ease of use 20 | file(COPY scalability.ipynb DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) 21 | 22 | add_executable(multiscale_normal multiscale_normal.cpp) 23 | target_link_libraries(multiscale_normal PUBLIC benchmark::benchmark 24 | py4dgeo_test) 25 | add_test(NAME multiscale_normal COMMAND multiscale_normal) 26 | -------------------------------------------------------------------------------- /benchmarks/changepoint_detection_b.cpp: -------------------------------------------------------------------------------- 1 | #include "testsetup.hpp" 2 | 3 | #include 4 | 5 | #include 6 | 7 | using namespace py4dgeo; 8 | 9 | static void 10 | changepoint_detection_benchmark(benchmark::State& state) 11 | { 12 | auto n = state.range(0); 13 | EigenTimeSeries ts(n); 14 | for (std::size_t i = n / 2; i < n; ++i) 15 | ts[i] += 1.0; 16 | 17 | ChangePointDetectionData data{ ts, 24, 12, 1, 1.0 }; 18 | 19 | for (auto _ : state) { 20 | auto cp = change_point_detection(data); 21 | } 22 | } 23 | 24 | BENCHMARK(changepoint_detection_benchmark) 25 | ->Unit(benchmark::kMicrosecond) 26 | ->RangeMultiplier(10) 27 | ->Range(100, 100000); 28 | BENCHMARK_MAIN(); 29 | -------------------------------------------------------------------------------- /benchmarks/cylindersearch_b.cpp: -------------------------------------------------------------------------------- 1 | #include "testsetup.hpp" 2 | 3 | #include 4 | #include 5 | 6 | #include 7 | 8 | using namespace py4dgeo; 9 | 10 | static void 11 | cylindersearch_benchmark(benchmark::State& state) 12 | { 13 | auto [cloud, corepoints] = ahk_benchcloud(); 14 | Epoch epoch(*cloud); 15 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 16 | epoch.kdtree.build_tree(10); 17 | 18 | EigenNormalSet directions(1, 3); 19 | directions << 0, 0, 1; 20 | 21 | WorkingSetFinderParameters params{ epoch, 22 | 1.0, 23 | corepoints->row(0), 24 | directions, 25 | static_cast(state.range(0)) }; 26 | 27 | for (auto _ : state) { 28 | auto points = cylinder_workingset_finder(params); 29 | } 30 | } 31 | 32 | BENCHMARK(cylindersearch_benchmark) 33 | ->Unit(benchmark::kMicrosecond) 34 | ->DenseRange(2.0, 8.0, 1.0); 35 | BENCHMARK_MAIN(); 36 | -------------------------------------------------------------------------------- /benchmarks/distances_b.cpp: -------------------------------------------------------------------------------- 1 | #include "testsetup.hpp" 2 | 3 | #include 4 | #include 5 | 6 | #include 7 | 8 | using namespace py4dgeo; 9 | 10 | static void 11 | distances_benchmark(benchmark::State& state) 12 | { 13 | auto [cloud, corepoints] = ahk_benchcloud(); 14 | Epoch epoch(*cloud); 15 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 16 | epoch.kdtree.build_tree(10); 17 | std::vector normal_radii{ 1.0 }; 18 | std::vector used_radii; 19 | EigenNormalSet directions(corepoints->rows(), 3); 20 | EigenNormalSet orientation(1, 3); 21 | orientation << 0, 0, 1; 22 | 23 | // Precompute the multiscale directions 24 | compute_multiscale_directions( 25 | epoch, *corepoints, normal_radii, orientation, directions, used_radii); 26 | 27 | // We try to test all callback combinations 28 | auto wsfinder = radius_workingset_finder; 29 | auto distancecalc = mean_stddev_distance; 30 | 31 | for (auto _ : state) { 32 | // Calculate the distances 33 | DistanceVector distances; 34 | UncertaintyVector uncertainties; 35 | 36 | compute_distances(*corepoints, 37 | 2.0, 38 | epoch, 39 | epoch, 40 | directions, 41 | 0.0, 42 | 0.0, 43 | distances, 44 | uncertainties, 45 | wsfinder, 46 | distancecalc); 47 | } 48 | } 49 | 50 | BENCHMARK(distances_benchmark)->Unit(benchmark::kMillisecond); 51 | BENCHMARK_MAIN(); 52 | -------------------------------------------------------------------------------- /benchmarks/multiscale_normal.cpp: -------------------------------------------------------------------------------- 1 | #include "testsetup.hpp" 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #include 8 | 9 | using namespace py4dgeo; 10 | 11 | static void 12 | multiscale_normal_benchmark_kdtree(benchmark::State& state) 13 | { 14 | auto [cloud, corepoints] = ahk_benchcloud(); 15 | Epoch epoch(*cloud); 16 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 17 | epoch.kdtree.build_tree(10); 18 | std::vector normal_radii{ 0.1, 0.5, 1.0, 2.0, 5.0 }; 19 | std::vector used_radii; 20 | EigenNormalSet directions(corepoints->rows(), 3); 21 | EigenNormalSet orientation(1, 3); 22 | orientation << 0, 0, 1; 23 | 24 | for (auto _ : state) { 25 | // Precompute the multiscale directions 26 | compute_multiscale_directions( 27 | epoch, *corepoints, normal_radii, orientation, directions, used_radii); 28 | } 29 | } 30 | 31 | static void 32 | multiscale_normal_benchmark_octree(benchmark::State& state) 33 | { 34 | auto [cloud, corepoints] = ahk_benchcloud(); 35 | Epoch epoch(*cloud); 36 | Epoch::set_default_radius_search_tree(SearchTree::Octree); 37 | epoch.octree.build_tree(); 38 | std::vector radii{ 0.1, 0.5, 1.0, 2.0, 5.0 }; 39 | std::vector used_radii; 40 | EigenNormalSet directions(corepoints->rows(), 3); 41 | EigenNormalSet orientation(1, 3); 42 | orientation << 0, 0, 1; 43 | 44 | for (auto _ : state) { 45 | compute_multiscale_directions( 46 | epoch, *corepoints, radii, orientation, directions, used_radii); 47 | } 48 | } 49 | 50 | BENCHMARK(multiscale_normal_benchmark_kdtree)->Unit(benchmark::kMicrosecond); 51 | BENCHMARK(multiscale_normal_benchmark_octree)->Unit(benchmark::kMicrosecond); 52 | BENCHMARK_MAIN(); 53 | -------------------------------------------------------------------------------- /benchmarks/scalability.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Scalability Analysis" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "1", 14 | "metadata": {}, 15 | "source": [ 16 | "Parameters to this notebook that you might want to tweak:" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "2", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "benchmark_name = \"scalability_benchmark\" # The name of the benchmark as defined with Google Benchmark\n", 27 | "output_filename = \"scaling.png\" # The image name to save the result to\n", 28 | "benchmark_program = \"./bench\" # The path to the compiled benchmark program\n", 29 | "hyperthreading = True # Whether hyperthreading is enabled on the machine (will halve the number of threads)" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "id": "3", 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "import json\n", 40 | "import matplotlib.pyplot as plt\n", 41 | "import os\n", 42 | "import pandas\n", 43 | "import subprocess" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "id": "4", 49 | "metadata": {}, 50 | "source": [ 51 | "Create the environment for our benchmark run:" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "id": "5", 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "env = os.environ.copy()\n", 62 | "count = os.cpu_count()\n", 63 | "if hyperthreading:\n", 64 | " count = count // 2\n", 65 | "env[\"OMP_NUM_THREADS\"] = str(count)\n", 66 | "env.setdefault(\"OMP_PROC_BIND\", \"spread\")" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "id": "6", 72 | "metadata": {}, 73 | "source": [ 74 | "Run the actual benchmark and load the generated data into a JSON data structure:" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "id": "7", 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "process = subprocess.run(\n", 85 | " f\"{benchmark_program} --benchmark_filter={benchmark_name}/* --benchmark_format=json\".split(),\n", 86 | " env=env,\n", 87 | " stdout=subprocess.PIPE,\n", 88 | ")" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "id": "8", 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "data = json.loads(process.stdout.decode())" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "id": "9", 104 | "metadata": {}, 105 | "source": [ 106 | "Parse the scalability data into a pandas dataframe:" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": null, 112 | "id": "10", 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "df = pandas.read_json(json.dumps(data[\"benchmarks\"]))" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "id": "11", 122 | "metadata": {}, 123 | "source": [ 124 | "Do some processing that adds the relevant columns:" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "id": "12", 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "df = df[df.run_type == \"iteration\"]\n", 135 | "df[\"num_threads\"] = df[\"per_family_instance_index\"] + 1\n", 136 | "tseq = df.loc[lambda df: df[\"num_threads\"] == 1][\"cpu_time\"][0]\n", 137 | "df[\"speedup\"] = tseq / df[\"cpu_time\"]" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "id": "13", 143 | "metadata": {}, 144 | "source": [ 145 | "Plot in Jupyter notebook:" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "id": "14", 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "fig, ax = plt.subplots()\n", 156 | "ax.plot(df[\"num_threads\"], df[\"num_threads\"], linestyle=\"--\", label=\"Perfect Speedup\")\n", 157 | "df.plot(\"num_threads\", \"speedup\", ax=ax, label=\"Measured Speedup\")\n", 158 | "ax = ax.legend()" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "id": "15", 164 | "metadata": {}, 165 | "source": [ 166 | "Additionally, save to an image file:" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": null, 172 | "id": "16", 173 | "metadata": {}, 174 | "outputs": [], 175 | "source": [ 176 | "fig.savefig(output_filename)" 177 | ] 178 | } 179 | ], 180 | "metadata": { 181 | "kernelspec": { 182 | "display_name": "Python 3 (ipykernel)", 183 | "language": "python", 184 | "name": "python3" 185 | }, 186 | "language_info": { 187 | "codemirror_mode": { 188 | "name": "ipython", 189 | "version": 3 190 | }, 191 | "file_extension": ".py", 192 | "mimetype": "text/x-python", 193 | "name": "python", 194 | "nbconvert_exporter": "python", 195 | "pygments_lexer": "ipython3", 196 | "version": "3.9.7" 197 | } 198 | }, 199 | "nbformat": 4, 200 | "nbformat_minor": 5 201 | } 202 | -------------------------------------------------------------------------------- /benchmarks/scaling.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #ifdef PY4DGEO_WITH_OPENMP 4 | 5 | #include "testsetup.hpp" 6 | 7 | #include 8 | 9 | #include 10 | #include 11 | 12 | using namespace py4dgeo; 13 | 14 | static void 15 | scalability_benchmark(benchmark::State& state) 16 | { 17 | auto [cloud, corepoints] = ahk_benchcloud(); 18 | Epoch epoch(*cloud); 19 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 20 | epoch.kdtree.build_tree(10); 21 | 22 | for (auto _ : state) { 23 | // Set the number of threads according to benchmark state 24 | omp_set_num_threads(state.range(0)); 25 | 26 | std::vector normal_radii{ 1.0 }; 27 | EigenNormalSet directions(corepoints->rows(), 3); 28 | std::vector used_radii; 29 | EigenNormalSet orientation(1, 3); 30 | orientation << 0, 0, 1; 31 | 32 | // Precompute the multiscale directions 33 | compute_multiscale_directions( 34 | epoch, *corepoints, normal_radii, orientation, directions, used_radii); 35 | 36 | // We try to test all callback combinations 37 | auto wsfinder = radius_workingset_finder; 38 | auto distancecalc = mean_stddev_distance; 39 | 40 | // Calculate the distances 41 | DistanceVector distances; 42 | UncertaintyVector uncertainties; 43 | 44 | compute_distances(*corepoints, 45 | 2.0, 46 | epoch, 47 | epoch, 48 | directions, 49 | 0.0, 50 | 0.0, 51 | distances, 52 | uncertainties, 53 | wsfinder, 54 | distancecalc); 55 | } 56 | state.SetComplexityN(state.range(0)); 57 | } 58 | 59 | BENCHMARK(scalability_benchmark) 60 | ->Unit(benchmark::kMillisecond) 61 | ->DenseRange(1, omp_get_max_threads(), 1) 62 | ->Complexity(); 63 | 64 | #endif // PY4DGEO_WITH_OPENMP 65 | 66 | BENCHMARK_MAIN(); 67 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | require_ci_to_pass: true 3 | 4 | coverage: 5 | status: 6 | project: 7 | default: 8 | target: 80 9 | patch: 10 | default: 11 | target: 80 12 | 13 | parsers: 14 | gcov: 15 | branch_detection: 16 | conditional: yes 17 | loop: yes 18 | method: no 19 | macro: no 20 | 21 | comment: 22 | layout: "reach,diff,flags,files,footer" 23 | behavior: default 24 | require_changes: false 25 | 26 | ignore: 27 | - "**/tests" 28 | - "**/Catch2" 29 | - "**/eigen" 30 | - "**/nanoflann" 31 | 32 | fixes: 33 | - "py4dgeo/::" 34 | -------------------------------------------------------------------------------- /doc/4dobc-analysis.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/4dobc-analysis.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/4dobc-creation.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/4dobc-creation.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/4dobc-customization.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/4dobc-customization.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | find_package(Doxygen REQUIRED) 2 | 3 | set(DOXYGEN_EXCLUDE_PATTERNS "${CMAKE_SOURCE_DIR}/ext/*") 4 | set(DOXYGEN_SHORT_NAMES YES) 5 | set(DOXYGEN_GENERATE_XML YES) 6 | 7 | doxygen_add_docs( 8 | doxygen ${CMAKE_SOURCE_DIR}/include ${CMAKE_SOURCE_DIR}/lib 9 | WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} 10 | COMMENT Building doxygen documentation...) 11 | 12 | add_custom_target( 13 | sphinx-doc 14 | COMMAND 15 | sphinx-build -b html 16 | -Dbreathe_projects.py4dgeo="${CMAKE_CURRENT_BINARY_DIR}/xml" -c 17 | ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR} 18 | ${CMAKE_CURRENT_BINARY_DIR}/sphinx 19 | WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} 20 | COMMENT "Generating documentation with Sphinx...") 21 | 22 | add_dependencies(sphinx-doc doxygen) 23 | -------------------------------------------------------------------------------- /doc/basic.rst: -------------------------------------------------------------------------------- 1 | Basic usage tutorials 2 | ===================== 3 | 4 | In the following, you find a number of tutorials that demonstrate the basic capabilities of :code:`py4dgeo`. 5 | 6 | .. toctree:: 7 | 8 | m3c2 9 | customization 10 | registration 11 | m3c2ep 12 | 4dobc-creation 13 | 4dobc-analysis 14 | 4dobc-customization 15 | pbm3c2 16 | pbm3c2-segmented 17 | pbm3c2-tools 18 | pbm3c2-longterm 19 | -------------------------------------------------------------------------------- /doc/callbacks.rst: -------------------------------------------------------------------------------- 1 | Callback reference 2 | ================== 3 | 4 | As described in the custumization tutorial, :code:`py4dgeo` uses a callback software architecture 5 | to allow to flexibly change components of the core algorithm while maintaining its performance. 6 | Callbacks can be implemented in Python (for rapid prototyping) or C++ (for performance). In this 7 | section, we summarize the available types of callbacks and their available implementations. 8 | 9 | Distance + Uncertainty Calculation 10 | ---------------------------------- 11 | 12 | This callback is responsible for calculating the distance measure and its uncertainty at one core point. 13 | The C++ signature for this callback is the following: 14 | 15 | .. doxygentypedef:: py4dgeo::DistanceUncertaintyCalculationCallback 16 | 17 | The default implementation calculates the mean and standard deviation: 18 | 19 | .. doxygenfunction:: py4dgeo::mean_stddev_distance 20 | .. autofunction:: py4dgeo.fallback.mean_stddev_distance 21 | 22 | Alternative, a distance measure based on the median and interquartile range is available: 23 | 24 | .. doxygenfunction:: py4dgeo::median_iqr_distance 25 | .. autofunction:: py4dgeo.fallback.median_iqr_distance 26 | 27 | Working Set Finder 28 | ------------------ 29 | 30 | This callback determines which points from a given epoch that are located around a given corepoint 31 | should be taken into consideration by the M3C2 algorithm. 32 | The C++ signature is the following: 33 | 34 | .. doxygentypedef:: py4dgeo::WorkingSetFinderCallback 35 | 36 | The available implementations perform a radius and a cylinder search: 37 | 38 | .. doxygenfunction:: py4dgeo::radius_workingset_finder 39 | .. autofunction:: py4dgeo.fallback.radius_workingset_finder 40 | 41 | .. doxygenfunction:: py4dgeo::cylinder_workingset_finder 42 | .. autofunction:: py4dgeo.fallback.cylinder_workingset_finder 43 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | import os 8 | import py4dgeo 9 | import subprocess 10 | import sys 11 | 12 | # -- Path setup -------------------------------------------------------------- 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | sys.path.insert(0, os.path.abspath("../../src")) 18 | 19 | # We need to be able to locate data files to include Jupyter notebooks 20 | os.environ["XDG_DATA_DIRS"] = os.path.abspath("../tests/data") 21 | 22 | # -- Project information ----------------------------------------------------- 23 | 24 | project = "py4dgeo" 25 | copyright = "2021, Scientific Software Center, Heidelberg University" 26 | author = "Dominic Kempf" 27 | release = py4dgeo.__version__ 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | "sphinx.ext.autodoc", 36 | "breathe", 37 | "nbsphinx", 38 | "nbsphinx_link", 39 | "sphinx_mdinclude", 40 | "sphinx_rtd_theme", 41 | ] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = [] 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] 50 | 51 | # -- Options for HTML output ------------------------------------------------- 52 | 53 | # The theme to use for HTML and HTML Help pages. See the documentation for 54 | # a list of builtin themes. 55 | # 56 | html_theme = "sphinx_rtd_theme" 57 | 58 | # Add any paths that contain custom static files (such as style sheets) here, 59 | # relative to this directory. They are copied after the builtin static files, 60 | # so a file named "default.css" will overwrite the builtin "default.css". 61 | html_static_path = [] 62 | 63 | # Breathe Configuration: Breathe is the bridge between the information extracted 64 | # from the C++ sources by Doxygen and Sphinx. 65 | breathe_projects = {} 66 | breathe_default_project = "py4dgeo" 67 | 68 | # Implement the Doxygen generation logic on RTD servers 69 | if os.environ.get("READTHEDOCS", "False") == "True": 70 | cwd = os.getcwd() 71 | os.makedirs("build-cmake", exist_ok=True) 72 | builddir = os.path.join(cwd, "build-cmake") 73 | subprocess.check_call( 74 | "cmake -DBUILD_DOCS=ON -DBUILD_TESTING=OFF -DBUILD_PYTHON=OFF ../..".split(), 75 | cwd=builddir, 76 | ) 77 | subprocess.check_call("cmake --build . --target doxygen".split(), cwd=builddir) 78 | breathe_projects["py4dgeo"] = os.path.join(builddir, "doc", "xml") 79 | -------------------------------------------------------------------------------- /doc/cppapi.rst: -------------------------------------------------------------------------------- 1 | C++ API reference 2 | ================= 3 | 4 | This is the complete reference for the (internal) C++ API of :code:`py4dgeo`. 5 | You only need to consult this documentation if you are aiming to extend the 6 | C++ core of :code:`py4dgeo`. 7 | 8 | .. doxygenfile:: py4dgeo/py4dgeo.hpp 9 | 10 | .. doxygenfile:: py4dgeo/epoch.hpp 11 | 12 | .. doxygenfile:: py4dgeo/kdtree.hpp 13 | 14 | .. doxygenfunction:: py4dgeo::compute_distances 15 | 16 | .. doxygenfunction:: py4dgeo::compute_multiscale_directions 17 | 18 | .. doxygenfile:: py4dgeo/segmentation.hpp 19 | 20 | .. doxygenfile:: py4dgeo/openmp.hpp 21 | -------------------------------------------------------------------------------- /doc/customization.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/customization.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/faq.rst: -------------------------------------------------------------------------------- 1 | Frequently Asked Questions 2 | ========================== 3 | 4 | py4dgeo requires more RAM than e.g. CloudCompare/I run out of memory processing a pointcloud that CloudCompare can process on the same computer 5 | ----------------------------------------------------------------------------------------------------------------------------------------------- 6 | 7 | This might be related to a deliberate choice in the design of :code:`py4dgeo`: 8 | Point clouds are stored as double precision values (:code:`np.float64` or C++'s :code:`double`), 9 | while many other software packages that process point clouds (e.g. CloudCompare) go 10 | for a mixed precision approach, where the point cloud is stored in single precision 11 | and computational results (e.g. M3C2 distances) are represented in double precision. 12 | In order to store point clouds in single precision without significant loss of precision, 13 | they need to be correctly shifted close to the origin of the space and the resulting 14 | offset needs to be taken into account in a lot of computations. With :code:`py4dgeo` 15 | being an open system for users to develop their own algorithms, we felt that the risk 16 | of caveats related to this transformation process is quite high and therefore decided 17 | to choose the very robust (but memory-inefficient) method of storing point clouds in 18 | global coordinates as double precision values. Other than what was just explained, 19 | the design of the :code:`py4dgeo` library aims for maximum RAM efficiency. 20 | -------------------------------------------------------------------------------- /doc/img/data_vos_2022_kijkduin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/doc/img/data_vos_2022_kijkduin.png -------------------------------------------------------------------------------- /doc/img/data_zahs_2022_ahk_2019_tls.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/doc/img/data_zahs_2022_ahk_2019_tls.png -------------------------------------------------------------------------------- /doc/img/thumb_youtube_anders_isprs2021.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/doc/img/thumb_youtube_anders_isprs2021.png -------------------------------------------------------------------------------- /doc/img/thumb_youtube_auto3dscapes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/doc/img/thumb_youtube_auto3dscapes.png -------------------------------------------------------------------------------- /doc/img/thumb_youtube_zahs_isprs2022.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/doc/img/thumb_youtube_zahs_isprs2022.png -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. toctree:: 2 | :maxdepth: 2 3 | :caption: Contents: 4 | 5 | intro 6 | basic 7 | pythonapi 8 | cppapi 9 | callbacks 10 | faq 11 | -------------------------------------------------------------------------------- /doc/intro.rst: -------------------------------------------------------------------------------- 1 | .. mdinclude:: ../README.md 2 | -------------------------------------------------------------------------------- /doc/m3c2.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/m3c2.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/m3c2ep.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/m3c2ep.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/pbm3c2-longterm.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/pbm3c2-longterm.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/pbm3c2-segmented.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/pbm3c2-segmented.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/pbm3c2-tools.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/pbm3c2-tools.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/pbm3c2.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/pbm3c2.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /doc/pythonapi.rst: -------------------------------------------------------------------------------- 1 | Python API reference 2 | ==================== 3 | 4 | User API reference 5 | ------------------ 6 | 7 | This is the complete reference of the Python API for the :code:`py4dgeo` package. 8 | It focuses on those aspects relevant to end users that are not interested in algorithm development. 9 | 10 | .. autoclass:: py4dgeo.Epoch 11 | :members: 12 | 13 | .. autofunction:: py4dgeo.read_from_las 14 | 15 | .. autofunction:: py4dgeo.read_from_xyz 16 | 17 | .. autofunction:: py4dgeo.save_epoch 18 | 19 | .. autofunction:: py4dgeo.load_epoch 20 | 21 | .. autoclass:: py4dgeo.M3C2 22 | :members: 23 | :inherited-members: 24 | :show-inheritance: 25 | 26 | .. autoclass:: py4dgeo.CloudCompareM3C2 27 | :members: 28 | :inherited-members: 29 | :show-inheritance: 30 | 31 | .. autoclass:: py4dgeo.SpatiotemporalAnalysis 32 | :members: 33 | 34 | .. autoclass:: py4dgeo.RegionGrowingAlgorithm 35 | :members: 36 | :inherited-members: 37 | :show-inheritance: 38 | 39 | .. autofunction:: py4dgeo.regular_corepoint_grid 40 | 41 | .. autofunction:: py4dgeo.set_py4dgeo_logfile 42 | 43 | .. autofunction:: py4dgeo.set_memory_policy 44 | 45 | .. autoclass:: py4dgeo.MemoryPolicy 46 | 47 | .. autofunction:: py4dgeo.get_num_threads 48 | 49 | .. autofunction:: py4dgeo.set_num_threads 50 | 51 | .. autoclass:: py4dgeo.PBM3C2 52 | :members: 53 | 54 | Developer API reference 55 | ----------------------- 56 | 57 | .. autofunction:: py4dgeo.epoch.as_epoch 58 | 59 | .. autofunction:: py4dgeo.epoch.normalize_timestamp 60 | 61 | .. autoclass:: py4dgeo.m3c2.M3C2LikeAlgorithm 62 | :members: 63 | 64 | .. autoclass:: py4dgeo.fallback.PythonFallbackM3C2 65 | :members: 66 | :inherited-members: 67 | :show-inheritance: 68 | 69 | .. autoclass:: py4dgeo.segmentation.RegionGrowingAlgorithmBase 70 | :members: 71 | 72 | .. autoclass:: py4dgeo.segmentation.RegionGrowingSeed 73 | :members: 74 | 75 | .. autoclass:: py4dgeo.segmentation.ObjectByChange 76 | :members: 77 | 78 | .. autofunction:: py4dgeo.segmentation.check_epoch_timestamp 79 | 80 | .. autoclass:: py4dgeo.util.Py4DGeoError 81 | 82 | .. autofunction:: py4dgeo.util.find_file 83 | 84 | .. autofunction:: py4dgeo.util.as_double_precision 85 | 86 | .. autofunction:: py4dgeo.util.make_contiguous 87 | 88 | .. autofunction:: py4dgeo.util.memory_policy_is_minimum 89 | 90 | .. autofunction:: py4dgeo.util.append_file_extension 91 | 92 | .. autofunction:: py4dgeo.util.is_iterable 93 | 94 | .. autoclass:: py4dgeo.zipfile.UpdateableZipFile 95 | 96 | .. autofunction:: py4dgeo.logger.create_default_logger 97 | 98 | .. autofunction:: py4dgeo.logger.logger_context 99 | -------------------------------------------------------------------------------- /doc/registration.nblink: -------------------------------------------------------------------------------- 1 | { 2 | "path": "../jupyter/registration.ipynb" 3 | } 4 | -------------------------------------------------------------------------------- /img/4dobc_extraction_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/4dobc_extraction_thumbnail.png -------------------------------------------------------------------------------- /img/kmeans_clustering_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/kmeans_clustering_thumbnail.png -------------------------------------------------------------------------------- /img/m3c2_change_analysis_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/m3c2_change_analysis_thumbnail.png -------------------------------------------------------------------------------- /img/m3c2ep_change_analysis_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/m3c2ep_change_analysis_thumbnail.png -------------------------------------------------------------------------------- /img/pbm3c2_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/pbm3c2_thumbnail.png -------------------------------------------------------------------------------- /img/pca_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/pca_thumbnail.png -------------------------------------------------------------------------------- /img/standard_icp_thumbnail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/img/standard_icp_thumbnail.png -------------------------------------------------------------------------------- /include/py4dgeo/compute.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | namespace py4dgeo { 10 | 11 | /* Signatures for the callbacks used in the M3C2 algorithm implementation */ 12 | 13 | /** @brief The parameter struct for @ref WorkingSetFinderCallback */ 14 | struct WorkingSetFinderParameters 15 | { 16 | /** @brief The epoch that we are operating on */ 17 | const Epoch& epoch; 18 | /** @brief The search radius*/ 19 | double radius; 20 | /** @brief The (single) core point that we are dealing with */ 21 | EigenPointCloudConstRef corepoint; 22 | /** @brief The cylinder axis direction */ 23 | EigenNormalSetConstRef cylinder_axis; 24 | /** @brief The maximum cylinder (half) length*/ 25 | double max_distance; 26 | }; 27 | 28 | /** @brief The callback type that determines the point cloud working subset in 29 | * the vicinity of a core point */ 30 | using WorkingSetFinderCallback = 31 | std::function; 32 | 33 | /** @brief The parameter struct for @ref DistanceUncertaintyCalculationCallback 34 | */ 35 | struct DistanceUncertaintyCalculationParameters 36 | { 37 | /** @brief The point cloud in the first epoch to operate on */ 38 | EigenPointCloudConstRef workingset1; 39 | /** @brief The point cloud in the second epoch to operate on */ 40 | EigenPointCloudConstRef workingset2; 41 | /** @brief The (single) core point that we are dealing with */ 42 | EigenPointCloudConstRef corepoint; 43 | /** @brief The surface normal at the current core point */ 44 | EigenNormalSetConstRef normal; 45 | /** @brief The registration error */ 46 | double registration_error; 47 | }; 48 | 49 | /** @brief The callback type for calculating the distance between two point 50 | * clouds */ 51 | using DistanceUncertaintyCalculationCallback = 52 | std::function( 53 | const DistanceUncertaintyCalculationParameters&)>; 54 | 55 | /* Variety of callback declarations usable in M3C2 algorithms */ 56 | 57 | /** @brief Implementation of working set finder that performs a regular radius 58 | * search */ 59 | EigenPointCloud 60 | radius_workingset_finder(const WorkingSetFinderParameters&); 61 | 62 | /** @brief Implementation of a working set finder that performs a cylinder 63 | * search */ 64 | EigenPointCloud 65 | cylinder_workingset_finder(const WorkingSetFinderParameters&); 66 | 67 | /** @brief Mean-based implementation of point cloud distance 68 | * 69 | * This is the default implementation of point cloud distance that takes 70 | * the mean of both point clouds (center of mass), projects it onto the 71 | * cylinder axis and calculates the distance. 72 | */ 73 | std::tuple 74 | mean_stddev_distance(const DistanceUncertaintyCalculationParameters&); 75 | 76 | /** @brief Median-based implementation of point cloud distance 77 | * 78 | * Use median of distances in pointcloud instead of mean. This 79 | * results in a more expensive but more robust distance measure. 80 | */ 81 | std::tuple 82 | median_iqr_distance(const DistanceUncertaintyCalculationParameters&); 83 | 84 | /* Compute interfaces used in the M3C2 main algorithm */ 85 | 86 | /** @brief Compute M3C2 multi scale directions */ 87 | void 88 | compute_multiscale_directions(const Epoch&, 89 | EigenPointCloudConstRef, 90 | const std::vector&, 91 | EigenNormalSetConstRef, 92 | EigenNormalSetRef, 93 | std::vector&); 94 | 95 | /** @brief Compute M3C2 distances */ 96 | void 97 | compute_distances(EigenPointCloudConstRef, 98 | double, 99 | const Epoch&, 100 | const Epoch&, 101 | EigenNormalSetConstRef, 102 | double, 103 | double, 104 | DistanceVector&, 105 | UncertaintyVector&, 106 | const WorkingSetFinderCallback&, 107 | const DistanceUncertaintyCalculationCallback&); 108 | 109 | /** @brief Compute correspondence distances */ 110 | std::vector 111 | compute_correspondence_distances(const Epoch&, 112 | EigenPointCloudConstRef, 113 | std::vector, 114 | unsigned int); 115 | 116 | } 117 | -------------------------------------------------------------------------------- /include/py4dgeo/epoch.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include 9 | 10 | namespace py4dgeo { 11 | 12 | /** @brief A data structure representing an epoch 13 | * 14 | * Stores the point cloud itself (without taking ownership of it) and 15 | * provides two search trees: a KDTree and an Octree. This structure allows 16 | * efficient spatial queries without duplicating data. 17 | */ 18 | class Epoch 19 | { 20 | public: 21 | // Constructors 22 | Epoch(const EigenPointCloudRef&); 23 | Epoch(std::shared_ptr); 24 | 25 | // Methods for (de)serialization 26 | static std::unique_ptr from_stream(std::istream&); 27 | std::ostream& to_stream(std::ostream&) const; 28 | 29 | static void set_default_radius_search_tree(SearchTree tree) 30 | { 31 | default_radius_search_tree = tree; 32 | } 33 | 34 | static void set_default_nearest_neighbor_tree(SearchTree tree) 35 | { 36 | if (tree == SearchTree::Octree) { 37 | std::cerr << "[Warning] Octree is not yet implemented for nearest " 38 | "neighbor queries. Use KDTree instead.\n"; 39 | return; 40 | } 41 | default_nearest_neighbor_tree = tree; 42 | } 43 | 44 | static SearchTree get_default_radius_search_tree() 45 | { 46 | return default_radius_search_tree; 47 | } 48 | 49 | static SearchTree get_default_nearest_neighbor_tree() 50 | { 51 | return default_nearest_neighbor_tree; 52 | } 53 | 54 | private: 55 | // If this epoch is unserialized, it owns the point cloud 56 | std::shared_ptr owned_cloud; 57 | 58 | // Default for search operations 59 | static SearchTree default_radius_search_tree; 60 | static SearchTree default_nearest_neighbor_tree; 61 | 62 | public: 63 | // The data members are accessible from the outside. This could be 64 | // realized through getter methods. 65 | EigenPointCloudRef cloud; 66 | KDTree kdtree; 67 | Octree octree; 68 | 69 | // We can add a collection of metadata here 70 | }; 71 | 72 | } // namespace py4dgeo 73 | -------------------------------------------------------------------------------- /include/py4dgeo/kdtree.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include "nanoflann.hpp" 12 | #include "py4dgeo.hpp" 13 | #include "py4dgeo/searchtree.hpp" 14 | 15 | namespace py4dgeo { 16 | 17 | // Forward declaration of Epoch 18 | class Epoch; 19 | 20 | /** @brief Efficient KDTree data structure for nearest neighbor/radius searches 21 | * 22 | * This data structure allows efficient radius searches in 3D point cloud data. 23 | * It is based on NanoFLANN: https://github.com/jlblancoc/nanoflann 24 | */ 25 | class KDTree 26 | { 27 | 28 | private: 29 | /** @brief An adaptor between our Eigen data structures and NanoFLANN */ 30 | struct Adaptor 31 | { 32 | EigenPointCloudRef cloud; 33 | 34 | inline std::size_t kdtree_get_point_count() const { return cloud.rows(); } 35 | 36 | double kdtree_get_pt(const IndexType idx, const IndexType dim) const 37 | { 38 | return cloud(idx, dim); 39 | } 40 | 41 | template 42 | bool kdtree_get_bbox(BBOX&) const 43 | { 44 | return false; 45 | } 46 | }; 47 | 48 | /** @brief A structure to perform efficient radius searches with NanoFLANN 49 | * 50 | * The built-in return set of NanoFLANN does automatically export the 51 | * distances as well, which we want to omit if we already know that we do not 52 | * need the distance information. 53 | */ 54 | struct NoDistancesReturnSet 55 | { 56 | double radius; 57 | RadiusSearchResult& indices; 58 | 59 | inline std::size_t size() const { return indices.size(); } 60 | 61 | inline bool full() const { return true; } 62 | 63 | inline bool addPoint(double dist, IndexType idx) 64 | { 65 | if (dist < radius) 66 | indices.push_back(idx); 67 | return true; 68 | } 69 | 70 | inline double worstDist() const { return radius; } 71 | }; 72 | 73 | //! The NanoFLANN index implementation that we use 74 | using KDTreeImpl = nanoflann::KDTreeSingleIndexAdaptor< 75 | nanoflann::L2_Simple_Adaptor, 76 | Adaptor, 77 | 3, 78 | IndexType>; 79 | 80 | // We allow the Epoch class to directly call below constructor 81 | friend Epoch; 82 | 83 | //! Private constructor from pointcloud - use through @ref KDTree::create 84 | KDTree(const EigenPointCloudRef&); 85 | 86 | public: 87 | /** @brief Construct instance of KDTree from a given point cloud 88 | * 89 | * This is implemented as a static function instead of a public constructor 90 | * to ease the implementation of Python bindings. 91 | * 92 | * @param cloud The point cloud to construct the search tree for 93 | */ 94 | static KDTree create(const EigenPointCloudRef& cloud); 95 | 96 | /** @brief Save the index to a (file) stream */ 97 | std::ostream& saveIndex(std::ostream& stream) const; 98 | 99 | /** @brief Load the index from a (file) stream */ 100 | std::istream& loadIndex(std::istream& stream); 101 | 102 | /** @brief Build the KDTree index 103 | * 104 | * This initializes the KDTree search index. Calling this method is required 105 | * before performing any nearest neighbors or radius searches. 106 | * 107 | * @param leaf The threshold parameter definining at what size the search 108 | * tree is cutoff. Below the cutoff, a brute force search is 109 | * performed. This parameter controls a trade off decision between search tree 110 | * build time and query time. 111 | */ 112 | void build_tree(int leaf); 113 | 114 | /** @brief Invalidate the KDTree index */ 115 | void invalidate(); 116 | 117 | /** @brief Peform radius search around given query point 118 | * 119 | * This method determines all the points from the point cloud within the given 120 | * radius of the query point. It returns only the indices and the result is 121 | * not sorted according to distance. 122 | * 123 | * @param[in] querypoint A pointer to the 3D coordinate of the query point 124 | * @param[in] radius The radius to search within 125 | * @param[out] result A data structure to hold the result. It will be cleared 126 | * during application. 127 | * 128 | * @return The amount of points in the return set 129 | */ 130 | std::size_t radius_search(const double* querypoint, 131 | double radius, 132 | RadiusSearchResult& result) const; 133 | 134 | /** @brief Perform radius search around given query point exporting distance 135 | * information 136 | * 137 | * This method determines all the points from the point cloud within the given 138 | * radius of the query point. It returns their indices and their distances 139 | * from the query point. The result is sorted by ascending distance from the 140 | * query point. 141 | * 142 | * @param[in] querypoint A pointer to the 3D coordinate of the query point 143 | * @param[in] radius The radius to search within 144 | * @param[out] result A data structure to hold the result. It will be cleared 145 | * during application. 146 | * 147 | * @return The amount of points in the return set 148 | */ 149 | std::size_t radius_search_with_distances( 150 | const double* querypoint, 151 | double radius, 152 | RadiusSearchDistanceResult& result) const; 153 | 154 | /** @brief Calculate the nearest neighbors with Euclidian distance for an 155 | * entire point cloud 156 | * 157 | * @param[in] cloud The point cloud to use as query points 158 | * @param[out] result The indexes and distances of k nearest neighbors for 159 | * each point 160 | * @param[in] k The amount of nearest neighbors to calculate 161 | * 162 | */ 163 | void nearest_neighbors_with_distances(EigenPointCloudConstRef cloud, 164 | NearestNeighborsDistanceResult& result, 165 | int k) const; 166 | 167 | /** @brief Calculate the nearest neighbors for an entire point cloud 168 | * 169 | * @param[in] cloud The point cloud to use as query points 170 | * @param[out] result The indexes of k nearest neighbors for each point 171 | * @param[in] k The amount of nearest neighbors to calculate 172 | * 173 | */ 174 | void nearest_neighbors(EigenPointCloudConstRef cloud, 175 | NearestNeighborsResult& result, 176 | int k) const; 177 | 178 | /** @brief Return the leaf parameter this KDTree has been built with */ 179 | int get_leaf_parameter() const; 180 | 181 | private: 182 | Adaptor adaptor; 183 | std::shared_ptr search; 184 | int leaf_parameter = 0; 185 | }; 186 | 187 | } // namespace py4dgeo 188 | -------------------------------------------------------------------------------- /include/py4dgeo/openmp.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #ifdef PY4DGEO_WITH_OPENMP 4 | #include 5 | #endif 6 | 7 | #include 8 | 9 | /** @brief A container to handle exceptions in parallel regions 10 | * 11 | * OpenMP does have limited support for C++ exceptions in parallel 12 | * regions: Exceptions need to be catched on the same thread they 13 | * have been thrown on. This class allows to store the first thrown 14 | * exception to then rethrow it after we left the parallel region. 15 | * This is a necessary construct to propagate exceptions from Python 16 | * callbacks through the multithreaded C++ layer back to the calling 17 | * Python scope. Inspiration is taken from: 18 | * https://stackoverflow.com/questions/11828539/elegant-exceptionhandling-in-openmp 19 | */ 20 | class CallbackExceptionVault 21 | { 22 | std::exception_ptr ptr = nullptr; 23 | 24 | public: 25 | template 26 | void run(Function&& f, Parameters&&... parameters) 27 | { 28 | try { 29 | std::forward(f)(std::forward(parameters)...); 30 | } catch (...) { 31 | #ifdef PY4DGEO_WITH_OPENMP 32 | #pragma omp critical 33 | #endif 34 | { 35 | if (!this->ptr) 36 | this->ptr = std::current_exception(); 37 | } 38 | } 39 | } 40 | 41 | void rethrow() const 42 | { 43 | if (this->ptr) 44 | std::rethrow_exception(this->ptr); 45 | } 46 | }; 47 | -------------------------------------------------------------------------------- /include/py4dgeo/py4dgeo.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | namespace py4dgeo { 6 | 7 | /* Declare the most important types used in py4dgeo */ 8 | 9 | /** @brief The C++ type for a point cloud 10 | * 11 | * Point clouds are represented as (nx3) matrices from the Eigen library. 12 | * 13 | * The choice of this type allows us both very efficient implementation 14 | * of numeric algorithms using the Eigen library, as well as no-copy 15 | * interoperability with numpy's multidimensional arrays. 16 | */ 17 | using EigenPointCloud = 18 | Eigen::Matrix; 19 | 20 | /** @brief A non-const reference type for passing around @ref EigenPointCloud 21 | * 22 | * You should use this in function signatures that accept a point cloud 23 | * as a parameter and need to modify the point cloud. 24 | */ 25 | using EigenPointCloudRef = Eigen::Ref; 26 | 27 | /** @brief A const reference type for passing around @ref EigenPointCloud 28 | * 29 | * You should use this in function signatures that accept a read-only 30 | * point cloud. 31 | */ 32 | using EigenPointCloudConstRef = Eigen::Ref; 33 | 34 | /** @brief The C++ type to represent a set of normal vectors on a point cloud */ 35 | using EigenNormalSet = 36 | Eigen::Matrix; 37 | 38 | /** @brief A mutable reference to a set of normal vectors on a point cloud */ 39 | using EigenNormalSetRef = Eigen::Ref; 40 | 41 | /** @brief An immutable reference to a set of normal vectors on a point cloud */ 42 | using EigenNormalSetConstRef = Eigen::Ref; 43 | 44 | /** @brief The type used for point cloud indices */ 45 | using IndexType = Eigen::Index; 46 | 47 | /** @brief Return structure for the uncertainty of the distance computation 48 | * 49 | * This structured type is used to describe the uncertainty of point cloud 50 | * distance at a single corepoint. It contains the level of detection, 51 | * the spread within both point clouds (e.g. the standard deviation of the 52 | * distance measure) and the number of sampled points. 53 | */ 54 | struct DistanceUncertainty 55 | { 56 | double lodetection; 57 | double spread1; 58 | IndexType num_samples1; 59 | double spread2; 60 | IndexType num_samples2; 61 | }; 62 | 63 | /** @brief The variable-sized vector type used for distances */ 64 | using DistanceVector = std::vector; 65 | 66 | /** @brief The variable-sized vector type used for uncertainties */ 67 | using UncertaintyVector = std::vector; 68 | 69 | /** @brief An enumerator for py4dgeo's memory policy 70 | * 71 | * This is used and documented through its Python binding equivalent. 72 | */ 73 | enum class MemoryPolicy 74 | { 75 | STRICT = 0, 76 | MINIMAL = 1, 77 | COREPOINTS = 2, 78 | RELAXED = 3 79 | }; 80 | 81 | struct Supervoxel 82 | { 83 | EigenPointCloud cloud; 84 | EigenNormalSet normals; 85 | Eigen::Vector3d centroid; 86 | EigenPointCloud boundary_points; 87 | 88 | // Default constructor 89 | Supervoxel() 90 | : cloud() 91 | , normals() 92 | , centroid() 93 | , boundary_points() 94 | { 95 | } 96 | 97 | // constructor 98 | Supervoxel(EigenPointCloudConstRef c, 99 | EigenNormalSetConstRef n, 100 | const Eigen::Vector3d& center, 101 | EigenPointCloudConstRef boundary) 102 | : cloud(c) 103 | , normals(n) 104 | , centroid(center) 105 | , boundary_points(boundary) 106 | { 107 | } 108 | }; 109 | 110 | } 111 | -------------------------------------------------------------------------------- /include/py4dgeo/pybind11_numpy_interop.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #include 8 | #include 9 | 10 | namespace py = pybind11; 11 | 12 | namespace py4dgeo { 13 | 14 | // helper function to avoid making a copy when returning a py::array_t 15 | // author: https://github.com/YannickJadoul 16 | // source: https://github.com/pybind/pybind11/issues/1042#issuecomment-642215028 17 | template 18 | inline py::array_t 19 | as_pyarray(Sequence&& seq) 20 | { 21 | auto size = seq.size(); 22 | auto data = seq.data(); 23 | std::unique_ptr seq_ptr = 24 | std::make_unique(std::move(seq)); 25 | auto capsule = py::capsule(seq_ptr.get(), [](void* p) { 26 | std::unique_ptr(reinterpret_cast(p)); 27 | }); 28 | seq_ptr.release(); 29 | return py::array(size, data, capsule); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /include/py4dgeo/registration.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | 13 | #include 14 | 15 | namespace py4dgeo { 16 | 17 | /** @brief The type that used for affine transformations on point clouds */ 18 | using Transformation = Eigen::Transform; 19 | 20 | /** @brief Apply an affine transformation to a point cloud (in-place) */ 21 | void 22 | transform_pointcloud_inplace(EigenPointCloudRef cloud, 23 | const Transformation& trafo, 24 | EigenPointCloudConstRef reduction_point, 25 | EigenNormalSetRef normals); 26 | 27 | /** Union/Find data structure */ 28 | class DisjointSet 29 | { 30 | public: 31 | /** @brief Construct the data structure for a given size */ 32 | DisjointSet(IndexType size); 33 | 34 | /** @brief Find the subset identifier that the i-th element currently belongs 35 | * to */ 36 | IndexType Find(IndexType i) const; 37 | 38 | /** @brief Merge two subsets into one 39 | * 40 | * @param i First subset identifier to merge 41 | * @param j Second subset identifier to merge 42 | * @param balance_sizes If true, the large subset is merged into the smaller. 43 | * 44 | * @return The subset identifier of the merged subset 45 | */ 46 | IndexType Union(IndexType i, IndexType j, bool balance_sizes); 47 | 48 | private: 49 | /** @brief The number of points in the data structure */ 50 | IndexType size_; 51 | 52 | /** @brief The subset sizes */ 53 | std::vector numbers_; 54 | 55 | /** @brief The subset index for all our points */ 56 | mutable std::vector subsets_; 57 | }; 58 | /** @brief Calculate the amount of supervoxel, based on seed_resolution */ 59 | std::size_t 60 | estimate_supervoxel_count(EigenPointCloudConstRef cloud, 61 | double seed_resolution); 62 | 63 | /** @brief Perform supervoxel segmentation 64 | * 65 | * @param epoch The epoch to be segmented. 66 | * @param kdtree The KDTree corresponding to the epoch's points. 67 | * @param seed_resolution The seed resolution used in supervoxel count 68 | * calculation. 69 | * @param k The number of neighbors considered for each point during 70 | * segmentation. 71 | * @param normals The normal vectors of the epoch's points. 72 | * 73 | */ 74 | std::vector> 75 | supervoxel_segmentation( 76 | Epoch& epoch, 77 | const KDTree& kdtree, 78 | double seed_resolution, 79 | int k, 80 | EigenNormalSet normals = EigenNormalSet::Zero( 81 | 1, 82 | 3)); // it will be changed to EigenNormalSetRef afterwards); 83 | 84 | /** @brief Perform supervoxel segmentation algorithm to distribute points within 85 | * an epoch. 86 | * @param epoch The epoch to be segmented. 87 | * @param kdtree The KDTree corresponding to the epoch's points. 88 | * @param normals The normal vectors of the epoch's points. 89 | * @param resolution The resolution of the supervoxels. 90 | * @param k The number of neighbors considered for each point during 91 | * segmentation. 92 | * @param minSVPvalue The minimum number of points in a supervoxel. 93 | * */ 94 | 95 | std::vector 96 | segment_pc(Epoch& epoch, 97 | const KDTree& kdtree, 98 | EigenNormalSet normals, 99 | double resolution, 100 | int k, 101 | int minSVPvalue); 102 | 103 | /** @brief Perform searching a transformation that fits two point clouds onto 104 | * each other using Gauss-Newton method 105 | * @param epoch The epoch to be segmented. 106 | * @param kdtree The KDTree corresponding to the epoch's points. 107 | * @param normals The normal vectors of the epoch's points. 108 | * @param resolution The resolution of the supervoxels. 109 | * @param k The number of neighbors considered for each point during 110 | * segmentation. 111 | * @param minSVPvalue The mutable parameter. 112 | * */ 113 | 114 | Eigen::Matrix4d 115 | fit_transform_GN(EigenPointCloudConstRef trans_cloud, 116 | EigenPointCloudConstRef reference_cloud, 117 | EigenNormalSetConstRef reference_normals); 118 | 119 | } // namespace py4dgeo 120 | -------------------------------------------------------------------------------- /include/py4dgeo/searchtree.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | 7 | #include 8 | #include 9 | 10 | namespace py4dgeo { 11 | 12 | //! Return type used for radius searches 13 | using RadiusSearchResult = std::vector; 14 | 15 | //! Return type used for radius searches that export calculated **squared** 16 | //! distances 17 | using RadiusSearchDistanceResult = std::vector>; 18 | 19 | //! Return type used for nearest neighbor with Euclidian distances searches 20 | using NearestNeighborsDistanceResult = 21 | std::vector, std::vector>>; 22 | 23 | //! Return type used for nearest neighbor searches 24 | using NearestNeighborsResult = std::vector>; 25 | 26 | enum class SearchTree 27 | { 28 | KDTree, 29 | Octree, 30 | }; 31 | 32 | class Epoch; 33 | 34 | /** 35 | * @brief Function type for performing a single-radius search. 36 | * 37 | * This function takes a 3D query point (as an Eigen vector) and outputs a 38 | * vector of point indices that lie within a sphere around the query point with 39 | * specified radius. The specific search algorithm (KDTree or Octree) is 40 | * determined at runtime. 41 | * 42 | * @param query_point The 3D coordinates of the point to search around. 43 | * @param result A vector of indices of points within the search radius. 44 | */ 45 | using RadiusSearchFuncSingle = 46 | std::function; 47 | 48 | /** 49 | * @brief Function type for performing a multi-radius search. 50 | * 51 | * This function takes a 3D point and an index representing the radius to use 52 | * from a precomputed list of radii. It outputs a vector of point indices that 53 | * lie within the selected radius. 54 | * 55 | * @param query_point The 3D coordinates of the point to search around. 56 | * @param radius_index The index to select the radius from a list of radii. 57 | * @param result A vector of indices of points within the search radius. 58 | */ 59 | using RadiusSearchFunc = 60 | std::function; 61 | 62 | /** 63 | * @brief Returns a function for performing a single-radius search. 64 | * 65 | * Depending on the default search tree type set in the Epoch class (KDTree or 66 | * Octree), this function returns a callable object that efficiently performs 67 | * radius searches around a given point. 68 | * 69 | * In the case of the Octree, the function also computes the appropriate level 70 | * of the tree based on the given radius. 71 | * 72 | * @param epoch The Epoch object containing the search tree. 73 | * @param radius The radius within which to search for neighboring points. 74 | * 75 | * @return A callable function that performs the radius search. 76 | */ 77 | RadiusSearchFuncSingle 78 | get_radius_search_function(const Epoch& epoch, double radius); 79 | 80 | /** 81 | * @brief Returns a function for performing multi-radius searches. 82 | * 83 | * Depending on the default search tree type set in the Epoch class (KDTree or 84 | * Octree), this function returns a callable object that efficiently performs 85 | * radius searches for a given list of radii. 86 | * 87 | * In the case of the Octree, the function precomputes the appropriate level 88 | * for each radius to optimize search performance. 89 | * 90 | * @param epoch The Epoch object containing the search tree. 91 | * @param radii A vector of radii for which search functions are needed. 92 | * 93 | * @return A callable function that performs the radius search for each radius. 94 | */ 95 | RadiusSearchFunc 96 | get_radius_search_function(const Epoch& epoch, 97 | const std::vector& radii); 98 | 99 | } // namespace py4dgeo 100 | -------------------------------------------------------------------------------- /include/py4dgeo/segmentation.hpp: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "py4dgeo/epoch.hpp" 4 | #include "py4dgeo/py4dgeo.hpp" 5 | 6 | #include 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | namespace py4dgeo { 13 | 14 | /** @brief The type to use for the spatiotemporal distance array. */ 15 | using EigenSpatiotemporalArray = 16 | Eigen::Matrix; 17 | using EigenSpatiotemporalArrayRef = Eigen::Ref; 18 | using EigenSpatiotemporalArrayConstRef = 19 | Eigen::Ref; 20 | 21 | /** @brief The type to use for a TimeSeries */ 22 | using EigenTimeSeries = Eigen::Vector; 23 | using EigenTimeSeriesRef = Eigen::Ref; 24 | using EigenTimeSeriesConstRef = Eigen::Ref; 25 | 26 | /** @brief The data object passed to time series distance functions */ 27 | struct TimeseriesDistanceFunctionData 28 | { 29 | EigenTimeSeriesConstRef ts1; 30 | EigenTimeSeriesConstRef ts2; 31 | double norm1; 32 | double norm2; 33 | }; 34 | 35 | /** @brief The signature to use for a distance function */ 36 | using TimeseriesDistanceFunction = 37 | std::function; 38 | 39 | /** @brief Basic data structure for 4D change object */ 40 | struct ObjectByChange 41 | { 42 | std::unordered_map indices_distances; 43 | IndexType start_epoch; 44 | IndexType end_epoch; 45 | double threshold; 46 | }; 47 | 48 | struct RegionGrowingSeed 49 | { 50 | IndexType index; 51 | IndexType start_epoch; 52 | IndexType end_epoch; 53 | }; 54 | 55 | struct RegionGrowingAlgorithmData 56 | { 57 | EigenSpatiotemporalArrayConstRef distances; 58 | const Epoch& corepoints; 59 | double radius; 60 | RegionGrowingSeed seed; 61 | std::vector thresholds; 62 | std::size_t min_segments; 63 | std::size_t max_segments; 64 | }; 65 | 66 | /** @brief The main region growing algorithm */ 67 | ObjectByChange 68 | region_growing(const RegionGrowingAlgorithmData&, 69 | const TimeseriesDistanceFunction&); 70 | 71 | /** @brief The DTW distance measure implementation used in 4DOBC */ 72 | double 73 | dtw_distance(const TimeseriesDistanceFunctionData&); 74 | 75 | /** @brief Normalized DTW distance measure for 4DOBC */ 76 | double 77 | normalized_dtw_distance(const TimeseriesDistanceFunctionData&); 78 | 79 | struct ChangePointDetectionData 80 | { 81 | EigenTimeSeriesConstRef ts; 82 | IndexType window_width; 83 | IndexType min_size; 84 | IndexType jump; 85 | double penalty; 86 | }; 87 | 88 | /** @brief Calculate the median of double vector. The function changed the 89 | * array!*/ 90 | double 91 | median_calculation(std::vector&); 92 | double 93 | median_calculation_simp(std::vector&); 94 | /** @brief Calculate the local maxima, which more than "order" values left and 95 | * right */ 96 | std::vector 97 | local_maxima_calculation(std::vector&, IndexType); 98 | /** @brief Calculate cost error */ 99 | double cost_L1_error(EigenTimeSeriesConstRef, IndexType, IndexType, IndexType); 100 | 101 | /** @brief Calculate signal sum of costs */ 102 | double 103 | sum_of_costs(EigenTimeSeriesConstRef, std::vector&, IndexType); 104 | 105 | /** @brief Change point detection using sliding window approach, run fit then 106 | * predict function */ 107 | std::vector 108 | change_point_detection(const ChangePointDetectionData&); 109 | 110 | /** @brief Compute parameters for change point detection function, return scores 111 | * array. */ 112 | std::vector fit_change_point_detection(EigenTimeSeriesConstRef, 113 | IndexType, 114 | IndexType, 115 | IndexType); 116 | 117 | /** @brief Predict change point detection, return the optimal breackpoints, must 118 | * called after fit function */ 119 | std::vector 120 | predict_change_point_detection(EigenTimeSeriesConstRef, 121 | std::vector&, 122 | IndexType, 123 | IndexType, 124 | IndexType, 125 | double); 126 | 127 | } // namespace py4dgeo 128 | -------------------------------------------------------------------------------- /jupyter/m3c2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Basic M3C2 algorithm" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "1", 14 | "metadata": {}, 15 | "source": [ 16 | "This presents how the M3C2 algorithm ([Lague et al., 2013](#References)) for point cloud distance computation can be run using the `py4dgeo` package. As a first step, we import the `py4dgeo` and `numpy` packages:" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "2", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "import numpy as np\n", 27 | "import py4dgeo" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "id": "3", 33 | "metadata": {}, 34 | "source": [ 35 | "Next, we need to load two datasets that cover the same scene at two different points in time. Point cloud datasets are represented by `numpy` arrays of shape `n x 3` using a 64 bit floating point type (`np.float64`). Here, we work with a rather small synthetical data set:" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "id": "4", 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "epoch1, epoch2 = py4dgeo.read_from_xyz(\n", 46 | " \"plane_horizontal_t1.xyz\", \"plane_horizontal_t2.xyz\"\n", 47 | ")" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "id": "5", 53 | "metadata": {}, 54 | "source": [ 55 | "The analysis of point cloud distances is executed on so-called *core points* (cf. Lague et al., 2013). These could be, e.g., one of the input point clouds, a subsampled version thereof, points in an equidistant grid, etc. Here, we choose a subsampling by taking every 50th point of the reference point cloud:" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "id": "6", 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "corepoints = epoch1.cloud[::50]" 66 | ] 67 | }, 68 | { 69 | "cell_type": "markdown", 70 | "id": "7", 71 | "metadata": {}, 72 | "source": [ 73 | "Next, we instantiate the algorithm class and run the distance calculation:" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "id": "8", 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [ 83 | "m3c2 = py4dgeo.M3C2(\n", 84 | " epochs=(epoch1, epoch2),\n", 85 | " corepoints=corepoints,\n", 86 | " cyl_radius=2.0,\n", 87 | " normal_radii=[0.5, 1.0, 2.0],\n", 88 | ")\n", 89 | "\n", 90 | "distances, uncertainties = m3c2.run()" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "id": "9", 96 | "metadata": {}, 97 | "source": [ 98 | "The calculated result is an array with one distance per core point. The order of distances corresponds exactly to the order of input core points." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "id": "10", 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "distances" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "id": "11", 114 | "metadata": {}, 115 | "source": [ 116 | "Corresponding to the derived distances, an uncertainty array is returned which contains several quantities that can be accessed individually: The level of detection `lodetection`, the spread of the distance across points in either cloud (`spread1` and `spread2`, by default measured as the standard deviation of distances) and the total number of points taken into consideration in either cloud (`num_samples1` and `num_samples2`):" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "id": "12", 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "uncertainties[\"lodetection\"]" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "id": "13", 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "uncertainties[\"spread1\"]" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "id": "14", 143 | "metadata": {}, 144 | "outputs": [], 145 | "source": [ 146 | "uncertainties[\"num_samples1\"]" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "id": "15", 152 | "metadata": {}, 153 | "source": [ 154 | "The direction of surface change in the M3C2 algorithm is determined via local normal vectors per core point. The normal vectors used in the calculation can be accessed via the `directions()` method of the M3C2 algorithm in `py4dgeo`, which returns an array (Nx3) of length N corresponding to the number of core points with three entries for the normal vector components in x, y, and z direction. " 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "id": "16", 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "m3c2.directions()" 165 | ] 166 | }, 167 | { 168 | "cell_type": "markdown", 169 | "id": "17", 170 | "metadata": {}, 171 | "source": [ 172 | "The property `directions_radii` returns an array (Nx1) of length N corresponding to the number of core points and one entry for the radius used for normal computation at the respective core point. This is relevant for the multi-scale functionality of the M3C2, i.e. the possibility to specify multiple normal radii of which the one with maximized planarity is used for change analysis." 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "execution_count": null, 178 | "id": "18", 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "m3c2.directions_radii()" 183 | ] 184 | }, 185 | { 186 | "cell_type": "markdown", 187 | "id": "19", 188 | "metadata": {}, 189 | "source": [ 190 | "### References\n", 191 | "\n", 192 | "* Lague, D., Brodu, N., & Leroux, J. (2013). Accurate 3D comparison of complex topography with terrestrial laser scanner: Application to the Rangitikei canyon (N-Z). ISPRS Journal of Photogrammetry and Remote Sensing, 82, pp. 10-26. doi: [10.1016/j.isprsjprs.2013.04.009](https://doi.org/10.1016/j.isprsjprs.2013.04.009)." 193 | ] 194 | } 195 | ], 196 | "metadata": { 197 | "kernelspec": { 198 | "display_name": "Python 3 (ipykernel)", 199 | "language": "python", 200 | "name": "python3" 201 | }, 202 | "language_info": { 203 | "codemirror_mode": { 204 | "name": "ipython", 205 | "version": 3 206 | }, 207 | "file_extension": ".py", 208 | "mimetype": "text/x-python", 209 | "name": "python", 210 | "nbconvert_exporter": "python", 211 | "pygments_lexer": "ipython3", 212 | "version": "3.12.3" 213 | } 214 | }, 215 | "nbformat": 4, 216 | "nbformat_minor": 5 217 | } 218 | -------------------------------------------------------------------------------- /jupyter/pbm3c2-longterm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Applying PB-M3C2 in long term monitoring\n", 9 | "\n", 10 | "

\n", 11 | "WARNING: The implementation of this method is experimental and under active development.\n", 12 | "

" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "id": "1", 18 | "metadata": {}, 19 | "source": [ 20 | "In applications where data from the same observation site is acquired over a long period of time, it is desirable to carry out the training of the PB-M3C2 algorithm once and then apply the trained model to newly acquired epochs. This notebook explains how this process is implemented in `py4dgeo`. First, we are carrying out the training procedure like we did in the explanation of the [base workflow](pbm3c2.ipynb):" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "id": "2", 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "import py4dgeo" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "id": "3", 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "py4dgeo.set_interactive_backend(\"vtk\")" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "id": "4", 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "epoch0, epoch1 = py4dgeo.read_from_xyz(\n", 51 | " \"plane_horizontal_t1.xyz\", \"plane_horizontal_t2.xyz\"\n", 52 | ")" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": null, 58 | "id": "5", 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "alg = py4dgeo.PBM3C2()" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": null, 68 | "id": "6", 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "xyz_epoch0, xyz_epoch1, segment_id = alg.export_segmented_point_cloud_and_segments(\n", 73 | " epoch0=epoch0,\n", 74 | " epoch1=epoch1,\n", 75 | ")" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "id": "7", 82 | "metadata": {}, 83 | "outputs": [], 84 | "source": [ 85 | "alg.training(\n", 86 | " extracted_segments_file_name=\"extracted_segments.seg\",\n", 87 | " extended_y_file_name=\"testdata-labelling.csv\",\n", 88 | ")" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "id": "8", 94 | "metadata": {}, 95 | "source": [ 96 | "Having the pre-trained algorithm object `alg`, we would like to save it for reuse in later analysis sessions. We do use Python's `pickle` module for that:" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "id": "9", 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "import pickle" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": null, 112 | "id": "10", 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "with open(\"alg.pickle\", \"wb\") as outfile:\n", 117 | " pickle.dump(alg, outfile)" 118 | ] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "id": "11", 123 | "metadata": {}, 124 | "source": [ 125 | "Then, in a subsequent session, we can reload the algorithm using `pickle`:" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "id": "12", 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "with open(\"alg.pickle\", \"rb\") as infile:\n", 136 | " alg = pickle.load(infile)" 137 | ] 138 | }, 139 | { 140 | "cell_type": "markdown", 141 | "id": "13", 142 | "metadata": {}, 143 | "source": [ 144 | "We can then feed new epochs (here, we just use `epoch0` again) into the algorithm. It will apply segmentation on the new epoch and then run the prediction for the new epoch" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": null, 150 | "id": "14", 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [ 154 | "(\n", 155 | " _0,\n", 156 | " _1,\n", 157 | " extracted_segments_epoch0,\n", 158 | ") = alg.export_segmented_point_cloud_and_segments(\n", 159 | " # must be a new epoch\n", 160 | " epoch0=epoch0,\n", 161 | " # epoch1=None,\n", 162 | " x_y_z_id_epoch0_file_name=None,\n", 163 | " x_y_z_id_epoch1_file_name=None,\n", 164 | " extracted_segments_file_name=None,\n", 165 | ")" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "id": "15", 171 | "metadata": {}, 172 | "source": [ 173 | "We can then calculate distances for the new epoch. Note, that in order to disable those parts of the analysis pipeline that are already computed for the reference epoch, we pass the constant dictionary `**py4dgeo.config_epoch0_as_segments`. If you have customized the analysis pipeline, you should adapt the configuration settings accordingly and disable all those steps that are not required for the reference epoch:" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": null, 179 | "id": "16", 180 | "metadata": {}, 181 | "outputs": [], 182 | "source": [ 183 | "distances, uncertainties = alg.compute_distances(\n", 184 | " epoch0=extracted_segments_epoch0, epoch1=epoch1, **py4dgeo.config_epoch0_as_segments\n", 185 | ")" 186 | ] 187 | } 188 | ], 189 | "metadata": { 190 | "kernelspec": { 191 | "display_name": "Python 3 (ipykernel)", 192 | "language": "python", 193 | "name": "python3" 194 | }, 195 | "language_info": { 196 | "codemirror_mode": { 197 | "name": "ipython", 198 | "version": 3 199 | }, 200 | "file_extension": ".py", 201 | "mimetype": "text/x-python", 202 | "name": "python", 203 | "nbconvert_exporter": "python", 204 | "pygments_lexer": "ipython3", 205 | "version": "3.11.0" 206 | } 207 | }, 208 | "nbformat": 4, 209 | "nbformat_minor": 5 210 | } 211 | -------------------------------------------------------------------------------- /jupyter/pbm3c2-segmented.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Correspondence-driven plane-based M3C2 (PBM3C2) with known segmentation\n", 9 | "\n", 10 | "

\n", 11 | "WARNING: The implementation of this method is experimental and under active development.\n", 12 | "

" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "id": "1", 18 | "metadata": {}, 19 | "source": [ 20 | "In this notebook, we are extending the [PB-M3C2 implementation](pbm3c2.ipynb) to work with segmentation information that is already present in the input data. This is useful if you are embedding the calculation into a larger workflow where a segmentation has already been produced." 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "id": "2", 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "import py4dgeo" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "id": "3", 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "py4dgeo.set_interactive_backend(\"vtk\")" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "id": "4", 46 | "metadata": {}, 47 | "source": [ 48 | "We are reading the two input epochs from XYZ files which contain a total of four columns: X, Y and Z coordinates, as well a segment ID mapping each point to a segment. The `read_from_xyz` functionality allows us to read additional data columns through its `additional_dimensions` parameter. It is expecting a dictionary that maps the column index to a column name." 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "id": "5", 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "epoch0, epoch1 = py4dgeo.read_from_xyz(\n", 59 | " \"plane_horizontal_t1_segmented.xyz\",\n", 60 | " \"plane_horizontal_t2_segmented.xyz\",\n", 61 | " additional_dimensions={3: \"segment_id\"},\n", 62 | " delimiter=\",\",\n", 63 | ")" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "id": "6", 69 | "metadata": {}, 70 | "source": [ 71 | "Again, we instantiate the algorithm. Due to fundamental differences in the algorithm workflow, we are using a separated algorithm class for this use case:" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "id": "7", 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "alg = py4dgeo.PBM3C2WithSegments()" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "id": "8", 87 | "metadata": {}, 88 | "source": [ 89 | "Next, we will read the segmented point cloud, which is part of the input epochs, and reconstruct the required segments from it. As a result, we get the same information that we got from the `export_segments_for_labelling` method in the [base PB-M3C2 implementation](pbm3c2.ipynb). Again, we need to provide labelling and can choose to do so either interactively or with external tools. In contrast to `export_segments_for_labelling`, `reconstruct_post_segmentation_output` only writes one file - the full segmentation information file (which defaults to `extracted_segments.seg`):" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "id": "9", 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "xyz_epoch0, xyz_epoch1, segments = alg.reconstruct_post_segmentation_output(\n", 100 | " epoch0=epoch0,\n", 101 | " epoch1=epoch1,\n", 102 | ")" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "id": "10", 108 | "metadata": {}, 109 | "source": [ 110 | "Having completed the labelling process, we read it back in and start the trainging procedure:" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": null, 116 | "id": "11", 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "alg.training(\n", 121 | " extracted_segments_file_name=\"extracted_segments.seg\",\n", 122 | " extended_y_file_name=\"testdata-labelling2.csv\",\n", 123 | ")" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "id": "12", 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [ 133 | "distances, uncertainties = alg.compute_distances(epoch0, epoch1)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "id": "13", 139 | "metadata": {}, 140 | "source": [ 141 | "*Note*: When comparing distance results between this notebook and the [base algorithm notebook](pbm3c2.ipynb), you might notice, that results do not necessarily agree even if the given segmentation information is exactly the same as the one computed in the base algorithm. This is due to the reconstruction process in this algorithm being forced to select the segment position (exported as the *core point*) from the segment points instead of reconstructing the correct position from the base algorithm." 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "id": "14", 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [] 151 | } 152 | ], 153 | "metadata": { 154 | "kernelspec": { 155 | "display_name": "Python 3 (ipykernel)", 156 | "language": "python", 157 | "name": "python3" 158 | }, 159 | "language_info": { 160 | "codemirror_mode": { 161 | "name": "ipython", 162 | "version": 3 163 | }, 164 | "file_extension": ".py", 165 | "mimetype": "text/x-python", 166 | "name": "python", 167 | "nbconvert_exporter": "python", 168 | "pygments_lexer": "ipython3", 169 | "version": "3.11.0" 170 | } 171 | }, 172 | "nbformat": 4, 173 | "nbformat_minor": 5 174 | } 175 | -------------------------------------------------------------------------------- /jupyter/pbm3c2-tools.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Additional tools for PB-M3C2\n", 9 | "\n", 10 | "

\n", 11 | "WARNING: The implementation of this method is experimental and under active development.\n", 12 | "

" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "id": "1", 18 | "metadata": {}, 19 | "source": [ 20 | "In this notebook, we will provide extension to the [PB-M3C2 workflow](pbm3c2.ipynb) that will be occasionally useful based on your application." 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "id": "2", 26 | "metadata": {}, 27 | "source": [ 28 | "## Generation of non-correspondent pairs" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "id": "3", 34 | "metadata": {}, 35 | "source": [ 36 | "For best training results, the user should provide both pairs of segments that do correspond to each other, as well as pairs of segments that do not correspond. In manual labelling workflows, it is much easier to produce high quality corresponding pairs that it is to produce non-corresponding pairs. Here, we provide a function that allows you to generate pairs of non-corresponding segments automatically based on heuristic. The general procedure is exactly the same as in [the base workflow](pbm3c2.ipynb) and will not be further explained here." 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "id": "4", 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "import py4dgeo" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "id": "5", 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "py4dgeo.set_interactive_backend(\"vtk\")" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "id": "6", 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [ 66 | "epoch0, epoch1 = py4dgeo.read_from_xyz(\n", 67 | " \"plane_horizontal_t1.xyz\", \"plane_horizontal_t2.xyz\"\n", 68 | ")" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "id": "7", 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "alg = py4dgeo.PBM3C2()" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "id": "8", 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "(\n", 89 | " xyz_epoch0,\n", 90 | " xyz_epoch1,\n", 91 | " extracted_segments,\n", 92 | ") = alg.export_segmented_point_cloud_and_segments(\n", 93 | " epoch0=epoch0,\n", 94 | " epoch1=epoch1,\n", 95 | ")" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "id": "9", 101 | "metadata": {}, 102 | "source": [ 103 | "Now, we will use labelling data from the file `testdata-labelling-correspondent-only.csv`, which does not contain any pairs of non-corresponding segments. Running `add_no_corresponding_seg` on this data, we automatically generate these. There are two heuristics that can be selected through the `algorithm` parameter:\n", 104 | "* `random`: For each segment in one epoch, label a random segment from the neighborhood in the other epoch as non-corresponding.\n", 105 | "* `closes`: For each segment in one epoch, take the closest segment in the other epoch and label it non-corresponding.\n", 106 | "\n", 107 | "The neighborhood of a segment is defined by the threshold parameter given as `threshold_max_distance`." 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "id": "10", 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "augmented_extended_y = py4dgeo.add_no_corresponding_seg(\n", 118 | " segments=extracted_segments,\n", 119 | " threshold_max_distance=5,\n", 120 | " algorithm=\"random\",\n", 121 | " extended_y_file_name=\"testdata-labelling-correspondent-only.csv\",\n", 122 | ")" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "id": "11", 128 | "metadata": {}, 129 | "source": [ 130 | "We can then run the training algorithm, passing directly the augmented labelling data:" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "id": "12", 137 | "metadata": {}, 138 | "outputs": [], 139 | "source": [ 140 | "alg.training(\n", 141 | " extracted_segments_file_name=\"extracted_segments.seg\",\n", 142 | " extended_y=augmented_extended_y,\n", 143 | ")" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "id": "13", 150 | "metadata": {}, 151 | "outputs": [], 152 | "source": [ 153 | "distances, uncertainties = alg.compute_distances(epoch0=epoch0, epoch1=epoch1)" 154 | ] 155 | } 156 | ], 157 | "metadata": { 158 | "kernelspec": { 159 | "display_name": "Python 3 (ipykernel)", 160 | "language": "python", 161 | "name": "python3" 162 | }, 163 | "language_info": { 164 | "codemirror_mode": { 165 | "name": "ipython", 166 | "version": 3 167 | }, 168 | "file_extension": ".py", 169 | "mimetype": "text/x-python", 170 | "name": "python", 171 | "nbconvert_exporter": "python", 172 | "pygments_lexer": "ipython3", 173 | "version": "3.13.2" 174 | } 175 | }, 176 | "nbformat": 4, 177 | "nbformat_minor": 5 178 | } 179 | -------------------------------------------------------------------------------- /jupyter/pbm3c2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Correspondence-driven plane-based M3C2 (PBM3C2)\n", 9 | "\n", 10 | "

\n", 11 | "WARNING: The implementation of this method is experimental and under active development.\n", 12 | "

" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": null, 18 | "id": "1", 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "import py4dgeo" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "id": "2", 28 | "metadata": {}, 29 | "source": [ 30 | "In this notebook, we present how the *Correspondence-driven plane-based M3C2* (PB-M3C2, [Zahs et al., 2022](#References)) algorithm for point cloud distance computation using the `py4dgeo` package.\n", 31 | "\n", 32 | "The concept and method of PBM3C2 are explained in this scientific talk:\n", 33 | "\n", 34 | "\"\"\n" 35 | ] 36 | }, 37 | { 38 | "cell_type": "markdown", 39 | "id": "3", 40 | "metadata": {}, 41 | "source": [ 42 | "As PB-M3C2 is a learning algorithm, it requires user-labelled input data in the process. This input can either be provided through external tools or be generated using a simple graphical user interface. For the graphical user interface to work best from Jupyter notebooks, we select the `vtk` backend." 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "id": "4", 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "py4dgeo.set_interactive_backend(\"vtk\")" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "id": "5", 58 | "metadata": {}, 59 | "source": [ 60 | "We will work on the same demonstrator data we used in the explanation of the [M3C2 algorithm](m3c2.ipynb):" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "id": "6", 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "epoch0, epoch1 = py4dgeo.read_from_xyz(\n", 71 | " \"plane_horizontal_t1.xyz\", \"plane_horizontal_t2.xyz\"\n", 72 | ")" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "7", 78 | "metadata": {}, 79 | "source": [ 80 | "Again, we instantiate an instance of the algorithm class. For now, we use only the defaults for its parameters and leave explanation of customization aspects for later." 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "id": "8", 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "alg = py4dgeo.PBM3C2()" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "id": "9", 96 | "metadata": {}, 97 | "source": [ 98 | "In a first step, PB-M3C2 will run a plane segmentation algorithm on the provided input point clouds. As a learning algorithm, it then requires user input about corresponding planes. `py4dgeo` offers two ways of doing this:\n", 99 | "* You can export the segmentation data in XYZ format with four columns: `x`, `y` and `z` of the point cloud, as well as the `segment_id` of the segment the point is associated with. Using that data, you can determine correspondance using your favorite tools or existing workflows. Your input is again expected in a comma-separated text file (CSV). It should contain three columns: The `segment_id` from the first point cloud, the `segment_id` from the second point cloud and a value of `0` or `1` depending on whether the two segments matched. The APIs for this case are shown in this notebook.\n", 100 | "* You can interactively build the correspondence information in an interactive session. For this, you can call `alg.build_labelled_similarity_features_interactively()`." 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "id": "10", 106 | "metadata": {}, 107 | "source": [ 108 | "Here, we use the first method of using an external tool for labelling. This call will write a total of three files: The above mentioned XYZ files for both epochs, as well as a third file that contains the entire results of the segmentation process. This will allow you to start computation later on without rerunning the segmentation part of the algorithm. You can modify the default file names by passing them to the respective arguments." 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": null, 114 | "id": "11", 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "xyz_epoch0, xyz_epoch1, segment_id = alg.export_segmented_point_cloud_and_segments(\n", 119 | " epoch0=epoch0,\n", 120 | " epoch1=epoch1,\n", 121 | ")" 122 | ] 123 | }, 124 | { 125 | "cell_type": "markdown", 126 | "id": "12", 127 | "metadata": {}, 128 | "source": [ 129 | "After doing the labelling using your preferred method, you can read it into `py4dgeo`. We pass the previously exported segmentation information and the externally produced CSV file to the traingin procedure. In this test case, we are distributing the labelled data with the test data:" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "id": "13", 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "alg.training(\n", 140 | " extracted_segments_file_name=\"extracted_segments.seg\",\n", 141 | " extended_y_file_name=\"testdata-labelling.csv\",\n", 142 | ")" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "id": "14", 148 | "metadata": {}, 149 | "source": [ 150 | "We have now trained the algorithm using a `scikit-learn` classifier. By default, this is a random forest tree. We are now ready to compute the distances analoguous to how distances in standard M3C2 are calculated. This will run the prediction with the trained model and derive distance and uncertainty information from the results:" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "id": "15", 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "distances, uncertainties = alg.compute_distances(epoch0=epoch0, epoch1=epoch1)" 161 | ] 162 | }, 163 | { 164 | "cell_type": "markdown", 165 | "id": "16", 166 | "metadata": {}, 167 | "source": [ 168 | "### References\n", 169 | "\n", 170 | "* Zahs, V., Winiwarter, L., Anders, K., Williams, J.G., Rutzinger, M. & Höfle, B. (2022): Correspondence-driven plane-based M3C2 for lower uncertainty in 3D topographic change quantification. ISPRS Journal of Photogrammetry and Remote Sensing, 183, pp. 541-559. DOI: [10.1016/j.isprsjprs.2021.11.018](https://doi.org/10.1016/j.isprsjprs.2021.11.018)." 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "id": "17", 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [] 180 | } 181 | ], 182 | "metadata": { 183 | "kernelspec": { 184 | "display_name": "Python 3 (ipykernel)", 185 | "language": "python", 186 | "name": "python3" 187 | }, 188 | "language_info": { 189 | "codemirror_mode": { 190 | "name": "ipython", 191 | "version": 3 192 | }, 193 | "file_extension": ".py", 194 | "mimetype": "text/x-python", 195 | "name": "python", 196 | "nbconvert_exporter": "python", 197 | "pygments_lexer": "ipython3", 198 | "version": "3.12.0" 199 | } 200 | }, 201 | "nbformat": 4, 202 | "nbformat_minor": 5 203 | } 204 | -------------------------------------------------------------------------------- /lib/directions.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include "py4dgeo/compute.hpp" 5 | #include "py4dgeo/kdtree.hpp" 6 | #include "py4dgeo/octree.hpp" 7 | #include "py4dgeo/openmp.hpp" 8 | #include "py4dgeo/py4dgeo.hpp" 9 | #include "py4dgeo/searchtree.hpp" 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | #include 16 | 17 | namespace py4dgeo { 18 | 19 | void 20 | compute_multiscale_directions(const Epoch& epoch, 21 | EigenPointCloudConstRef corepoints, 22 | const std::vector& normal_radii, 23 | EigenNormalSetConstRef orientation, 24 | EigenNormalSetRef result, 25 | std::vector& used_radii) 26 | { 27 | used_radii.resize(corepoints.rows()); 28 | const Eigen::Vector3d orientation_vector = orientation.row(0).transpose(); 29 | 30 | auto radius_search = get_radius_search_function(epoch, normal_radii); 31 | 32 | // Instantiate a container for the first thrown exception in 33 | // the following parallel region. 34 | CallbackExceptionVault vault; 35 | #ifdef PY4DGEO_WITH_OPENMP 36 | #pragma omp parallel for schedule(dynamic, 1) 37 | #endif 38 | for (IndexType i = 0; i < corepoints.rows(); ++i) { 39 | vault.run([&]() { 40 | double highest_planarity = 0.0; 41 | Eigen::Matrix3d cov; 42 | Eigen::SelfAdjointEigenSolver solver{}; 43 | RadiusSearchResult points; 44 | for (size_t r = 0; r < normal_radii.size(); ++r) { 45 | 46 | radius_search(corepoints.row(i), r, points); 47 | 48 | EigenPointCloud subset = epoch.cloud(points, Eigen::all); 49 | 50 | // Calculate covariance matrix 51 | const Eigen::Vector3d mean = subset.colwise().mean(); 52 | subset.rowwise() -= mean.transpose(); 53 | // only need the lower-triangular elements of the covariance matrix 54 | cov.diagonal() = subset.colwise().squaredNorm(); 55 | cov(1, 0) = subset.col(0).dot(subset.col(1)); 56 | cov(2, 0) = subset.col(0).dot(subset.col(2)); 57 | cov(2, 1) = subset.col(1).dot(subset.col(2)); 58 | cov /= double(subset.rows() - 1); 59 | 60 | // Calculate Eigen vectors 61 | solver.computeDirect(cov); 62 | const Eigen::Vector3d& evalues = solver.eigenvalues(); 63 | const Eigen::Vector3d evec = solver.eigenvectors().col(0); 64 | 65 | // Calculate planarity 66 | double planarity = (evalues[1] - evalues[0]) / evalues[2]; 67 | if (planarity > highest_planarity) { 68 | highest_planarity = planarity; 69 | 70 | double sign = (evec.dot(orientation_vector) < 0.0) ? -1.0 : 1.0; 71 | result.row(i) = sign * evec; 72 | used_radii[i] = normal_radii[r]; 73 | } 74 | } 75 | }); 76 | } 77 | 78 | // Potentially rethrow an exception that occurred in above parallel region 79 | vault.rethrow(); 80 | } 81 | 82 | std::vector 83 | compute_correspondence_distances(const Epoch& epoch, 84 | EigenPointCloudConstRef transformated_pc, 85 | std::vector corepoints, 86 | unsigned int check_size) 87 | { 88 | 89 | NearestNeighborsDistanceResult result; 90 | epoch.kdtree.nearest_neighbors_with_distances(transformated_pc, result, 1); 91 | std::vector p2pdist(transformated_pc.rows()); 92 | 93 | #ifdef PY4DGEO_WITH_OPENMP 94 | #pragma omp parallel for schedule(dynamic, 1) 95 | #endif 96 | for (IndexType i = 0; i < transformated_pc.rows(); ++i) { 97 | if (epoch.cloud.rows() != check_size) { 98 | EigenPointCloud subset = corepoints[result[i].first[0]]; 99 | 100 | // Calculate covariance matrix 101 | Eigen::Matrix3d cov; 102 | const Eigen::Vector3d mean = subset.colwise().mean(); 103 | subset.rowwise() -= mean.transpose(); 104 | // only need the lower-triangular elements of the covariance matrix 105 | cov.diagonal() = subset.colwise().squaredNorm(); 106 | cov(1, 0) = subset.col(0).dot(subset.col(1)); 107 | cov(2, 0) = subset.col(0).dot(subset.col(2)); 108 | cov(2, 1) = subset.col(1).dot(subset.col(2)); 109 | cov /= double(subset.rows() - 1); 110 | 111 | // Calculate eigenvectors using direct 3x3 solver 112 | Eigen::SelfAdjointEigenSolver solver; 113 | solver.computeDirect(cov); 114 | 115 | // Calculate Eigen vectors 116 | Eigen::Vector3d normal_vector = solver.eigenvectors().col(0); 117 | // Calculate cor distance 118 | Eigen::Vector3d displacement_vector = 119 | epoch.cloud.row(result[i].first[0]) - transformated_pc.row(i); 120 | p2pdist[i] = std::abs(displacement_vector.dot(normal_vector)); 121 | 122 | } 123 | 124 | else 125 | p2pdist[i] = std::sqrt(result[i].second[0]); 126 | } 127 | return p2pdist; 128 | } 129 | 130 | } // namespace py4dgeo 131 | -------------------------------------------------------------------------------- /lib/epoch.cpp: -------------------------------------------------------------------------------- 1 | #include "py4dgeo/epoch.hpp" 2 | #include "py4dgeo/kdtree.hpp" 3 | #include "py4dgeo/octree.hpp" 4 | #include "py4dgeo/py4dgeo.hpp" 5 | 6 | #include 7 | 8 | #include 9 | 10 | namespace py4dgeo { 11 | 12 | Epoch::Epoch(const EigenPointCloudRef& cloud_) 13 | : owned_cloud(nullptr) 14 | , cloud(cloud_) 15 | , kdtree(cloud_) 16 | , octree(cloud_) 17 | { 18 | } 19 | 20 | Epoch::Epoch(std::shared_ptr cloud_) 21 | : owned_cloud(cloud_) 22 | , cloud(*cloud_) 23 | , kdtree(*cloud_) 24 | , octree(*cloud_) 25 | { 26 | } 27 | 28 | SearchTree Epoch::default_radius_search_tree = SearchTree::KDTree; 29 | SearchTree Epoch::default_nearest_neighbor_tree = SearchTree::KDTree; 30 | 31 | std::ostream& 32 | Epoch::to_stream(std::ostream& stream) const 33 | { 34 | // Write the cloud itself 35 | IndexType rows = cloud.rows(); 36 | stream.write(reinterpret_cast(&rows), sizeof(IndexType)); 37 | stream.write(reinterpret_cast(&cloud(0, 0)), 38 | sizeof(double) * rows * 3); 39 | 40 | // Write the kdtree leaf parameter 41 | int leaf_parameter = kdtree.leaf_parameter; 42 | stream.write(reinterpret_cast(&leaf_parameter), sizeof(int)); 43 | 44 | // Write the KDTree search index iff the index was built 45 | if (leaf_parameter != 0) 46 | kdtree.search->saveIndex(stream); 47 | 48 | // Write the Octree iff it was built 49 | unsigned int octree_points = octree.get_number_of_points(); 50 | stream.write(reinterpret_cast(&octree_points), 51 | sizeof(unsigned int)); 52 | if (octree_points != 0) 53 | octree.saveIndex(stream); 54 | 55 | return stream; 56 | } 57 | 58 | std::unique_ptr 59 | Epoch::from_stream(std::istream& stream) 60 | { 61 | // Read the cloud itself 62 | IndexType rows; 63 | stream.read(reinterpret_cast(&rows), sizeof(IndexType)); 64 | auto cloud = std::make_shared(rows, 3); 65 | stream.read(reinterpret_cast(&(*cloud)(0, 0)), 66 | sizeof(double) * rows * 3); 67 | 68 | // Create the epoch 69 | auto epoch = std::make_unique(cloud); 70 | 71 | // Read the leaf parameter 72 | stream.read(reinterpret_cast(&(epoch->kdtree.leaf_parameter)), 73 | sizeof(int)); 74 | 75 | // Read the search index iff the index was built 76 | if (epoch->kdtree.leaf_parameter != 0) { 77 | epoch->kdtree.search = std::make_shared( 78 | 3, 79 | epoch->kdtree.adaptor, 80 | nanoflann::KDTreeSingleIndexAdaptorParams(epoch->kdtree.leaf_parameter)); 81 | epoch->kdtree.search->loadIndex(stream); 82 | } 83 | 84 | // Read the octree iff it was built 85 | unsigned int octree_points; 86 | stream.read(reinterpret_cast(&octree_points), sizeof(unsigned int)); 87 | if (octree_points != 0) { 88 | epoch->octree.loadIndex(stream); 89 | } 90 | 91 | return epoch; 92 | } 93 | 94 | } // namespace py4dgeo 95 | -------------------------------------------------------------------------------- /lib/kdtree.cpp: -------------------------------------------------------------------------------- 1 | #ifdef PY4DGEO_WITH_OPENMP 2 | #include 3 | #endif 4 | 5 | #include "py4dgeo/kdtree.hpp" 6 | #include "py4dgeo/py4dgeo.hpp" 7 | 8 | #include 9 | 10 | namespace py4dgeo { 11 | 12 | KDTree::KDTree(const EigenPointCloudRef& cloud) 13 | : adaptor{ cloud } 14 | { 15 | } 16 | 17 | KDTree 18 | KDTree::create(const EigenPointCloudRef& cloud) 19 | { 20 | return KDTree(cloud); 21 | } 22 | 23 | void 24 | KDTree::build_tree(int leaf) 25 | { 26 | search = std::make_shared( 27 | 3, adaptor, nanoflann::KDTreeSingleIndexAdaptorParams(leaf)); 28 | search->buildIndex(); 29 | leaf_parameter = leaf; 30 | } 31 | 32 | void 33 | KDTree::invalidate() 34 | { 35 | search = nullptr; 36 | leaf_parameter = 0; 37 | } 38 | 39 | std::ostream& 40 | KDTree::saveIndex(std::ostream& stream) const 41 | { 42 | stream.write(reinterpret_cast(&leaf_parameter), sizeof(int)); 43 | 44 | if (leaf_parameter != 0) 45 | search->saveIndex(stream); 46 | 47 | return stream; 48 | } 49 | 50 | std::istream& 51 | KDTree::loadIndex(std::istream& stream) 52 | { 53 | // Read the leaf parameter 54 | stream.read(reinterpret_cast(&leaf_parameter), sizeof(int)); 55 | 56 | if (leaf_parameter != 0) { 57 | search = std::make_shared( 58 | 3, adaptor, nanoflann::KDTreeSingleIndexAdaptorParams(leaf_parameter)); 59 | search->loadIndex(stream); 60 | } 61 | 62 | return stream; 63 | } 64 | 65 | std::size_t 66 | KDTree::radius_search(const double* query, 67 | double radius, 68 | RadiusSearchResult& result) const 69 | { 70 | NoDistancesReturnSet set{ radius * radius, result }; 71 | nanoflann::SearchParams params; 72 | params.sorted = false; 73 | return search->radiusSearchCustomCallback(query, set, params); 74 | } 75 | 76 | std::size_t 77 | KDTree::radius_search_with_distances(const double* query, 78 | double radius, 79 | RadiusSearchDistanceResult& result) const 80 | { 81 | nanoflann::SearchParams params; 82 | return search->radiusSearch(query, radius * radius, result, params); 83 | } 84 | 85 | void 86 | KDTree::nearest_neighbors_with_distances(EigenPointCloudConstRef cloud, 87 | NearestNeighborsDistanceResult& result, 88 | int k) const 89 | { 90 | result.resize(cloud.rows()); 91 | nanoflann::SearchParams params; 92 | 93 | #ifdef PY4DGEO_WITH_OPENMP 94 | #pragma omp parallel for schedule(dynamic, 1) 95 | #endif 96 | for (IndexType i = 0; i < cloud.rows(); ++i) { 97 | std::pair, std::vector> pointResult; 98 | 99 | std::vector& ret_indices = pointResult.first; 100 | std::vector& out_dists_sqr = pointResult.second; 101 | ret_indices.resize(k); 102 | out_dists_sqr.resize(k); 103 | 104 | nanoflann::KNNResultSet resultset(k); 105 | auto qp = cloud.row(i).eval(); 106 | resultset.init(ret_indices.data(), out_dists_sqr.data()); 107 | search->findNeighbors(resultset, &(qp(0, 0)), params); 108 | result[i] = pointResult; 109 | } 110 | } 111 | 112 | void 113 | KDTree::nearest_neighbors(EigenPointCloudConstRef cloud, 114 | NearestNeighborsResult& result, 115 | int k) const 116 | { 117 | result.resize(cloud.rows()); 118 | nanoflann::SearchParams params; 119 | 120 | #ifdef PY4DGEO_WITH_OPENMP 121 | #pragma omp parallel for schedule(dynamic, 1) 122 | #endif 123 | for (IndexType i = 0; i < cloud.rows(); ++i) { 124 | std::vector pointResult; 125 | std::vector dis_skip; 126 | 127 | std::vector& ret_indices = pointResult; 128 | std::vector& out_dists_sqr = dis_skip; 129 | ret_indices.resize(k); 130 | out_dists_sqr.resize(k); 131 | 132 | nanoflann::KNNResultSet resultset(k); 133 | auto qp = cloud.row(i).eval(); 134 | resultset.init(ret_indices.data(), out_dists_sqr.data()); 135 | search->findNeighbors(resultset, &(qp(0, 0)), params); 136 | result[i] = pointResult; 137 | } 138 | } 139 | 140 | int 141 | KDTree::get_leaf_parameter() const 142 | { 143 | return leaf_parameter; 144 | } 145 | 146 | } // namespace py4dgeo 147 | -------------------------------------------------------------------------------- /lib/searchtree.cpp: -------------------------------------------------------------------------------- 1 | #include "py4dgeo/searchtree.hpp" 2 | 3 | #include 4 | 5 | #include 6 | 7 | #include 8 | 9 | namespace py4dgeo { 10 | 11 | // For a single radius 12 | RadiusSearchFuncSingle 13 | get_radius_search_function(const Epoch& epoch, double radius) 14 | { 15 | if (Epoch::get_default_radius_search_tree() == SearchTree::Octree) { 16 | unsigned int level = 17 | epoch.octree.find_appropriate_level_for_radius_search(radius); 18 | 19 | return [&, radius, level](const Eigen::Vector3d& point, 20 | RadiusSearchResult& out) { 21 | epoch.octree.radius_search(point, radius, level, out); 22 | }; 23 | } else { 24 | return [&, radius](const Eigen::Vector3d& point, RadiusSearchResult& out) { 25 | epoch.kdtree.radius_search(point.data(), radius, out); 26 | }; 27 | } 28 | } 29 | 30 | // For a vector of radii 31 | RadiusSearchFunc 32 | get_radius_search_function(const Epoch& epoch, const std::vector& radii) 33 | { 34 | if (Epoch::get_default_radius_search_tree() == SearchTree::Octree) { 35 | std::vector levels(radii.size()); 36 | for (size_t i = 0; i < radii.size(); ++i) { 37 | levels[i] = 38 | epoch.octree.find_appropriate_level_for_radius_search(radii[i]); 39 | } 40 | 41 | return [&, radii, levels = std::move(levels)]( 42 | const Eigen::Vector3d& point, size_t r, RadiusSearchResult& out) { 43 | epoch.octree.radius_search(point, radii[r], levels[r], out); 44 | }; 45 | } else { 46 | return [&, radii]( 47 | const Eigen::Vector3d& point, size_t r, RadiusSearchResult& out) { 48 | epoch.kdtree.radius_search(point.data(), radii[r], out); 49 | }; 50 | } 51 | } 52 | 53 | } // namespace py4dgeo 54 | -------------------------------------------------------------------------------- /py4dgeo_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/3dgeo-heidelberg/py4dgeo/48fb7520066b08f84a393594f2c075ba212a8769/py4dgeo_logo.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # This section describes the requirements of the build/installation 2 | # process itself. Being able to do this was the original reason to 3 | # introduce pyproject.toml 4 | [build-system] 5 | requires = [ 6 | "pybind11", 7 | "scikit-build-core >=0.6.1", 8 | ] 9 | build-backend = "scikit_build_core.build" 10 | 11 | # This section provides general project metadata that is used across 12 | # a variety of build tools. Notably, the version specified here is the 13 | # single source of truth for py4dgeo's version 14 | [project] 15 | name = "py4dgeo" 16 | version = "0.7.0" 17 | description = "Library for change detection in 4D point cloud data" 18 | readme = "README.md" 19 | maintainers = [ 20 | { name = "Dominic Kempf", email = "ssc@iwr.uni-heidelberg.de" }, 21 | ] 22 | requires-python = ">=3.8" 23 | license = { file = "LICENSE.md" } 24 | classifiers = [ 25 | "Programming Language :: Python :: 3", 26 | "Programming Language :: C++", 27 | "Operating System :: OS Independent", 28 | "License :: OSI Approved :: MIT License", 29 | "Topic :: Scientific/Engineering :: GIS", 30 | "Intended Audience :: Science/Research", 31 | ] 32 | dependencies = [ 33 | "dateparser", 34 | "laspy[lazrs]>=2.0,<3.0", 35 | "matplotlib", 36 | "numpy", 37 | "pooch", 38 | "requests", 39 | "seaborn", 40 | "scikit-learn", 41 | "vedo", 42 | "xdg", 43 | "psutil" 44 | ] 45 | 46 | # Command line scripts installed as part of the installation 47 | [project.scripts] 48 | copy_py4dgeo_test_data = "py4dgeo.util:copy_test_data_entrypoint" 49 | 50 | [tool.scikit-build.cmake] 51 | minimum-version = "3.30" 52 | 53 | [tool.scikit-build.cmake.define] 54 | BUILD_DOCS = "OFF" 55 | BUILD_TESTING = "OFF" 56 | 57 | # The next section configures building wheels in Continuous Integration 58 | # The cibuildwheel documentation covers the available options in detail: 59 | # https://cibuildwheel.readthedocs.io/en/stable/options/ 60 | [tool.cibuildwheel] 61 | # Super-verbose output for debugging purpose 62 | build-verbosity = 3 63 | 64 | # We only do 64 bit builds 65 | archs = ["auto64"] 66 | 67 | # We restrict ourselves to recent Python versions. 68 | # We temporarily skip win32 builds, because lazrs 69 | # does not provide Win32 wheels. 70 | skip = "pp* cp38-* *musllinux* *-win32" 71 | 72 | # Testing commands for our wheels 73 | test-command = "pytest {package}/tests/python" 74 | test-requires = ["pytest", "ruptures"] 75 | 76 | [tool.cibuildwheel.macos] 77 | before-all = "brew install libomp" 78 | environment = { OpenMP_ROOT="$(brew --prefix libomp)" } 79 | 80 | # The following is the configuration for the pytest test suite 81 | [tool.pytest.ini_options] 82 | testpaths = [ 83 | "tests/python", 84 | "jupyter", 85 | ] 86 | filterwarnings = [ 87 | "ignore:The localize method is no longer necessary, as this time zone supports the fold attribute", 88 | "ignore:distutils Version classes are deprecated. Use packaging.version instead.", 89 | ] 90 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | breathe 2 | cmake 3 | ipykernel 4 | lcov_cobertura 5 | nbsphinx 6 | nbsphinx-link 7 | nbval 8 | pandas 9 | pre-commit 10 | pytest 11 | pytest-cov 12 | ruptures 13 | sphinx 14 | sphinx_mdinclude 15 | sphinx_rtd_theme 16 | -------------------------------------------------------------------------------- /src/py4dgeo/UpdateableZipFile.py: -------------------------------------------------------------------------------- 1 | import operator 2 | import zipfile 3 | 4 | 5 | class UpdateableZipFile(zipfile.ZipFile): 6 | """A patched version of Python's zipfile that supports removing files in 'a' mode 7 | 8 | Python does not implement this although the issue has been 9 | known for over a decade and a fix exists: 10 | https://github.com/python/cpython/pull/19358 11 | 12 | This class ports the CPython fix forward. 13 | """ 14 | 15 | def remove(self, member): 16 | """Remove a file from the archive. The archive must be open with mode 'a'""" 17 | 18 | if self.mode != "a": 19 | raise RuntimeError("remove() requires mode 'a'") 20 | if not self.fp: 21 | raise ValueError("Attempt to write to ZIP archive that was already closed") 22 | if self._writing: 23 | raise ValueError( 24 | "Can't write to ZIP archive while an open writing handle exists." 25 | ) 26 | 27 | # Make sure we have an info object 28 | if isinstance(member, zipfile.ZipInfo): 29 | # 'member' is already an info object 30 | zinfo = member 31 | else: 32 | # get the info object 33 | zinfo = self.getinfo(member) 34 | 35 | return self._remove_member(zinfo) 36 | 37 | def _remove_member(self, member): 38 | # get a sorted filelist by header offset, in case the dir order 39 | # doesn't match the actual entry order 40 | fp = self.fp 41 | entry_offset = 0 42 | filelist = sorted(self.filelist, key=operator.attrgetter("header_offset")) 43 | for i in range(len(filelist)): 44 | info = filelist[i] 45 | # find the target member 46 | if info.header_offset < member.header_offset: 47 | continue 48 | 49 | # get the total size of the entry 50 | entry_size = None 51 | if i == len(filelist) - 1: 52 | entry_size = self.start_dir - info.header_offset 53 | else: 54 | entry_size = filelist[i + 1].header_offset - info.header_offset 55 | 56 | # found the member, set the entry offset 57 | if member == info: 58 | entry_offset = entry_size 59 | continue 60 | 61 | # Move entry 62 | # read the actual entry data 63 | fp.seek(info.header_offset) 64 | entry_data = fp.read(entry_size) 65 | 66 | # update the header 67 | info.header_offset -= entry_offset 68 | 69 | # write the entry to the new position 70 | fp.seek(info.header_offset) 71 | fp.write(entry_data) 72 | fp.flush() 73 | 74 | # update state 75 | self.start_dir -= entry_offset 76 | self.filelist.remove(member) 77 | del self.NameToInfo[member.filename] 78 | self._didModify = True 79 | 80 | # seek to the start of the central dir 81 | fp.seek(self.start_dir) 82 | -------------------------------------------------------------------------------- /src/py4dgeo/__init__.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.logger import set_py4dgeo_logfile 2 | from py4dgeo.cloudcompare import CloudCompareM3C2 3 | from py4dgeo.epoch import ( 4 | Epoch, 5 | read_from_las, 6 | read_from_xyz, 7 | save_epoch, 8 | load_epoch, 9 | ) 10 | from _py4dgeo import SearchTree 11 | from py4dgeo.m3c2 import M3C2, write_m3c2_results_to_las 12 | from py4dgeo.m3c2ep import M3C2EP 13 | from py4dgeo.registration import ( 14 | iterative_closest_point, 15 | point_to_plane_icp, 16 | icp_with_stable_areas, 17 | ) 18 | from py4dgeo.segmentation import ( 19 | RegionGrowingAlgorithm, 20 | SpatiotemporalAnalysis, 21 | regular_corepoint_grid, 22 | temporal_averaging, 23 | ) 24 | from py4dgeo.util import ( 25 | __version__, 26 | find_file, 27 | MemoryPolicy, 28 | set_memory_policy, 29 | get_num_threads, 30 | set_num_threads, 31 | initialize_openmp_defaults, 32 | ) 33 | 34 | initialize_openmp_defaults() 35 | 36 | from py4dgeo.pbm3c2 import * 37 | -------------------------------------------------------------------------------- /src/py4dgeo/cloudcompare.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.m3c2 import M3C2 2 | 3 | 4 | _cloudcompare_param_mapping = { 5 | "normalscale": "normal_radii", 6 | "registrationerror": "reg_error", 7 | "searchdepth": "max_distance", 8 | "searchscale": "cyl_radius", 9 | "usemedian": "robust_aggr", 10 | } 11 | 12 | 13 | class CloudCompareM3C2(M3C2): 14 | def __init__(self, **params): 15 | """An M3C2 implementation that uses parameter names from CloudCompare""" 16 | # Remap parameters using above mapping 17 | py4dgeo_params = { 18 | _cloudcompare_param_mapping.get(k, k): v for k, v in params.items() 19 | } 20 | 21 | # Apply changes that are not a mere renaming 22 | 23 | # Scale parameters are diameters in CloudCompare and radii in py4dgeo 24 | if "cyl_radius" in py4dgeo_params: 25 | py4dgeo_params["cyl_radius"] = py4dgeo_params["cyl_radius"] * 0.5 26 | if "normal_radii" in py4dgeo_params: 27 | py4dgeo_params["normal_radii"] = tuple( 28 | 0.5 * r for r in py4dgeo_params["normal_radii"] 29 | ) 30 | 31 | # Intialize base class with remapped parameters 32 | super().__init__(**py4dgeo_params) 33 | -------------------------------------------------------------------------------- /src/py4dgeo/fallback.py: -------------------------------------------------------------------------------- 1 | """Fallback implementations for C++ components of the M3C2 algorithms""" 2 | 3 | from py4dgeo.m3c2 import M3C2 4 | 5 | import numpy as np 6 | import _py4dgeo 7 | 8 | 9 | def radius_workingset_finder(params: _py4dgeo.WorkingSetFinderParameters) -> np.ndarray: 10 | indices = params.epoch._radius_search(params.corepoint, params.radius) 11 | return params.epoch._cloud[indices, :] 12 | 13 | 14 | def cylinder_workingset_finder( 15 | params: _py4dgeo.WorkingSetFinderParameters, 16 | ) -> np.ndarray: 17 | # Cut the cylinder into N segments, perform radius searches around the 18 | # segment midpoints and create the union of indices 19 | N = 1 20 | max_cylinder_length = params.max_distance 21 | if max_cylinder_length >= params.radius: 22 | N = np.ceil(max_cylinder_length / params.radius) 23 | else: 24 | max_cylinder_length = params.radius 25 | 26 | r_cyl = np.sqrt( 27 | params.radius * params.radius 28 | + max_cylinder_length * max_cylinder_length / (N * N) 29 | ) 30 | 31 | slabs = [] 32 | for i in range(int(N)): 33 | # Find indices around slab midpoint 34 | qp = ( 35 | params.corepoint[0, :] 36 | + (2 * i - N + 1) / N * max_cylinder_length * params.cylinder_axis[0, :] 37 | ) 38 | indices = params.epoch._radius_search(qp, r_cyl) 39 | 40 | # Gather the points from the point cloud 41 | superset = params.epoch._cloud[indices, :] 42 | 43 | # Calculate distance from the axis and the plane perpendicular to the axis 44 | to_corepoint = superset - qp 45 | to_corepoint_plane = to_corepoint.dot(params.cylinder_axis[0, :]) 46 | to_axis2 = np.sum( 47 | np.square( 48 | to_corepoint 49 | - np.multiply( 50 | to_corepoint_plane[:, np.newaxis], params.cylinder_axis[0, :] 51 | ) 52 | ), 53 | axis=1, 54 | ) 55 | 56 | # Filter the points that are not within the slab 57 | filtered = superset[ 58 | np.logical_and( 59 | to_axis2 <= params.radius * params.radius, 60 | np.abs(to_corepoint_plane) < max_cylinder_length / N, 61 | ) 62 | ] 63 | 64 | slabs.append(filtered) 65 | 66 | return np.concatenate(tuple(slabs)) 67 | 68 | 69 | def mean_stddev_distance( 70 | params: _py4dgeo.DistanceUncertaintyCalculationParameters, 71 | ) -> tuple: 72 | # Calculate distance 73 | distance = params.normal[0, :].dot( 74 | params.workingset2.mean(axis=0) - params.workingset1.mean(axis=0) 75 | ) 76 | 77 | # Calculate variances 78 | variance1 = params.normal @ np.cov(params.workingset1.T) @ params.normal.T 79 | variance2 = params.normal @ np.cov(params.workingset2.T) @ params.normal.T 80 | 81 | # The structured array that describes the full uncertainty 82 | uncertainty = _py4dgeo.DistanceUncertainty( 83 | lodetection=1.96 84 | * ( 85 | np.sqrt( 86 | variance1 / params.workingset1.shape[0] 87 | + variance2 / params.workingset2.shape[0] 88 | ).item() 89 | + params.registration_error 90 | ), 91 | spread1=np.sqrt(variance1).item(), 92 | num_samples1=params.workingset1.shape[0], 93 | spread2=np.sqrt(variance2).item(), 94 | num_samples2=params.workingset2.shape[0], 95 | ) 96 | 97 | return distance, uncertainty 98 | 99 | 100 | def average_pos(a, pos, div): 101 | # This is an unfortunate helper, but numpy.percentile does not do 102 | # the correct thing. It sometimes averages although we have an exact 103 | # match for the position we are searching. 104 | if len(a) % div == 0: 105 | return ( 106 | a[int(np.floor(pos * len(a)))] + a[int(np.floor(pos * len(a))) - 1] 107 | ) / 2.0 108 | else: 109 | return a[int(np.floor(pos * len(a)))] 110 | 111 | 112 | def median_iqr_distance( 113 | params: _py4dgeo.DistanceUncertaintyCalculationParameters, 114 | ) -> tuple: 115 | # Calculate distributions 116 | dist1 = (params.workingset1 - params.corepoint[0, :]).dot(params.normal[0, :]) 117 | dist2 = (params.workingset2 - params.corepoint[0, :]).dot(params.normal[0, :]) 118 | dist1.sort() 119 | dist2.sort() 120 | 121 | median1 = average_pos(dist1, 0.5, 2) 122 | median2 = average_pos(dist2, 0.5, 2) 123 | iqr1 = average_pos(dist1, 0.75, 4) - average_pos(dist1, 0.25, 4) 124 | iqr2 = average_pos(dist2, 0.75, 4) - average_pos(dist2, 0.25, 4) 125 | 126 | # The structured array that describes the full uncertainty 127 | uncertainty = _py4dgeo.DistanceUncertainty( 128 | lodetection=1.96 129 | * ( 130 | np.sqrt( 131 | iqr1 * iqr1 / params.workingset1.shape[0] 132 | + iqr2 * iqr2 / params.workingset2.shape[0] 133 | ) 134 | + params.registration_error 135 | ), 136 | spread1=iqr1, 137 | num_samples1=params.workingset1.shape[0], 138 | spread2=iqr2, 139 | num_samples2=params.workingset2.shape[0], 140 | ) 141 | 142 | return median2 - median1, uncertainty 143 | 144 | 145 | class PythonFallbackM3C2(M3C2): 146 | """An implementation of M3C2 that makes use of Python fallback implementations""" 147 | 148 | @property 149 | def name(self): 150 | return "M3C2 (Python Fallback)" 151 | 152 | def callback_workingset_finder(self): 153 | return cylinder_workingset_finder 154 | 155 | def callback_distance_calculation(self): 156 | if self.robust_aggr: 157 | return median_iqr_distance 158 | else: 159 | return mean_stddev_distance 160 | -------------------------------------------------------------------------------- /src/py4dgeo/logger.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import logging 3 | import sys 4 | import time 5 | 6 | 7 | def create_default_logger(filename=None): 8 | # Create the logger instance 9 | logger = logging.getLogger("py4dgeo") 10 | 11 | # Reset the handlers to avoid handler duplication 12 | logger.handlers.clear() 13 | 14 | # Apply default for logfile name 15 | if filename is None: 16 | filename = "py4dgeo.log" 17 | 18 | # We format messages including the date 19 | _formatter = logging.Formatter( 20 | "[%(asctime)s][%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S" 21 | ) 22 | 23 | # We use stdout for DEBUG and INFO messages 24 | _stdoutandler = logging.StreamHandler(sys.stdout) 25 | _stdoutandler.setLevel(logging.DEBUG) 26 | _stdoutandler.addFilter(lambda r: r.levelno <= logging.INFO) 27 | _stdoutandler.setFormatter(_formatter) 28 | logger.addHandler(_stdoutandler) 29 | 30 | # We use stderr for WARNING and ERROR messages 31 | _stderrhandler = logging.StreamHandler(sys.stderr) 32 | _stderrhandler.setLevel(logging.WARNING) 33 | _stderrhandler.setFormatter(_formatter) 34 | logger.addHandler(_stderrhandler) 35 | 36 | # We additionally use a file that is automatically generated 37 | _filehandler = logging.FileHandler(filename, mode="a", delay=True) 38 | _filehandler.setLevel(logging.DEBUG) 39 | _filehandler.setFormatter(_formatter) 40 | logger.addHandler(_filehandler) 41 | 42 | logger.setLevel(logging.INFO) 43 | 44 | return logger 45 | 46 | 47 | # Storage to keep the logger instance alive + initial creation 48 | _logger = create_default_logger() 49 | 50 | 51 | def set_py4dgeo_logfile(filename): 52 | """Set the logfile used by py4dgeo 53 | 54 | All log messages produced by py4dgeo are logged into this file 55 | in addition to be logged to stdout/stderr. By default, that file 56 | is called 'py4dgeo.log'. 57 | 58 | :param filename: 59 | The name of the logfile to use 60 | :type filename: str 61 | """ 62 | global _logger 63 | _logger = create_default_logger(filename) 64 | 65 | 66 | @contextlib.contextmanager 67 | def logger_context(msg, level=logging.INFO): 68 | # Log a message that we started the task described by message 69 | logger = logging.getLogger("py4dgeo") 70 | logger.log(level, f"Starting: {msg}") 71 | 72 | # Measure time 73 | start = time.perf_counter() 74 | yield 75 | duration = time.perf_counter() - start 76 | 77 | logger.log(level, f"Finished in {duration:.4f}s: {msg}") 78 | -------------------------------------------------------------------------------- /tests/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_subdirectory(c++) 2 | -------------------------------------------------------------------------------- /tests/c++/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | # Test support library that implements e.g. easy access to data files 2 | add_library(py4dgeo_test testsetup.cpp) 3 | target_link_libraries(py4dgeo_test PUBLIC py4dgeo) 4 | target_compile_definitions( 5 | py4dgeo_test 6 | PUBLIC PY4DGEO_TEST_DATA_DIRECTORY="${CMAKE_SOURCE_DIR}/tests/data") 7 | target_include_directories(py4dgeo_test PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) 8 | 9 | # The Catch test executable 10 | add_executable( 11 | tests 12 | tests.cpp 13 | directions_t.cpp 14 | distances_t.cpp 15 | epoch_t.cpp 16 | kdtree_t.cpp 17 | octree_t.cpp 18 | registration_t.cpp 19 | segmentation_t.cpp 20 | searchtrees_t.cpp) 21 | target_link_libraries(tests PUBLIC py4dgeo py4dgeo_test Catch2::Catch2) 22 | 23 | # allow user to run tests with `make test` or `ctest` 24 | catch_discover_tests(tests) 25 | -------------------------------------------------------------------------------- /tests/c++/directions_t.cpp: -------------------------------------------------------------------------------- 1 | #include "Eigen/Eigen" 2 | #include "catch2/catch.hpp" 3 | #include "py4dgeo/compute.hpp" 4 | #include "py4dgeo/kdtree.hpp" 5 | #include "py4dgeo/py4dgeo.hpp" 6 | #include "testsetup.hpp" 7 | 8 | #include 9 | 10 | using namespace py4dgeo; 11 | 12 | TEST_CASE("M3C2 Multiscale direction calculation", "[compute]") 13 | { 14 | // Get a test epoch 15 | auto [cloud, corepoints] = testcloud(); 16 | Epoch epoch(*cloud); 17 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 18 | epoch.kdtree.build_tree(10); 19 | 20 | std::vector normal_radii{ 1.0, 2.0, 3.0 }; 21 | EigenNormalSet result(epoch.cloud.rows(), 3); 22 | std::vector used_radii; 23 | EigenNormalSet orientation(1, 3); 24 | orientation << 0, 0, 1; 25 | 26 | REQUIRE(result.rows() == 441); 27 | 28 | // Do the calculation 29 | compute_multiscale_directions( 30 | epoch, *corepoints, normal_radii, orientation, result, used_radii); 31 | 32 | for (IndexType i = 0; i < result.rows(); ++i) 33 | REQUIRE(std::abs(result.row(i).norm() - 1.0) < 1e-8); 34 | } 35 | -------------------------------------------------------------------------------- /tests/c++/distances_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/compute.hpp" 3 | #include "py4dgeo/kdtree.hpp" 4 | #include "py4dgeo/py4dgeo.hpp" 5 | #include "testsetup.hpp" 6 | 7 | #include 8 | 9 | using namespace py4dgeo; 10 | 11 | TEST_CASE("M3C2 distance calculation", "[compute]") 12 | { 13 | // Get a test epoch 14 | auto [cloud, corepoints] = testcloud(); 15 | Epoch epoch(*cloud); 16 | epoch.kdtree.build_tree(10); 17 | 18 | std::vector normal_radii{ 3.0 }; 19 | std::vector used_radii; 20 | EigenNormalSet directions(epoch.cloud.rows(), 3); 21 | EigenNormalSet orientation(1, 3); 22 | orientation << 0, 0, 1; 23 | 24 | // Precompute the multiscale directions 25 | compute_multiscale_directions( 26 | epoch, *corepoints, normal_radii, orientation, directions, used_radii); 27 | 28 | // Calculate the distances 29 | DistanceVector distances; 30 | UncertaintyVector uncertainties; 31 | 32 | // We try to test all callback combinations 33 | auto wsfinder = 34 | GENERATE(radius_workingset_finder, cylinder_workingset_finder); 35 | auto distancecalc = GENERATE(mean_stddev_distance, median_iqr_distance); 36 | 37 | compute_distances(epoch.cloud, 38 | 2.0, 39 | epoch, 40 | epoch, 41 | directions, 42 | 0.0, 43 | 0.0, 44 | distances, 45 | uncertainties, 46 | wsfinder, 47 | distancecalc); 48 | 49 | REQUIRE(distances.size() == epoch.cloud.rows()); 50 | REQUIRE(uncertainties.size() == epoch.cloud.rows()); 51 | 52 | for (std::size_t i = 0; i < distances.size(); ++i) 53 | REQUIRE(std::abs(distances[i]) < 1e-8); 54 | } 55 | 56 | TEST_CASE("Single-direction M3C2 distance calculation", "[compute]") 57 | { 58 | // Get a test epoch 59 | auto [cloud, corepoints] = testcloud(); 60 | Epoch epoch(*cloud); 61 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 62 | epoch.kdtree.build_tree(10); 63 | 64 | // Single distance vector 65 | EigenNormalSet directions(1, 3); 66 | directions << 0, 0, 1; 67 | 68 | // Calculate the distances 69 | DistanceVector distances; 70 | UncertaintyVector uncertainties; 71 | 72 | // We try to test all callback combinations 73 | auto wsfinder = 74 | GENERATE(radius_workingset_finder, cylinder_workingset_finder); 75 | auto distancecalc = GENERATE(mean_stddev_distance, median_iqr_distance); 76 | 77 | compute_distances(*corepoints, 78 | 2.0, 79 | epoch, 80 | epoch, 81 | directions, 82 | 0.0, 83 | 0.0, 84 | distances, 85 | uncertainties, 86 | wsfinder, 87 | distancecalc); 88 | 89 | for (std::size_t i = 0; i < distances.size(); ++i) 90 | REQUIRE(std::abs(distances[i]) < 1e-8); 91 | } 92 | 93 | TEST_CASE("Cylinder Search Correctness", "[compute]") 94 | { 95 | auto [cloud, corepoints] = testcloud(); 96 | Epoch epoch(*cloud); 97 | Epoch::set_default_radius_search_tree(SearchTree::KDTree); 98 | epoch.kdtree.build_tree(10); 99 | 100 | EigenPointCloud corepoint(1, 3); 101 | corepoint << 10, 10, 0; 102 | 103 | EigenNormalSet normal(1, 3); 104 | normal << 0.70710678, 0.70710678, 0.0; 105 | 106 | WorkingSetFinderParameters params{ 107 | epoch, 1.0, corepoint.row(0), normal.row(0), 5.0 108 | }; 109 | auto cyl = cylinder_workingset_finder(params); 110 | 111 | REQUIRE(cyl.rows() == 23); 112 | 113 | for (IndexType i = 0; i < cyl.rows(); ++i) { 114 | auto to_midpoint = 115 | cyl.cast().row(i) - corepoint.cast().row(0); 116 | auto to_midpoint_plane = (to_midpoint * normal.row(0).transpose()).eval(); 117 | auto to_axis2 = 118 | (to_midpoint - to_midpoint_plane * normal).rowwise().squaredNorm().eval(); 119 | 120 | REQUIRE(to_axis2(0, 0) <= 1.0); 121 | REQUIRE(std::abs(to_midpoint_plane(0, 0)) <= 5.0); 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /tests/c++/epoch_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/epoch.hpp" 3 | #include "testsetup.hpp" 4 | 5 | #include 6 | 7 | using namespace py4dgeo; 8 | 9 | TEST_CASE("Epoch is working correctly", "[epoch]") 10 | { 11 | // Instantiate a test epoch 12 | auto [cloud, corepoints] = testcloud(); 13 | Epoch epoch(*cloud); 14 | 15 | SECTION("Serialize + deserialize") 16 | { 17 | std::stringstream buf; 18 | epoch.to_stream(buf); 19 | auto deserialized = Epoch::from_stream(buf); 20 | 21 | REQUIRE(epoch.cloud.rows() == deserialized->cloud.rows()); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tests/c++/kdtree_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/epoch.hpp" 3 | #include "py4dgeo/kdtree.hpp" 4 | #include "py4dgeo/py4dgeo.hpp" 5 | #include "py4dgeo/searchtree.hpp" 6 | #include "testsetup.hpp" 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | using namespace py4dgeo; 15 | 16 | TEST_CASE("KDTree is correctly build", "[kdtree]") 17 | { 18 | // Get a test epoch 19 | auto [cloud, corepoints] = testcloud(); 20 | Epoch epoch(*cloud); 21 | 22 | // Construct the KDTree 23 | auto tree = KDTree::create(epoch.cloud); 24 | tree.build_tree(10); 25 | 26 | SECTION("Perform radius search") 27 | { 28 | // Find all nodes with a radius search 29 | std::array o{ 0.0, 0.0, 0.0 }; 30 | RadiusSearchResult result; 31 | 32 | // Do radius search with radius wide enough to cover the entire cloud 33 | auto num = tree.radius_search(o.data(), 100.0, result); 34 | REQUIRE(num == epoch.cloud.rows()); 35 | REQUIRE(result.size() == epoch.cloud.rows()); 36 | } 37 | 38 | SECTION("Perform radius search with distances") 39 | { 40 | // Find all nodes with a radius search 41 | std::array o{ 0.0, 0.0, 0.0 }; 42 | RadiusSearchDistanceResult result; 43 | 44 | // Do radius search with radius wide enough to cover the entire cloud 45 | auto num = tree.radius_search_with_distances(o.data(), 100.0, result); 46 | REQUIRE(num == epoch.cloud.rows()); 47 | REQUIRE(result.size() == epoch.cloud.rows()); 48 | REQUIRE(std::is_sorted(result.begin(), result.end(), [](auto a, auto b) { 49 | return a.second < b.second; 50 | })); 51 | } 52 | 53 | SECTION("Nearest neighbor search with distances") 54 | { 55 | NearestNeighborsDistanceResult result; 56 | int k = 5; 57 | tree.nearest_neighbors_with_distances(epoch.cloud, result, k); 58 | REQUIRE(result.size() == epoch.cloud.rows()); 59 | REQUIRE(result[0].first.size() == k); 60 | REQUIRE(result[0].first[k - 1] > 0); 61 | } 62 | 63 | SECTION("Nearest neighbor search:") 64 | { 65 | NearestNeighborsResult result; 66 | int k = 5; 67 | tree.nearest_neighbors(epoch.cloud, result, k); 68 | REQUIRE(result.size() == epoch.cloud.rows()); 69 | REQUIRE(result[0].size() == k); 70 | REQUIRE(result[0][k - 1] > 0); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /tests/c++/octree_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/epoch.hpp" 3 | #include "py4dgeo/octree.hpp" 4 | #include "py4dgeo/py4dgeo.hpp" 5 | #include "py4dgeo/searchtree.hpp" 6 | #include "testsetup.hpp" 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | using namespace py4dgeo; 15 | 16 | TEST_CASE("Octree is correctly build", "[octree]") 17 | { 18 | // Get a test epoch 19 | auto [cloud, corepoints] = testcloud(); 20 | Epoch epoch(*cloud); 21 | 22 | // Construct the Octree 23 | auto tree = Octree::create(epoch.cloud); 24 | tree.build_tree(); 25 | 26 | SECTION("Perform radius search") 27 | { 28 | // Find all nodes with a radius search 29 | Eigen::Vector3d query_point{ 0.0, 0.0, 0.0 }; 30 | RadiusSearchResult result; 31 | 32 | // Do radius search with radius wide enough to cover the entire cloud 33 | double radius = 100.; 34 | 35 | for (unsigned int level = 0; level < 7; ++level) { 36 | auto num = tree.radius_search(query_point, radius, level, result); 37 | REQUIRE(num == epoch.cloud.rows()); 38 | REQUIRE(result.size() == epoch.cloud.rows()); 39 | } 40 | } 41 | 42 | SECTION("Perform radius search with distances") 43 | { 44 | // Find all nodes with a radius search 45 | Eigen::Vector3d query_point{ 0.0, 0.0, 0.0 }; 46 | RadiusSearchDistanceResult result; 47 | 48 | // Do radius search with radius wide enough to cover the entire cloud 49 | double radius = 100.; 50 | unsigned int level = 51 | epoch.octree.find_appropriate_level_for_radius_search(radius); 52 | auto num = 53 | tree.radius_search_with_distances(query_point, radius, level, result); 54 | REQUIRE(num == epoch.cloud.rows()); 55 | REQUIRE(result.size() == epoch.cloud.rows()); 56 | REQUIRE(std::is_sorted(result.begin(), result.end(), [](auto a, auto b) { 57 | return a.second < b.second; 58 | })); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /tests/c++/registration_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/epoch.hpp" 3 | #include "py4dgeo/registration.hpp" 4 | #include "testsetup.hpp" 5 | #include 6 | 7 | #include "py4dgeo/compute.hpp" 8 | #include 9 | 10 | #include 11 | 12 | using namespace py4dgeo; 13 | 14 | TEST_CASE("Affine Transformation", "[compute]") 15 | { 16 | auto [cloud1, corepoints] = testcloud(); 17 | auto [cloud2, corepoints2] = testcloud(); 18 | 19 | SECTION("Perform Transformation: ") 20 | { 21 | // Define a transformation 22 | Transformation t(Transformation::Identity()); 23 | t(0, 3) = 1; 24 | 25 | EigenPointCloud ref(1, 3); 26 | ref << 1, 2, 3; 27 | 28 | EigenNormalSet normals; 29 | // Apply the transformation 30 | transform_pointcloud_inplace(*cloud1, t, ref, normals); 31 | 32 | for (IndexType i = 0; i < cloud1->rows(); ++i) { 33 | if (std::abs((*cloud1)(i, 0) - (*cloud2)(i, 0) - 1.0) >= 1e-8) { 34 | CAPTURE((*cloud1)(i, 0)); 35 | CAPTURE((*cloud2)(i, 0)); 36 | } 37 | REQUIRE(std::abs((*cloud1)(i, 0) - (*cloud2)(i, 0) - 1.0) < 1e-8); 38 | REQUIRE(std::abs((*cloud1)(i, 1) - (*cloud2)(i, 1)) < 1e-8); 39 | REQUIRE(std::abs((*cloud1)(i, 2) - (*cloud2)(i, 2)) < 1e-8); 40 | } 41 | } 42 | 43 | auto [cloud2a, cloud2b] = testcloud_dif_files(); 44 | Epoch epoch_test1(*cloud2a); 45 | Epoch epoch_test2(*cloud2b); 46 | DisjointSet set1(epoch_test1.cloud.rows()); 47 | DisjointSet set2(epoch_test2.cloud.rows()); 48 | 49 | SECTION("Disjoint Set find: ") 50 | { 51 | int label = 10; 52 | auto set1_find_test = set1.Find(label); 53 | auto set2_find_test = set2.Find(label); 54 | REQUIRE(set1_find_test == set2_find_test); 55 | } 56 | 57 | SECTION("Disjoint Set union: ") 58 | { 59 | int label1 = 10; 60 | int same_label = label1; 61 | auto same_label_union_test = set1.Union(label1, same_label, true); 62 | REQUIRE(same_label_union_test == label1); 63 | 64 | int label2 = 11; 65 | auto nonsize_union_test = set2.Union(label1, label2, false); 66 | REQUIRE(nonsize_union_test == label2); 67 | 68 | int label3 = 12; 69 | auto merged_label = set1.Union(label2, label3, false); 70 | auto size_union_test = set1.Union(label1, merged_label, true); 71 | REQUIRE(size_union_test == label1); 72 | } 73 | 74 | SECTION("SupervoxelSegmentation: ") 75 | { 76 | auto [cloud_s, corepoints_s] = testcloud(); 77 | Epoch epoch_test_s(*cloud_s); 78 | epoch_test_s.kdtree.build_tree(10); //???? 79 | EigenNormalSet normals(epoch_test_s.cloud.rows(), 3); 80 | std::vector normal_radii{ 3.0 }; 81 | std::vector used_radii; 82 | EigenNormalSet orientation(1, 3); 83 | orientation << 0, 0, 1; 84 | compute_multiscale_directions(epoch_test_s, 85 | *corepoints_s, 86 | normal_radii, 87 | orientation, 88 | normals, 89 | used_radii); 90 | 91 | double resolution = 10; 92 | int k = 10; 93 | auto n_supervoxels = 94 | estimate_supervoxel_count(epoch_test_s.cloud, resolution); 95 | 96 | std::vector> result = supervoxel_segmentation( 97 | epoch_test_s, epoch_test_s.kdtree, resolution, k, normals); 98 | 99 | REQUIRE(result.size() == n_supervoxels); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /tests/c++/searchtrees_t.cpp: -------------------------------------------------------------------------------- 1 | #include "catch2/catch.hpp" 2 | #include "py4dgeo/epoch.hpp" 3 | #include "py4dgeo/kdtree.hpp" 4 | #include "py4dgeo/octree.hpp" 5 | #include "py4dgeo/py4dgeo.hpp" 6 | #include "py4dgeo/searchtree.hpp" 7 | #include "testsetup.hpp" 8 | 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | using namespace py4dgeo; 16 | 17 | TEST_CASE("KDTree and Octree are correctly built") 18 | { 19 | // Get a test epoch 20 | auto [cloud, corepoints] = testcloud(); 21 | Epoch epoch(*cloud); 22 | 23 | // Construct the KDTree 24 | auto kdtree = KDTree::create(epoch.cloud); 25 | kdtree.build_tree(10); 26 | 27 | // Construct the Octree 28 | auto octree = Octree::create(epoch.cloud); 29 | octree.build_tree(); 30 | 31 | std::array query_point_indices = { 32 | 0, cloud->rows() / 3, 2 * cloud->rows() / 3, cloud->rows() - 1 33 | }; 34 | 35 | std::array radii{ 1.5, 2.0, 5.0 }; 36 | 37 | for (const IndexType idx : query_point_indices) { 38 | const Eigen::Vector3d& point = epoch.cloud.row(idx); 39 | for (double radius : radii) { 40 | std::string tag = "Query at (" + std::to_string(point.x()) + ", " + 41 | std::to_string(point.y()) + ", " + 42 | std::to_string(point.z()) + 43 | "), radius = " + std::to_string(radius); 44 | 45 | SECTION("Index-only " + tag) 46 | { 47 | RadiusSearchResult kd_result, oct_result; 48 | 49 | kdtree.radius_search(point.data(), radius, kd_result); 50 | 51 | unsigned int level = 52 | epoch.octree.find_appropriate_level_for_radius_search(radius); 53 | octree.radius_search(point, radius, level, oct_result); 54 | 55 | std::sort(kd_result.begin(), kd_result.end()); 56 | std::sort(oct_result.begin(), oct_result.end()); 57 | 58 | REQUIRE(kd_result == oct_result); 59 | } 60 | 61 | SECTION("With distances " + tag) 62 | { 63 | RadiusSearchDistanceResult kd_result, oct_result; 64 | 65 | kdtree.radius_search_with_distances(point.data(), radius, kd_result); 66 | 67 | unsigned int level = 68 | epoch.octree.find_appropriate_level_for_radius_search(radius); 69 | octree.radius_search_with_distances(point, radius, level, oct_result); 70 | 71 | REQUIRE(kd_result.size() == oct_result.size()); 72 | 73 | for (std::size_t i = 0; i < kd_result.size(); ++i) { 74 | REQUIRE(kd_result[i].first == oct_result[i].first); 75 | REQUIRE(kd_result[i].second == 76 | Approx(oct_result[i].second).epsilon(1e-7)); 77 | } 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /tests/c++/tests.cpp: -------------------------------------------------------------------------------- 1 | #define CATCH_CONFIG_MAIN 2 | #include "catch2/catch.hpp" 3 | -------------------------------------------------------------------------------- /tests/c++/testsetup.cpp: -------------------------------------------------------------------------------- 1 | #include "testsetup.hpp" 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include 9 | #include 10 | 11 | using namespace py4dgeo; 12 | 13 | std::shared_ptr 14 | benchcloud_from_file(const std::string& filename) 15 | { 16 | std::ifstream stream(filename); 17 | if (!stream) { 18 | std::cerr << "Was not successfully opened. Please check that the file " 19 | "currently exists: " 20 | << filename << std::endl; 21 | std::exit(1); 22 | } 23 | 24 | std::vector points; 25 | Eigen::Vector3d mincoord = 26 | Eigen::Vector3d::Constant(std::numeric_limits::infinity()); 27 | 28 | std::string line; 29 | while (std::getline(stream, line)) { 30 | std::istringstream s(line); 31 | Eigen::Vector3d point; 32 | s >> point[0] >> point[1] >> point[2]; 33 | 34 | if (!s) 35 | continue; 36 | 37 | mincoord = mincoord.cwiseMin(point); 38 | points.push_back(point); 39 | } 40 | 41 | auto cloud = std::make_shared(points.size(), 3); 42 | for (std::size_t i = 0; i < points.size(); ++i) { 43 | (*cloud).row(i) = points[i] - mincoord; 44 | } 45 | 46 | return cloud; 47 | } 48 | 49 | std::shared_ptr 50 | slice_cloud(EigenPointCloudConstRef cloud, int sampling_factor) 51 | { 52 | auto sliced = 53 | std::make_shared(cloud.rows() / sampling_factor, 3); 54 | for (IndexType i = 0; i < cloud.rows() / sampling_factor; ++i) 55 | (*sliced)(i, Eigen::all) = cloud(i * sampling_factor, Eigen::all); 56 | return sliced; 57 | } 58 | 59 | std::pair, std::shared_ptr> 60 | ahk_benchcloud() 61 | { 62 | // auto cloud = benchcloud_from_file(DATAPATH(ahk_2017_small.xyz)); 63 | auto cloud = benchcloud_from_file(DATAPATH(plane_horizontal_t1.xyz)); 64 | return std::make_pair(cloud, slice_cloud(*cloud, 100)); 65 | } 66 | 67 | std::pair, std::shared_ptr> 68 | testcloud() 69 | { 70 | auto cloud = benchcloud_from_file(DATAPATH(plane_horizontal_t1.xyz)); 71 | return std::make_pair(cloud, cloud); 72 | } 73 | 74 | std::pair, std::shared_ptr> 75 | testcloud_dif_files() 76 | { 77 | auto cloud1 = benchcloud_from_file(DATAPATH(plane_horizontal_t1.xyz)); 78 | auto cloud2 = benchcloud_from_file(DATAPATH(plane_horizontal_t2.xyz)); 79 | return std::make_pair(cloud1, cloud2); 80 | } 81 | -------------------------------------------------------------------------------- /tests/c++/testsetup.hpp: -------------------------------------------------------------------------------- 1 | #include "py4dgeo/py4dgeo.hpp" 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #ifndef PY4DGEO_TEST_DATA_DIRECTORY 8 | #error Test data directory needs to be set from CMake 9 | #endif 10 | 11 | #define DATAPATH(filename) PY4DGEO_TEST_DATA_DIRECTORY "/" #filename 12 | 13 | std::shared_ptr 14 | benchcloud_from_file(const std::string& filename); 15 | 16 | std::pair, 17 | std::shared_ptr> 18 | ahk_benchcloud(); 19 | 20 | std::pair, 21 | std::shared_ptr> 22 | testcloud(); 23 | 24 | std::pair, 25 | std::shared_ptr> 26 | testcloud_dif_files(); 27 | -------------------------------------------------------------------------------- /tests/python/__init__.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | pytest.register_assert_rewrite("python.helpers") 5 | -------------------------------------------------------------------------------- /tests/python/conftest.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.epoch import read_from_xyz, read_from_las 2 | from py4dgeo.logger import set_py4dgeo_logfile 3 | from py4dgeo.m3c2 import M3C2 4 | from py4dgeo.segmentation import SpatiotemporalAnalysis 5 | from py4dgeo.util import MemoryPolicy, set_memory_policy 6 | 7 | import numpy as np 8 | import os 9 | import pytest 10 | import shutil 11 | import subprocess 12 | import tempfile 13 | 14 | 15 | # The path to our data directory 16 | data_dir = os.path.join(os.path.split(__file__)[0], "..", "data") 17 | log_dir = tempfile.TemporaryDirectory() 18 | 19 | # Ensure that the data directory has been downloaded 20 | if not os.path.exists(data_dir): 21 | os.makedirs(data_dir) 22 | subprocess.call(["copy_py4dgeo_test_data", data_dir]) 23 | 24 | 25 | def find_data_file(filename): 26 | return os.path.join(data_dir, filename) 27 | 28 | 29 | def epoch_fixture(*filenames): 30 | """Wrap a given data file in an Epoch and make it a pytest fixture""" 31 | 32 | @pytest.fixture 33 | def _epoch_fixture(): 34 | return read_from_xyz(*tuple(find_data_file(fn) for fn in filenames)) 35 | 36 | return _epoch_fixture 37 | 38 | 39 | def epoch_las_fixture(*filenames): 40 | """Wrap a given data file in an Epoch and make it a pytest fixture""" 41 | 42 | @pytest.fixture 43 | def _epoch_fixture(): 44 | normal_columns = ( 45 | ["NormalX", "NormalY", "NormalZ"] if ("normals" in filenames[0]) else None 46 | ) 47 | return read_from_las( 48 | *tuple(find_data_file(fn) for fn in filenames), 49 | normal_columns=normal_columns, 50 | ) 51 | 52 | return _epoch_fixture 53 | 54 | 55 | # Instantiate one fixture per data dile 56 | epochs = epoch_fixture("plane_horizontal_t1.xyz", "plane_horizontal_t2.xyz") 57 | epochs_las = epoch_las_fixture("plane_horizontal_t1.laz", "plane_horizontal_t2.laz") 58 | epochs_las_w_normals = epoch_las_fixture( 59 | "plane_horizontal_t1_w_normals.laz", "plane_horizontal_t2_w_normals.laz" 60 | ) 61 | 62 | 63 | @pytest.fixture 64 | def analysis(tmp_path): 65 | shutil.copy(os.path.join(data_dir, "synthetic.zip"), tmp_path) 66 | return SpatiotemporalAnalysis(os.path.join(tmp_path, "synthetic.zip")) 67 | 68 | 69 | @pytest.fixture(autouse=True) 70 | def log_into_temporary_directory(): 71 | set_py4dgeo_logfile(os.path.join((log_dir.name), "py4dgeo.log")) 72 | 73 | 74 | @pytest.fixture(autouse=True) 75 | def memory_policy_fixture(): 76 | """This fixture ensures that all tests start with the default memory policy""" 77 | set_memory_policy(MemoryPolicy.COREPOINTS) 78 | 79 | 80 | @pytest.fixture() 81 | def scanpos_info(): 82 | filename = find_data_file("sps.json") 83 | with open(filename, "r") as load_f: 84 | try: 85 | json_str = load_f.read() 86 | json_dict = eval(json_str) 87 | except ValueError as err: 88 | return None 89 | return json_dict 90 | 91 | 92 | def epoch_m3c2ep_fixture(*filenames, additional_dimensions): 93 | """Wrap a given data file in an Epoch and make it a pytest fixture""" 94 | 95 | @pytest.fixture 96 | def _epoch_m3c2ep_fixture(): 97 | return read_from_las( 98 | *tuple(find_data_file(fn) for fn in filenames), 99 | additional_dimensions=additional_dimensions, 100 | ) 101 | 102 | return _epoch_m3c2ep_fixture 103 | 104 | 105 | epochs_m3c2ep = epoch_m3c2ep_fixture( 106 | "ahk_2017_652900_5189100_gnd_subarea.laz", 107 | "ahk_2018A_652900_5189100_gnd_subarea.laz", 108 | additional_dimensions={"point_source_id": "scanpos_id"}, 109 | ) 110 | 111 | 112 | @pytest.fixture() 113 | def Cxx(): 114 | covariance_matrix = np.loadtxt( 115 | find_data_file("Cxx.csv"), dtype=np.float64, delimiter="," 116 | ) 117 | return covariance_matrix 118 | 119 | 120 | @pytest.fixture() 121 | def tfM(): 122 | tf_matrix = np.loadtxt(find_data_file("tfM.csv"), dtype=np.float64, delimiter=",") 123 | return tf_matrix 124 | 125 | 126 | @pytest.fixture() 127 | def redPoint(): 128 | reduction_point = np.loadtxt( 129 | find_data_file("redPoint.csv"), dtype=np.float64, delimiter="," 130 | ) 131 | return reduction_point 132 | -------------------------------------------------------------------------------- /tests/python/helpers.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def compare_uncertainties(uncertainties1, uncertainties2): 5 | assert np.allclose(uncertainties1["lodetection"], uncertainties2["lodetection"]) 6 | assert np.allclose(uncertainties1["spread1"], uncertainties2["spread1"]) 7 | assert np.allclose(uncertainties1["spread2"], uncertainties2["spread2"]) 8 | assert np.allclose(uncertainties1["num_samples1"], uncertainties2["num_samples1"]) 9 | assert np.allclose(uncertainties1["num_samples2"], uncertainties2["num_samples2"]) 10 | 11 | 12 | def compare_algorithms(alg1, alg2): 13 | """A helper to compare the output of two algorithms that should be equivalent""" 14 | # Run the two algorithms 15 | distances1, uncertainties1 = alg1.run() 16 | distances2, uncertainties2 = alg2.run() 17 | 18 | assert np.allclose(distances1, distances2) 19 | compare_uncertainties(uncertainties1, uncertainties2) 20 | 21 | 22 | def compare_segmentations(seg1, seg2): 23 | assert np.allclose(seg1.distances, seg2.distances) 24 | compare_uncertainties(seg2.uncertainties, seg2.uncertainties) 25 | 26 | for td1, td2 in zip(seg1.timedeltas, seg2.timedeltas): 27 | assert td1 == td2 28 | 29 | 30 | def simple_jump(): 31 | # A simple time series with a jump 32 | ts = np.linspace(0, 0.1, 100) 33 | ts[50:] += 1 34 | 35 | return ts 36 | 37 | 38 | def complex_timeseries(): 39 | # A non-trivial time series 40 | return np.array( 41 | [ 42 | -9.30567119, 43 | -6.50542506, 44 | -5.25334064, 45 | -3.95708071, 46 | -10.62295044, 47 | -6.00679331, 48 | -7.10972198, 49 | -4.75825001, 50 | -6.7100845, 51 | -8.11943878, 52 | -9.56607421, 53 | -4.94672353, 54 | -6.67989247, 55 | -4.65803801, 56 | -7.37845623, 57 | -7.48818285, 58 | -5.78868842, 59 | -5.63853894, 60 | -6.82157223, 61 | -4.36439707, 62 | -10.23610407, 63 | -7.17848293, 64 | -10.35211983, 65 | -11.3509054, 66 | -11.61486292, 67 | -9.35727084, 68 | -10.34027666, 69 | -10.86800044, 70 | -9.93429977, 71 | -8.48897875, 72 | -15.3695928, 73 | -14.26784041, 74 | -9.31700749, 75 | -10.71438333, 76 | -6.79820964, 77 | -9.38362261, 78 | -10.55322992, 79 | -10.752245, 80 | -13.40571741, 81 | -14.18765576, 82 | -9.54996564, 83 | -19.35102448, 84 | -20.7635553, 85 | -19.26623954, 86 | -20.00548551, 87 | -17.99023991, 88 | -18.37537823, 89 | -17.53357472, 90 | -23.12740104, 91 | -17.55591524, 92 | -20.86915052, 93 | -17.75074536, 94 | -20.28542954, 95 | -20.5310157, 96 | -18.73412243, 97 | -19.07024164, 98 | -20.43017388, 99 | -21.11249791, 100 | -18.45269281, 101 | -18.07089436, 102 | -19.96608851, 103 | -26.63058919, 104 | -28.97962105, 105 | -25.47049917, 106 | -26.02732842, 107 | -25.42334059, 108 | -24.96684487, 109 | -22.69612178, 110 | -24.85381947, 111 | -25.34017963, 112 | -27.46753899, 113 | -24.05766122, 114 | -29.416168, 115 | -19.5100961, 116 | -25.98309514, 117 | -26.24838995, 118 | -25.40961916, 119 | -23.21292358, 120 | -26.63478854, 121 | -29.04883697, 122 | -16.35986185, 123 | -12.87413666, 124 | -16.2863866, 125 | -15.55762212, 126 | -21.88964465, 127 | -17.68327092, 128 | -16.60648862, 129 | -13.49961018, 130 | -15.97483962, 131 | -17.23104101, 132 | -18.17963806, 133 | -16.19978101, 134 | -17.62239823, 135 | -17.14176606, 136 | -14.73581351, 137 | -14.62187256, 138 | -20.06316631, 139 | -14.96743604, 140 | -19.01623592, 141 | -15.53150285, 142 | ] 143 | ) 144 | -------------------------------------------------------------------------------- /tests/python/test_cloudcompare.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.cloudcompare import CloudCompareM3C2 2 | from py4dgeo.m3c2 import M3C2 3 | 4 | from .helpers import compare_algorithms 5 | 6 | 7 | def test_cloudcompare_m3c2(epochs): 8 | epoch1, epoch2 = epochs 9 | 10 | # Instantiate an M3C2 instance 11 | m3c2 = M3C2( 12 | epochs=(epoch1, epoch2), 13 | corepoints=epoch1.cloud, 14 | cyl_radius=1.6, 15 | normal_radii=[ 16 | 1.1, 17 | ], 18 | ) 19 | 20 | # Instantiate Cloud compare variant 21 | cc_m3c2 = CloudCompareM3C2( 22 | epochs=(epoch1, epoch2), 23 | corepoints=epoch1.cloud, 24 | searchscale=3.2, 25 | normalscale=(2.2,), 26 | ) 27 | 28 | compare_algorithms(m3c2, cc_m3c2) 29 | -------------------------------------------------------------------------------- /tests/python/test_fallback.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.fallback import * 2 | from _py4dgeo import ( 3 | cylinder_workingset_finder as cxx_cylinder_workingset_finder, 4 | mean_stddev_distance as cxx_mean_stddev_distance, 5 | median_iqr_distance as cxx_median_iqr_distance, 6 | radius_workingset_finder as cxx_radius_workingset_finder, 7 | ) 8 | from py4dgeo.m3c2 import M3C2 9 | 10 | from .helpers import compare_algorithms 11 | 12 | import pytest 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "distance_callback", 17 | [ 18 | (cxx_mean_stddev_distance, mean_stddev_distance), 19 | (cxx_median_iqr_distance, median_iqr_distance), 20 | ], 21 | ) 22 | @pytest.mark.parametrize( 23 | "workingset_callback", 24 | [ 25 | (cxx_radius_workingset_finder, radius_workingset_finder), 26 | # (cxx_cylinder_workingset_finder, cylinder_workingset_finder), 27 | ], 28 | ) 29 | def test_fallback_implementations(epochs, distance_callback, workingset_callback): 30 | class CxxTestM3C2(M3C2): 31 | def callback_distance_calculation(self): 32 | return distance_callback[0] 33 | 34 | def callback_workingset_finder(self): 35 | return workingset_callback[0] 36 | 37 | class PythonTestM3C2(M3C2): 38 | def callback_distance_calculation(self): 39 | return distance_callback[1] 40 | 41 | def callback_workingset_finder(self): 42 | return workingset_callback[1] 43 | 44 | # Instantiate a fallback M3C2 instance 45 | pym3c2 = CxxTestM3C2( 46 | epochs=epochs, 47 | corepoints=epochs[0].cloud, 48 | cyl_radius=3.0, 49 | normal_radii=(2.0,), 50 | max_distance=6.0, 51 | ) 52 | 53 | # And a regular C++ based one 54 | m3c2 = PythonTestM3C2( 55 | epochs=epochs, 56 | corepoints=epochs[0].cloud, 57 | cyl_radius=3.0, 58 | normal_radii=(2.0,), 59 | max_distance=6.0, 60 | ) 61 | 62 | compare_algorithms(m3c2, pym3c2) 63 | 64 | 65 | def test_python_fallback_m3c2(epochs): 66 | # Instantiate a fallback M3C2 instance 67 | pym3c2 = PythonFallbackM3C2( 68 | epochs=epochs, corepoints=epochs[0].cloud, cyl_radius=3.0, normal_radii=(2.0,) 69 | ) 70 | 71 | # And a regular C++ based one 72 | m3c2 = M3C2( 73 | epochs=epochs, corepoints=epochs[0].cloud, cyl_radius=3.0, normal_radii=(2.0,) 74 | ) 75 | 76 | compare_algorithms(m3c2, pym3c2) 77 | 78 | 79 | def test_python_exception_in_callback(epochs): 80 | # Define a fault algorithm 81 | class ExcM3C2(M3C2): 82 | def callback_workingset_finder(self): 83 | def callback(*args): 84 | 1 / 0 85 | 86 | return callback 87 | 88 | # Instantiate it 89 | m3c2 = ExcM3C2( 90 | epochs=epochs, corepoints=epochs[0].cloud, cyl_radius=3.0, normal_radii=(2.0,) 91 | ) 92 | 93 | # Running it should throw the proper exception despite taking a detour 94 | # throw multi-threaded C++ code. 95 | with pytest.raises(ZeroDivisionError): 96 | m3c2.run() 97 | -------------------------------------------------------------------------------- /tests/python/test_kdtree.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.util import get_memory_policy 2 | 3 | import numpy as np 4 | import os 5 | import pickle 6 | import pytest 7 | import tempfile 8 | 9 | 10 | def test_kdtree(epochs): 11 | epoch1, _ = epochs 12 | epoch1.build_kdtree() 13 | data = epoch1.cloud 14 | 15 | # Find all points in sufficiently large radius 16 | result = epoch1.kdtree.radius_search(np.array([0, 0, 0]), 100) 17 | assert result.shape[0] == data.shape[0] 18 | 19 | 20 | def test_kdtree_pickle(epochs): 21 | epoch1, _ = epochs 22 | with pytest.raises(RuntimeError): 23 | with tempfile.TemporaryDirectory() as dir: 24 | fn = os.path.join(dir, "kdtree.pickle") 25 | with open(fn, "wb") as f: 26 | pickle.dump(epoch1.kdtree, f) 27 | 28 | 29 | def test_rebuilding(epochs): 30 | epoch1, _ = epochs 31 | 32 | # Not build yet - leaf parameter is 0 33 | assert epoch1.kdtree.leaf_parameter() == 0 34 | 35 | # Building with default - leaf parameter is 10 36 | epoch1.build_kdtree() 37 | assert epoch1.kdtree.leaf_parameter() == 10 38 | 39 | # Non-forced rebuild is ignored - leaf parameter stays 10 40 | epoch1.build_kdtree(leaf_size=20) 41 | assert epoch1.kdtree.leaf_parameter() == 10 42 | 43 | # forced rebuild - leaf parameter is 20 44 | epoch1.build_kdtree(leaf_size=20, force_rebuild=20) 45 | assert epoch1.kdtree.leaf_parameter() == 20 46 | 47 | 48 | def test_nearest_neighbors(epochs): 49 | epoch1, epoch2 = epochs 50 | epoch1.build_kdtree() 51 | epoch2.build_kdtree() 52 | 53 | checklist_pr = np.asarray(epoch1.kdtree.nearest_neighbors(epoch2.cloud, 1)) 54 | assert len(checklist_pr) > 0 55 | indices, distances = np.split(checklist_pr, 2, axis=0) 56 | indices = indices.flatten().tolist() 57 | distances = distances.flatten().tolist() 58 | indices = tuple(int(i) for i in indices) 59 | 60 | for i in range(epoch1.cloud.shape[0]): 61 | assert i == indices[i] 62 | assert np.isclose( 63 | ((epoch1.cloud[i, :] - epoch2.cloud[i, :]) ** 2).sum(), distances[i] 64 | ) 65 | -------------------------------------------------------------------------------- /tests/python/test_logger.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.logger import * 2 | from py4dgeo.util import Py4DGeoError 3 | 4 | import logging 5 | import os 6 | import pytest 7 | 8 | 9 | def test_set_loggerfile(tmp_path): 10 | filename = os.path.join(tmp_path, "test.log") 11 | set_py4dgeo_logfile(filename) 12 | assert not os.path.exists(filename) 13 | logging.getLogger("py4dgeo").info("Some log message") 14 | assert os.stat(filename).st_size > 0 15 | 16 | 17 | def test_log_exception(tmp_path): 18 | filename = os.path.join(tmp_path, "test.log") 19 | set_py4dgeo_logfile(filename) 20 | assert not os.path.exists(filename) 21 | 22 | with pytest.raises(Py4DGeoError): 23 | raise Py4DGeoError("This is some exception") 24 | 25 | assert os.stat(filename).st_size > 0 26 | -------------------------------------------------------------------------------- /tests/python/test_m3c2.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.m3c2 import * 2 | from py4dgeo.util import Py4DGeoError, set_memory_policy, MemoryPolicy 3 | 4 | import pytest 5 | 6 | 7 | @pytest.mark.parametrize("robust_aggr", (True, False)) 8 | def test_m3c2(epochs, robust_aggr): 9 | epoch1, epoch2 = epochs 10 | # Try with wrong number of epochs 11 | with pytest.raises(Py4DGeoError): 12 | M3C2(epochs=(epoch1,), corepoints=epoch1.cloud, cyl_radius=1.0) 13 | 14 | # Instantiate an M3C2 instance 15 | m3c2 = M3C2( 16 | epochs=(epoch1, epoch2), 17 | corepoints=epoch1.cloud, 18 | cyl_radius=3.0, 19 | normal_radii=(2.0,), 20 | robust_aggr=robust_aggr, 21 | ) 22 | 23 | # Run it 24 | distances, uncertainties = m3c2.run() 25 | 26 | # Running with the same epoch twice should yield all zeroes 27 | distances, uncertainties = M3C2( 28 | epochs=(epoch1, epoch1), 29 | corepoints=epoch1.cloud, 30 | cyl_radius=3.0, 31 | normal_radii=(2.0,), 32 | robust_aggr=robust_aggr, 33 | ).run() 34 | assert np.allclose(distances, 0) 35 | 36 | 37 | def test_minimal_m3c2(epochs): 38 | epoch1, epoch2 = epochs 39 | set_memory_policy(MemoryPolicy.MINIMAL) 40 | 41 | # Instantiate an M3C2 instance 42 | m3c2 = M3C2( 43 | epochs=(epoch1, epoch2), 44 | corepoints=epoch1.cloud, 45 | cyl_radius=3.0, 46 | normal_radii=(2.0,), 47 | ) 48 | 49 | # Run it 50 | distances, uncertainties = m3c2.run() 51 | 52 | 53 | def test_registration_error(epochs): 54 | epoch1, _ = epochs 55 | 56 | m3c2 = M3C2( 57 | epochs=(epoch1, epoch1), 58 | corepoints=epoch1.cloud, 59 | cyl_radius=3.0, 60 | normal_radii=(2.0,), 61 | registration_error=1.0, 62 | ) 63 | 64 | # Run it and check that lodetection is at least 1.96 65 | _, uncertainties = m3c2.run() 66 | assert (uncertainties["lodetection"] > 1.96).all() 67 | 68 | 69 | def test_external_normals(epochs): 70 | epoch1, epoch2 = epochs 71 | # Instantiate an M3C2 instance 72 | d, u = M3C2( 73 | epochs=(epoch1, epoch2), 74 | corepoints=epoch1.cloud, 75 | cyl_radius=3.0, 76 | normal_radii=(2.0,), 77 | corepoint_normals=np.array([[0, 0, 1]]), 78 | ).run() 79 | 80 | with pytest.raises(Py4DGeoError): 81 | d, u = M3C2( 82 | epochs=(epoch1, epoch2), 83 | corepoints=epoch1.cloud, 84 | cyl_radius=3.0, 85 | normal_radii=(2.0,), 86 | corepoint_normals=np.array([[0, 0, 1], [0, 0, 1]]), 87 | ).run() 88 | 89 | 90 | def test_directions_radii(epochs): 91 | epoch1, epoch2 = epochs 92 | # Instantiate an M3C2 instance 93 | m3c2 = M3C2( 94 | epochs=(epoch1, epoch2), 95 | corepoints=epoch1.cloud, 96 | cyl_radii=(3.0,), 97 | normal_radii=(1.0, 2.0, 3.0), 98 | ) 99 | 100 | # Run it 101 | m3c2.directions() 102 | 103 | assert m3c2._directions_radii is not None 104 | for i in range(m3c2.directions_radii().shape[0]): 105 | assert m3c2.directions_radii()[i] in (1.0, 2.0, 3.0) 106 | -------------------------------------------------------------------------------- /tests/python/test_m3c2ep.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.m3c2ep import * 2 | from py4dgeo.util import Py4DGeoError 3 | from py4dgeo import write_m3c2_results_to_las 4 | 5 | import pytest 6 | import tempfile 7 | import os 8 | 9 | 10 | def test_m3c2ep(epochs_m3c2ep, Cxx, tfM, redPoint, scanpos_info): 11 | epoch1, epoch2 = epochs_m3c2ep 12 | epoch1.scanpos_info = scanpos_info 13 | epoch2.scanpos_info = scanpos_info 14 | corepoints = epoch1.cloud[::8000] 15 | print("corepoints shape", corepoints.shape) 16 | # Instantiate an M3C2 instance 17 | m3c2ep = M3C2EP( 18 | epochs=(epoch1, epoch2), 19 | corepoints=corepoints, 20 | normal_radii=(0.5, 1.0, 2.0), 21 | cyl_radius=0.5, 22 | max_distance=3.0, 23 | Cxx=Cxx, 24 | tfM=tfM, 25 | refPointMov=redPoint, 26 | ) 27 | 28 | # Run it and check results exists with correct shapes 29 | distances, uncertainties, covariance = m3c2ep.run() 30 | 31 | assert distances.shape[0] == corepoints.shape[0] 32 | assert uncertainties["num_samples1"].shape[0] == corepoints.shape[0] 33 | assert uncertainties["num_samples2"].shape[0] == corepoints.shape[0] 34 | assert uncertainties["spread1"].shape[0] == corepoints.shape[0] 35 | assert uncertainties["spread2"].shape[0] == corepoints.shape[0] 36 | assert uncertainties["lodetection"].shape[0] == corepoints.shape[0] 37 | cov1 = covariance["cov1"] 38 | cov2 = covariance["cov2"] 39 | assert ( 40 | cov1.shape[0] == corepoints.shape[0] 41 | and cov1.shape[1] == 3 42 | and cov1.shape[2] == 3 43 | ) 44 | assert ( 45 | cov2.shape[0] == corepoints.shape[0] 46 | and cov2.shape[1] == 3 47 | and cov2.shape[2] == 3 48 | ) 49 | 50 | 51 | def test_m3c2ep_external_normals(epochs_m3c2ep, Cxx, tfM, redPoint, scanpos_info): 52 | epoch1, epoch2 = epochs_m3c2ep 53 | epoch1.scanpos_info = scanpos_info 54 | epoch2.scanpos_info = scanpos_info 55 | corepoints = epoch1.cloud[::8000] 56 | 57 | # Run and check normals should be one direction or one normal per point. 58 | with pytest.raises(Py4DGeoError): 59 | d, u, c = M3C2EP( 60 | epochs=(epoch1, epoch2), 61 | corepoints=corepoints, 62 | corepoint_normals=np.array([[0, 0, 1], [0, 0, 1]]), 63 | cyl_radius=0.5, 64 | max_distance=3.0, 65 | Cxx=Cxx, 66 | tfM=tfM, 67 | refPointMov=redPoint, 68 | ).run() 69 | 70 | # Instantiate an default M3C2 instance to get normals 71 | m3c2ep = M3C2EP( 72 | epochs=(epoch1, epoch2), 73 | corepoints=corepoints, 74 | normal_radii=(0.5, 1.0, 2.0), 75 | cyl_radius=0.5, 76 | max_distance=3.0, 77 | Cxx=Cxx, 78 | tfM=tfM, 79 | refPointMov=redPoint, 80 | ) 81 | 82 | # Instantiate an M3C2 instance with specified corepoint normals 83 | corepoint_normals = m3c2ep.directions() 84 | m3c2ep_n = py4dgeo.M3C2EP( 85 | epochs=(epoch1, epoch2), 86 | corepoints=corepoints, 87 | corepoint_normals=corepoint_normals, 88 | cyl_radius=0.5, 89 | max_distance=3.0, 90 | Cxx=Cxx, 91 | tfM=tfM, 92 | refPointMov=redPoint, 93 | ) 94 | # Check that corepoint normals same as algorithm directions 95 | assert np.allclose(m3c2ep_n.directions(), corepoint_normals) 96 | 97 | # Instantiate an M3C2 instance with one direction 98 | corepoint_normals = np.array([[0, 0, 1]]) 99 | m = py4dgeo.M3C2EP( 100 | epochs=(epoch1, epoch2), 101 | corepoints=corepoints, 102 | corepoint_normals=corepoint_normals, 103 | cyl_radius=0.5, 104 | max_distance=3.0, 105 | Cxx=Cxx, 106 | tfM=tfM, 107 | refPointMov=redPoint, 108 | ) 109 | # Check that corepoint normals same as algorithm directions 110 | assert np.allclose(m.directions(), corepoint_normals) 111 | 112 | 113 | def test_m3c2ep_epoch_saveload(epochs_m3c2ep, scanpos_info): 114 | epoch1, epoch2 = epochs_m3c2ep 115 | epoch1._validate_search_tree() 116 | epoch2._validate_search_tree() 117 | epoch1.scanpos_info = scanpos_info 118 | epoch2.scanpos_info = scanpos_info 119 | with tempfile.TemporaryDirectory() as dir: 120 | # Save and load it 121 | filename1 = os.path.join(dir, "epoch1") 122 | filename2 = os.path.join(dir, "epoch2") 123 | epoch1.save(filename1) 124 | epoch2.save(filename2) 125 | load1 = py4dgeo.load_epoch(filename1) 126 | load2 = py4dgeo.load_epoch(filename2) 127 | load1._validate_search_tree() 128 | load2._validate_search_tree() 129 | # Assert that the two object behave the same 130 | assert load1.cloud.shape[0] == epoch1.cloud.shape[0] 131 | assert load2.cloud.shape[0] == epoch2.cloud.shape[0] 132 | 133 | assert np.allclose(load1.cloud - epoch1.cloud, 0) 134 | assert np.allclose(load2.cloud - epoch2.cloud, 0) 135 | 136 | bbox_extent_epoch1 = epoch1.cloud.max(axis=0) - epoch1.cloud.min(axis=0) 137 | radius1 = 0.25 * np.min( 138 | bbox_extent_epoch1 139 | ) # Quarter of the extent of the smallest dimension 140 | query_point_epoch1 = 0.5 * (epoch1.cloud.min(axis=0) + epoch1.cloud.max(axis=0)) 141 | assert np.allclose( 142 | load1._radius_search(query_point_epoch1, radius1), 143 | epoch1._radius_search(query_point_epoch1, radius1), 144 | ) 145 | 146 | bbox_extent_epoch2 = epoch2.cloud.max(axis=0) - epoch2.cloud.min(axis=0) 147 | radius2 = 0.25 * np.min( 148 | bbox_extent_epoch2 149 | ) # Quarter of the extent of the smallest dimension 150 | query_point_epoch2 = 0.5 * (epoch2.cloud.min(axis=0) + epoch2.cloud.max(axis=0)) 151 | assert np.allclose( 152 | load2._radius_search(query_point_epoch2, radius2), 153 | epoch2._radius_search(query_point_epoch2, radius2), 154 | ) 155 | 156 | 157 | def test_m3c2ep_write_las(epochs_m3c2ep, Cxx, tfM, redPoint, scanpos_info): 158 | epoch1, epoch2 = epochs_m3c2ep 159 | epoch1.scanpos_info = scanpos_info 160 | epoch2.scanpos_info = scanpos_info 161 | corepoints = epoch1.cloud[::8000] 162 | 163 | # Instantiate an M3C2 instance 164 | m3c2ep = M3C2EP( 165 | epochs=(epoch1, epoch2), 166 | corepoints=corepoints, 167 | normal_radii=(0.5, 1.0, 2.0), 168 | cyl_radius=0.5, 169 | max_distance=3.0, 170 | Cxx=Cxx, 171 | tfM=tfM, 172 | refPointMov=redPoint, 173 | ) 174 | 175 | # Run it 176 | distances, uncertainties, covariance = m3c2ep.run() 177 | 178 | def read_cp_from_las(path): 179 | import laspy 180 | 181 | inFile = laspy.read(path) 182 | coords = np.vstack((inFile.x, inFile.y, inFile.z)).transpose() 183 | try: 184 | distances = getattr(inFile, "distances", None) 185 | except: 186 | distances = None 187 | 188 | try: 189 | lod = getattr(inFile, "lod", None) 190 | except: 191 | lod = None 192 | return coords, distances, lod 193 | 194 | # save and load from las file and check results are same 195 | with tempfile.TemporaryDirectory() as dir: 196 | attr = {"distances": distances, "lod": uncertainties["lodetection"]} 197 | file = dir + "cp.las" 198 | write_m3c2_results_to_las(file, m3c2ep, attribute_dict=attr) 199 | c, d, l = read_cp_from_las(file) 200 | diff_c = corepoints - c 201 | diff_d = distances - d 202 | diff_d[np.isnan(diff_d)] = 0 203 | diff_l = uncertainties["lodetection"] - l 204 | diff_l[np.isnan(diff_l)] = 0 205 | assert np.allclose(diff_c, 0) 206 | assert np.allclose(diff_d, 0) 207 | assert np.allclose(diff_l, 0) 208 | -------------------------------------------------------------------------------- /tests/python/test_octree.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.util import get_memory_policy 2 | 3 | import numpy as np 4 | import os 5 | import pickle 6 | import pytest 7 | import tempfile 8 | 9 | 10 | def test_octree(epochs): 11 | epoch1, _ = epochs 12 | epoch1.build_octree() 13 | data = epoch1.cloud 14 | 15 | # Find all points in sufficiently large radius 16 | result = epoch1.octree.radius_search(np.array([0, 0, 0]), 100) 17 | assert result.shape[0] == data.shape[0] 18 | 19 | 20 | def test_octree_pickle(epochs): 21 | epoch1, _ = epochs 22 | with pytest.raises(RuntimeError): 23 | with tempfile.TemporaryDirectory() as dir: 24 | fn = os.path.join(dir, "octree.pickle") 25 | with open(fn, "wb") as f: 26 | pickle.dump(epoch1.octree, f) 27 | 28 | 29 | def test_rebuilding(epochs): 30 | epoch1, _ = epochs 31 | 32 | # Not build yet - number of points is 0 33 | assert epoch1.octree.get_number_of_points() == 0 34 | 35 | # Building with default - number of points is > 0 36 | epoch1.build_octree() 37 | assert epoch1.octree.get_number_of_points() > 0 38 | -------------------------------------------------------------------------------- /tests/python/test_pbm3c2_compute_distances.py: -------------------------------------------------------------------------------- 1 | import py4dgeo 2 | 3 | import numpy as np 4 | from sklearn.ensemble import RandomForestClassifier 5 | 6 | 7 | def test_compute_distances(epochs): 8 | epoch0, epoch1 = epochs 9 | 10 | alg = py4dgeo.PBM3C2( 11 | classifier=py4dgeo.ClassifierWrapper( 12 | classifier=RandomForestClassifier(random_state=42) 13 | ) 14 | ) 15 | 16 | ( 17 | x_y_z_id_epoch0, 18 | x_y_z_id_epoch1, 19 | extracted_segments, 20 | ) = alg.export_segmented_point_cloud_and_segments( 21 | epoch0=epoch0, 22 | epoch1=epoch1, 23 | **{ 24 | # "c": True, # used for testing 25 | # "get_pipeline_options": True, 26 | # "Transform_Segmentation__output_file_name": "segmented_point_cloud.out" 27 | }, 28 | ) 29 | # segmented_point_cloud = py4dgeo.Viewer.read_np_ndarray_from_xyz( 30 | # input_file_name="segmented_point_cloud.out" 31 | # ) 32 | # py4dgeo.Viewer.segmented_point_cloud_visualizer(X=segmented_point_cloud) 33 | 34 | ( 35 | _0, 36 | _1, 37 | extracted_segments_epoch0, 38 | ) = alg.export_segmented_point_cloud_and_segments( 39 | epoch0=epoch0, 40 | # epoch1=None, 41 | x_y_z_id_epoch0_file_name=None, 42 | x_y_z_id_epoch1_file_name=None, 43 | extracted_segments_file_name=None, 44 | ) 45 | 46 | extended_y = py4dgeo.generate_random_extended_y( 47 | extracted_segments, extended_y_file_name="extended_y.csv" 48 | ) 49 | 50 | alg.training(extracted_segments, extended_y) 51 | # alg.training( 52 | # extracted_segments_file_name="extracted_segments.seg", 53 | # extended_y_file_name="extended_y.csv", 54 | # ) 55 | 56 | rez0 = alg.compute_distances(epoch0=epoch0, epoch1=epoch1) 57 | # print(alg.predict(epoch0=epoch0, epoch1=epoch1, get_pipeline_option=True)) 58 | 59 | rez1 = alg.compute_distances( 60 | epoch0=extracted_segments_epoch0, 61 | epoch1=epoch1, 62 | get_pipeline_options=True, 63 | epoch0_Transform_PerPointComputation__skip=True, 64 | epoch0_Transform_Segmentation__skip=True, 65 | epoch0_Transform_Second_Segmentation__skip=True, 66 | epoch0_Transform_ExtractSegments__skip=True, 67 | ) 68 | 69 | config_epoch0_as_segments = { 70 | "get_pipeline_options": True, 71 | "epoch0_Transform_PerPointComputation__skip": True, 72 | "epoch0_Transform_Segmentation__skip": True, 73 | "epoch0_Transform_Second_Segmentation__skip": True, 74 | "epoch0_Transform_ExtractSegments__skip": True, 75 | } 76 | 77 | rez2 = alg.compute_distances( 78 | epoch0=extracted_segments_epoch0, epoch1=epoch1, **config_epoch0_as_segments 79 | ) 80 | 81 | assert np.array_equal(rez0[0], rez1[0]), "unequal anymore" 82 | assert np.array_equal(rez0[1], rez1[1]), "unequal anymore" 83 | assert np.array_equal(rez0[0], rez1[0]), "unequal anymore" 84 | assert np.array_equal(rez0[1], rez1[1]), "unequal anymore" 85 | assert np.array_equal(rez1[0], rez2[0]), "unequal anymore" 86 | assert np.array_equal(rez1[1], rez2[1]), "unequal anymore" 87 | -------------------------------------------------------------------------------- /tests/python/test_pbm3c2_consistency.py: -------------------------------------------------------------------------------- 1 | import py4dgeo 2 | 3 | import numpy as np 4 | from sklearn.ensemble import RandomForestClassifier 5 | 6 | 7 | def test_consistency(epochs): 8 | epoch0, epoch1 = epochs 9 | 10 | # =============== 11 | 12 | alg_cpy = py4dgeo.PBM3C2( 13 | classifier=py4dgeo.ClassifierWrapper( 14 | classifier=RandomForestClassifier(random_state=42) 15 | ) 16 | ) 17 | 18 | ( 19 | x_y_z_id_epoch0, 20 | x_y_z_id_epoch1, 21 | extracted_segments, 22 | ) = alg_cpy.export_segmented_point_cloud_and_segments( 23 | epoch0=epoch0, 24 | epoch1=epoch1, 25 | x_y_z_id_epoch0_file_name=None, 26 | x_y_z_id_epoch1_file_name=None, 27 | extracted_segments_file_name=None, 28 | ) 29 | extended_y = py4dgeo.generate_random_extended_y( 30 | extracted_segments, extended_y_file_name="extended_y.csv" 31 | ) 32 | alg_cpy.training(extracted_segments, extended_y) 33 | 34 | rez_cpy = alg_cpy.predict(epoch0=epoch0, epoch1=epoch1) 35 | 36 | # =============== 37 | 38 | alg_original = py4dgeo.PBM3C2( 39 | classifier=py4dgeo.ClassifierWrapper( 40 | classifier=RandomForestClassifier(random_state=42) 41 | ) 42 | ) 43 | 44 | # ( 45 | # x_y_z_id_epoch0, 46 | # x_y_z_id_epoch1, 47 | # extracted_segments, 48 | # ) = \ 49 | alg_original.export_segmented_point_cloud_and_segments( 50 | epoch0=epoch0, 51 | epoch1=epoch1, 52 | x_y_z_id_epoch0_file_name=None, 53 | x_y_z_id_epoch1_file_name=None, 54 | extracted_segments_file_name=None, 55 | ) 56 | # extended_y = py4dgeo.generate_random_extended_y(extracted_segments, extended_y_file_name="extended_y.csv") 57 | 58 | alg_original.training(extracted_segments, extended_y) 59 | 60 | rez_original = alg_original.predict(epoch0=epoch0, epoch1=epoch1) 61 | 62 | assert np.array_equal(rez_original, rez_cpy), "unequal anymore" 63 | -------------------------------------------------------------------------------- /tests/python/test_pbm3c2_predict.py: -------------------------------------------------------------------------------- 1 | import py4dgeo 2 | 3 | import numpy as np 4 | from sklearn.ensemble import RandomForestClassifier 5 | 6 | 7 | def test_predict(epochs): 8 | epoch0, epoch1 = epochs 9 | 10 | alg = py4dgeo.PBM3C2( 11 | classifier=py4dgeo.ClassifierWrapper( 12 | classifier=RandomForestClassifier(random_state=42) 13 | ) 14 | ) 15 | 16 | ( 17 | x_y_z_id_epoch0, 18 | x_y_z_id_epoch1, 19 | extracted_segments, 20 | ) = alg.export_segmented_point_cloud_and_segments( 21 | epoch0=epoch0, 22 | epoch1=epoch1, 23 | **{ 24 | # "c": True, # used for testing 25 | # "get_pipeline_options": True, 26 | # "Transform_Segmentation__output_file_name": "segmented_point_cloud.out" 27 | }, 28 | ) 29 | # segmented_point_cloud = py4dgeo.Viewer.read_np_ndarray_from_xyz( 30 | # input_file_name="segmented_point_cloud.out" 31 | # ) 32 | # py4dgeo.Viewer.segmented_point_cloud_visualizer(X=segmented_point_cloud) 33 | 34 | extended_y = py4dgeo.generate_random_extended_y( 35 | extracted_segments, extended_y_file_name="extended_y.csv" 36 | ) 37 | 38 | alg.training(extracted_segments, extended_y) 39 | # alg.training( 40 | # extracted_segments_file_name="extracted_segments.seg", 41 | # extended_y_file_name="extended_y.csv", 42 | # ) 43 | 44 | rez0 = alg.predict(epoch0=epoch0, epoch1=epoch1) 45 | # print(alg.predict(epoch0=epoch0, epoch1=epoch1, get_pipeline_option=True)) 46 | 47 | ( 48 | _0, 49 | _1, 50 | extracted_segments_epoch0, 51 | ) = alg.export_segmented_point_cloud_and_segments( 52 | epoch0=epoch0, 53 | # epoch1=None, 54 | x_y_z_id_epoch0_file_name=None, 55 | x_y_z_id_epoch1_file_name=None, 56 | extracted_segments_file_name=None, 57 | ) 58 | 59 | rez1 = alg.predict( 60 | epoch0=extracted_segments_epoch0, 61 | epoch1=epoch1, 62 | get_pipeline_options=True, 63 | epoch0_Transform_PerPointComputation__skip=True, 64 | epoch0_Transform_Segmentation__skip=True, 65 | epoch0_Transform_Second_Segmentation__skip=True, 66 | epoch0_Transform_ExtractSegments__skip=True, 67 | ) 68 | 69 | config_epoch0_as_segments = { 70 | "get_pipeline_options": True, 71 | "epoch0_Transform_PerPointComputation__skip": True, 72 | "epoch0_Transform_Segmentation__skip": True, 73 | "epoch0_Transform_Second_Segmentation__skip": True, 74 | "epoch0_Transform_ExtractSegments__skip": True, 75 | } 76 | 77 | rez2 = alg.predict( 78 | epoch0=extracted_segments_epoch0, epoch1=epoch1, **config_epoch0_as_segments 79 | ) 80 | 81 | assert np.array_equal(rez0, rez1), "unequal anymore" 82 | assert np.array_equal(rez0, rez1), "unequal anymore" 83 | assert np.array_equal(rez1, rez2), "unequal anymore" 84 | -------------------------------------------------------------------------------- /tests/python/test_registration.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.registration import * 2 | 3 | 4 | def test_icp(epochs): 5 | epoch1, epoch2 = epochs 6 | epoch1.calculate_normals(radius=2.5) 7 | epoch2.calculate_normals(radius=2.5) 8 | 9 | trafo2 = point_to_plane_icp(epoch1, epoch2) 10 | 11 | assert trafo2.affine_transformation.shape == (4, 4) 12 | 13 | 14 | def test_stable_area_icp(epochs): 15 | epoch1, epoch2 = epochs 16 | epoch1.calculate_normals(radius=10) 17 | epoch2.calculate_normals(radius=10) 18 | trafo2 = icp_with_stable_areas(epoch1, epoch2, 10, 10, 0.2, 0.2, 1) 19 | 20 | assert trafo2.affine_transformation.shape == (4, 4) 21 | -------------------------------------------------------------------------------- /tests/python/test_segmentation.py: -------------------------------------------------------------------------------- 1 | from py4dgeo.segmentation import * 2 | from py4dgeo.m3c2 import M3C2 3 | from py4dgeo.util import Py4DGeoError 4 | 5 | import pytest 6 | import ruptures 7 | 8 | from .helpers import complex_timeseries, simple_jump 9 | 10 | 11 | def test_segmentation(analysis): 12 | # Basic assertions about the analysis loaded in fixture 13 | assert len(analysis.distances.shape) == 2 14 | assert len(analysis.uncertainties.shape) == 2 15 | assert len(analysis.corepoints.cloud.shape) == 2 16 | assert len(analysis.timedeltas) > 0 17 | 18 | 19 | def test_access_unassigned_properties(tmp_path, epochs): 20 | analysis = SpatiotemporalAnalysis(os.path.join(tmp_path, "unassigned.zip")) 21 | 22 | with pytest.raises(Py4DGeoError): 23 | analysis.corepoints 24 | 25 | with pytest.raises(Py4DGeoError): 26 | analysis.reference_epoch 27 | 28 | assert len(analysis.timedeltas) == 0 29 | 30 | # Set reference_epoch and corepoints to check unassigned distances 31 | # and uncertainties 32 | epoch, _ = epochs 33 | epoch.timestamp = "March 9th 2022, 16:33" 34 | analysis.reference_epoch = epochs[0] 35 | analysis.corepoints = epochs[0] 36 | 37 | assert analysis.distances.shape[1] == 0 38 | assert analysis.uncertainties.shape[1] == 0 39 | 40 | 41 | def test_construct_from_epochs(epochs, tmp_path): 42 | ref_epoch, epoch1 = epochs 43 | 44 | ref_epoch.timestamp = "March 9th 2022, 16:32" 45 | epoch1.timestamp = "March 9th 2022, 16:33" 46 | 47 | m3c2 = M3C2( 48 | epochs=(ref_epoch, epoch1), 49 | corepoints=ref_epoch.cloud, 50 | cyl_radius=2.0, 51 | normal_radii=[2.0], 52 | ) 53 | 54 | analysis = SpatiotemporalAnalysis(os.path.join(tmp_path, "testanalysis.zip")) 55 | analysis.m3c2 = m3c2 56 | analysis.reference_epoch = ref_epoch 57 | analysis.corepoints = ref_epoch.cloud 58 | analysis.add_epochs(epoch1) 59 | 60 | assert analysis.distances.shape[1] == 1 61 | assert analysis.uncertainties.shape[1] == 1 62 | 63 | # Adding epoch again to trigger the code path overriding existing results 64 | analysis.add_epochs(epoch1) 65 | 66 | 67 | def test_construct_from_scratch(tmp_path, epochs): 68 | ref_epoch, epoch = epochs 69 | ref_epoch.timestamp = "March 9th 2022, 16:32" 70 | epoch.timestamp = "March 9th 2022, 16:33" 71 | analysis = SpatiotemporalAnalysis(os.path.join(tmp_path, "scratch.zip")) 72 | 73 | analysis.reference_epoch = ref_epoch 74 | analysis.corepoints = epoch.cloud 75 | analysis.distances = np.zeros(shape=(analysis.corepoints.cloud.shape[0], 1)) 76 | analysis.uncertainties = np.empty( 77 | (analysis.corepoints.cloud.shape[0], 0), 78 | dtype=np.dtype( 79 | [ 80 | ("lodetection", "