├── .github ├── FUNDING.yml └── workflows │ ├── draft-pdf.yaml │ └── draft-tex.yaml ├── CITATION.bib ├── Dockerfile ├── LICENSE ├── README.md ├── docs ├── .buildinfo ├── .nojekyll ├── _images │ ├── interactive.png │ └── results.png ├── _sources │ ├── citation.rst.txt │ ├── index.rst.txt │ ├── installation.rst.txt │ ├── license.rst.txt │ ├── module.rst.txt │ └── tutorial.rst.txt ├── _static │ ├── _sphinx_javascript_frameworks_compat.js │ ├── basic.css │ ├── css │ │ ├── badge_only.css │ │ ├── fonts │ │ │ ├── Roboto-Slab-Bold.woff │ │ │ ├── Roboto-Slab-Bold.woff2 │ │ │ ├── Roboto-Slab-Regular.woff │ │ │ ├── Roboto-Slab-Regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ ├── fontawesome-webfont.woff2 │ │ │ ├── lato-bold-italic.woff │ │ │ ├── lato-bold-italic.woff2 │ │ │ ├── lato-bold.woff │ │ │ ├── lato-bold.woff2 │ │ │ ├── lato-normal-italic.woff │ │ │ ├── lato-normal-italic.woff2 │ │ │ ├── lato-normal.woff │ │ │ └── lato-normal.woff2 │ │ └── theme.css │ ├── doctools.js │ ├── documentation_options.js │ ├── file.png │ ├── jquery.js │ ├── js │ │ ├── badge_only.js │ │ ├── html5shiv-printshiv.min.js │ │ ├── html5shiv.min.js │ │ └── theme.js │ ├── language_data.js │ ├── minus.png │ ├── plus.png │ ├── pygments.css │ ├── searchtools.js │ └── sphinx_highlight.js ├── citation.html ├── genindex.html ├── index.html ├── installation.html ├── license.html ├── module.html ├── objects.inv ├── py-modindex.html ├── search.html ├── searchindex.js └── tutorial.html ├── paper ├── figures │ ├── extrinsic.jpg │ ├── intrinsic.jpg │ ├── pinhole_inference.png │ ├── pinhole_results.png │ ├── top_inference.png │ ├── top_results.png │ └── views.png ├── paper.bib └── paper.md ├── requirements.txt ├── segment_lidar ├── __init__.py ├── samlidar.py └── view.py └── setup.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [yarroudh] 2 | -------------------------------------------------------------------------------- /.github/workflows/draft-pdf.yaml: -------------------------------------------------------------------------------- 1 | on: [push] 2 | 3 | jobs: 4 | paper: 5 | runs-on: ubuntu-latest 6 | name: Paper Draft 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v3 10 | - name: Build draft PDF 11 | uses: openjournals/openjournals-draft-action@master 12 | with: 13 | journal: joss 14 | # This should be the path to the paper within your repo. 15 | paper-path: paper/paper.md 16 | - name: Upload 17 | uses: actions/upload-artifact@v1 18 | with: 19 | name: paper 20 | # This is the output path where Pandoc will write the compiled 21 | # PDF. Note, this should be the same directory as the input 22 | # paper.md 23 | path: paper/paper.pdf -------------------------------------------------------------------------------- /.github/workflows/draft-tex.yaml: -------------------------------------------------------------------------------- 1 | on: [push] 2 | 3 | jobs: 4 | tests: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v2 8 | - name: TeX 9 | uses: docker://openjournals/paperdraft:latest 10 | with: 11 | args: joss/paper.md --to=latex --output=paper.tex 12 | env: 13 | GIT_SHA: $GITHUB_SHA 14 | JOURNAL: joss 15 | - name: PDF 16 | uses: docker://openjournals/paperdraft:latest 17 | with: 18 | args: joss/paper.md 19 | env: 20 | GIT_SHA: $GITHUB_SHA 21 | JOURNAL: joss 22 | - uses: actions/upload-artifact@v2 23 | with: 24 | path: joss 25 | -------------------------------------------------------------------------------- /CITATION.bib: -------------------------------------------------------------------------------- 1 | @misc{yarroudh:2023:samlidar, 2 | author = {Yarroudh, Anass}, 3 | title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI}, 4 | year = {2023}, 5 | howpublished = {GitHub Repository}, 6 | url = {https://github.com/Yarroudh/segment-lidar} 7 | } 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:22.04 2 | 3 | RUN useradd --create-home --shell /bin/bash user 4 | WORKDIR /home/user 5 | 6 | # Install Anaconda 7 | RUN apt-get update && apt-get install -y wget bzip2 8 | RUN wget --quiet https://repo.anaconda.com/archive/Anaconda3-2021.05-Linux-x86_64.sh -O ~/anaconda.sh 9 | RUN /bin/bash ~/anaconda.sh -b -p /opt/conda && \ 10 | rm ~/anaconda.sh && \ 11 | echo 'export PATH="/opt/conda/bin:$PATH"' >> ~/.bashrc 12 | 13 | # Configure the environment 14 | ENV PATH /opt/conda/bin:$PATH 15 | RUN conda create -n samlidar python=3.9 16 | RUN echo "conda activate samlidar" > ~/.bashrc 17 | ENV PATH /opt/conda/envs/env/bin:$PATH 18 | USER root 19 | 20 | # Install GCC 21 | RUN apt-get install gcc -y 22 | 23 | # Install segment-lidar 24 | RUN python -m pip install --upgrade pip setuptools wheel cython 25 | RUN python -m pip install segment-lidar 26 | 27 | CMD ["python", "-c", "while True: pass"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, University of Liège - Author: Anass Yarroudh (ayarroudh@uliege.be), Geomatics Unit of ULiege 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of the copyright holder nor the names of its 16 | contributors may be used to endorse or promote products derived from 17 | this software without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | logo 2 | 3 | # segment-lidar 4 | [![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://github.com/Yarroudh/ZRect3D/blob/main/LICENSE) 5 | [![Geomatics Unit of ULiege - Development](https://img.shields.io/badge/Geomatics_Unit_of_ULiege-Development-2ea44f)](http://geomatics.ulg.ac.be/) 6 | [![read - documentation](https://img.shields.io/static/v1?label=read&message=documentation&color=orange)](https://yarroudh.github.io/segment-lidar/) 7 | 8 | logo 9 | 10 | *Python package for segmenting aerial LiDAR data using Segment-Anything Model (SAM) from Meta AI.* 11 | 12 | This package is specifically designed for **unsupervised instance segmentation** of **LiDAR data**. It brings together the power of the **Segment-Anything Model (SAM)** developed by [Meta Research](https://github.com/facebookresearch) and the **segment-geospatial** package from [Open Geospatial Solutions](https://github.com/opengeos) to automatize instance segmentation of 3D point cloud data. 13 | 14 | ![results](https://github.com/Yarroudh/segment-lidar/assets/72500344/089a603b-697e-4483-af1e-3687a79adcc1) 15 | 16 | ## Installation 17 | 18 | We recommand using `Python>=3.9`. First, you need to install `PyTorch`. Please follow the instructions [here](https://pytorch.org/). 19 | 20 | Then, you can easily install `segment-lidar` from [PyPI](https://pypi.org/project/segment-lidar/): 21 | 22 | ```bash 23 | pip install segment-lidar 24 | ``` 25 | 26 | Or, you can install it from source by running the following commands: 27 | 28 | ```bash 29 | git clone https://github.com/Yarroudh/segment-lidar 30 | cd segment-lidar 31 | python setup.py install 32 | ``` 33 | 34 | Please, note that the actual version is always under tests. If you find any issues or bugs, please report them in [issues](https://github.com/Yarroudh/segment-lidar/issues) section. The second version should implement more advanced features and fonctionalities. 35 | 36 | ## Documentation 37 | 38 | If you are using `segment-lidar`, we highly recommend that you take the time to read the [documentation](https://yarroudh.gitbook.io/segment-lidar/). The documentation is an essential resource that will help you understand the features of the package, as well as provide guidance on how to use it effectively. 39 | 40 | ## Basic tutorial 41 | 42 | A basic tutorial is available [here](https://yarroudh.github.io/segment-lidar/tutorial.html). 43 | 44 | You can also refer to [API](https://yarroudh.github.io/segment-lidar/module.html) for more information about different parameters. 45 | 46 | ### Without ground filtering 47 | 48 | ```python 49 | from segment_lidar import samlidar, view 50 | 51 | viewpoint = view.TopView() 52 | 53 | model = samlidar.SamLidar(ckpt_path="sam_vit_h_4b8939.pth") 54 | points = model.read("pointcloud.las") 55 | labels, *_ = model.segment(points=points, view=viewpoint, image_path="raster.tif", labels_path="labeled.tif") 56 | model.write(points=points, segment_ids=labels, save_path="segmented.las") 57 | ``` 58 | 59 | ### With ground filtering 60 | 61 | ```python 62 | from segment_lidar import samlidar, view 63 | 64 | viewpoint = view.TopView() 65 | 66 | model = samlidar.SamLidar(ckpt_path="sam_vit_h_4b8939.pth") 67 | points = model.read("pointcloud.las") 68 | cloud, non_ground, ground = model.csf(points) 69 | labels, *_ = model.segment(points=cloud, view=viewpoint, image_path="raster.tif", labels_path="labeled.tif") 70 | model.write(points=points, non_ground=non_ground, ground=ground, segment_ids=labels, save_path="segmented.las") 71 | ``` 72 | 73 | **Note**: The latest version of `segment-lidar` supports defining a custom pinhole camera, with or without interactive visualization, and save the view as an image. Please, refer to the [documentation](https://yarroudh.github.io/segment-lidar/tutorial.html#interactive-mode) for more details. 74 | 75 | ## Sample data 76 | 77 | For testing purposes, you can download a sample here: [pointcloud.las](https://drive.google.com/file/d/16EF2aRSvo8u0pXvwtaQ6sjhP5h0sWw3o/view?usp=sharing). 78 | 79 | This data was retrieved from **AHN-4**. For more data, please visit [GeoTiles.nl](https://geotiles.nl/). 80 | 81 | ## Model checkpoints 82 | 83 | Click the links below to download the checkpoint for the corresponding model type. 84 | 85 | - `vit_h`: [ViT-H SAM model.](https://dl.fbaipublicfiles.com/segment_anything/sam_vit_h_4b8939.pth) 86 | - `vit_l`: [ViT-L SAM model.](https://dl.fbaipublicfiles.com/segment_anything/sam_vit_l_0b3195.pth) 87 | - `vit_b`: [ViT-B SAM model.](https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth) 88 | 89 | ## Docker Image 90 | 91 | **segment-lidar** is also available as [Docker image](https://hub.docker.com/r/yarroudh/segment-lidar). 92 | 93 | These are the steps to run `segment-lidar` as a Docker container: 94 | 95 | 1. First pull the image using the docker pull command: 96 | 97 | ```bash 98 | docker pull yarroudh/segment-lidar 99 | ``` 100 | 101 | 2. To run the Docker container and mount your data and script file inside it, use the docker run command with the -v option to specify the path to the host directory and the path to the container directory where you want to mount the data folder. For example: 102 | 103 | ```bash 104 | docker run -d -v ABSOLUTE_PATH_TO_HOST_DATA:/home/user yarroudh/segment-lidar 105 | ``` 106 | 107 | This command will start a Docker container in detached mode, mount the **ABSOLUTE_PATH_TO_HOST_DATA** directory on the host machine to the **/home/user/data** directory inside the container, and run the yarroudh/segment-lidar image. Do not change the path of the directory inside the container. 108 | 109 | 3. Find the container ID and copy it. You can use the docker ps command to list all running containers and their IDs. 110 | 4. Launch a command inside the container using docker exec, use the container ID or name and the command you want to run. For example: 111 | 112 | ```bash 113 | docker exec CONTAINER_ID python SCRIPT_FILE 114 | ``` 115 | 116 | 5. To copy the output of the command from the container to a local path, use the docker cp command with the container ID or name, the path to the file inside the container, and the path to the destination on the host machine. For example: 117 | 118 | ```bash 119 | docker cp CONTAINER_ID:/home/user/PATH_TO_OUTPUT PATH_ON_HOST_MACHINE 120 | ``` 121 | 122 | 6. Finally, after executing all the commands and copying the results to your local machine, you can stop the Docker container using the docker stop command followed by the container ID or name: 123 | 124 | ```bash 125 | docker stop CONTAINER_ID 126 | ``` 127 | 128 | ## Related repositories 129 | 130 | We would like to express our acknowledgments to the creators of: 131 | 132 | - [segment-anything](https://github.com/facebookresearch/segment-anything) 133 | - [segment-geospatial](https://github.com/opengeos/segment-geospatial) 134 | 135 | Please, visit these repositories for more information about image raster automatic segmentation using SAM from Meta AI. 136 | 137 | ## License 138 | 139 | This software is under the BSD 3-Clause "New" or "Revised" license which is a permissive license that allows you almost unlimited freedom with the software so long as you include the BSD copyright and license notice in it. Please refer to the [LICENSE](https://github.com/Yarroudh/segment-lidar/blob/main/LICENSE) file for more detailed information. 140 | 141 | ## Citation 142 | 143 | The use of open-source software repositories has become increasingly prevalent in scientific research. If you use this repository for your research, please make sure to cite it appropriately in your work. The recommended citation format for this repository is provided in the accompanying [BibTeX citation](https://github.com/Yarroudh/segment-lidar/blob/main/CITATION.bib). Additionally, please make sure to comply with any licensing terms and conditions associated with the use of this repository. 144 | 145 | ```bib 146 | @misc{yarroudh:2023:samlidar, 147 | author = {Yarroudh, Anass}, 148 | title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI}, 149 | year = {2023}, 150 | howpublished = {GitHub Repository}, 151 | url = {https://github.com/Yarroudh/segment-lidar} 152 | } 153 | ``` 154 | 155 | Yarroudh, A. (2023). *LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI* [GitHub repository]. Retrieved from https://github.com/Yarroudh/segment-lidar 156 | 157 | ## Author 158 | 159 | This software was developped by [Anass Yarroudh](https://www.linkedin.com/in/anass-yarroudh/), a Research Engineer in the [Geomatics Unit of the University of Liege](http://geomatics.ulg.ac.be/fr/home.php). 160 | For more detailed information please contact us via , we are pleased to send you the necessary information. 161 | 162 | ----- 163 | 164 | Copyright © 2023, [Geomatics Unit of ULiège](http://geomatics.ulg.ac.be/fr/home.php). Released under [BSD-3 Clause License](https://github.com/Yarroudh/segment-lidar/blob/main/LICENSE). 165 | -------------------------------------------------------------------------------- /docs/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 47cf6d7ca16a60cd2a291f6125aeba81 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /docs/_images/interactive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_images/interactive.png -------------------------------------------------------------------------------- /docs/_images/results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_images/results.png -------------------------------------------------------------------------------- /docs/_sources/citation.rst.txt: -------------------------------------------------------------------------------- 1 | Citation 2 | ========= 3 | 4 | The use of open-source software repositories has become increasingly 5 | prevalent in scientific research. If you use this repository for your 6 | research, please make sure to cite it appropriately in your work. The 7 | recommended citation format for this repository is provided in the 8 | accompanying `BibTeX 9 | citation `__. 10 | Additionally, please make sure to comply with any licensing terms and 11 | conditions associated with the use of this repository. 12 | 13 | .. code:: bibtex 14 | 15 | @misc{yarroudh:2023:samlidar, 16 | author = {Yarroudh, Anass}, 17 | title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI}, 18 | year = {2023}, 19 | howpublished = {GitHub Repository}, 20 | url = {https://github.com/Yarroudh/segment-lidar} 21 | } 22 | 23 | Yarroudh, A. (2023). LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI [GitHub repository]. Retrieved from https://github.com/Yarroudh/segment-lidar -------------------------------------------------------------------------------- /docs/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | 2 | Welcome to segment-lidar's documentation! 3 | ========================================= 4 | 5 | About 6 | ----- 7 | 8 | The package **segment-lidar** is specifically designed for unsupervised instance segmentation of aerial LiDAR data. It brings together the power of Segment-Anything Model (SAM) developed by Meta Research and segment-geospatial (SamGeo) package from Open Geospatial Solutions to automatize the segmentation of LiDAR data. 9 | If you use this package for your research, please cite: 10 | 11 | .. code-block:: bibtex 12 | 13 | @misc{yarroudh:2023:samlidar, 14 | author = {Yarroudh, Anass}, 15 | title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI}, 16 | year = {2023}, 17 | howpublished = {GitHub Repository}, 18 | url = {https://github.com/Yarroudh/segment-lidar} 19 | } 20 | 21 | .. note:: 22 | The paper will be published very soon. 23 | 24 | The latest source code is available at `GitHub 25 | `_. The package builds on top of existing works and when using specific algorithms within segment-lidar, please also cite the original authors, as specified in the source code. 26 | 27 | .. toctree:: 28 | :maxdepth: 1 29 | :glob: 30 | :caption: Contents 31 | 32 | installation 33 | tutorial 34 | module 35 | citation 36 | license 37 | 38 | Support 39 | ------- 40 | 41 | Please, contact us via email ayarroudh@uliege.be or akharroubi@uliege.be for questions and the `GitHub issue tracker 42 | `_ for bug reports, feature requests/additions, etc. -------------------------------------------------------------------------------- /docs/_sources/installation.rst.txt: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | This guide describes the steps to install **segment-lidar** using `PyPI `__ or from source. 5 | 6 | Step 1: Create an environment 7 | ----------------------------- 8 | 9 | Before installing **segment-lidar**, you need to create an environment by 10 | running the following commands: 11 | 12 | .. code:: bash 13 | 14 | conda create -n samlidar python=3.9 15 | conda activate samlidar 16 | 17 | This command will create a new Conda environment named **samlidar**. We recommend using **Python 3.9**, but feel free to test with other versions. 18 | 19 | Please note that using a Conda environment is not mandatory, but it is highly recommended. Alternatively, you can use `virtualenv `__. 20 | 21 | Step 2: Install PyTorch 22 | ----------------------- 23 | 24 | For the installation instructions and options, refer to the official PyTorch website: `PyTorch Get Started `__. 25 | 26 | .. note:: 27 | 28 | If you want to leverage GPU acceleration with PyTorch, 29 | make sure you have a CUDA-supported GPU and install the corresponding 30 | CUDA toolkit. Follow the instructions in the official CUDA installation guide: 31 | `NVIDIA CUDA Installation Guide `__. 32 | 33 | 34 | Step 3: Install segment-lidar 35 | ----------------------------- 36 | 37 | You can easily install **segment-lidar** from `PyPI `__ using the following command: 38 | 39 | .. code:: bash 40 | 41 | pip install segment-lidar 42 | 43 | Or, you can install it from source: 44 | 45 | .. code:: bash 46 | 47 | git clone https://github.com/Yarroudh/segment-lidar 48 | cd segment-lidar 49 | python setup.py install 50 | 51 | To make sure that **segment-lidar** is installed correctly, you can run the following command: 52 | 53 | .. code:: bash 54 | 55 | python -c "import segment_lidar; print(segment_lidar.__version__)" 56 | 57 | If the installation is successful, you should see the version that you have installed. -------------------------------------------------------------------------------- /docs/_sources/license.rst.txt: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | BSD 3-Clause License 5 | 6 | Copyright (c) 2023, University of Liège - Author: Anass Yarroudh, Geomatics Unit of ULiege 7 | 8 | Redistribution and use in source and binary forms, with or without 9 | modification, are permitted provided that the following conditions are met: 10 | 11 | 1. Redistributions of source code must retain the above copyright notice, this 12 | list of conditions and the following disclaimer. 13 | 14 | 2. Redistributions in binary form must reproduce the above copyright notice, 15 | this list of conditions and the following disclaimer in the documentation 16 | and/or other materials provided with the distribution. 17 | 18 | 3. Neither the name of the copyright holder nor the names of its 19 | contributors may be used to endorse or promote products derived from 20 | this software without specific prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /docs/_sources/module.rst.txt: -------------------------------------------------------------------------------- 1 | API 2 | ====== 3 | 4 | .. automodule:: segment_lidar.samlidar 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. automodule:: segment_lidar.view 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | -------------------------------------------------------------------------------- /docs/_sources/tutorial.rst.txt: -------------------------------------------------------------------------------- 1 | Basic tutorial 2 | ============== 3 | 4 | 5 | In this tutorial, we will learn how to use the `segment_lidar` module for 6 | automatic unsupervised instance segmentation of LiDAR data. 7 | 8 | Prerequisites 9 | ------------- 10 | 11 | Before getting started, make sure you have the following: 12 | 13 | 1. Python installed on your system. 14 | 2. The `segment_lidar` module installed. You can install it using pip: 15 | 16 | .. code-block:: bash 17 | 18 | pip install segment-lidar 19 | 20 | For more information on how to install the module, please refer to the :doc:`installation` page. 21 | 22 | 23 | Sample data 24 | ------------ 25 | 26 | For testing purposes, you can download a sample data here: `pointcloud.las `__. 27 | This data was retrieved from **AHN-4**. For more data, please visit `AHN-Viewer `__. 28 | 29 | 30 | Model checkpoints 31 | ----------------- 32 | 33 | Click the links below to download the checkpoint for the corresponding Segment-Anything model (SAM) type. 34 | 35 | - `default` or `vit_h`: `ViT-H SAM model `__. 36 | - `vit_l`: `ViT-L SAM model `__. 37 | - `vit_b`: `ViT-B SAM model `__. 38 | 39 | 40 | Basic usage 41 | ----------- 42 | 43 | 1. Import the necessary modules: 44 | 45 | .. code-block:: python 46 | 47 | from segment_lidar import samlidar, view 48 | 49 | 2. Define the viewpoint using the **view** module. You can choose between the following: 50 | 51 | - `TopView`: Top view of the point cloud. 52 | - `PinholeView`: Pinhole camera view of the point cloud, defined by its intrinsic and extrinsic parameters. 53 | 54 | For example, to define a top view, you can do the following: 55 | 56 | .. code-block:: python 57 | 58 | viewpoint = view.TopView() 59 | 60 | The pinhole view can be defined either by providing the intrinsic and extrinsic parameters: 61 | 62 | .. code-block:: python 63 | 64 | viewpoint = view.PinholeView(intrinsic=K, rotation=R, translation=T) 65 | 66 | K is a 3x3 intrinsic matrix, R is a 3x3 rotation matrix and T is a 3x1 translation vector. 67 | 68 | or by using the interactive mode: 69 | 70 | .. code-block:: python 71 | 72 | viewpoint = view.PinholeView(interactive=True) 73 | 74 | 3. Create an instance of the SamLidar class and specify the path to the checkpoint 75 | file **ckpt_path** when instantiating the class: 76 | 77 | .. code-block:: python 78 | 79 | model = samlidar.SamLidar(ckpt_path="sam_vit_h_4b8939.pth") 80 | 81 | 4. Read the point cloud data from a **.las/.laz** file using the read method of the 82 | SamLidar instance. Provide the path to the point cloud file `pointcloud.las` as an argument: 83 | 84 | .. code-block:: python 85 | 86 | points = model.read("pointcloud.las") 87 | 88 | 5. Apply the Cloth Simulation Filter (CSF) algorithm for ground filtering using the **csf** 89 | method of the SamLidar instance. This method returns the filtered point cloud `cloud`, 90 | the non-ground `non_ground` and the ground `ground` indices: 91 | 92 | .. code-block:: python 93 | 94 | cloud, non_ground, ground = model.csf(points, class_threshold=0.1) 95 | 96 | 6. Perform segmentation using the **segment** method of the SamLidar instance. This 97 | method requires the filtered point cloud `cloud` as input, and you can optionally provide 98 | an image path `image_path` and labels path `labels_path` to save the segmentation 99 | results as an image and labels, respectively. The segment method returns the segmentation 100 | labels `labels`: 101 | 102 | .. code-block:: python 103 | 104 | labels, *_ = model.segment(points=cloud, view=viewpoint, image_path="raster.tif", labels_path="labeled.tif") 105 | 106 | 7. Save results to **.las/.laz** file using the **write** method of the SamLidar instance: 107 | 108 | .. code-block:: python 109 | 110 | model.write(points=points, non_ground=non_ground, ground=ground, segment_ids=labels, save_path="segmented.las") 111 | 112 | Now, the entire code should look like this: 113 | 114 | .. code-block:: python 115 | 116 | from segment_lidar import samlidar, view 117 | 118 | # Define viewpoint 119 | viewpoint = view.TopView() 120 | 121 | # Create SamLidar instance 122 | model = samlidar.SamLidar(ckpt_path="sam_vit_h_4b8939.pth") 123 | 124 | # Load point cloud 125 | points = model.read("pointcloud.las") 126 | 127 | # Apply CSF 128 | cloud, non_ground, ground = model.csf(points) 129 | 130 | # Segment the point cloud 131 | labels, *_ = instance.segment(points=cloud, view=viewpoint, image_path="raster.tif", labels_path="labeled.tif") 132 | 133 | # Save results 134 | model.write(points=points, non_ground=non_ground, ground=ground, segment_ids=labels, save_path="segmented.las") 135 | 136 | 8. The resulted point cloud contains a new scalar field called `segment_id`. 137 | For visualization and further processing, we recommand using `CloudCompare `__. 138 | 139 | The following figure shows the results of the segmentation on the sample data form AHN-4: 140 | 141 | .. image:: _static/results.png 142 | :width: 100% 143 | :align: center 144 | :alt: Segmented point cloud 145 | 146 | Interactive mode 147 | ---------------- 148 | 149 | The interactive mode allows you to interactively define the viewpoint using GUI. 150 | 151 | .. code-block:: python 152 | 153 | viewpoint = view.PinholeView(interactive=True) 154 | 155 | .. image:: _static/interactive.png 156 | :width: 100% 157 | :align: center 158 | :alt: Interactive mode 159 | 160 | You can rotate, move and zoom the camera using the mouse (please refer to `Open3D documentation `_ for more details). 161 | 162 | Once you are done, press **p** to save the image and the camera parameters, than **esc** to quit the interactive mode. 163 | 164 | Example: 165 | 166 | .. code-block:: python 167 | 168 | import os 169 | from segment_lidar import samlidar, view 170 | 171 | viewpoint = view.PinholeView(interactive=True) 172 | 173 | model = samlidar.SamLidar(ckpt_path='sam_vit_h_4b8939.pth', 174 | device='cuda:0', 175 | algorithm='segment-anything') 176 | 177 | model.mask.min_mask_region_area = 200 178 | model.mask.points_per_side = 5 179 | model.mask.pred_iou_thresh = 0.60 180 | model.mask.stability_score_thresh = 0.85 181 | 182 | points = model.read('laundry.las') 183 | 184 | os.makedirs("results/", exist_ok=True) 185 | 186 | labels, *_ = model.segment(points=points, 187 | view=viewpoint, 188 | image_path="results/raster.tif", 189 | labels_path="results/labeled.tif") 190 | 191 | model.write(points=points, segment_ids=labels, save_path="results/segmented.las") 192 | 193 | 194 | Configuration 195 | ------------- 196 | 197 | The `segment_lidar` module provides a set of parameters that can be used to configure 198 | the segmentation process. These parameters are passed to the `SamLidar` class as arguments 199 | when instantiating the class. The following table shows the parameters and their default values: 200 | 201 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 202 | | Parameter | Default value | Description | 203 | +==================+======================+=====================================================================================================================================================================================================================================================================================================================================================+ 204 | | algorithm | "segment-geospatial" | Algorithm to use for segmentation. Possible values are: "segment-geospatial", "segment-anything". | 205 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 206 | | ckpt_path | None | Path to the checkpoint file. | 207 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 208 | | device | "cuda:0" | Device to use for inference. | 209 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 210 | | model_type | "vit_h" | Type of the SAM model. Possible values are: "vit_h", "vit_l", "vit_b". | 211 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 212 | | resolution | 0.25 | The resolution value of the created image raster. | 213 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 214 | | sam_kwargs | False | Whether to use the SAM kwargs when using "segment-geospatial" as algorithm | 215 | +------------------+----------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ 216 | 217 | Here is an example of how to configure the parameters: 218 | 219 | .. code-block:: python 220 | 221 | model = samlidar.SamLidar(ckpt_path="sam_vit_h_4b8939.pth", 222 | algorithm="segment-geo-spatial", 223 | model_type="vit_h", 224 | resolution=0.5, 225 | sam_kwargs=True) 226 | 227 | Additionally, the parameters of `segment-anything` can be configured as follows: 228 | 229 | .. code-block:: python 230 | 231 | model.mask.crop_n_layers = 1 232 | model.mask.crop_n_points_downscale_factor = 2 233 | model.mask.min_mask_region_area = 500 234 | model.mask.points_per_side = 10 235 | model.mask.pred_iou_thresh = 0.90 236 | model.mask.stability_score_thresh = 0.92 237 | 238 | Please, refer to the `segment-anything `__ repository for more details about these parameters. 239 | See the complete arguments list of the `SamLidar` class :doc:`here `. -------------------------------------------------------------------------------- /docs/_static/_sphinx_javascript_frameworks_compat.js: -------------------------------------------------------------------------------- 1 | /* Compatability shim for jQuery and underscores.js. 2 | * 3 | * Copyright Sphinx contributors 4 | * Released under the two clause BSD licence 5 | */ 6 | 7 | /** 8 | * small helper function to urldecode strings 9 | * 10 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 11 | */ 12 | jQuery.urldecode = function(x) { 13 | if (!x) { 14 | return x 15 | } 16 | return decodeURIComponent(x.replace(/\+/g, ' ')); 17 | }; 18 | 19 | /** 20 | * small helper function to urlencode strings 21 | */ 22 | jQuery.urlencode = encodeURIComponent; 23 | 24 | /** 25 | * This function returns the parsed url parameters of the 26 | * current request. Multiple values per key are supported, 27 | * it will always return arrays of strings for the value parts. 28 | */ 29 | jQuery.getQueryParameters = function(s) { 30 | if (typeof s === 'undefined') 31 | s = document.location.search; 32 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 33 | var result = {}; 34 | for (var i = 0; i < parts.length; i++) { 35 | var tmp = parts[i].split('=', 2); 36 | var key = jQuery.urldecode(tmp[0]); 37 | var value = jQuery.urldecode(tmp[1]); 38 | if (key in result) 39 | result[key].push(value); 40 | else 41 | result[key] = [value]; 42 | } 43 | return result; 44 | }; 45 | 46 | /** 47 | * highlight a given string on a jquery object by wrapping it in 48 | * span elements with the given class name. 49 | */ 50 | jQuery.fn.highlightText = function(text, className) { 51 | function highlight(node, addItems) { 52 | if (node.nodeType === 3) { 53 | var val = node.nodeValue; 54 | var pos = val.toLowerCase().indexOf(text); 55 | if (pos >= 0 && 56 | !jQuery(node.parentNode).hasClass(className) && 57 | !jQuery(node.parentNode).hasClass("nohighlight")) { 58 | var span; 59 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 60 | if (isInSVG) { 61 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 62 | } else { 63 | span = document.createElement("span"); 64 | span.className = className; 65 | } 66 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 67 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 68 | document.createTextNode(val.substr(pos + text.length)), 69 | node.nextSibling)); 70 | node.nodeValue = val.substr(0, pos); 71 | if (isInSVG) { 72 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 73 | var bbox = node.parentElement.getBBox(); 74 | rect.x.baseVal.value = bbox.x; 75 | rect.y.baseVal.value = bbox.y; 76 | rect.width.baseVal.value = bbox.width; 77 | rect.height.baseVal.value = bbox.height; 78 | rect.setAttribute('class', className); 79 | addItems.push({ 80 | "parent": node.parentNode, 81 | "target": rect}); 82 | } 83 | } 84 | } 85 | else if (!jQuery(node).is("button, select, textarea")) { 86 | jQuery.each(node.childNodes, function() { 87 | highlight(this, addItems); 88 | }); 89 | } 90 | } 91 | var addItems = []; 92 | var result = this.each(function() { 93 | highlight(this, addItems); 94 | }); 95 | for (var i = 0; i < addItems.length; ++i) { 96 | jQuery(addItems[i].parent).before(addItems[i].target); 97 | } 98 | return result; 99 | }; 100 | 101 | /* 102 | * backward compatibility for jQuery.browser 103 | * This will be supported until firefox bug is fixed. 104 | */ 105 | if (!jQuery.browser) { 106 | jQuery.uaMatch = function(ua) { 107 | ua = ua.toLowerCase(); 108 | 109 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 110 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 111 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 112 | /(msie) ([\w.]+)/.exec(ua) || 113 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 114 | []; 115 | 116 | return { 117 | browser: match[ 1 ] || "", 118 | version: match[ 2 ] || "0" 119 | }; 120 | }; 121 | jQuery.browser = {}; 122 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 123 | } 124 | -------------------------------------------------------------------------------- /docs/_static/basic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * basic.css 3 | * ~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- basic theme. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /* -- main layout ----------------------------------------------------------- */ 13 | 14 | div.clearer { 15 | clear: both; 16 | } 17 | 18 | div.section::after { 19 | display: block; 20 | content: ''; 21 | clear: left; 22 | } 23 | 24 | /* -- relbar ---------------------------------------------------------------- */ 25 | 26 | div.related { 27 | width: 100%; 28 | font-size: 90%; 29 | } 30 | 31 | div.related h3 { 32 | display: none; 33 | } 34 | 35 | div.related ul { 36 | margin: 0; 37 | padding: 0 0 0 10px; 38 | list-style: none; 39 | } 40 | 41 | div.related li { 42 | display: inline; 43 | } 44 | 45 | div.related li.right { 46 | float: right; 47 | margin-right: 5px; 48 | } 49 | 50 | /* -- sidebar --------------------------------------------------------------- */ 51 | 52 | div.sphinxsidebarwrapper { 53 | padding: 10px 5px 0 10px; 54 | } 55 | 56 | div.sphinxsidebar { 57 | float: left; 58 | width: 230px; 59 | margin-left: -100%; 60 | font-size: 90%; 61 | word-wrap: break-word; 62 | overflow-wrap : break-word; 63 | } 64 | 65 | div.sphinxsidebar ul { 66 | list-style: none; 67 | } 68 | 69 | div.sphinxsidebar ul ul, 70 | div.sphinxsidebar ul.want-points { 71 | margin-left: 20px; 72 | list-style: square; 73 | } 74 | 75 | div.sphinxsidebar ul ul { 76 | margin-top: 0; 77 | margin-bottom: 0; 78 | } 79 | 80 | div.sphinxsidebar form { 81 | margin-top: 10px; 82 | } 83 | 84 | div.sphinxsidebar input { 85 | border: 1px solid #98dbcc; 86 | font-family: sans-serif; 87 | font-size: 1em; 88 | } 89 | 90 | div.sphinxsidebar #searchbox form.search { 91 | overflow: hidden; 92 | } 93 | 94 | div.sphinxsidebar #searchbox input[type="text"] { 95 | float: left; 96 | width: 80%; 97 | padding: 0.25em; 98 | box-sizing: border-box; 99 | } 100 | 101 | div.sphinxsidebar #searchbox input[type="submit"] { 102 | float: left; 103 | width: 20%; 104 | border-left: none; 105 | padding: 0.25em; 106 | box-sizing: border-box; 107 | } 108 | 109 | 110 | img { 111 | border: 0; 112 | max-width: 100%; 113 | } 114 | 115 | /* -- search page ----------------------------------------------------------- */ 116 | 117 | ul.search { 118 | margin: 10px 0 0 20px; 119 | padding: 0; 120 | } 121 | 122 | ul.search li { 123 | padding: 5px 0 5px 20px; 124 | background-image: url(file.png); 125 | background-repeat: no-repeat; 126 | background-position: 0 7px; 127 | } 128 | 129 | ul.search li a { 130 | font-weight: bold; 131 | } 132 | 133 | ul.search li p.context { 134 | color: #888; 135 | margin: 2px 0 0 30px; 136 | text-align: left; 137 | } 138 | 139 | ul.keywordmatches li.goodmatch a { 140 | font-weight: bold; 141 | } 142 | 143 | /* -- index page ------------------------------------------------------------ */ 144 | 145 | table.contentstable { 146 | width: 90%; 147 | margin-left: auto; 148 | margin-right: auto; 149 | } 150 | 151 | table.contentstable p.biglink { 152 | line-height: 150%; 153 | } 154 | 155 | a.biglink { 156 | font-size: 1.3em; 157 | } 158 | 159 | span.linkdescr { 160 | font-style: italic; 161 | padding-top: 5px; 162 | font-size: 90%; 163 | } 164 | 165 | /* -- general index --------------------------------------------------------- */ 166 | 167 | table.indextable { 168 | width: 100%; 169 | } 170 | 171 | table.indextable td { 172 | text-align: left; 173 | vertical-align: top; 174 | } 175 | 176 | table.indextable ul { 177 | margin-top: 0; 178 | margin-bottom: 0; 179 | list-style-type: none; 180 | } 181 | 182 | table.indextable > tbody > tr > td > ul { 183 | padding-left: 0em; 184 | } 185 | 186 | table.indextable tr.pcap { 187 | height: 10px; 188 | } 189 | 190 | table.indextable tr.cap { 191 | margin-top: 10px; 192 | background-color: #f2f2f2; 193 | } 194 | 195 | img.toggler { 196 | margin-right: 3px; 197 | margin-top: 3px; 198 | cursor: pointer; 199 | } 200 | 201 | div.modindex-jumpbox { 202 | border-top: 1px solid #ddd; 203 | border-bottom: 1px solid #ddd; 204 | margin: 1em 0 1em 0; 205 | padding: 0.4em; 206 | } 207 | 208 | div.genindex-jumpbox { 209 | border-top: 1px solid #ddd; 210 | border-bottom: 1px solid #ddd; 211 | margin: 1em 0 1em 0; 212 | padding: 0.4em; 213 | } 214 | 215 | /* -- domain module index --------------------------------------------------- */ 216 | 217 | table.modindextable td { 218 | padding: 2px; 219 | border-collapse: collapse; 220 | } 221 | 222 | /* -- general body styles --------------------------------------------------- */ 223 | 224 | div.body { 225 | min-width: 360px; 226 | max-width: 800px; 227 | } 228 | 229 | div.body p, div.body dd, div.body li, div.body blockquote { 230 | -moz-hyphens: auto; 231 | -ms-hyphens: auto; 232 | -webkit-hyphens: auto; 233 | hyphens: auto; 234 | } 235 | 236 | a.headerlink { 237 | visibility: hidden; 238 | } 239 | 240 | a:visited { 241 | color: #551A8B; 242 | } 243 | 244 | h1:hover > a.headerlink, 245 | h2:hover > a.headerlink, 246 | h3:hover > a.headerlink, 247 | h4:hover > a.headerlink, 248 | h5:hover > a.headerlink, 249 | h6:hover > a.headerlink, 250 | dt:hover > a.headerlink, 251 | caption:hover > a.headerlink, 252 | p.caption:hover > a.headerlink, 253 | div.code-block-caption:hover > a.headerlink { 254 | visibility: visible; 255 | } 256 | 257 | div.body p.caption { 258 | text-align: inherit; 259 | } 260 | 261 | div.body td { 262 | text-align: left; 263 | } 264 | 265 | .first { 266 | margin-top: 0 !important; 267 | } 268 | 269 | p.rubric { 270 | margin-top: 30px; 271 | font-weight: bold; 272 | } 273 | 274 | img.align-left, figure.align-left, .figure.align-left, object.align-left { 275 | clear: left; 276 | float: left; 277 | margin-right: 1em; 278 | } 279 | 280 | img.align-right, figure.align-right, .figure.align-right, object.align-right { 281 | clear: right; 282 | float: right; 283 | margin-left: 1em; 284 | } 285 | 286 | img.align-center, figure.align-center, .figure.align-center, object.align-center { 287 | display: block; 288 | margin-left: auto; 289 | margin-right: auto; 290 | } 291 | 292 | img.align-default, figure.align-default, .figure.align-default { 293 | display: block; 294 | margin-left: auto; 295 | margin-right: auto; 296 | } 297 | 298 | .align-left { 299 | text-align: left; 300 | } 301 | 302 | .align-center { 303 | text-align: center; 304 | } 305 | 306 | .align-default { 307 | text-align: center; 308 | } 309 | 310 | .align-right { 311 | text-align: right; 312 | } 313 | 314 | /* -- sidebars -------------------------------------------------------------- */ 315 | 316 | div.sidebar, 317 | aside.sidebar { 318 | margin: 0 0 0.5em 1em; 319 | border: 1px solid #ddb; 320 | padding: 7px; 321 | background-color: #ffe; 322 | width: 40%; 323 | float: right; 324 | clear: right; 325 | overflow-x: auto; 326 | } 327 | 328 | p.sidebar-title { 329 | font-weight: bold; 330 | } 331 | 332 | nav.contents, 333 | aside.topic, 334 | div.admonition, div.topic, blockquote { 335 | clear: left; 336 | } 337 | 338 | /* -- topics ---------------------------------------------------------------- */ 339 | 340 | nav.contents, 341 | aside.topic, 342 | div.topic { 343 | border: 1px solid #ccc; 344 | padding: 7px; 345 | margin: 10px 0 10px 0; 346 | } 347 | 348 | p.topic-title { 349 | font-size: 1.1em; 350 | font-weight: bold; 351 | margin-top: 10px; 352 | } 353 | 354 | /* -- admonitions ----------------------------------------------------------- */ 355 | 356 | div.admonition { 357 | margin-top: 10px; 358 | margin-bottom: 10px; 359 | padding: 7px; 360 | } 361 | 362 | div.admonition dt { 363 | font-weight: bold; 364 | } 365 | 366 | p.admonition-title { 367 | margin: 0px 10px 5px 0px; 368 | font-weight: bold; 369 | } 370 | 371 | div.body p.centered { 372 | text-align: center; 373 | margin-top: 25px; 374 | } 375 | 376 | /* -- content of sidebars/topics/admonitions -------------------------------- */ 377 | 378 | div.sidebar > :last-child, 379 | aside.sidebar > :last-child, 380 | nav.contents > :last-child, 381 | aside.topic > :last-child, 382 | div.topic > :last-child, 383 | div.admonition > :last-child { 384 | margin-bottom: 0; 385 | } 386 | 387 | div.sidebar::after, 388 | aside.sidebar::after, 389 | nav.contents::after, 390 | aside.topic::after, 391 | div.topic::after, 392 | div.admonition::after, 393 | blockquote::after { 394 | display: block; 395 | content: ''; 396 | clear: both; 397 | } 398 | 399 | /* -- tables ---------------------------------------------------------------- */ 400 | 401 | table.docutils { 402 | margin-top: 10px; 403 | margin-bottom: 10px; 404 | border: 0; 405 | border-collapse: collapse; 406 | } 407 | 408 | table.align-center { 409 | margin-left: auto; 410 | margin-right: auto; 411 | } 412 | 413 | table.align-default { 414 | margin-left: auto; 415 | margin-right: auto; 416 | } 417 | 418 | table caption span.caption-number { 419 | font-style: italic; 420 | } 421 | 422 | table caption span.caption-text { 423 | } 424 | 425 | table.docutils td, table.docutils th { 426 | padding: 1px 8px 1px 5px; 427 | border-top: 0; 428 | border-left: 0; 429 | border-right: 0; 430 | border-bottom: 1px solid #aaa; 431 | } 432 | 433 | th { 434 | text-align: left; 435 | padding-right: 5px; 436 | } 437 | 438 | table.citation { 439 | border-left: solid 1px gray; 440 | margin-left: 1px; 441 | } 442 | 443 | table.citation td { 444 | border-bottom: none; 445 | } 446 | 447 | th > :first-child, 448 | td > :first-child { 449 | margin-top: 0px; 450 | } 451 | 452 | th > :last-child, 453 | td > :last-child { 454 | margin-bottom: 0px; 455 | } 456 | 457 | /* -- figures --------------------------------------------------------------- */ 458 | 459 | div.figure, figure { 460 | margin: 0.5em; 461 | padding: 0.5em; 462 | } 463 | 464 | div.figure p.caption, figcaption { 465 | padding: 0.3em; 466 | } 467 | 468 | div.figure p.caption span.caption-number, 469 | figcaption span.caption-number { 470 | font-style: italic; 471 | } 472 | 473 | div.figure p.caption span.caption-text, 474 | figcaption span.caption-text { 475 | } 476 | 477 | /* -- field list styles ----------------------------------------------------- */ 478 | 479 | table.field-list td, table.field-list th { 480 | border: 0 !important; 481 | } 482 | 483 | .field-list ul { 484 | margin: 0; 485 | padding-left: 1em; 486 | } 487 | 488 | .field-list p { 489 | margin: 0; 490 | } 491 | 492 | .field-name { 493 | -moz-hyphens: manual; 494 | -ms-hyphens: manual; 495 | -webkit-hyphens: manual; 496 | hyphens: manual; 497 | } 498 | 499 | /* -- hlist styles ---------------------------------------------------------- */ 500 | 501 | table.hlist { 502 | margin: 1em 0; 503 | } 504 | 505 | table.hlist td { 506 | vertical-align: top; 507 | } 508 | 509 | /* -- object description styles --------------------------------------------- */ 510 | 511 | .sig { 512 | font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; 513 | } 514 | 515 | .sig-name, code.descname { 516 | background-color: transparent; 517 | font-weight: bold; 518 | } 519 | 520 | .sig-name { 521 | font-size: 1.1em; 522 | } 523 | 524 | code.descname { 525 | font-size: 1.2em; 526 | } 527 | 528 | .sig-prename, code.descclassname { 529 | background-color: transparent; 530 | } 531 | 532 | .optional { 533 | font-size: 1.3em; 534 | } 535 | 536 | .sig-paren { 537 | font-size: larger; 538 | } 539 | 540 | .sig-param.n { 541 | font-style: italic; 542 | } 543 | 544 | /* C++ specific styling */ 545 | 546 | .sig-inline.c-texpr, 547 | .sig-inline.cpp-texpr { 548 | font-family: unset; 549 | } 550 | 551 | .sig.c .k, .sig.c .kt, 552 | .sig.cpp .k, .sig.cpp .kt { 553 | color: #0033B3; 554 | } 555 | 556 | .sig.c .m, 557 | .sig.cpp .m { 558 | color: #1750EB; 559 | } 560 | 561 | .sig.c .s, .sig.c .sc, 562 | .sig.cpp .s, .sig.cpp .sc { 563 | color: #067D17; 564 | } 565 | 566 | 567 | /* -- other body styles ----------------------------------------------------- */ 568 | 569 | ol.arabic { 570 | list-style: decimal; 571 | } 572 | 573 | ol.loweralpha { 574 | list-style: lower-alpha; 575 | } 576 | 577 | ol.upperalpha { 578 | list-style: upper-alpha; 579 | } 580 | 581 | ol.lowerroman { 582 | list-style: lower-roman; 583 | } 584 | 585 | ol.upperroman { 586 | list-style: upper-roman; 587 | } 588 | 589 | :not(li) > ol > li:first-child > :first-child, 590 | :not(li) > ul > li:first-child > :first-child { 591 | margin-top: 0px; 592 | } 593 | 594 | :not(li) > ol > li:last-child > :last-child, 595 | :not(li) > ul > li:last-child > :last-child { 596 | margin-bottom: 0px; 597 | } 598 | 599 | ol.simple ol p, 600 | ol.simple ul p, 601 | ul.simple ol p, 602 | ul.simple ul p { 603 | margin-top: 0; 604 | } 605 | 606 | ol.simple > li:not(:first-child) > p, 607 | ul.simple > li:not(:first-child) > p { 608 | margin-top: 0; 609 | } 610 | 611 | ol.simple p, 612 | ul.simple p { 613 | margin-bottom: 0; 614 | } 615 | 616 | aside.footnote > span, 617 | div.citation > span { 618 | float: left; 619 | } 620 | aside.footnote > span:last-of-type, 621 | div.citation > span:last-of-type { 622 | padding-right: 0.5em; 623 | } 624 | aside.footnote > p { 625 | margin-left: 2em; 626 | } 627 | div.citation > p { 628 | margin-left: 4em; 629 | } 630 | aside.footnote > p:last-of-type, 631 | div.citation > p:last-of-type { 632 | margin-bottom: 0em; 633 | } 634 | aside.footnote > p:last-of-type:after, 635 | div.citation > p:last-of-type:after { 636 | content: ""; 637 | clear: both; 638 | } 639 | 640 | dl.field-list { 641 | display: grid; 642 | grid-template-columns: fit-content(30%) auto; 643 | } 644 | 645 | dl.field-list > dt { 646 | font-weight: bold; 647 | word-break: break-word; 648 | padding-left: 0.5em; 649 | padding-right: 5px; 650 | } 651 | 652 | dl.field-list > dd { 653 | padding-left: 0.5em; 654 | margin-top: 0em; 655 | margin-left: 0em; 656 | margin-bottom: 0em; 657 | } 658 | 659 | dl { 660 | margin-bottom: 15px; 661 | } 662 | 663 | dd > :first-child { 664 | margin-top: 0px; 665 | } 666 | 667 | dd ul, dd table { 668 | margin-bottom: 10px; 669 | } 670 | 671 | dd { 672 | margin-top: 3px; 673 | margin-bottom: 10px; 674 | margin-left: 30px; 675 | } 676 | 677 | .sig dd { 678 | margin-top: 0px; 679 | margin-bottom: 0px; 680 | } 681 | 682 | .sig dl { 683 | margin-top: 0px; 684 | margin-bottom: 0px; 685 | } 686 | 687 | dl > dd:last-child, 688 | dl > dd:last-child > :last-child { 689 | margin-bottom: 0; 690 | } 691 | 692 | dt:target, span.highlighted { 693 | background-color: #fbe54e; 694 | } 695 | 696 | rect.highlighted { 697 | fill: #fbe54e; 698 | } 699 | 700 | dl.glossary dt { 701 | font-weight: bold; 702 | font-size: 1.1em; 703 | } 704 | 705 | .versionmodified { 706 | font-style: italic; 707 | } 708 | 709 | .system-message { 710 | background-color: #fda; 711 | padding: 5px; 712 | border: 3px solid red; 713 | } 714 | 715 | .footnote:target { 716 | background-color: #ffa; 717 | } 718 | 719 | .line-block { 720 | display: block; 721 | margin-top: 1em; 722 | margin-bottom: 1em; 723 | } 724 | 725 | .line-block .line-block { 726 | margin-top: 0; 727 | margin-bottom: 0; 728 | margin-left: 1.5em; 729 | } 730 | 731 | .guilabel, .menuselection { 732 | font-family: sans-serif; 733 | } 734 | 735 | .accelerator { 736 | text-decoration: underline; 737 | } 738 | 739 | .classifier { 740 | font-style: oblique; 741 | } 742 | 743 | .classifier:before { 744 | font-style: normal; 745 | margin: 0 0.5em; 746 | content: ":"; 747 | display: inline-block; 748 | } 749 | 750 | abbr, acronym { 751 | border-bottom: dotted 1px; 752 | cursor: help; 753 | } 754 | 755 | .translated { 756 | background-color: rgba(207, 255, 207, 0.2) 757 | } 758 | 759 | .untranslated { 760 | background-color: rgba(255, 207, 207, 0.2) 761 | } 762 | 763 | /* -- code displays --------------------------------------------------------- */ 764 | 765 | pre { 766 | overflow: auto; 767 | overflow-y: hidden; /* fixes display issues on Chrome browsers */ 768 | } 769 | 770 | pre, div[class*="highlight-"] { 771 | clear: both; 772 | } 773 | 774 | span.pre { 775 | -moz-hyphens: none; 776 | -ms-hyphens: none; 777 | -webkit-hyphens: none; 778 | hyphens: none; 779 | white-space: nowrap; 780 | } 781 | 782 | div[class*="highlight-"] { 783 | margin: 1em 0; 784 | } 785 | 786 | td.linenos pre { 787 | border: 0; 788 | background-color: transparent; 789 | color: #aaa; 790 | } 791 | 792 | table.highlighttable { 793 | display: block; 794 | } 795 | 796 | table.highlighttable tbody { 797 | display: block; 798 | } 799 | 800 | table.highlighttable tr { 801 | display: flex; 802 | } 803 | 804 | table.highlighttable td { 805 | margin: 0; 806 | padding: 0; 807 | } 808 | 809 | table.highlighttable td.linenos { 810 | padding-right: 0.5em; 811 | } 812 | 813 | table.highlighttable td.code { 814 | flex: 1; 815 | overflow: hidden; 816 | } 817 | 818 | .highlight .hll { 819 | display: block; 820 | } 821 | 822 | div.highlight pre, 823 | table.highlighttable pre { 824 | margin: 0; 825 | } 826 | 827 | div.code-block-caption + div { 828 | margin-top: 0; 829 | } 830 | 831 | div.code-block-caption { 832 | margin-top: 1em; 833 | padding: 2px 5px; 834 | font-size: small; 835 | } 836 | 837 | div.code-block-caption code { 838 | background-color: transparent; 839 | } 840 | 841 | table.highlighttable td.linenos, 842 | span.linenos, 843 | div.highlight span.gp { /* gp: Generic.Prompt */ 844 | user-select: none; 845 | -webkit-user-select: text; /* Safari fallback only */ 846 | -webkit-user-select: none; /* Chrome/Safari */ 847 | -moz-user-select: none; /* Firefox */ 848 | -ms-user-select: none; /* IE10+ */ 849 | } 850 | 851 | div.code-block-caption span.caption-number { 852 | padding: 0.1em 0.3em; 853 | font-style: italic; 854 | } 855 | 856 | div.code-block-caption span.caption-text { 857 | } 858 | 859 | div.literal-block-wrapper { 860 | margin: 1em 0; 861 | } 862 | 863 | code.xref, a code { 864 | background-color: transparent; 865 | font-weight: bold; 866 | } 867 | 868 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { 869 | background-color: transparent; 870 | } 871 | 872 | .viewcode-link { 873 | float: right; 874 | } 875 | 876 | .viewcode-back { 877 | float: right; 878 | font-family: sans-serif; 879 | } 880 | 881 | div.viewcode-block:target { 882 | margin: -1px -10px; 883 | padding: 0 10px; 884 | } 885 | 886 | /* -- math display ---------------------------------------------------------- */ 887 | 888 | img.math { 889 | vertical-align: middle; 890 | } 891 | 892 | div.body div.math p { 893 | text-align: center; 894 | } 895 | 896 | span.eqno { 897 | float: right; 898 | } 899 | 900 | span.eqno a.headerlink { 901 | position: absolute; 902 | z-index: 1; 903 | } 904 | 905 | div.math:hover a.headerlink { 906 | visibility: visible; 907 | } 908 | 909 | /* -- printout stylesheet --------------------------------------------------- */ 910 | 911 | @media print { 912 | div.document, 913 | div.documentwrapper, 914 | div.bodywrapper { 915 | margin: 0 !important; 916 | width: 100%; 917 | } 918 | 919 | div.sphinxsidebar, 920 | div.related, 921 | div.footer, 922 | #top-link { 923 | display: none; 924 | } 925 | } -------------------------------------------------------------------------------- /docs/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-normal.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/css/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /docs/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Base JavaScript utilities for all Sphinx HTML documentation. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | "use strict"; 12 | 13 | const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ 14 | "TEXTAREA", 15 | "INPUT", 16 | "SELECT", 17 | "BUTTON", 18 | ]); 19 | 20 | const _ready = (callback) => { 21 | if (document.readyState !== "loading") { 22 | callback(); 23 | } else { 24 | document.addEventListener("DOMContentLoaded", callback); 25 | } 26 | }; 27 | 28 | /** 29 | * Small JavaScript module for the documentation. 30 | */ 31 | const Documentation = { 32 | init: () => { 33 | Documentation.initDomainIndexTable(); 34 | Documentation.initOnKeyListeners(); 35 | }, 36 | 37 | /** 38 | * i18n support 39 | */ 40 | TRANSLATIONS: {}, 41 | PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), 42 | LOCALE: "unknown", 43 | 44 | // gettext and ngettext don't access this so that the functions 45 | // can safely bound to a different name (_ = Documentation.gettext) 46 | gettext: (string) => { 47 | const translated = Documentation.TRANSLATIONS[string]; 48 | switch (typeof translated) { 49 | case "undefined": 50 | return string; // no translation 51 | case "string": 52 | return translated; // translation exists 53 | default: 54 | return translated[0]; // (singular, plural) translation tuple exists 55 | } 56 | }, 57 | 58 | ngettext: (singular, plural, n) => { 59 | const translated = Documentation.TRANSLATIONS[singular]; 60 | if (typeof translated !== "undefined") 61 | return translated[Documentation.PLURAL_EXPR(n)]; 62 | return n === 1 ? singular : plural; 63 | }, 64 | 65 | addTranslations: (catalog) => { 66 | Object.assign(Documentation.TRANSLATIONS, catalog.messages); 67 | Documentation.PLURAL_EXPR = new Function( 68 | "n", 69 | `return (${catalog.plural_expr})` 70 | ); 71 | Documentation.LOCALE = catalog.locale; 72 | }, 73 | 74 | /** 75 | * helper function to focus on search bar 76 | */ 77 | focusSearchBar: () => { 78 | document.querySelectorAll("input[name=q]")[0]?.focus(); 79 | }, 80 | 81 | /** 82 | * Initialise the domain index toggle buttons 83 | */ 84 | initDomainIndexTable: () => { 85 | const toggler = (el) => { 86 | const idNumber = el.id.substr(7); 87 | const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); 88 | if (el.src.substr(-9) === "minus.png") { 89 | el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; 90 | toggledRows.forEach((el) => (el.style.display = "none")); 91 | } else { 92 | el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; 93 | toggledRows.forEach((el) => (el.style.display = "")); 94 | } 95 | }; 96 | 97 | const togglerElements = document.querySelectorAll("img.toggler"); 98 | togglerElements.forEach((el) => 99 | el.addEventListener("click", (event) => toggler(event.currentTarget)) 100 | ); 101 | togglerElements.forEach((el) => (el.style.display = "")); 102 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); 103 | }, 104 | 105 | initOnKeyListeners: () => { 106 | // only install a listener if it is really needed 107 | if ( 108 | !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && 109 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS 110 | ) 111 | return; 112 | 113 | document.addEventListener("keydown", (event) => { 114 | // bail for input elements 115 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 116 | // bail with special keys 117 | if (event.altKey || event.ctrlKey || event.metaKey) return; 118 | 119 | if (!event.shiftKey) { 120 | switch (event.key) { 121 | case "ArrowLeft": 122 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 123 | 124 | const prevLink = document.querySelector('link[rel="prev"]'); 125 | if (prevLink && prevLink.href) { 126 | window.location.href = prevLink.href; 127 | event.preventDefault(); 128 | } 129 | break; 130 | case "ArrowRight": 131 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 132 | 133 | const nextLink = document.querySelector('link[rel="next"]'); 134 | if (nextLink && nextLink.href) { 135 | window.location.href = nextLink.href; 136 | event.preventDefault(); 137 | } 138 | break; 139 | } 140 | } 141 | 142 | // some keyboard layouts may need Shift to get / 143 | switch (event.key) { 144 | case "/": 145 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; 146 | Documentation.focusSearchBar(); 147 | event.preventDefault(); 148 | } 149 | }); 150 | }, 151 | }; 152 | 153 | // quick alias for translations 154 | const _ = Documentation.gettext; 155 | 156 | _ready(Documentation.init); 157 | -------------------------------------------------------------------------------- /docs/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | const DOCUMENTATION_OPTIONS = { 2 | VERSION: '', 3 | LANGUAGE: 'en', 4 | COLLAPSE_INDEX: false, 5 | BUILDER: 'html', 6 | FILE_SUFFIX: '.html', 7 | LINK_SUFFIX: '.html', 8 | HAS_SOURCE: true, 9 | SOURCELINK_SUFFIX: '.txt', 10 | NAVIGATION_WITH_KEYS: false, 11 | SHOW_SEARCH_SUMMARY: true, 12 | ENABLE_SEARCH_SHORTCUTS: true, 13 | }; -------------------------------------------------------------------------------- /docs/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/file.png -------------------------------------------------------------------------------- /docs/_static/js/badge_only.js: -------------------------------------------------------------------------------- 1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv-printshiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/theme.js: -------------------------------------------------------------------------------- 1 | !function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 65 | var s_v = "^(" + C + ")?" + v; // vowel in stem 66 | 67 | this.stemWord = function (w) { 68 | var stem; 69 | var suffix; 70 | var firstch; 71 | var origword = w; 72 | 73 | if (w.length < 3) 74 | return w; 75 | 76 | var re; 77 | var re2; 78 | var re3; 79 | var re4; 80 | 81 | firstch = w.substr(0,1); 82 | if (firstch == "y") 83 | w = firstch.toUpperCase() + w.substr(1); 84 | 85 | // Step 1a 86 | re = /^(.+?)(ss|i)es$/; 87 | re2 = /^(.+?)([^s])s$/; 88 | 89 | if (re.test(w)) 90 | w = w.replace(re,"$1$2"); 91 | else if (re2.test(w)) 92 | w = w.replace(re2,"$1$2"); 93 | 94 | // Step 1b 95 | re = /^(.+?)eed$/; 96 | re2 = /^(.+?)(ed|ing)$/; 97 | if (re.test(w)) { 98 | var fp = re.exec(w); 99 | re = new RegExp(mgr0); 100 | if (re.test(fp[1])) { 101 | re = /.$/; 102 | w = w.replace(re,""); 103 | } 104 | } 105 | else if (re2.test(w)) { 106 | var fp = re2.exec(w); 107 | stem = fp[1]; 108 | re2 = new RegExp(s_v); 109 | if (re2.test(stem)) { 110 | w = stem; 111 | re2 = /(at|bl|iz)$/; 112 | re3 = new RegExp("([^aeiouylsz])\\1$"); 113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 114 | if (re2.test(w)) 115 | w = w + "e"; 116 | else if (re3.test(w)) { 117 | re = /.$/; 118 | w = w.replace(re,""); 119 | } 120 | else if (re4.test(w)) 121 | w = w + "e"; 122 | } 123 | } 124 | 125 | // Step 1c 126 | re = /^(.+?)y$/; 127 | if (re.test(w)) { 128 | var fp = re.exec(w); 129 | stem = fp[1]; 130 | re = new RegExp(s_v); 131 | if (re.test(stem)) 132 | w = stem + "i"; 133 | } 134 | 135 | // Step 2 136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 137 | if (re.test(w)) { 138 | var fp = re.exec(w); 139 | stem = fp[1]; 140 | suffix = fp[2]; 141 | re = new RegExp(mgr0); 142 | if (re.test(stem)) 143 | w = stem + step2list[suffix]; 144 | } 145 | 146 | // Step 3 147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 148 | if (re.test(w)) { 149 | var fp = re.exec(w); 150 | stem = fp[1]; 151 | suffix = fp[2]; 152 | re = new RegExp(mgr0); 153 | if (re.test(stem)) 154 | w = stem + step3list[suffix]; 155 | } 156 | 157 | // Step 4 158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 159 | re2 = /^(.+?)(s|t)(ion)$/; 160 | if (re.test(w)) { 161 | var fp = re.exec(w); 162 | stem = fp[1]; 163 | re = new RegExp(mgr1); 164 | if (re.test(stem)) 165 | w = stem; 166 | } 167 | else if (re2.test(w)) { 168 | var fp = re2.exec(w); 169 | stem = fp[1] + fp[2]; 170 | re2 = new RegExp(mgr1); 171 | if (re2.test(stem)) 172 | w = stem; 173 | } 174 | 175 | // Step 5 176 | re = /^(.+?)e$/; 177 | if (re.test(w)) { 178 | var fp = re.exec(w); 179 | stem = fp[1]; 180 | re = new RegExp(mgr1); 181 | re2 = new RegExp(meq1); 182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 184 | w = stem; 185 | } 186 | re = /ll$/; 187 | re2 = new RegExp(mgr1); 188 | if (re.test(w) && re2.test(w)) { 189 | re = /.$/; 190 | w = w.replace(re,""); 191 | } 192 | 193 | // and turn initial Y back to y 194 | if (firstch == "y") 195 | w = firstch.toLowerCase() + w.substr(1); 196 | return w; 197 | } 198 | } 199 | 200 | -------------------------------------------------------------------------------- /docs/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/minus.png -------------------------------------------------------------------------------- /docs/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/_static/plus.png -------------------------------------------------------------------------------- /docs/_static/pygments.css: -------------------------------------------------------------------------------- 1 | pre { line-height: 125%; } 2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 6 | .highlight .hll { background-color: #ffffcc } 7 | .highlight { background: #f8f8f8; } 8 | .highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ 9 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 10 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */ 11 | .highlight .o { color: #666666 } /* Operator */ 12 | .highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ 13 | .highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ 14 | .highlight .cp { color: #9C6500 } /* Comment.Preproc */ 15 | .highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ 16 | .highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ 17 | .highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ 18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 19 | .highlight .ge { font-style: italic } /* Generic.Emph */ 20 | .highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ 21 | .highlight .gr { color: #E40000 } /* Generic.Error */ 22 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 23 | .highlight .gi { color: #008400 } /* Generic.Inserted */ 24 | .highlight .go { color: #717171 } /* Generic.Output */ 25 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 26 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 27 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 28 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 29 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ 30 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ 31 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ 32 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */ 33 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ 34 | .highlight .kt { color: #B00040 } /* Keyword.Type */ 35 | .highlight .m { color: #666666 } /* Literal.Number */ 36 | .highlight .s { color: #BA2121 } /* Literal.String */ 37 | .highlight .na { color: #687822 } /* Name.Attribute */ 38 | .highlight .nb { color: #008000 } /* Name.Builtin */ 39 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 40 | .highlight .no { color: #880000 } /* Name.Constant */ 41 | .highlight .nd { color: #AA22FF } /* Name.Decorator */ 42 | .highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ 43 | .highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ 44 | .highlight .nf { color: #0000FF } /* Name.Function */ 45 | .highlight .nl { color: #767600 } /* Name.Label */ 46 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 47 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ 48 | .highlight .nv { color: #19177C } /* Name.Variable */ 49 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 50 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 51 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */ 52 | .highlight .mf { color: #666666 } /* Literal.Number.Float */ 53 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */ 54 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */ 55 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */ 56 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */ 57 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ 58 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */ 59 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ 60 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ 61 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */ 62 | .highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ 63 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ 64 | .highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ 65 | .highlight .sx { color: #008000 } /* Literal.String.Other */ 66 | .highlight .sr { color: #A45A77 } /* Literal.String.Regex */ 67 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */ 68 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */ 69 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ 70 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */ 71 | .highlight .vc { color: #19177C } /* Name.Variable.Class */ 72 | .highlight .vg { color: #19177C } /* Name.Variable.Global */ 73 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */ 74 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */ 75 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/_static/searchtools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * searchtools.js 3 | * ~~~~~~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for the full-text search. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | "use strict"; 12 | 13 | /** 14 | * Simple result scoring code. 15 | */ 16 | if (typeof Scorer === "undefined") { 17 | var Scorer = { 18 | // Implement the following function to further tweak the score for each result 19 | // The function takes a result array [docname, title, anchor, descr, score, filename] 20 | // and returns the new score. 21 | /* 22 | score: result => { 23 | const [docname, title, anchor, descr, score, filename] = result 24 | return score 25 | }, 26 | */ 27 | 28 | // query matches the full name of an object 29 | objNameMatch: 11, 30 | // or matches in the last dotted part of the object name 31 | objPartialMatch: 6, 32 | // Additive scores depending on the priority of the object 33 | objPrio: { 34 | 0: 15, // used to be importantResults 35 | 1: 5, // used to be objectResults 36 | 2: -5, // used to be unimportantResults 37 | }, 38 | // Used when the priority is not in the mapping. 39 | objPrioDefault: 0, 40 | 41 | // query found in title 42 | title: 15, 43 | partialTitle: 7, 44 | // query found in terms 45 | term: 5, 46 | partialTerm: 2, 47 | }; 48 | } 49 | 50 | const _removeChildren = (element) => { 51 | while (element && element.lastChild) element.removeChild(element.lastChild); 52 | }; 53 | 54 | /** 55 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping 56 | */ 57 | const _escapeRegExp = (string) => 58 | string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string 59 | 60 | const _displayItem = (item, searchTerms, highlightTerms) => { 61 | const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; 62 | const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; 63 | const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; 64 | const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; 65 | const contentRoot = document.documentElement.dataset.content_root; 66 | 67 | const [docName, title, anchor, descr, score, _filename] = item; 68 | 69 | let listItem = document.createElement("li"); 70 | let requestUrl; 71 | let linkUrl; 72 | if (docBuilder === "dirhtml") { 73 | // dirhtml builder 74 | let dirname = docName + "/"; 75 | if (dirname.match(/\/index\/$/)) 76 | dirname = dirname.substring(0, dirname.length - 6); 77 | else if (dirname === "index/") dirname = ""; 78 | requestUrl = contentRoot + dirname; 79 | linkUrl = requestUrl; 80 | } else { 81 | // normal html builders 82 | requestUrl = contentRoot + docName + docFileSuffix; 83 | linkUrl = docName + docLinkSuffix; 84 | } 85 | let linkEl = listItem.appendChild(document.createElement("a")); 86 | linkEl.href = linkUrl + anchor; 87 | linkEl.dataset.score = score; 88 | linkEl.innerHTML = title; 89 | if (descr) { 90 | listItem.appendChild(document.createElement("span")).innerHTML = 91 | " (" + descr + ")"; 92 | // highlight search terms in the description 93 | if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js 94 | highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); 95 | } 96 | else if (showSearchSummary) 97 | fetch(requestUrl) 98 | .then((responseData) => responseData.text()) 99 | .then((data) => { 100 | if (data) 101 | listItem.appendChild( 102 | Search.makeSearchSummary(data, searchTerms) 103 | ); 104 | // highlight search terms in the summary 105 | if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js 106 | highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); 107 | }); 108 | Search.output.appendChild(listItem); 109 | }; 110 | const _finishSearch = (resultCount) => { 111 | Search.stopPulse(); 112 | Search.title.innerText = _("Search Results"); 113 | if (!resultCount) 114 | Search.status.innerText = Documentation.gettext( 115 | "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." 116 | ); 117 | else 118 | Search.status.innerText = _( 119 | `Search finished, found ${resultCount} page(s) matching the search query.` 120 | ); 121 | }; 122 | const _displayNextItem = ( 123 | results, 124 | resultCount, 125 | searchTerms, 126 | highlightTerms, 127 | ) => { 128 | // results left, load the summary and display it 129 | // this is intended to be dynamic (don't sub resultsCount) 130 | if (results.length) { 131 | _displayItem(results.pop(), searchTerms, highlightTerms); 132 | setTimeout( 133 | () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), 134 | 5 135 | ); 136 | } 137 | // search finished, update title and status message 138 | else _finishSearch(resultCount); 139 | }; 140 | 141 | /** 142 | * Default splitQuery function. Can be overridden in ``sphinx.search`` with a 143 | * custom function per language. 144 | * 145 | * The regular expression works by splitting the string on consecutive characters 146 | * that are not Unicode letters, numbers, underscores, or emoji characters. 147 | * This is the same as ``\W+`` in Python, preserving the surrogate pair area. 148 | */ 149 | if (typeof splitQuery === "undefined") { 150 | var splitQuery = (query) => query 151 | .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) 152 | .filter(term => term) // remove remaining empty strings 153 | } 154 | 155 | /** 156 | * Search Module 157 | */ 158 | const Search = { 159 | _index: null, 160 | _queued_query: null, 161 | _pulse_status: -1, 162 | 163 | htmlToText: (htmlString) => { 164 | const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); 165 | htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); 166 | const docContent = htmlElement.querySelector('[role="main"]'); 167 | if (docContent !== undefined) return docContent.textContent; 168 | console.warn( 169 | "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." 170 | ); 171 | return ""; 172 | }, 173 | 174 | init: () => { 175 | const query = new URLSearchParams(window.location.search).get("q"); 176 | document 177 | .querySelectorAll('input[name="q"]') 178 | .forEach((el) => (el.value = query)); 179 | if (query) Search.performSearch(query); 180 | }, 181 | 182 | loadIndex: (url) => 183 | (document.body.appendChild(document.createElement("script")).src = url), 184 | 185 | setIndex: (index) => { 186 | Search._index = index; 187 | if (Search._queued_query !== null) { 188 | const query = Search._queued_query; 189 | Search._queued_query = null; 190 | Search.query(query); 191 | } 192 | }, 193 | 194 | hasIndex: () => Search._index !== null, 195 | 196 | deferQuery: (query) => (Search._queued_query = query), 197 | 198 | stopPulse: () => (Search._pulse_status = -1), 199 | 200 | startPulse: () => { 201 | if (Search._pulse_status >= 0) return; 202 | 203 | const pulse = () => { 204 | Search._pulse_status = (Search._pulse_status + 1) % 4; 205 | Search.dots.innerText = ".".repeat(Search._pulse_status); 206 | if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); 207 | }; 208 | pulse(); 209 | }, 210 | 211 | /** 212 | * perform a search for something (or wait until index is loaded) 213 | */ 214 | performSearch: (query) => { 215 | // create the required interface elements 216 | const searchText = document.createElement("h2"); 217 | searchText.textContent = _("Searching"); 218 | const searchSummary = document.createElement("p"); 219 | searchSummary.classList.add("search-summary"); 220 | searchSummary.innerText = ""; 221 | const searchList = document.createElement("ul"); 222 | searchList.classList.add("search"); 223 | 224 | const out = document.getElementById("search-results"); 225 | Search.title = out.appendChild(searchText); 226 | Search.dots = Search.title.appendChild(document.createElement("span")); 227 | Search.status = out.appendChild(searchSummary); 228 | Search.output = out.appendChild(searchList); 229 | 230 | const searchProgress = document.getElementById("search-progress"); 231 | // Some themes don't use the search progress node 232 | if (searchProgress) { 233 | searchProgress.innerText = _("Preparing search..."); 234 | } 235 | Search.startPulse(); 236 | 237 | // index already loaded, the browser was quick! 238 | if (Search.hasIndex()) Search.query(query); 239 | else Search.deferQuery(query); 240 | }, 241 | 242 | /** 243 | * execute search (requires search index to be loaded) 244 | */ 245 | query: (query) => { 246 | const filenames = Search._index.filenames; 247 | const docNames = Search._index.docnames; 248 | const titles = Search._index.titles; 249 | const allTitles = Search._index.alltitles; 250 | const indexEntries = Search._index.indexentries; 251 | 252 | // stem the search terms and add them to the correct list 253 | const stemmer = new Stemmer(); 254 | const searchTerms = new Set(); 255 | const excludedTerms = new Set(); 256 | const highlightTerms = new Set(); 257 | const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); 258 | splitQuery(query.trim()).forEach((queryTerm) => { 259 | const queryTermLower = queryTerm.toLowerCase(); 260 | 261 | // maybe skip this "word" 262 | // stopwords array is from language_data.js 263 | if ( 264 | stopwords.indexOf(queryTermLower) !== -1 || 265 | queryTerm.match(/^\d+$/) 266 | ) 267 | return; 268 | 269 | // stem the word 270 | let word = stemmer.stemWord(queryTermLower); 271 | // select the correct list 272 | if (word[0] === "-") excludedTerms.add(word.substr(1)); 273 | else { 274 | searchTerms.add(word); 275 | highlightTerms.add(queryTermLower); 276 | } 277 | }); 278 | 279 | if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js 280 | localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) 281 | } 282 | 283 | // console.debug("SEARCH: searching for:"); 284 | // console.info("required: ", [...searchTerms]); 285 | // console.info("excluded: ", [...excludedTerms]); 286 | 287 | // array of [docname, title, anchor, descr, score, filename] 288 | let results = []; 289 | _removeChildren(document.getElementById("search-progress")); 290 | 291 | const queryLower = query.toLowerCase(); 292 | for (const [title, foundTitles] of Object.entries(allTitles)) { 293 | if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { 294 | for (const [file, id] of foundTitles) { 295 | let score = Math.round(100 * queryLower.length / title.length) 296 | results.push([ 297 | docNames[file], 298 | titles[file] !== title ? `${titles[file]} > ${title}` : title, 299 | id !== null ? "#" + id : "", 300 | null, 301 | score, 302 | filenames[file], 303 | ]); 304 | } 305 | } 306 | } 307 | 308 | // search for explicit entries in index directives 309 | for (const [entry, foundEntries] of Object.entries(indexEntries)) { 310 | if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { 311 | for (const [file, id] of foundEntries) { 312 | let score = Math.round(100 * queryLower.length / entry.length) 313 | results.push([ 314 | docNames[file], 315 | titles[file], 316 | id ? "#" + id : "", 317 | null, 318 | score, 319 | filenames[file], 320 | ]); 321 | } 322 | } 323 | } 324 | 325 | // lookup as object 326 | objectTerms.forEach((term) => 327 | results.push(...Search.performObjectSearch(term, objectTerms)) 328 | ); 329 | 330 | // lookup as search terms in fulltext 331 | results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); 332 | 333 | // let the scorer override scores with a custom scoring function 334 | if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); 335 | 336 | // now sort the results by score (in opposite order of appearance, since the 337 | // display function below uses pop() to retrieve items) and then 338 | // alphabetically 339 | results.sort((a, b) => { 340 | const leftScore = a[4]; 341 | const rightScore = b[4]; 342 | if (leftScore === rightScore) { 343 | // same score: sort alphabetically 344 | const leftTitle = a[1].toLowerCase(); 345 | const rightTitle = b[1].toLowerCase(); 346 | if (leftTitle === rightTitle) return 0; 347 | return leftTitle > rightTitle ? -1 : 1; // inverted is intentional 348 | } 349 | return leftScore > rightScore ? 1 : -1; 350 | }); 351 | 352 | // remove duplicate search results 353 | // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept 354 | let seen = new Set(); 355 | results = results.reverse().reduce((acc, result) => { 356 | let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); 357 | if (!seen.has(resultStr)) { 358 | acc.push(result); 359 | seen.add(resultStr); 360 | } 361 | return acc; 362 | }, []); 363 | 364 | results = results.reverse(); 365 | 366 | // for debugging 367 | //Search.lastresults = results.slice(); // a copy 368 | // console.info("search results:", Search.lastresults); 369 | 370 | // print the results 371 | _displayNextItem(results, results.length, searchTerms, highlightTerms); 372 | }, 373 | 374 | /** 375 | * search for object names 376 | */ 377 | performObjectSearch: (object, objectTerms) => { 378 | const filenames = Search._index.filenames; 379 | const docNames = Search._index.docnames; 380 | const objects = Search._index.objects; 381 | const objNames = Search._index.objnames; 382 | const titles = Search._index.titles; 383 | 384 | const results = []; 385 | 386 | const objectSearchCallback = (prefix, match) => { 387 | const name = match[4] 388 | const fullname = (prefix ? prefix + "." : "") + name; 389 | const fullnameLower = fullname.toLowerCase(); 390 | if (fullnameLower.indexOf(object) < 0) return; 391 | 392 | let score = 0; 393 | const parts = fullnameLower.split("."); 394 | 395 | // check for different match types: exact matches of full name or 396 | // "last name" (i.e. last dotted part) 397 | if (fullnameLower === object || parts.slice(-1)[0] === object) 398 | score += Scorer.objNameMatch; 399 | else if (parts.slice(-1)[0].indexOf(object) > -1) 400 | score += Scorer.objPartialMatch; // matches in last name 401 | 402 | const objName = objNames[match[1]][2]; 403 | const title = titles[match[0]]; 404 | 405 | // If more than one term searched for, we require other words to be 406 | // found in the name/title/description 407 | const otherTerms = new Set(objectTerms); 408 | otherTerms.delete(object); 409 | if (otherTerms.size > 0) { 410 | const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); 411 | if ( 412 | [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) 413 | ) 414 | return; 415 | } 416 | 417 | let anchor = match[3]; 418 | if (anchor === "") anchor = fullname; 419 | else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; 420 | 421 | const descr = objName + _(", in ") + title; 422 | 423 | // add custom score for some objects according to scorer 424 | if (Scorer.objPrio.hasOwnProperty(match[2])) 425 | score += Scorer.objPrio[match[2]]; 426 | else score += Scorer.objPrioDefault; 427 | 428 | results.push([ 429 | docNames[match[0]], 430 | fullname, 431 | "#" + anchor, 432 | descr, 433 | score, 434 | filenames[match[0]], 435 | ]); 436 | }; 437 | Object.keys(objects).forEach((prefix) => 438 | objects[prefix].forEach((array) => 439 | objectSearchCallback(prefix, array) 440 | ) 441 | ); 442 | return results; 443 | }, 444 | 445 | /** 446 | * search for full-text terms in the index 447 | */ 448 | performTermsSearch: (searchTerms, excludedTerms) => { 449 | // prepare search 450 | const terms = Search._index.terms; 451 | const titleTerms = Search._index.titleterms; 452 | const filenames = Search._index.filenames; 453 | const docNames = Search._index.docnames; 454 | const titles = Search._index.titles; 455 | 456 | const scoreMap = new Map(); 457 | const fileMap = new Map(); 458 | 459 | // perform the search on the required terms 460 | searchTerms.forEach((word) => { 461 | const files = []; 462 | const arr = [ 463 | { files: terms[word], score: Scorer.term }, 464 | { files: titleTerms[word], score: Scorer.title }, 465 | ]; 466 | // add support for partial matches 467 | if (word.length > 2) { 468 | const escapedWord = _escapeRegExp(word); 469 | Object.keys(terms).forEach((term) => { 470 | if (term.match(escapedWord) && !terms[word]) 471 | arr.push({ files: terms[term], score: Scorer.partialTerm }); 472 | }); 473 | Object.keys(titleTerms).forEach((term) => { 474 | if (term.match(escapedWord) && !titleTerms[word]) 475 | arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); 476 | }); 477 | } 478 | 479 | // no match but word was a required one 480 | if (arr.every((record) => record.files === undefined)) return; 481 | 482 | // found search word in contents 483 | arr.forEach((record) => { 484 | if (record.files === undefined) return; 485 | 486 | let recordFiles = record.files; 487 | if (recordFiles.length === undefined) recordFiles = [recordFiles]; 488 | files.push(...recordFiles); 489 | 490 | // set score for the word in each file 491 | recordFiles.forEach((file) => { 492 | if (!scoreMap.has(file)) scoreMap.set(file, {}); 493 | scoreMap.get(file)[word] = record.score; 494 | }); 495 | }); 496 | 497 | // create the mapping 498 | files.forEach((file) => { 499 | if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) 500 | fileMap.get(file).push(word); 501 | else fileMap.set(file, [word]); 502 | }); 503 | }); 504 | 505 | // now check if the files don't contain excluded terms 506 | const results = []; 507 | for (const [file, wordList] of fileMap) { 508 | // check if all requirements are matched 509 | 510 | // as search terms with length < 3 are discarded 511 | const filteredTermCount = [...searchTerms].filter( 512 | (term) => term.length > 2 513 | ).length; 514 | if ( 515 | wordList.length !== searchTerms.size && 516 | wordList.length !== filteredTermCount 517 | ) 518 | continue; 519 | 520 | // ensure that none of the excluded terms is in the search result 521 | if ( 522 | [...excludedTerms].some( 523 | (term) => 524 | terms[term] === file || 525 | titleTerms[term] === file || 526 | (terms[term] || []).includes(file) || 527 | (titleTerms[term] || []).includes(file) 528 | ) 529 | ) 530 | break; 531 | 532 | // select one (max) score for the file. 533 | const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); 534 | // add result to the result list 535 | results.push([ 536 | docNames[file], 537 | titles[file], 538 | "", 539 | null, 540 | score, 541 | filenames[file], 542 | ]); 543 | } 544 | return results; 545 | }, 546 | 547 | /** 548 | * helper function to return a node containing the 549 | * search summary for a given text. keywords is a list 550 | * of stemmed words. 551 | */ 552 | makeSearchSummary: (htmlText, keywords) => { 553 | const text = Search.htmlToText(htmlText); 554 | if (text === "") return null; 555 | 556 | const textLower = text.toLowerCase(); 557 | const actualStartPosition = [...keywords] 558 | .map((k) => textLower.indexOf(k.toLowerCase())) 559 | .filter((i) => i > -1) 560 | .slice(-1)[0]; 561 | const startWithContext = Math.max(actualStartPosition - 120, 0); 562 | 563 | const top = startWithContext === 0 ? "" : "..."; 564 | const tail = startWithContext + 240 < text.length ? "..." : ""; 565 | 566 | let summary = document.createElement("p"); 567 | summary.classList.add("context"); 568 | summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; 569 | 570 | return summary; 571 | }, 572 | }; 573 | 574 | _ready(Search.init); 575 | -------------------------------------------------------------------------------- /docs/_static/sphinx_highlight.js: -------------------------------------------------------------------------------- 1 | /* Highlighting utilities for Sphinx HTML documentation. */ 2 | "use strict"; 3 | 4 | const SPHINX_HIGHLIGHT_ENABLED = true 5 | 6 | /** 7 | * highlight a given string on a node by wrapping it in 8 | * span elements with the given class name. 9 | */ 10 | const _highlight = (node, addItems, text, className) => { 11 | if (node.nodeType === Node.TEXT_NODE) { 12 | const val = node.nodeValue; 13 | const parent = node.parentNode; 14 | const pos = val.toLowerCase().indexOf(text); 15 | if ( 16 | pos >= 0 && 17 | !parent.classList.contains(className) && 18 | !parent.classList.contains("nohighlight") 19 | ) { 20 | let span; 21 | 22 | const closestNode = parent.closest("body, svg, foreignObject"); 23 | const isInSVG = closestNode && closestNode.matches("svg"); 24 | if (isInSVG) { 25 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 26 | } else { 27 | span = document.createElement("span"); 28 | span.classList.add(className); 29 | } 30 | 31 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 32 | const rest = document.createTextNode(val.substr(pos + text.length)); 33 | parent.insertBefore( 34 | span, 35 | parent.insertBefore( 36 | rest, 37 | node.nextSibling 38 | ) 39 | ); 40 | node.nodeValue = val.substr(0, pos); 41 | /* There may be more occurrences of search term in this node. So call this 42 | * function recursively on the remaining fragment. 43 | */ 44 | _highlight(rest, addItems, text, className); 45 | 46 | if (isInSVG) { 47 | const rect = document.createElementNS( 48 | "http://www.w3.org/2000/svg", 49 | "rect" 50 | ); 51 | const bbox = parent.getBBox(); 52 | rect.x.baseVal.value = bbox.x; 53 | rect.y.baseVal.value = bbox.y; 54 | rect.width.baseVal.value = bbox.width; 55 | rect.height.baseVal.value = bbox.height; 56 | rect.setAttribute("class", className); 57 | addItems.push({ parent: parent, target: rect }); 58 | } 59 | } 60 | } else if (node.matches && !node.matches("button, select, textarea")) { 61 | node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); 62 | } 63 | }; 64 | const _highlightText = (thisNode, text, className) => { 65 | let addItems = []; 66 | _highlight(thisNode, addItems, text, className); 67 | addItems.forEach((obj) => 68 | obj.parent.insertAdjacentElement("beforebegin", obj.target) 69 | ); 70 | }; 71 | 72 | /** 73 | * Small JavaScript module for the documentation. 74 | */ 75 | const SphinxHighlight = { 76 | 77 | /** 78 | * highlight the search words provided in localstorage in the text 79 | */ 80 | highlightSearchWords: () => { 81 | if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight 82 | 83 | // get and clear terms from localstorage 84 | const url = new URL(window.location); 85 | const highlight = 86 | localStorage.getItem("sphinx_highlight_terms") 87 | || url.searchParams.get("highlight") 88 | || ""; 89 | localStorage.removeItem("sphinx_highlight_terms") 90 | url.searchParams.delete("highlight"); 91 | window.history.replaceState({}, "", url); 92 | 93 | // get individual terms from highlight string 94 | const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); 95 | if (terms.length === 0) return; // nothing to do 96 | 97 | // There should never be more than one element matching "div.body" 98 | const divBody = document.querySelectorAll("div.body"); 99 | const body = divBody.length ? divBody[0] : document.querySelector("body"); 100 | window.setTimeout(() => { 101 | terms.forEach((term) => _highlightText(body, term, "highlighted")); 102 | }, 10); 103 | 104 | const searchBox = document.getElementById("searchbox"); 105 | if (searchBox === null) return; 106 | searchBox.appendChild( 107 | document 108 | .createRange() 109 | .createContextualFragment( 110 | '" 114 | ) 115 | ); 116 | }, 117 | 118 | /** 119 | * helper function to hide the search marks again 120 | */ 121 | hideSearchWords: () => { 122 | document 123 | .querySelectorAll("#searchbox .highlight-link") 124 | .forEach((el) => el.remove()); 125 | document 126 | .querySelectorAll("span.highlighted") 127 | .forEach((el) => el.classList.remove("highlighted")); 128 | localStorage.removeItem("sphinx_highlight_terms") 129 | }, 130 | 131 | initEscapeListener: () => { 132 | // only install a listener if it is really needed 133 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 134 | 135 | document.addEventListener("keydown", (event) => { 136 | // bail for input elements 137 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 138 | // bail with special keys 139 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 140 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 141 | SphinxHighlight.hideSearchWords(); 142 | event.preventDefault(); 143 | } 144 | }); 145 | }, 146 | }; 147 | 148 | _ready(() => { 149 | /* Do not call highlightSearchWords() when we are on the search page. 150 | * It will highlight words from the *previous* search query. 151 | */ 152 | if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); 153 | SphinxHighlight.initEscapeListener(); 154 | }); 155 | -------------------------------------------------------------------------------- /docs/citation.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Citation — segment-lidar documentation 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 60 | 61 |
65 | 66 |
67 |
68 |
69 | 76 |
77 |
78 |
79 |
80 | 81 |
82 |

Citation

83 |

The use of open-source software repositories has become increasingly 84 | prevalent in scientific research. If you use this repository for your 85 | research, please make sure to cite it appropriately in your work. The 86 | recommended citation format for this repository is provided in the 87 | accompanying BibTeX 88 | citation. 89 | Additionally, please make sure to comply with any licensing terms and 90 | conditions associated with the use of this repository.

91 |
@misc{yarroudh:2023:samlidar,
 92 |     author = {Yarroudh, Anass},
 93 |     title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI},
 94 |     year = {2023},
 95 |     howpublished = {GitHub Repository},
 96 |     url = {https://github.com/Yarroudh/segment-lidar}
 97 | }
 98 | 
99 |
100 |

Yarroudh, A. (2023). LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI [GitHub repository]. Retrieved from https://github.com/Yarroudh/segment-lidar

101 |
102 | 103 | 104 |
105 |
106 |
110 | 111 |
112 | 113 |
114 |

© Copyright 2023, Geomatics Unit - University of Liège.

115 |
116 | 117 | 118 | 119 |
120 |
121 |
122 |
123 |
124 | 129 | 130 | 131 | -------------------------------------------------------------------------------- /docs/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Index — segment-lidar documentation 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 |
25 | 57 | 58 |
62 | 63 |
64 |
65 |
66 | 73 |
74 |
75 |
76 |
77 | 78 | 79 |

Index

80 | 81 |
82 | C 83 | | I 84 | | M 85 | | P 86 | | R 87 | | S 88 | | T 89 | | W 90 | 91 |
92 |

C

93 | 94 | 102 | 106 |
107 | 108 |

I

109 | 110 | 118 |
119 | 120 |

M

121 | 122 | 133 |
134 | 135 |

P

136 | 137 | 141 |
142 | 143 |

R

144 | 145 | 149 |
150 | 151 |

S

152 | 153 | 163 | 179 |
    164 |
  • 165 | segment_lidar.samlidar 166 | 167 |
  • 171 |
  • 172 | segment_lidar.view 173 | 174 |
  • 178 |
180 | 181 |

T

182 | 183 | 187 |
188 | 189 |

W

190 | 191 | 195 |
196 | 197 | 198 | 199 |
200 |
201 |
202 | 203 |
204 | 205 |
206 |

© Copyright 2023, Geomatics Unit - University of Liège.

207 |
208 | 209 | 210 | 211 |
212 |
213 |
214 |
215 |
216 | 221 | 222 | 223 | -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Welcome to segment-lidar’s documentation! — segment-lidar documentation 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 | 59 | 60 |
64 | 65 |
66 |
67 |
68 |
    69 |
  • 70 | 71 |
  • 72 | Edit on GitHub 73 |
  • 74 |
75 |
76 |
77 |
78 |
79 | 80 |
81 |

Welcome to segment-lidar’s documentation!

82 |
83 |

About

84 |

The package segment-lidar is specifically designed for unsupervised instance segmentation of aerial LiDAR data. It brings together the power of Segment-Anything Model (SAM) developed by Meta Research and segment-geospatial (SamGeo) package from Open Geospatial Solutions to automatize the segmentation of LiDAR data. 85 | If you use this package for your research, please cite:

86 |
@misc{yarroudh:2023:samlidar,
 87 |    author = {Yarroudh, Anass},
 88 |    title = {LiDAR Automatic Unsupervised Segmentation using Segment-Anything Model (SAM) from Meta AI},
 89 |    year = {2023},
 90 |    howpublished = {GitHub Repository},
 91 |    url = {https://github.com/Yarroudh/segment-lidar}
 92 | }
 93 | 
94 |
95 |
96 |

Note

97 |

The paper will be published very soon.

98 |
99 |

The latest source code is available at GitHub. The package builds on top of existing works and when using specific algorithms within segment-lidar, please also cite the original authors, as specified in the source code.

100 |
101 |

Contents

102 | 109 |
110 |
111 |
112 |

Support

113 |

Please, contact us via email ayarroudh@uliege.be or akharroubi@uliege.be for questions and the GitHub issue tracker for bug reports, feature requests/additions, etc.

114 | 115 |
116 |
117 | 118 | 119 |
120 |
121 |
124 | 125 |
126 | 127 |
128 |

© Copyright 2023, Geomatics Unit - University of Liège.

129 |
130 | 131 | 132 | 133 |
134 |
135 |
136 |
137 |
138 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /docs/installation.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Installation — segment-lidar documentation 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 65 | 66 |
70 | 71 |
72 |
73 |
74 | 81 |
82 |
83 |
84 |
85 | 86 |
87 |

Installation

88 |

This guide describes the steps to install segment-lidar using PyPI or from source.

89 |
90 |

Step 1: Create an environment

91 |

Before installing segment-lidar, you need to create an environment by 92 | running the following commands:

93 |
conda create -n samlidar python=3.9
 94 | conda activate samlidar
 95 | 
96 |
97 |

This command will create a new Conda environment named samlidar. We recommend using Python 3.9, but feel free to test with other versions.

98 |

Please note that using a Conda environment is not mandatory, but it is highly recommended. Alternatively, you can use virtualenv.

99 |
100 |
101 |

Step 2: Install PyTorch

102 |

For the installation instructions and options, refer to the official PyTorch website: PyTorch Get Started.

103 |
104 |

Note

105 |

If you want to leverage GPU acceleration with PyTorch, 106 | make sure you have a CUDA-supported GPU and install the corresponding 107 | CUDA toolkit. Follow the instructions in the official CUDA installation guide: 108 | NVIDIA CUDA Installation Guide.

109 |
110 |
111 |
112 |

Step 3: Install segment-lidar

113 |

You can easily install segment-lidar from PyPI using the following command:

114 |
pip install segment-lidar
115 | 
116 |
117 |

Or, you can install it from source:

118 |
git clone https://github.com/Yarroudh/segment-lidar
119 | cd segment-lidar
120 | python setup.py install
121 | 
122 |
123 |

To make sure that segment-lidar is installed correctly, you can run the following command:

124 |
python -c "import segment_lidar; print(segment_lidar.__version__)"
125 | 
126 |
127 |

If the installation is successful, you should see the version that you have installed.

128 |
129 |
130 | 131 | 132 |
133 |
134 |
138 | 139 |
140 | 141 |
142 |

© Copyright 2023, Geomatics Unit - University of Liège.

143 |
144 | 145 | 146 | 147 |
148 |
149 |
150 |
151 |
152 | 157 | 158 | 159 | -------------------------------------------------------------------------------- /docs/license.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | License — segment-lidar documentation 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 | 59 | 60 |
64 | 65 |
66 |
67 |
68 | 75 |
76 |
77 |
78 |
79 | 80 |
81 |

License

82 |

BSD 3-Clause License

83 |

Copyright (c) 2023, University of Liège - Author: Anass Yarroudh, Geomatics Unit of ULiege

84 |

Redistribution and use in source and binary forms, with or without 85 | modification, are permitted provided that the following conditions are met:

86 |
    87 |
  1. Redistributions of source code must retain the above copyright notice, this 88 | list of conditions and the following disclaimer.

  2. 89 |
  3. Redistributions in binary form must reproduce the above copyright notice, 90 | this list of conditions and the following disclaimer in the documentation 91 | and/or other materials provided with the distribution.

  4. 92 |
  5. Neither the name of the copyright holder nor the names of its 93 | contributors may be used to endorse or promote products derived from 94 | this software without specific prior written permission.

  6. 95 |
96 |

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” 97 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 98 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 99 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 100 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 101 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 102 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 103 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 104 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 105 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

106 |
107 | 108 | 109 |
110 |
111 |
114 | 115 |
116 | 117 |
118 |

© Copyright 2023, Geomatics Unit - University of Liège.

119 |
120 | 121 | 122 | 123 |
124 |
125 |
126 |
127 |
128 | 133 | 134 | 135 | -------------------------------------------------------------------------------- /docs/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/docs/objects.inv -------------------------------------------------------------------------------- /docs/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Python Module Index — segment-lidar documentation 7 | 8 | 9 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 60 | 61 |
65 | 66 |
67 |
68 |
69 |
    70 |
  • 71 | 72 |
  • 73 |
  • 74 |
75 |
76 |
77 |
78 |
79 | 80 | 81 |

Python Module Index

82 | 83 |
84 | s 85 |
86 | 87 | 88 | 89 | 91 | 92 | 94 | 97 | 98 | 99 | 102 | 103 | 104 | 107 |
 
90 | s
95 | segment_lidar 96 |
    100 | segment_lidar.samlidar 101 |
    105 | segment_lidar.view 106 |
108 | 109 | 110 |
111 |
112 |
113 | 114 |
115 | 116 |
117 |

© Copyright 2023, Geomatics Unit - University of Liège.

118 |
119 | 120 | 121 | 122 |
123 |
124 |
125 |
126 |
127 | 132 | 133 | 134 | -------------------------------------------------------------------------------- /docs/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Search — segment-lidar documentation 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 60 | 61 |
65 | 66 |
67 |
68 |
69 |
    70 |
  • 71 | 72 |
  • 73 |
  • 74 |
75 |
76 |
77 |
78 |
79 | 80 | 87 | 88 | 89 |
90 | 91 |
92 | 93 |
94 |
95 |
96 | 97 |
98 | 99 |
100 |

© Copyright 2023, Geomatics Unit - University of Liège.

101 |
102 | 103 | 104 | 105 |
106 |
107 |
108 |
109 |
110 | 115 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | -------------------------------------------------------------------------------- /docs/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({"docnames": ["citation", "index", "installation", "license", "module", "tutorial"], "filenames": ["citation.rst", "index.rst", "installation.rst", "license.rst", "module.rst", "tutorial.rst"], "titles": ["Citation", "Welcome to segment-lidar\u2019s documentation!", "Installation", "License", "API", "Basic tutorial"], "terms": {"The": [0, 1, 4, 5], "us": [0, 1, 2, 3, 4, 5], "open": [0, 1], "sourc": [0, 1, 2, 3], "softwar": [0, 3], "repositori": [0, 1, 5], "ha": 0, "becom": 0, "increasingli": 0, "preval": 0, "scientif": 0, "research": [0, 1], "If": [0, 1, 2, 4], "you": [0, 1, 2, 5], "thi": [0, 1, 2, 3, 4, 5], "your": [0, 1, 5], "pleas": [0, 1, 2, 5], "make": [0, 2, 5], "sure": [0, 2, 5], "cite": [0, 1], "appropri": 0, "work": [0, 1], "recommend": [0, 2], "format": [0, 4], "i": [0, 1, 2, 3, 4, 5], "provid": [0, 3, 4, 5], "accompani": 0, "bibtex": 0, "addition": [0, 5], "compli": 0, "ani": [0, 3, 4], "licens": [0, 1], "term": 0, "condit": [0, 3], "associ": 0, "misc": [0, 1], "yarroudh": [0, 1, 2, 3], "2023": [0, 1, 3], "samlidar": [0, 1, 2, 4, 5], "author": [0, 1, 3], "anass": [0, 1, 3], "titl": [0, 1], "lidar": [0, 5], "automat": [0, 1, 5], "unsupervis": [0, 1, 5], "segment": [0, 4, 5], "anyth": [0, 1, 5], "model": [0, 1], "sam": [0, 1, 5], "from": [0, 1, 2, 3, 4, 5], "meta": [0, 1], "ai": [0, 1], "year": [0, 1], "howpublish": [0, 1], "github": [0, 1, 2], "url": [0, 1], "http": [0, 1, 2], "com": [0, 1, 2], "A": [0, 3, 4], "retriev": [0, 5], "packag": 1, "specif": [1, 3], "design": 1, "instanc": [1, 5], "aerial": 1, "data": [1, 3, 4], "It": 1, "bring": 1, "togeth": 1, "power": 1, "develop": 1, "geospati": [1, 4, 5], "samgeo": 1, "solut": 1, "paper": 1, "publish": 1, "veri": 1, "soon": 1, "latest": 1, "code": [1, 3, 5], "avail": 1, "build": 1, "top": [1, 4, 5], "exist": [1, 4], "when": [1, 5], "algorithm": [1, 4, 5], "within": 1, "also": 1, "origin": [1, 4], "specifi": [1, 5], "instal": [1, 5], "basic": 1, "tutori": 1, "api": 1, "citat": 1, "contact": 1, "u": 1, "via": 1, "email": 1, "ayarroudh": 1, "ulieg": [1, 3], "akharroubi": 1, "question": 1, "issu": 1, "tracker": 1, "bug": 1, "report": 1, "featur": 1, "request": 1, "addit": 1, "etc": 1, "guid": 2, "describ": 2, "pypi": 2, "befor": [2, 5], "need": 2, "run": 2, "follow": [2, 3, 5], "command": 2, "conda": 2, "n": [2, 4], "python": [2, 5], "9": [2, 4], "activ": 2, "new": [2, 5], "name": [2, 3], "we": [2, 5], "feel": 2, "free": 2, "test": [2, 5], "other": [2, 3], "version": 2, "note": 2, "mandatori": 2, "highli": 2, "altern": 2, "can": [2, 4, 5], "virtualenv": 2, "For": [2, 5], "instruct": 2, "option": [2, 4, 5], "refer": [2, 5], "offici": 2, "websit": 2, "get": [2, 5], "start": [2, 5], "want": 2, "leverag": 2, "gpu": 2, "acceler": 2, "have": [2, 4, 5], "cuda": [2, 4, 5], "support": [2, 4], "correspond": [2, 4, 5], "toolkit": 2, "nvidia": 2, "easili": 2, "pip": [2, 5], "Or": 2, "git": 2, "clone": 2, "cd": 2, "setup": 2, "py": 2, "To": 2, "correctli": 2, "c": [2, 3], "import": [2, 5], "segment_lidar": [2, 4, 5], "print": 2, "__version__": 2, "success": 2, "should": [2, 5], "see": [2, 5], "bsd": 3, "3": [3, 4, 5], "claus": 3, "copyright": 3, "univers": 3, "li\u00e8g": 3, "geomat": 3, "unit": [3, 4], "redistribut": 3, "binari": 3, "form": [3, 5], "without": 3, "modif": 3, "ar": [3, 4, 5], "permit": 3, "met": 3, "must": 3, "retain": 3, "abov": 3, "notic": 3, "list": [3, 5], "disclaim": 3, "reproduc": 3, "document": [3, 5], "materi": 3, "distribut": 3, "neither": 3, "holder": 3, "nor": 3, "its": [3, 5], "contributor": 3, "mai": 3, "endors": 3, "promot": 3, "product": 3, "deriv": 3, "prior": 3, "written": 3, "permiss": 3, "BY": 3, "THE": 3, "AND": 3, "AS": 3, "express": 3, "OR": 3, "impli": 3, "warranti": 3, "includ": 3, "BUT": 3, "NOT": 3, "limit": 3, "TO": 3, "OF": 3, "merchant": 3, "fit": 3, "FOR": 3, "particular": 3, "purpos": [3, 5], "IN": 3, "NO": 3, "event": 3, "shall": 3, "BE": 3, "liabl": 3, "direct": 3, "indirect": 3, "incident": 3, "special": 3, "exemplari": 3, "consequenti": 3, "damag": 3, "procur": 3, "substitut": 3, "good": 3, "servic": 3, "loss": 3, "profit": 3, "busi": 3, "interrupt": 3, "howev": 3, "caus": 3, "ON": 3, "theori": 3, "liabil": 3, "whether": [3, 4, 5], "contract": 3, "strict": 3, "tort": 3, "neglig": 3, "otherwis": 3, "aris": 3, "wai": 3, "out": 3, "even": 3, "IF": 3, "advis": 3, "possibl": [3, 5], "SUCH": 3, "class": [4, 5], "ckpt_path": [4, 5], "str": 4, "model_typ": [4, 5], "vit_h": [4, 5], "resolut": [4, 5], "float": 4, "0": [4, 5], "25": [4, 5], "height": 4, "int": 4, "512": 4, "width": 4, "distance_threshold": 4, "none": [4, 5], "devic": [4, 5], "sam_kwarg": [4, 5], "bool": 4, "fals": [4, 5], "intrins": [4, 5], "ndarrai": 4, "rotat": [4, 5], "translat": [4, 5], "interact": 4, "base": 4, "object": 4, "csf": [4, 5], "point": [4, 5], "class_threshold": [4, 5], "5": [4, 5], "cloth_resolut": 4, "2": [4, 5], "iter": 4, "500": [4, 5], "slope_smooth": 4, "csf_path": 4, "tupl": 4, "appli": [4, 5], "cloth": [4, 5], "simul": [4, 5], "filter": [4, 5], "ground": [4, 5], "cloud": [4, 5], "paramet": [4, 5], "np": 4, "input": [4, 5], "numpi": 4, "arrai": 4, "where": 4, "each": 4, "row": 4, "repres": 4, "x": 4, "y": 4, "z": 4, "coordin": 4, "threshold": 4, "valu": [4, 5], "classifi": 4, "non": [4, 5], "default": [4, 5], "number": 4, "boolean": 4, "indic": [4, 5], "enabl": 4, "slope": 4, "smooth": 4, "path": [4, 5], "save": [4, 5], "result": [4, 5], "alreadi": 4, "return": [4, 5], "contain": [4, 5], "three": 4, "indinc": 4, "type": [4, 5], "mask": [4, 5], "crop_n_lay": [4, 5], "1": [4, 5], "crop_n_points_downscale_factor": [4, 5], "min_mask_region_area": [4, 5], "200": [4, 5], "points_per_sid": [4, 5], "pred_iou_thresh": [4, 5], "stability_score_thresh": [4, 5], "92": [4, 5], "read": [4, 5], "classif": 4, "file": [4, 5], "rais": 4, "valueerror": 4, "view": [4, 5], "topview": [4, 5], "pinholeview": [4, 5], "image_path": [4, 5], "raster": [4, 5], "tif": [4, 5], "labels_path": [4, 5], "label": [4, 5], "image_exist": 4, "label_exist": 4, "id": 4, "imag": [4, 5], "union": 4, "viewpoint": [4, 5], "output": 4, "rgb": 4, "text_prompt": 4, "text": 4, "box_threshold": 4, "24": 4, "text_threshold": 4, "15": 4, "write": [4, 5], "segment_id": [4, 5], "non_ground": [4, 5], "save_path": [4, 5], "la": [4, 5], "ground_path": 4, "laz": [4, 5], "true": [4, 5], "convert": 4, "pinhol": [4, 5], "camera": [4, 5], "vice": 4, "versa": 4, "cloud_to_imag": 4, "an": [4, 5], "extrins": [4, 5], "4x4": 4, "matrix": [4, 5], "fx": 4, "fy": 4, "cx": 4, "cy": 4, "6x1": 4, "shape": 4, "6": [4, 5], "assum": 4, "white": 4, "color": 4, "255": 4, "last": 4, "column": 4, "per": 4, "pixel": 4, "distanc": 4, "greater": 4, "than": [4, 5], "ignor": 4, "valid": 4, "invalid": 4, "image_to_cloud": 4, "": 4, "planar": 4, "minx": 4, "minimum": 4, "bound": 4, "box": 4, "maxx": 4, "maximum": 4, "mini": 4, "maxi": 4, "In": 5, "learn": 5, "how": 5, "modul": 5, "system": 5, "more": 5, "inform": 5, "page": 5, "download": 5, "here": 5, "pointcloud": 5, "wa": 5, "ahn": 5, "4": 5, "visit": 5, "viewer": 5, "click": 5, "link": 5, "below": 5, "vit": 5, "h": 5, "vit_l": 5, "l": 5, "vit_b": 5, "b": 5, "necessari": 5, "defin": 5, "choos": 5, "between": 5, "exampl": 5, "do": 5, "either": 5, "k": 5, "r": 5, "t": 5, "3x3": 5, "3x1": 5, "vector": 5, "creat": 5, "instanti": 5, "sam_vit_h_4b8939": 5, "pth": 5, "method": 5, "argument": 5, "perform": 5, "requir": 5, "respect": 5, "_": 5, "now": 5, "entir": 5, "look": 5, "like": 5, "load": 5, "8": 5, "scalar": 5, "field": 5, "call": 5, "visual": 5, "further": 5, "process": 5, "recommand": 5, "cloudcompar": 5, "figur": 5, "show": 5, "allow": 5, "gui": 5, "move": 5, "zoom": 5, "mous": 5, "open3d": 5, "detail": 5, "onc": 5, "done": 5, "press": 5, "p": 5, "esc": 5, "quit": 5, "o": 5, "60": 5, "85": 5, "laundri": 5, "makedir": 5, "exist_ok": 5, "set": 5, "These": 5, "pass": 5, "tabl": 5, "descript": 5, "infer": 5, "kwarg": 5, "geo": 5, "spatial": 5, "10": 5, "90": 5, "about": 5, "complet": 5}, "objects": {"segment_lidar": [[4, 0, 0, "-", "samlidar"], [4, 0, 0, "-", "view"]], "segment_lidar.samlidar": [[4, 1, 1, "", "SamLidar"]], "segment_lidar.samlidar.SamLidar": [[4, 2, 1, "", "csf"], [4, 1, 1, "", "mask"], [4, 2, 1, "", "read"], [4, 2, 1, "", "segment"], [4, 1, 1, "", "text_prompt"], [4, 2, 1, "", "write"]], "segment_lidar.view": [[4, 1, 1, "", "PinholeView"], [4, 1, 1, "", "TopView"]], "segment_lidar.view.PinholeView": [[4, 2, 1, "", "cloud_to_image"], [4, 2, 1, "", "image_to_cloud"]], "segment_lidar.view.TopView": [[4, 2, 1, "", "cloud_to_image"], [4, 2, 1, "", "image_to_cloud"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:method"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "method", "Python method"]}, "titleterms": {"citat": 0, "welcom": 1, "segment": [1, 2], "lidar": [1, 2], "": 1, "document": 1, "about": 1, "content": 1, "support": 1, "instal": 2, "step": 2, "1": 2, "creat": 2, "an": 2, "environ": 2, "2": 2, "pytorch": 2, "3": 2, "licens": 3, "api": 4, "basic": 5, "tutori": 5, "prerequisit": 5, "sampl": 5, "data": 5, "model": 5, "checkpoint": 5, "usag": 5, "interact": 5, "mode": 5, "configur": 5}, "envversion": {"sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx": 60}, "alltitles": {"Citation": [[0, "citation"]], "Installation": [[2, "installation"]], "Step 1: Create an environment": [[2, "step-1-create-an-environment"]], "Step 2: Install PyTorch": [[2, "step-2-install-pytorch"]], "Step 3: Install segment-lidar": [[2, "step-3-install-segment-lidar"]], "License": [[3, "license"]], "API": [[4, "module-segment_lidar.samlidar"]], "Welcome to segment-lidar\u2019s documentation!": [[1, "welcome-to-segment-lidar-s-documentation"]], "About": [[1, "about"]], "Contents": [[1, null]], "Support": [[1, "support"]], "Basic tutorial": [[5, "basic-tutorial"]], "Prerequisites": [[5, "prerequisites"]], "Sample data": [[5, "sample-data"]], "Model checkpoints": [[5, "model-checkpoints"]], "Basic usage": [[5, "basic-usage"]], "Interactive mode": [[5, "interactive-mode"]], "Configuration": [[5, "configuration"]]}, "indexentries": {"pinholeview (class in segment_lidar.view)": [[4, "segment_lidar.view.PinholeView"]], "samlidar (class in segment_lidar.samlidar)": [[4, "segment_lidar.samlidar.SamLidar"]], "samlidar.mask (class in segment_lidar.samlidar)": [[4, "segment_lidar.samlidar.SamLidar.mask"]], "samlidar.text_prompt (class in segment_lidar.samlidar)": [[4, "segment_lidar.samlidar.SamLidar.text_prompt"]], "topview (class in segment_lidar.view)": [[4, "segment_lidar.view.TopView"]], "cloud_to_image() (segment_lidar.view.pinholeview method)": [[4, "segment_lidar.view.PinholeView.cloud_to_image"]], "cloud_to_image() (segment_lidar.view.topview method)": [[4, "segment_lidar.view.TopView.cloud_to_image"]], "csf() (segment_lidar.samlidar.samlidar method)": [[4, "segment_lidar.samlidar.SamLidar.csf"]], "image_to_cloud() (segment_lidar.view.pinholeview method)": [[4, "segment_lidar.view.PinholeView.image_to_cloud"]], "image_to_cloud() (segment_lidar.view.topview method)": [[4, "segment_lidar.view.TopView.image_to_cloud"]], "module": [[4, "module-segment_lidar.samlidar"], [4, "module-segment_lidar.view"]], "read() (segment_lidar.samlidar.samlidar method)": [[4, "segment_lidar.samlidar.SamLidar.read"]], "segment() (segment_lidar.samlidar.samlidar method)": [[4, "segment_lidar.samlidar.SamLidar.segment"]], "segment_lidar.samlidar": [[4, "module-segment_lidar.samlidar"]], "segment_lidar.view": [[4, "module-segment_lidar.view"]], "write() (segment_lidar.samlidar.samlidar method)": [[4, "segment_lidar.samlidar.SamLidar.write"]]}}) -------------------------------------------------------------------------------- /paper/figures/extrinsic.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/extrinsic.jpg -------------------------------------------------------------------------------- /paper/figures/intrinsic.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/intrinsic.jpg -------------------------------------------------------------------------------- /paper/figures/pinhole_inference.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/pinhole_inference.png -------------------------------------------------------------------------------- /paper/figures/pinhole_results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/pinhole_results.png -------------------------------------------------------------------------------- /paper/figures/top_inference.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/top_inference.png -------------------------------------------------------------------------------- /paper/figures/top_results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/top_results.png -------------------------------------------------------------------------------- /paper/figures/views.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/paper/figures/views.png -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @misc{kirillov:2023, 2 | title={Segment Anything}, 3 | author={Alexander Kirillov and Eric Mintun and Nikhila Ravi and Hanzi Mao and Chloe Rolland and Laura Gustafson and Tete Xiao and Spencer Whitehead and Alexander C. Berg and Wan-Yen Lo and Piotr Dollár and Ross Girshick}, 4 | year={2023}, 5 | eprint={2304.02643}, 6 | archivePrefix={arXiv}, 7 | primaryClass={cs.CV} 8 | } 9 | 10 | @article{wu:2023, 11 | doi = {10.21105/joss.05663}, 12 | url = {https://doi.org/10.21105/joss.05663}, 13 | year = {2023}, 14 | publisher = {The Open Journal}, 15 | volume = {8}, 16 | number = {89}, 17 | pages = {5663}, 18 | author = {Qiusheng Wu and Lucas Prado Osco}, 19 | title = {samgeo: A Python package for segmenting geospatial data with the Segment Anything Model (SAM)}, 20 | journal = {Journal of Open Source Software} 21 | } 22 | 23 | @article{zhang:2016, 24 | author = {Zhang, Wuming and Qi, Jianbo and Wan, Peng and Wang, Hongtao and Xie, Donghui and Wang, Xiaoyan and Yan, Guangjian}, 25 | title = {An Easy-to-Use Airborne LiDAR Data Filtering Method Based on Cloth Simulation}, 26 | journal = {Remote Sensing}, 27 | volume = {8}, 28 | year = {2016}, 29 | number = {6}, 30 | article-number = {501}, 31 | url = {https://www.mdpi.com/2072-4292/8/6/501}, 32 | issn = {2072-4292}, 33 | doi = {10.3390/rs8060501} 34 | } 35 | 36 | @article{su:2022, 37 | author = {Su, Zhonghua and Zhou, Guiyun and Luo, Fulin and Li, Shihua and Ma, Kai-Kuang}, 38 | title = {Semantic Segmentation of 3D Point Clouds Based on High Precision Range Search Network}, 39 | journal = {Remote Sensing}, 40 | volume = {14}, 41 | year = {2022}, 42 | number = {22}, 43 | article-number = {5649}, 44 | url = {https://www.mdpi.com/2072-4292/14/22/5649}, 45 | issn = {2072-4292}, 46 | doi = {10.3390/rs14225649} 47 | } 48 | 49 | @article{shevrin:2020, 50 | author = {Shervin Minaee and 51 | Yuri Boykov and 52 | Fatih Porikli and 53 | Antonio Plaza and 54 | Nasser Kehtarnavaz and 55 | Demetri Terzopoulos}, 56 | title = {Image Segmentation Using Deep Learning: {A} Survey}, 57 | journal = {CoRR}, 58 | volume = {abs/2001.05566}, 59 | year = {2020}, 60 | url = {https://arxiv.org/abs/2001.05566}, 61 | eprinttype = {arXiv}, 62 | eprint = {2001.05566}, 63 | timestamp = {Fri, 17 Jan 2020 14:07:30 +0100}, 64 | biburl = {https://dblp.org/rec/journals/corr/abs-2001-05566.bib}, 65 | bibsource = {dblp computer science bibliography, https://dblp.org} 66 | } 67 | 68 | @article{chenfeng:2021, 69 | author = {Chenfeng Xu and 70 | Shijia Yang and 71 | Bohan Zhai and 72 | Bichen Wu and 73 | Xiangyu Yue and 74 | Wei Zhan and 75 | Peter Vajda and 76 | Kurt Keutzer and 77 | Masayoshi Tomizuka}, 78 | title = {Image2Point: 3D Point-Cloud Understanding with Pretrained 2D ConvNets}, 79 | journal = {CoRR}, 80 | volume = {abs/2106.04180}, 81 | year = {2021}, 82 | url = {https://arxiv.org/abs/2106.04180}, 83 | eprinttype = {arXiv}, 84 | eprint = {2106.04180}, 85 | timestamp = {Fri, 11 Jun 2021 11:04:16 +0200}, 86 | biburl = {https://dblp.org/rec/journals/corr/abs-2106-04180.bib}, 87 | bibsource = {dblp computer science bibliography, https://dblp.org} 88 | } 89 | 90 | @article{liu:2023, 91 | title={Grounding dino: Marrying dino with grounded pre-training for open-set object detection}, 92 | author={Liu, Shilong and Zeng, Zhaoyang and Ren, Tianhe and Li, Feng and Zhang, Hao and Yang, Jie and Li, Chunyuan and Yang, Jianwei and Su, Hang and Zhu, Jun and others}, 93 | journal={arXiv preprint arXiv:2303.05499}, 94 | year={2023} 95 | } 96 | 97 | @misc{idea:2023, 98 | author = {{IDEA Research}}, 99 | title = {Grounded-Segment-Anything}, 100 | year = {2023}, 101 | howpublished = {\url{https://github.com/IDEA-Research/Grounded-Segment-Anything}}, 102 | } 103 | 104 | @misc{zhao:2023, 105 | title={Fast Segment Anything}, 106 | author={Xu Zhao and Wenchao Ding and Yongqi An and Yinglong Du and Tao Yu and Min Li and Ming Tang and Jinqiao Wang}, 107 | year={2023}, 108 | eprint={2306.12156}, 109 | archivePrefix={arXiv}, 110 | primaryClass={cs.CV} 111 | } 112 | 113 | @misc{yulan:2020, 114 | title={Deep Learning for 3D Point Clouds: A Survey}, 115 | author={Yulan Guo and Hanyun Wang and Qingyong Hu and Hao Liu and Li Liu and Mohammed Bennamoun}, 116 | year={2020}, 117 | eprint={1912.12033}, 118 | archivePrefix={arXiv}, 119 | primaryClass={cs.CV} 120 | } 121 | 122 | @article{david:2019, 123 | author = {Boas, David and Poltaretskyi, Sergii and Ramel, Jean-Yves and Chaoui, Jean and Berhouet, Julien and Slimane, Mohamed}, 124 | year = {2019}, 125 | month = {07}, 126 | title = {A Benchmark Dataset for RGB-D Sphere Based Calibration} 127 | } 128 | 129 | @incollection{jerome:2022, 130 | title = {11 - Vision-based sensing for assessing and monitoring civil infrastructures}, 131 | editor = {Jerome P. Lynch and Hoon Sohn and Ming L. Wang}, 132 | booktitle = {Sensor Technologies for Civil Infrastructures (Second Edition)}, 133 | publisher = {Woodhead Publishing}, 134 | edition = {Second Edition}, 135 | pages = {309-333}, 136 | year = {2022}, 137 | series = {Woodhead Publishing Series in Civil and Structural Engineering}, 138 | isbn = {978-0-08-102696-0}, 139 | doi = {https://doi.org/10.1016/B978-0-08-102696-0.00016-6}, 140 | url = {https://www.sciencedirect.com/science/article/pii/B9780081026960000166}, 141 | author = {Y.F. Ji}, 142 | keywords = {Computer vision, Displacement measurement, Photogrammetry, Videogrammetry}, 143 | } -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'Segment-Lidar: Automatic Unsupervised LiDAR-Segmentation using Segment-Anything Model (SAM)' 3 | tags: 4 | - Python 5 | - Segment-Anything Model 6 | - LiDAR 7 | - Machine Learning 8 | - Unsupervised segmentation 9 | authors: 10 | - name: Anass Yarroudh 11 | orcid: 0000-0003-1387-8288 12 | corresponding: true 13 | affiliation: 1 14 | - name: Abderrazzaq Kharroubi 15 | orcid: 0000-0001-7712-6208 16 | affiliation: 1 17 | - name: Roland Billen 18 | orcid: 0000-0002-3101-8057 19 | affiliation: 1 20 | affiliations: 21 | - name: Geomatics Unit, University of Liège, Allée du six Août 19, 4000 Liège, Belgium 22 | index: 1 23 | date: 31 August 2023 24 | bibliography: paper.bib 25 | --- 26 | 27 | # Summary 28 | 29 | `Segment-Lidar` is a Python package for automatic unsupervised segmentation of aerial LiDAR data. It proposes an image-based approach for segmenting aerial point clouds using `Segment-Anything Model (SAM)` package from [Meta AI](https://github.com/facebookresearch). 30 | 31 | The API provides functions and classes to define the segmentation model and its parameters, and also to handle transformation between 3D point clouds and images. The package also relies on another dependency that make use of `SAM`, which is `Segment-Geospatial` package from [Open Geospatial Solutions](https://github.com/opengeos) for segmenting geospatial data. It also makes use of `Grounding DINO` from [The International Digital Economy Academy Research (IDEA-Research)](https://github.com/IDEA-Research) to detect and segment the 3D point cloud with text prompts. 32 | 33 | # Statement of need 34 | 35 | The swift advancement of data acquisition technologies like LiDAR sensors and depth cameras has led to the widespread use of 3D point cloud data, which, in turn, has started a growing interest among researchers in the field of 3D scene comprehension. However, the comprehension of such unstructured, disordered and spare point clouds yields technical challenges [@su:2022]. 36 | 37 | One approach for point cloud learning is to use deep neural networks. Deep learning has been successfully used to solve various 2D vision problems. In fact, due to the effective use of deep learning models in computer vision applications, many works focused on developing image segmentation approaches using deep learning models [@shevrin:2020]. Recently, deep learning applied to point clouds has become also very popular, and many methods have been suggested to solve various problems in this field [@yulan:2020]. Some of these methods process the point cloud as a structured image to leverage the performance of 2D deep learning models. These models are also known for the fewer computational and data resources required for finetuning in order to obtain competitive performance on downstream tasks [@chenfeng:2021]. 38 | 39 | The Segment Anything Model (SAM) is one of the most popular image segmentation models that was released recently. The model was introduced by @kirillov:2023 as a part of the Segment Anything (SA) project and has three components: an image encoder, a flexible prompt encoder and a fast mask decoder. It was trained on SA-1B dataset that consists of 11 millions licensed and privacy respecting images and 1.1 billion high-quality segmentation masks. 40 | 41 | Due to its zero-shot performance, SAM served as the foundation for the development of many other packages. `Segment-Geospatial` is one of these packages that was designed to segment geospatial data using Segment Anything Model [@wu:2023]. `Grounded-SAM` is another package that combines `Grounding DINO` [@liu:2023] with `Segment Anything Model` to automatically detect and segment images using text prompts. 42 | 43 | Regarding our work, we propose `Segment-Lidar` as an open-source Python package for automatic unsupervised 3D LiDAR-segmentation using Segment Anything Model (SAM) and other dependencies that make use of it. 44 | 45 | # Overview of the method 46 | 47 | The idea behind using `SAM` is to automatically identify and separate different instances in 3D LiDAR data through automated image segmentation. The process can be divided into four main steps: 48 | 49 | ## Step 1: Ground filtering using Cloth Simulation Filter [Optional] 50 | 51 | The ground filtering is optional but preferred for aerial LiDAR data with top viewpoint. It serves two primary purposes that significantly enhance the accuracy and reliability of object detection and segmentation. First, ground filtering helps improve the detection of objects within the image by eliminating the interference of ground points. This is especially vital for identifying objects such as buildings, vehicles, and infrastructure, as it allows for a clearer focus on target objects against a clutter-free background. Second, ground filtering prevents the projection of segmentation results onto ground points, especially for tall structures like trees and poles. 52 | 53 | Our package uses the Cloth Simulation Filter (CSF) to separate the ground points from non-ground points. The algorithm was proposed by @zhang:2016 as an implementation of the Cloth Simulation algorithm used in 3D computer graphics to simulate fabric attached to an object. 54 | 55 | ## Step 2: Projection of the 3D point cloud into a two-dimensional image 56 | 57 | This projection is based on two possible view: top view and pinhole camera view. 58 | 59 | 1. **Top Projection**: 60 | 61 | In a top view projection, the 3D coordinates (X, Y, Z) are projected onto the 2D coordinates (u, v) on the image plane (\autoref{fig:views}). 62 | 63 | The projection of each point is given by: 64 | 65 | $$u = \frac{x - x_{min}}{d}$$ 66 | $$v = \frac{y_{max} - y}{d}$$ 67 | $$P_{uv} = \begin{bmatrix} 68 | r \\ 69 | g \\ 70 | b 71 | \end{bmatrix} 72 | $$ 73 | 74 | `u` represents the horizontal axis in the image. 75 | 76 | `v` represents the vertical axis in the image. 77 | 78 | `x, y and z` are the point coordinates. 79 | 80 | `r, g and b` are the RGB colors of the point. 81 | 82 | `d` represents the pixel resolution. 83 | 84 | 85 | 2. **Pinhole Camera View**: 86 | 87 | This transformation is modeled by a pinhole camera, which mimics the perspective effects of capturing scenes through a small aperture. The camera maps points from a three-dimensional space to a two-dimensional image plane by combining intrinsic and extrinsic parameters. 88 | 89 | The camera has intrinsic parameters that define its internal properties. As shown in \autoref{fig:intrinsic}, these parameters include the focal length $f$ and the principal point $(u_{0}, v_{0})$ that are combined in a $3\times3$ camera matrix $K$: 90 | 91 | $$ 92 | K = \begin{bmatrix} 93 | f_{x} & 0 & u_{0} \\ 94 | 0 & f_{y} & v_{0} \\ 95 | 0 & 0 & 1 96 | \end{bmatrix} 97 | $$ 98 | 99 | ![Camera intrinsic parameters [@david:2019].\label{fig:intrinsic}](figures/intrinsic.jpg) 100 | 101 | In addition to the internal parameters, the position and orientation of the camera in 3D space are described by extrinsic parameters. These include the $3\times3$ rotation matrix $R$ and the $3\times1$ translation vector $T$ (\autoref{fig:extrinsic}). Therefore, the camera projection matrix is defined as follow: 102 | 103 | $$P = K[R|T]$$ 104 | 105 | ![Pinhole camera model [@jerome:2022].\label{fig:extrinsic}](figures/extrinsic.jpg) 106 | 107 | The projection matrix $P$ is then used to calculate the pixel coordinates $(u, v)$ of a 3D point coordinates by: first, converting the world-coordinates $(x_{w}, y_{w}, z_{w})$ of the point to the camera-coordinate system $(x_{c}, y_{c}, z_{c})$, then the camera-coordinates to image-coordinates $(u, v)$. 108 | 109 | $$ X_{W} = R \cdot (X_{C} - C_{0})$$ 110 | 111 | $C_{0}$ is the camera center coordinates in the world-coordinate system: $C_{0} = -R^T \cdot T$ 112 | 113 | $$ x = P \cdot X_{C}$$ 114 | 115 | where $x = [u, v, w]$ 116 | 117 | $(u, w)$ are normalized by the third coordinates $w$ and then used to calculate the image-coordinates as follow: 118 | 119 | $$u = \frac{u}{w} - u_{0}$$ 120 | 121 | $$v = \frac{v}{w} - v_{0}$$ 122 | 123 | ![Different viewpoints provided by SamLidar.\label{fig:views}](figures/views.png) 124 | 125 | ## Step 3: Inference on the generated image 126 | 127 | The Segment-Anything Model (SAM) was used to generate masks for all objects in the resulting image [@kirillov:2023]. Additionally, Segment-Geospatial [@wu:2023] is implemented to leverage SAM for geospatial analysis by enabling users to achieve results with minimal parameters tuning. The results for sample data, for both top view and pinhole camera view, are illustrated in \autoref{fig:top_inference} and \autoref{fig:pinhole_inference} respectively. 128 | 129 | ![Inference results using SAM and SamGeo (Top View).\label{fig:top_inference}](figures/top_inference.png) 130 | 131 | ![Inference results using SAM (Pinhole Camera View).\label{fig:pinhole_inference}](figures/pinhole_inference.png){ width=65% } 132 | 133 | ## Step 4: Reprojection of results on the 3D point cloud 134 | 135 | In the final step of our methodology, we seamlessly reproject the segmentation results onto the original point cloud (\autoref{fig:top_results} and \autoref{fig:pinhole_results}). This associates each point in the cloud with its corresponding segment label obtained from the 2D image segmentation. Mathematically, this process involves identifying the 2D image coordinates for each point in the point cloud, which can be achieved through reverse projection of the cubic or panoramic projection. Once the corresponding 2D image coordinates are identified, we assign the segment label from the segmentation map to the corresponding point in the cloud. 136 | 137 | ![Top view results.\label{fig:top_results}](figures/top_results.png) 138 | 139 | ![Pinhole Camera view results.\label{fig:pinhole_results}](figures/pinhole_results.png){ width=60% } 140 | 141 | # Use of the package 142 | 143 | The package is available as a Python library and can be installed directly from [PyPI](https://pypi.org/project/segment-lidar/). We recommend using `Python>=3.9`. It is also required to install [PyTorch](https://pytorch.org/) before installing `segment-lidar`. 144 | 145 | The usage of `Segment-Lidar` is comprehensively detailed in the accompanying documentation, which includes tutorials providing step-by-step instructions. 146 | 147 | The package also offers an API comprising classes and functions to ensure interoperability with other libraries for numerical computing, image processing and machine learning. 148 | 149 | # References -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pycocotools==2.0.6 2 | click==8.1.3 3 | opencv-python==4.7.0.72 4 | numpy==1.24.3 5 | supervision==0.8.0 6 | segment-geospatial==0.8.1 7 | rasterio==1.3.7 8 | laspy==2.4.1 9 | laszip==0.2.3 10 | cloth-simulation-filter==1.1.4 11 | open3d==0.17.0 -------------------------------------------------------------------------------- /segment_lidar/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yarroudh/segment-lidar/1867175f4fa8fd4fbae3182a23d6b217df5b9b77/segment_lidar/__init__.py -------------------------------------------------------------------------------- /segment_lidar/view.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023 - University of Liège 2 | # Author : Anass Yarroudh (ayarroudh@uliege.be), Geomatics Unit of ULiege 3 | # This file is distributed under the BSD-3 licence. See LICENSE file for complete text of the license. 4 | 5 | import numpy as np 6 | from typing import Tuple 7 | import cv2 8 | import open3d as o3d 9 | import matplotlib.pyplot as plt 10 | import json 11 | import os 12 | import time 13 | 14 | 15 | class TopView: 16 | """ 17 | The TopView class converts a point cloud to a top view image and vice versa. 18 | """ 19 | def __init__(self) -> None: 20 | """ 21 | Initializes a new instance of the CubicView class. 22 | """ 23 | pass 24 | 25 | def cloud_to_image(self, points: np.ndarray, resolution: float) -> np.ndarray: 26 | """ 27 | Converts a point cloud to a planar image. 28 | 29 | :param points: An array of points in the cloud, where each row represents a point. 30 | The array shape can be (N, 3) or (N, 6). 31 | If the shape is (N, 3), each point is assumed to have white color (255, 255, 255). 32 | If the shape is (N, 6), the last three columns represent the RGB color values for each point. 33 | :type points: ndarray 34 | :param minx: The minimum x-coordinate value of the cloud bounding box. 35 | :type minx: float 36 | :param maxx: The maximum x-coordinate value of the cloud bounding box. 37 | :type maxx: float 38 | :param miny: The minimum y-coordinate value of the cloud bounding box. 39 | :type miny: float 40 | :param maxy: The maximum y-coordinate value of the cloud bounding box. 41 | :type maxy: float 42 | :param resolution: The resolution of the image in units per pixel. 43 | :type resolution: float 44 | :return: An image array representing the point cloud, where each pixel contains the RGB color values 45 | of the corresponding point in the cloud. 46 | :rtype: ndarray 47 | :raises ValueError: If the shape of the points array is not valid or if any parameter is invalid. 48 | """ 49 | minx, maxx = np.min(points[:, 0]), np.max(points[:, 0]) 50 | miny, maxy = np.min(points[:, 1]), np.max(points[:, 1]) 51 | minz, maxz = np.min(points[:, 2]), np.max(points[:, 2]) 52 | 53 | width = int((maxx - minx) / resolution) + 1 54 | height = int((maxy - miny) / resolution) + 1 55 | 56 | image = np.zeros((height, width, 3), dtype=np.uint8) 57 | for i, point in enumerate(points): 58 | if points.shape[1] == 3: 59 | x, y, z, *_ = point 60 | r, g, b = np.array([255, 255, 255]) 61 | else: 62 | x, y, z, r, g, b = point 63 | 64 | pixel_x = int((x - minx) / resolution) 65 | pixel_y = int((maxy - y) / resolution) 66 | criterion = z 67 | 68 | closest_criterion = np.zeros((height, width), dtype=np.float32) 69 | closest_criterion[pixel_y, pixel_x] = criterion 70 | 71 | if criterion >= closest_criterion[pixel_y, pixel_x]: 72 | image[pixel_y, pixel_x] = np.array([r, g, b]) 73 | 74 | return image 75 | 76 | 77 | def image_to_cloud(self, points: np.ndarray, image: np.ndarray, resolution: float) -> np.ndarray: 78 | """ 79 | Converts an image to a point cloud. 80 | 81 | :param points: An array of points in the cloud, where each row represents a point. 82 | The array shape can be (N, 3) or (N, 6). 83 | If the shape is (N, 3), each point is assumed to have white color (255, 255, 255). 84 | If the shape is (N, 6), the last three columns represent the RGB color values for each point. 85 | :type points: ndarray 86 | :param image: An image array representing the point cloud, where each pixel contains the RGB color values of the corresponding point in the cloud. 87 | :type image: ndarray 88 | :param resolution: The resolution of the image in units per pixel. 89 | :type resolution: float 90 | :return: An array of segments' IDs in the cloud, where each row represents the segment's ID of a point. 91 | :rtype: ndarray 92 | :raises ValueError: If the shape of the points array is not valid or if any parameter is invalid. 93 | """ 94 | minx, maxx = np.min(points[:, 0]), np.max(points[:, 0]) 95 | miny, maxy = np.min(points[:, 1]), np.max(points[:, 1]) 96 | minz, maxz = np.min(points[:, 2]), np.max(points[:, 2]) 97 | 98 | segment_ids = [] 99 | unique_values = {} 100 | image = np.asarray(image) 101 | 102 | for i, point in enumerate(points): 103 | x, y, z, *_ = point 104 | 105 | pixel_x = int((x - minx) / resolution) 106 | pixel_y = int((maxy - y) / resolution) 107 | 108 | if not (0 <= pixel_x < image.shape[1]) or not (0 <= pixel_y < image.shape[0]): 109 | segment_ids.append(-1) 110 | continue 111 | 112 | rgb = image[pixel_y, pixel_x] 113 | 114 | if rgb not in unique_values: 115 | unique_values[rgb] = len(unique_values) 116 | 117 | id = unique_values[rgb] 118 | segment_ids.append(id) 119 | 120 | return segment_ids 121 | 122 | 123 | class PinholeView: 124 | """ 125 | The PinholeView class converts a point cloud to a pinhole camera view image and vice versa. 126 | """ 127 | def __init__(self, interactive: bool = True) -> None: 128 | """ 129 | Initializes a new instance of the CustomCameraView class. 130 | """ 131 | self.interactive = interactive 132 | pass 133 | 134 | def cloud_to_image(self, points: np.ndarray, resolution: float = 0.1, rotation: np.ndarray = None, translation: np.ndarray = None, intrinsics: np.ndarray = None, distance_threshold: float = None) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: 135 | """ 136 | Converts a point cloud to an image. 137 | 138 | :param extrinsics: The extrinsics matrix of the camera. 139 | :type extrinsics: ndarray (4x4) 140 | :param intrinsics: The intrinsics matrix of the camera. 141 | :type intrinsics: ndarray (width, height, fx, fy, cx, cy) (6x1) 142 | :param points: An array of points in the cloud, where each row represents a point. 143 | The array shape can be (N, 3) or (N, 6). 144 | If the shape is (N, 3), each point is assumed to have white color (255, 255, 255). 145 | If the shape is (N, 6), the last three columns represent the RGB color values for each point. 146 | :type points: ndarray 147 | :param resolution: The resolution of the image in units per pixel. 148 | :type resolution: float 149 | :param distance_threshold: An optional distance threshold. Points with distances greater than this threshold are ignored. 150 | :type distance_threshold: float 151 | :return: A tuple containing: 152 | - An image array representing the point cloud, where each pixel contains the RGB color values of the corresponding point in the cloud. 153 | - An array of pixel x-coordinates in the image. 154 | - An array of pixel y-coordinates in the image. 155 | :rtype: tuple of ndarrays 156 | :raises ValueError: If the shape of the points array is not valid or if any parameter is invalid. 157 | """ 158 | 159 | # Calculate the width and height of the image 160 | minx, maxx = np.min(points[:, 0]), np.max(points[:, 0]) 161 | miny, maxy = np.min(points[:, 1]), np.max(points[:, 1]) 162 | 163 | width = int((maxx - minx) / resolution) + 1 164 | height = int((maxy - miny) / resolution) + 1 165 | 166 | if not self.interactive: 167 | # Create 4x4 extrinsics matrix 168 | extrinsics = np.eye(4) 169 | extrinsics[:3, :3] = rotation 170 | extrinsics[:3, 3] = translation 171 | 172 | # Separate the points into 3D coordinates and color values 173 | coords = points[:, :3] 174 | colors = points[:, 3:6] 175 | 176 | # Camera center 177 | C = -np.dot(np.linalg.inv(rotation), translation) 178 | 179 | # Calculate points distance from the camera center 180 | distances = np.linalg.norm(coords - C, axis=1) 181 | 182 | # Filter points based on the distance threshold 183 | if distance_threshold is not None: 184 | coords = coords[distances <= distance_threshold] 185 | colors = colors[distances <= distance_threshold] 186 | 187 | # Project 3D points to 2D image using projectPoints 188 | if coords.shape[0] > 0: 189 | points_2d, _ = cv2.projectPoints(coords, rotation, translation, intrinsics, None) 190 | else: 191 | image = np.zeros((height, width, 3), dtype=np.uint8) 192 | print("WARNING: No points were projected to the image.") 193 | print("This can happen if the distance threshold is too small.") 194 | return image, intrinsics, extrinsics 195 | 196 | # Create an empty image 197 | image = np.zeros((height, width, 3), dtype=np.uint8) 198 | 199 | # Create an empty depth map 200 | depth_map = np.full((height, width), np.inf, dtype=np.float32) 201 | 202 | # Fill the image with the color values and make sure that the points are within the image boundaries 203 | # Also take the point with the smallest distance to the camera center 204 | for i, point in enumerate(points_2d): 205 | x, y = point[0] 206 | if 0 <= x < width and 0 <= y < height: 207 | if image[int(y), int(x)].any(): 208 | existing_dist = depth_map[int(y), int(x)] 209 | curr_dist = distances[i] 210 | if curr_dist < existing_dist: 211 | image[int(y), int(x)] = colors[i] 212 | depth_map[int(y), int(x)] = curr_dist 213 | else: 214 | image[int(y), int(x)] = colors[i] 215 | depth_map[int(y), int(x)] = distances[i] 216 | 217 | return image, intrinsics, extrinsics 218 | 219 | else: 220 | # Define the point cloud 221 | pcd = o3d.geometry.PointCloud() 222 | pcd.points = o3d.utility.Vector3dVector(points[:, :3]) 223 | pcd.colors = o3d.utility.Vector3dVector(points[:, 3:6] / 255) 224 | 225 | # Visualize the point cloud and save the image 226 | vis = o3d.visualization.VisualizerWithKeyCallback() 227 | vis.create_window(visible=True, width=1000, height=700) 228 | vis.add_geometry(pcd) 229 | render = vis.get_render_option() 230 | render.point_size = 1 231 | render.background_color = np.asarray([0, 0, 0]) 232 | vis.run() 233 | vis.destroy_window() 234 | 235 | # Wait 5 seconds for the image to be saved 236 | time.sleep(5) 237 | 238 | # Get the image and camera parameters (find files that starts with Screen) 239 | dir_path = os.getcwd() 240 | files = os.listdir(dir_path) 241 | files = [file for file in files if file.startswith('Screen')] 242 | image_path = [file for file in files if file.endswith('.png')] 243 | image_path = sorted(image_path, reverse=True)[0] 244 | camera_path = [file for file in files if file.endswith('.json')] 245 | camera_path = sorted(camera_path, reverse=True)[0] 246 | 247 | # Load the image 248 | image = cv2.imread(image_path) 249 | image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 250 | 251 | # Load the camera parameters 252 | with open(camera_path) as file: 253 | camera = json.load(file) 254 | 255 | # Convert open3d.geometry.Image to numpy array 256 | image = np.asarray(image) 257 | intrinsics = np.asarray(camera['intrinsic']['intrinsic_matrix']).reshape((3, 3)).T 258 | extrinsics = np.asarray(camera['extrinsic']).reshape((4, 4)).T 259 | 260 | # Delete the files 261 | for file in files: 262 | os.remove(file) 263 | 264 | return image, intrinsics, extrinsics 265 | 266 | 267 | def image_to_cloud(self, points: np.ndarray, image: np.ndarray, intrinsics: np.ndarray, extrinsics: np.ndarray) -> np.ndarray: 268 | """ 269 | Converts an image to a point cloud. 270 | 271 | :param points: An array of points in the cloud, where each row represents a point. 272 | :type points: ndarray 273 | :param image: An image array representing the point cloud, where each pixel contains the RGB color values of the corresponding point in the cloud. 274 | :type image: ndarray 275 | :param intrinsics: The intrinsics matrix of the camera. 276 | :type intrinsics: ndarray (width, height, fx, fy, cx, cy) (6x1) 277 | :param extrinsics: The extrinsics matrix of the camera. 278 | :type extrinsics: ndarray (4x4) 279 | :return: An array of segments' IDs in the cloud, where each row represents the segment's ID of a point. 280 | :rtype: ndarray 281 | """ 282 | 283 | # Create 4x4 extrinsics matrix 284 | rotation = extrinsics[:3, :3] 285 | translation = extrinsics[:3, 3] 286 | 287 | # Separate the points into 3D coordinates and color values 288 | coords = points[:, :3] 289 | colors = points[:, 3:6] 290 | 291 | # Project 3D points to 2D image using projectPoints 292 | points_2d, _ = cv2.projectPoints(coords, rotation, translation, intrinsics, None) 293 | 294 | # Extract segment IDs from the image 295 | # Give -1 as segment ID if the point is outside the image boundaries 296 | segment_ids = [] 297 | unique_values = {} 298 | image = np.asarray(image) 299 | 300 | for i, point in enumerate(points_2d): 301 | x, y = point[0] 302 | if 0 <= x < image.shape[1] and 0 <= y < image.shape[0]: 303 | rgb = image[int(y), int(x)] 304 | else: 305 | rgb = (0, 0, 0) 306 | 307 | if rgb not in unique_values: 308 | unique_values[rgb] = len(unique_values) 309 | 310 | id = unique_values[rgb] 311 | segment_ids.append(id) 312 | 313 | return segment_ids -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | from setuptools.command.install import install 3 | import subprocess 4 | 5 | with open("requirements.txt", "r") as file: 6 | requirements = file.read().splitlines() 7 | 8 | class CustomInstallCommand(install): 9 | def run(self): 10 | subprocess.call(['pip', 'install', '-r', 'requirements.txt']) 11 | install.run(self) 12 | 13 | from samgeo import SamGeo 14 | from samgeo.text_sam import LangSAM 15 | 16 | setup( 17 | name="segment-lidar", 18 | version='0.2.1', 19 | description="A package for segmenting LiDAR data using Segment-Anything Model (SAM) from Meta AI Research.", 20 | long_description=open('README.md', encoding='utf-8').read(), 21 | long_description_content_type='text/markdown', 22 | license='BSD 3-Clause "New" or "Revised" License', 23 | author='Anass Yarroudh', 24 | author_email='ayarroudh@uliege.be', 25 | url='https://github.com/Yarroudh/segment-lidar', 26 | packages=find_packages(), 27 | install_requires=requirements, 28 | classifiers=[ 29 | "Programming Language :: Python :: 3", 30 | "License :: OSI Approved :: BSD License", 31 | "Operating System :: OS Independent", 32 | ], 33 | cmdclass={ 34 | 'install': CustomInstallCommand, 35 | } 36 | ) 37 | --------------------------------------------------------------------------------