├── .gitattributes ├── .gitignore ├── .vscode ├── launch.json └── settings.json ├── BenchmarkTreeTool.ipynb ├── LICENSE ├── QuickDemo.ipynb ├── README.md ├── environment.yml ├── resultsFGIElipse.npz ├── scripts └── pip_upload.sh ├── setup.py └── treetool ├── __init__.py ├── seg_tree.py ├── tree_tool.py └── utils.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | data/ 131 | metrics* 132 | results/myresults.csv 133 | Downsampling_old.ipynb 134 | *.pdf 135 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Current File", 9 | "type": "python", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal", 13 | "justMyCode": false 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.formatting.provider": "black" 3 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 porteratzo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /QuickDemo.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\"\"\"\n", 8 | "MIT License\n", 9 | "\n", 10 | "Copyright (c) 2021 porteratzo\n", 11 | "\n", 12 | "Permission is hereby granted, free of charge, to any person obtaining a copy\n", 13 | "of this software and associated documentation files (the \"Software\"), to deal\n", 14 | "in the Software without restriction, including without limitation the rights\n", 15 | "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n", 16 | "copies of the Software, and to permit persons to whom the Software is\n", 17 | "furnished to do so, subject to the following conditions:\n", 18 | "\n", 19 | "The above copyright notice and this permission notice shall be included in all\n", 20 | "copies or substantial portions of the Software.\n", 21 | "\n", 22 | "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n", 23 | "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n", 24 | "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n", 25 | "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n", 26 | "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", 27 | "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n", 28 | "SOFTWARE.\n", 29 | "\"\"\"" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "metadata": {}, 35 | "source": [ 36 | "Introduction\n", 37 | "\n", 38 | "This document serves as a tutorial for using the treetool tool, a software for detecting trees in a point cloud and measuring their diameter at breast height (1.3 m). This document seeks to demonstrate the operation of treetool, whether used as a stand-alone application or integrated as a package with other applications.\n", 39 | "\n", 40 | "Usage guide\n", 41 | "\n", 42 | "Below we describe our demo notebook contained in the QuickDemo.ipynb file. This notebook illustrates the operation and use of our software, from loading a point cloud, viewing it, processing it with our algorithm and saving the results.\n" 43 | ] 44 | }, 45 | { 46 | "cell_type": "markdown", 47 | "metadata": {}, 48 | "source": [ 49 | "Load the libraries that we will use and had previously installed" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 1, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "import os\n", 59 | "\n", 60 | "os.environ[\"OPEN3D_DISABLE_WEB_VISUALIZER\"] = \"true\"\n", 61 | "import open3d as o3d\n", 62 | "import numpy as np\n", 63 | "import treetool.seg_tree as seg_tree\n", 64 | "import treetool.utils as utils\n", 65 | "import treetool.tree_tool as tree_tool\n", 66 | "import pandas as pd\n", 67 | "from scipy.optimize import linear_sum_assignment\n", 68 | "import matplotlib.pyplot as plt\n", 69 | "from porteratzo3D.visualization.open3d_vis import open3dpaint\n", 70 | "from porteratzo3D.visualization.open3d_pointset_class import O3dPointSetClass" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "Load the point cloud from a .pcd using pclpy, we use our function seg_tree which contains many helper functions such as voxelize to down sample our point cloud and visualize using our Visualization function built on open3d. \n" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 2, 83 | "metadata": {}, 84 | "outputs": [ 85 | { 86 | "name": "stdout", 87 | "output_type": "stream", 88 | "text": [ 89 | "FEngine (64 bits) created at 0x64d490eaea90 (threading is enabled)\n", 90 | "FEngine resolved backend: OpenGL\n" 91 | ] 92 | } 93 | ], 94 | "source": [ 95 | "file_directory = r\"data/downsampledlesscloudEURO3.pcd\"\n", 96 | "pc_original = o3d.io.read_point_cloud(file_directory)\n", 97 | "pc_downsampled = seg_tree.voxelize(pc_original, 0.06)\n", 98 | "open3dpaint(np.asarray(pc_downsampled.points), pointsize=2)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 3, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "data": { 108 | "text/plain": [ 109 | "(PointCloud with 7343999 points., PointCloud with 3301660 points.)" 110 | ] 111 | }, 112 | "execution_count": 3, 113 | "metadata": {}, 114 | "output_type": "execute_result" 115 | } 116 | ], 117 | "source": [ 118 | "pc_original, pc_downsampled" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "metadata": {}, 124 | "source": [ 125 | "Tree tool is our main class that contains the routines for tree detection and DBH extraction" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": 4, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "My_treetool = tree_tool.treetool(pc_downsampled)" 135 | ] 136 | }, 137 | { 138 | "cell_type": "markdown", 139 | "metadata": {}, 140 | "source": [ 141 | "Our tree top object has a series of functions that are performed to obtain DBH and tree detection." 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "metadata": {}, 148 | "outputs": [ 149 | { 150 | "name": "stderr", 151 | "output_type": "stream", 152 | "text": [ 153 | "imDefLkup.c,419: The application disposed a key event with 444 serial.\n" 154 | ] 155 | } 156 | ], 157 | "source": [ 158 | "My_treetool.step_1_remove_floor()\n", 159 | "\n", 160 | "# Obtained attributes:\n", 161 | "# non_ground_cloud: All points in the point cloud that don't belong to the ground\n", 162 | "# ground_cloud: All points in the point cloud that belong to the ground\n", 163 | "open3dpaint(\n", 164 | " [np.asarray(My_treetool.non_ground_cloud.points), np.asarray(My_treetool.ground_cloud.points)],\n", 165 | " voxel_size=0.1,\n", 166 | ")" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "Set Algorithm Parameters" 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": {}, 179 | "source": [ 180 | "Run main process" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 24, 186 | "metadata": {}, 187 | "outputs": [ 188 | { 189 | "name": "stdout", 190 | "output_type": "stream", 191 | "text": [ 192 | "1 1.7964000107895117e-05\n", 193 | "1 0.01770327200028987\n", 194 | "2 4.818900015379768e-05\n", 195 | "3 2.5470104289997835\n", 196 | "4 2.448223266000241\n", 197 | "5 6.0723999922629446e-05\n", 198 | "6 2.296078980999482\n", 199 | "7 0.02760200300053839\n", 200 | "2 0.0003022959999725572\n", 201 | "3 0.0573318790002304\n", 202 | "4 0.011528658000315772\n", 203 | "5 0.026435220000166737\n" 204 | ] 205 | } 206 | ], 207 | "source": [ 208 | "# Get point normals for filtering\n", 209 | "\n", 210 | "# Obtained attributes:\n", 211 | "# non_filtered_points: Same as non_ground_cloud\n", 212 | "# non_filtered_normals: Normals of points in non_filtered_points\n", 213 | "# filtered_points: Points that pass the normal filter\n", 214 | "# filtered_normals: Normals of points that pass the normal filter\n", 215 | "from tictoc import timer\n", 216 | "\n", 217 | "My_treetool.step_2_normal_filtering(\n", 218 | " verticality_threshold=0.08, curvature_threshold=0.08, search_radius=0.08\n", 219 | ")\n", 220 | "open3dpaint(\n", 221 | " [\n", 222 | " np.asarray(My_treetool.non_ground_cloud.points),\n", 223 | " np.asarray(My_treetool.filtered_points),\n", 224 | " ],\n", 225 | " voxel_size=0.1\n", 226 | ")\n" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 20, 232 | "metadata": {}, 233 | "outputs": [], 234 | "source": [ 235 | "curv_bool = My_treetool.curvature < 0.5\n", 236 | "ver_bool = My_treetool.verticality < 0.5\n", 237 | "\n", 238 | "show_points_curv = np.asarray(My_treetool.non_filtered_points)[curv_bool,:]\n", 239 | "show_colors_curv = My_treetool.curvature[curv_bool]\n", 240 | "\n", 241 | "show_points_ver = np.asarray(My_treetool.non_filtered_points)[ver_bool.ravel(),:]\n", 242 | "show_colors_ver = My_treetool.verticality[ver_bool]\n" 243 | ] 244 | }, 245 | { 246 | "cell_type": "code", 247 | "execution_count": 22, 248 | "metadata": {}, 249 | "outputs": [ 250 | { 251 | "data": { 252 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcgAAAIRCAYAAADOeC2QAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAN3xJREFUeJzt3X90VPWd//HXJCEZfpgovwKYGOIPVhBESVoabMoCGg+4rFQtdFkBEVZToRRSfxBpQdEatYpoNfw4CpRWMMsPu7onp5Ku8htdiQF1w7Z+FU2ABEw8ZgJqfkzm+wfNrGMul5l7J5lfz8c59xxzufcznzvOeb/v5z2f+xmHx+PxCAAA+IgLdQcAAAhHJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAwEnCB3796tyZMna9CgQXI4HPrTn/503nN27dqlrKwsOZ1OXXrppVq9erWVvgIA0GUCTpBnzpzRyJEj9fzzz/t1/NGjRzVp0iTl5uaqoqJCDz74oBYsWKBt27YF3FkAALqKw85i5Q6HQ6+++qqmTJlyzmMeeOABvfbaazpy5Ih3X35+vg4fPqwDBw5YfWkAADpVp38HeeDAAeXl5fnsu/HGG3Xw4EG1tLQYntPU1CSXy+XdvvzyS33yySdqa2vr7O4CACCpCxJkbW2tUlNTffalpqaqtbVVdXV1hucUFRUpJSXFu1100UW67LLLdOLEic7uLgAAkrpoFqvD4fD5u72q+9397QoLC9XQ0ODdKisrO72PAAB8W0Jnv8CAAQNUW1vrs+/UqVNKSEhQnz59DM9JSkpSUlKS92+Xy9WpfQQA4Ls6fQSZk5OjsrIyn307duxQdna2unXr1tkvDwCAJQEnyNOnT+vQoUM6dOiQpLOPcRw6dEhVVVWSzpZHZ86c6T0+Pz9fn332mQoKCnTkyBGtW7dOL730ku69997gXAEAAJ0g4BLrwYMHNW7cOO/fBQUFkqRZs2Zpw4YNqqmp8SZLScrMzFRpaakWLVqkF154QYMGDdJzzz2nW2+9NQjdBwCgc9h6DrKrHDt2TOnp6aqurlZaWlqouwMAiAGsxQoAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGCABAkAgAESJAAABkiQAAAYIEECAGAgIdQdCFfffPONmpubg9JWYmKinE5nUNoCAH8Rx+whQRr45ptv1K97d50OUnsDBgzQ0aNHY+7DBSB0iGP2kSANNDc367SkRZKSbLbVJOmZ2lo1NzfH1AcLQGgRx+wjQZroKcnuR4E3GEAoEcesi9Xr9ku3v292uIPREQCwiDhmHbNYAQAwwAjSRILsv0G8wQBCiThmXaxet18SZL800RqMjgCARcQx6yixAgBggBGkCUoTACIdccy6WL1uvwRj9lesliYAhAfimHWUWAEAMMAI0gSlCQCRjjhmXaxet1+CMfurJRgdAQCLiGPWUWIFAMAAI0gTlCYARDrimHWxet1+CcbsL7vnA4AdxDHrKLECAGCAEaQJ7rwARDrimHUkSBPU7gFEOuKYdZRYAQAwEKs3Bn4JxvNDvMEAQok4Zl2sXrdfKE0AiHTEMesosQIAYCBWbwz8wuwvAJGOOGYdCdIEpQkAkY44Zh0lVgAADMTqjYFfmP0FINIRx6yL1ev2C6UJAJGOOGYdJVYAAAzE6o2BX5j9BSDSEcesI0GaoDQBINIRx6yjxAoAgIFYvTHwC7O/AEQ64ph1sXrdfqF2DyDSEceso8QKAIABRpAm+HIbQKQjjlkXq9ftl4R4qZvDZhseSe6gdAcAAkYcs44SKwAABhhBmkhIkBK48wIQwYhj1pEgTXQLQmmimyc4fQEAK4hj1lFiBQDAACNIE0ErTQBAiBDHrCNBmugWL3WzOcbu1hacvgCAFcQx6yixAgBgwFKCLC4uVmZmppxOp7KysrRnzx7T419++WWNHDlSPXr00MCBAzV79mzV19db6nCXig/SBgChQhyzLOAEWVJSooULF2rJkiWqqKhQbm6uJk6cqKqqKsPj9+7dq5kzZ2rOnDn6n//5H23ZskXvvvuu5s6da7vznS4hSBsAhApxzLKAE+SKFSs0Z84czZ07V0OHDtXKlSuVnp6uVatWGR7/9ttva/DgwVqwYIEyMzP1wx/+UHfffbcOHjxou/MAAHSWgBJkc3OzysvLlZeX57M/Ly9P+/fvNzxnzJgxOnbsmEpLS+XxeHTy5Elt3bpVN9100zlfp6mpSS6Xy7s1NjYG0s3g4c4LQKQjjlkWUIKsq6uT2+1Wamqqz/7U1FTV1tYanjNmzBi9/PLLmjZtmhITEzVgwABdeOGF+t3vfnfO1ykqKlJKSop3GzZsWCDdDB4+WAAiHXHMMkuTdBwO34dqPB5Ph33tKisrtWDBAi1dulTl5eX685//rKNHjyo/P/+c7RcWFqqhocG7VVZWWukmAACWBXRf0LdvX8XHx3cYLZ46darDqLJdUVGRrrvuOt13332SpKuvvlo9e/ZUbm6uHn30UQ0cOLDDOUlJSUpKSvL+7XK5Aulm8MQpZmdvAYgSxDHLAhpBJiYmKisrS2VlZT77y8rKNGbMGMNzvvrqK8XF+b5MfPzZ/1seT5gvz0BpAkCkI45ZFnCJtaCgQC+++KLWrVunI0eOaNGiRaqqqvKWTAsLCzVz5kzv8ZMnT9b27du1atUqffLJJ9q3b58WLFig73//+xo0aFDwrgQAgCAK+L5g2rRpqq+v1/Lly1VTU6Phw4ertLRUGRkZkqSamhqfZyLvuOMONTY26vnnn9cvf/lLXXjhhRo/fryeeOKJ4F1FZ0mQ/dKEzTUQAcAW4phlDk/Y1zmlY8eOKT09XdXV1UpLS+v013O5XEpJSVHDCCnZ5gfL5ZZSPpAaGhqUnJwcnA4CwHkQx+xjLVYAAAzE6FevfqI0ASDSEccsI0GaiRfvEIDIRhyzjBIrAAAGuK8wE4yfeQn7KVAAohpxzDISpJkYfkAWQJQgjllGiRUAAAPcV5jhzgtApCOOWcbbZoYPFoBIRxyzjBIrAAAGuK8wE4yfiWkLRkcAwCLimGUkSDPBKE3E6PRoAGGCOGYZJVYAAAwwgjTDnReASEccs4wEaSYYK1DEaO0eQJggjllGiRUAAAOMIM1QmgAQ6YhjlpEgzQTjZ2JitDQBIEwQxyyjxBqGiouLlZmZKafTqaysLO3Zs8ev8/bt26eEhARdc801ndtBAPBDpMcyEqSZ+CBtASgpKdHChQu1ZMkSVVRUKDc3VxMnTlRVVZXpeQ0NDZo5c6YmTJgQ2AsCiG4hiGNSdMQyEqSZhCBtAVixYoXmzJmjuXPnaujQoVq5cqXS09O1atUq0/PuvvtuTZ8+XTk5OYG9IIDoFoI4JkVHLCNBdhGXy+WzNTU1dTimublZ5eXlysvL89mfl5en/fv3n7Pt9evX6+OPP9ayZcuC3m8AaOdPHJOiJ5aRIM0E8c4rPT1dKSkp3q2oqKjDy9XV1cntdis1NdVnf2pqqmpraw27+NFHH2nx4sV6+eWXlZDAnCsA39HFcUyKnlgWHr0IV8GYHv332V/V1dVKTk727k5KSjrnKQ6Hw+dvj8fTYZ8kud1uTZ8+XQ8//LCGDBlis6MAolKI4pgU+bGMBNlFkpOTfT5YRvr27av4+PgOd1inTp3qcCcmSY2NjTp48KAqKio0f/58SVJbW5s8Ho8SEhK0Y8cOjR8/PngXASCm+RPHpOiJZSRIM8H4mZgAitiJiYnKyspSWVmZfvzjH3v3l5WV6eabb+5wfHJysj744AOffcXFxXrzzTe1detWZWZmWu42gCjRxXFMip5YRoI0E4zShDuwwwsKCjRjxgxlZ2crJydHa9euVVVVlfLz8yVJhYWFOn78uDZu3Ki4uDgNHz7c5/z+/fvL6XR22A8gRoUgjknREctIkGFm2rRpqq+v1/Lly1VTU6Phw4ertLRUGRkZkqSamprzPkcEAKEWDbHM4fF4wn6VvWPHjik9PV3V1dVKS0vr9NdzuVxKSUlRw0Ip2fw76PO31SSlrDz78Ks/tXsACAbimH2MIM0E42di7J4PAHYQxyzjOUgAAAwwgjQToi+3ASBoiGOWkSDNBONnYlqD0REAsIg4ZhklVgAADDCCNBOM0gTvMIBQIo5ZFqOX7SdmfwGIdMQxyyixAgBggBGkGUoTACIdccyyGL1sP/HBAhDpiGOWUWIFAMBAjN4X+CkEPxMDAEFFHLOMBGmG0gSASEccsyxG7wsAADAXo/cFfuLOC0CkI45ZFqOX7ScesAUQ6YhjllFiBQDAACNIM5QmAEQ64phlMXrZfgrGz8TEaGkCQJggjllGiRUAAAOMIM1QmgAQ6YhjlsXoZfuJ2V8AIh1xzDJKrAAAGGAEaYbSBIBIRxyzLEYv2098sABEOuKYZZRYAQAwEKP3BX7iZ2IARDrimGUkSDOUJgBEOuKYZTF6XwAAgLkYvS/wE3deACIdccyyGL1sP/GALYBIRxyzjBIrAAAGGEGaoTQBINIRxyyL0cv2Ez8TAyDSEccso8QKAIABRpBmKE0AiHTEMcti9LL9xOwvAJGOOGYZJVYAAAwwgjRDaQJApCOOWRajl+0nZn8BiHTEMcsosQIAYIARpBm+3AYQ6YhjlpEgzVC7BxDpiGOWUWIFAMBAjN4X+Ik7LwCRjjhmWYxetp/4YAGIdMQxyyixAgBgIEbvC/zjiZM8NmdvebgFARBCxDHrSJAm3AlnN7ttAECoEMesi9H7AgAAzMXofYF/uPMCEOmIY9bF6GX7pzXeodZ4h802PJI8wekQAASIOGYdJVYAAAwwgjThTkiQO8HenZc7wSOpJTgdAoAAEcesI0GacMfHy22zNOGOj80PFoDwQByzjhIrAAAGLCXI4uJiZWZmyul0KisrS3v27DE9vqmpSUuWLFFGRoaSkpJ02WWXad26dZY63JXaFC+3za0tVn8nBkBYII5ZF3CJtaSkRAsXLlRxcbGuu+46rVmzRhMnTlRlZaUuueQSw3OmTp2qkydP6qWXXtLll1+uU6dOqbW11XbnO1ur4tUqm7O/YnDmF4DwQRyzLuAEuWLFCs2ZM0dz586VJK1cuVJvvPGGVq1apaKiog7H//nPf9auXbv0ySefqHfv3pKkwYMH2+s1AACdLKASa3Nzs8rLy5WXl+ezPy8vT/v37zc857XXXlN2draefPJJXXzxxRoyZIjuvfdeff311+d8naamJrlcLu/W2NgYSDeD5mx5IcHmFpulCQDhgThmXUAjyLq6OrndbqWmpvrsT01NVW1treE5n3zyifbu3Sun06lXX31VdXV1uueee/TFF1+c83vIoqIiPfzww4F0rVOc/WDZm8fktlnaAAA7iGPWWXrXHA7fN8vj8XTY166trU0Oh0Mvv/yyvv/972vSpElasWKFNmzYcM5RZGFhoRoaGrxbZWWllW4CAGBZQCPIvn37Kj4+vsNo8dSpUx1Gle0GDhyoiy++WCkpKd59Q4cOlcfj0bFjx3TFFVd0OCcpKUlJSUnev10uVyDdDBruvABEOuKYdQG9a4mJicrKylJZWZnP/rKyMo0ZM8bwnOuuu04nTpzQ6dOnvfv+9re/KS4uTmlpaRa63HXsTo1u3wAgVIhj1gV8W1FQUKAXX3xR69at05EjR7Ro0SJVVVUpPz9f0tny6MyZM73HT58+XX369NHs2bNVWVmp3bt367777tOdd96p7t27B+9KAAAIooAf85g2bZrq6+u1fPly1dTUaPjw4SotLVVGRoYkqaamRlVVVd7je/XqpbKyMv385z9Xdna2+vTpo6lTp+rRRx8N3lV0Erfi1UppAkAEI45ZZ+ldu+eee/Tpp5+qqalJ5eXl+tGPfuT9tw0bNmjnzp0+x1955ZUqKyvTV199perqaj399NMRMXq0PzX67BaoQFYq2rt3r6677jr16dNH3bt315VXXqlnnnnGzmUDiCKhimNS5McyFisPM4GuVNSzZ0/Nnz9fV199tXr27Km9e/fq7rvvVs+ePXXXXXeF4AoAIDpimcPj8YT9GkLHjh1Tenq6qquru2Rij8vlUkpKivY1XKZeyfa+nD7tcuu6lI/V0NCg5OTk8x4/evRojRo1SqtWrfLuGzp0qKZMmWK4UpGRW265RT179tQf/vAHy/0GENlCGcek6Ihl/JqHiWDO/vr2ykAul0tNTU0dXs/KSkXfVVFRof3792vs2LH23wAAEa+r45gUPbGMBNlF0tPTlZKS4t2M7qCsrFTULi0tTUlJScrOzta8efO8a+UCQLD4E8ek6IllfAdp4uwq+PZKE+2/WVJdXe1Tmvj2QgjfFchKRe327Nmj06dP6+2339bixYt1+eWX61/+5V8s9xtAdAhVHJMiP5aRIE20BWGR3ra/T49OTk4+b+3eykpF7TIzMyVJI0aM0MmTJ/XQQw+RIAF0eRyToieWUWINI1ZWKjLi8XjO+d0AAHS2aIlljCBNBGOJJXeAxxcUFGjGjBnKzs5WTk6O1q5d22GlouPHj2vjxo2SpBdeeEGXXHKJrrzySklnnyV66qmn9POf/9xWvwFEh1DEMSk6YhkJ0kQoPliBrlTU1tamwsJCHT16VAkJCbrsssv0+OOP6+6777bVbwDRIVQJsqtj2ZkzZ9SzZ08LPT03noM00P78UGlDlnrafH7ojMutSSnlAT0/BAB2xVoc69Wrl6ZOnao777xTP/zhD4PSJt9BmnArLgjPD0XGW/zll1/qxRdfVGFhob744gtJ0nvvvafjx4+HuGcA7IiVOLZ582Y1NDRowoQJGjJkiB5//HGdOHHCVpvhf9Uh1D492u4W7t5//30NGTJETzzxhJ566il9+eWXkqRXX31VhYWFoe0cAFtiJY5NnjxZ27Zt04kTJ/Szn/1MmzdvVkZGhv7pn/5J27dvV2tr6/kb+Q4SJFRQUKA77rhDH330kZxOp3f/xIkTtXv37hD2DAAC06dPHy1atEiHDx/WihUr9Je//EW33XabBg0apKVLl+qrr77yuy0m6Ziws4r9/7UR/t59912tWbOmw/6LL774vKteAAhvsRLH2tXW1mrjxo1av369qqqqdNttt2nOnDk6ceKEHn/8cb399tvasWOHX22RIE20BWH2V5vCfg6UnE6nXC5Xh/1//etf1a9fvxD0CECwxEoc2759u9avX6833nhDw4YN07x583T77bfrwgsv9B5zzTXX6Nprr/W7TUqs0M0336zly5erpaVF0tnloaqqqrR48WLdeuutIe4dAJzf7NmzNWjQIO3bt0+HDh3S/PnzfZKjJF166aVasmSJ320ygjQRnOeHwv/O66mnntKkSZPUv39/ff311xo7dqxqa2uVk5Oj3/zmN6HuHgAbYiWO1dTUqEePHqbHdO/eXcuWLfO7TRKkiVbFBWGR37Yg9abzJCcna+/evXrzzTf13nvvqa2tTaNGjdL1118f6q4BsClW4tgFF1ygmpoa9e/f32d/fX29+vfvL7c78G9SSZAxrrW1VU6nU4cOHdL48eM1fvz4UHcJAAJ2rjVvmpqalJiYaKlNEqSJ4Mz+Cu/SREJCgjIyMizdXQEIf9Eex5577jlJZ+dOvPjii+rVq5f339xut3bv3u1d3zVQJEgTwandh39p4le/+pUKCwv1xz/+Ub179w51dwAEUbTHsWeeeUbS2RHk6tWrFR//f9eamJiowYMHa/Xq1ZbaJkFCzz33nP7f//t/GjRokDIyMjos+Pvee++FqGcAYO7o0aOSpHHjxmn79u266KKLgtY2CdJEtN95tZsyZUqouwCgk8RKHHvrrbeC3iYJ0oQ7CGsQRsIHK5BpzwAiSzTHsYKCAj3yyCPq2bOnCgoKTI9dsWJFwO2TIAEAEamiosK7wElFRcU5j3M4HJbaJ0GaiPbZX+3i4uJMP0DMcAUiVzTHsW+XVSmxdrH231Gz10b4J5dXX33V5++WlhZVVFTo97//vR5++OEQ9QpAMMRKHOsMJEjo5ptv7rDvtttu01VXXaWSkhLNmTMnBL0CAHO33HKL38du37494PZJkCaCM/sr/H9o9FxGjx6tf/u3fwt1NwDYEM1xLCUlpVPbJ0GaiOYP1vl8/fXX+t3vfqe0tLRQdwWADdEcx9avX9+p7ZMgoYsuushnko7H41FjY6N69OihP/7xjyHsGQCEDgnSRHCeHwrPO69ve+aZZ3wSZFxcnPr166fRo0cHdVUKAF0vVuKYJG3dulX//u//rqqqKjU3N/v8m5UVwUiQJoIzPTo8H7D9tvHjxys9Pd3wUY+qqipdcsklIegVgGCIlTj23HPPacmSJZo1a5b+4z/+Q7Nnz9bHH3+sd999V/PmzbPUZlyQ+4gIlJmZqc8//7zD/vr6emVmZoagRwAQmOLiYq1du1bPP/+8EhMTdf/996usrEwLFixQQ0ODpTZJkCbav9y2u4W7c/2O2unTp+V0Oru4NwCCKVbiWFVVlcaMGSNJ6t69uxobGyVJM2bM0ObNmy21SYnVRHAesA3fe5D2tQsdDoeWLl2qHj16eP/N7XbrnXfe0TXXXBOi3gEIhmiPY+0GDBig+vp6ZWRkKCMjQ2+//bZGjhypo0ePnnMQcD4kyBjWvnahx+PRBx984POr24mJiRo5cqTuvffeUHUPAPw2fvx4vf766xo1apTmzJmjRYsWaevWrTp48GBACwp8GwnSRGsQZn/ZPb8zta9dOHv2bD377LNKTk4OcY8ABFu0x7F2a9euVVvb2clE+fn56t27t/bu3avJkycrPz/fUpskSBPBmf0V/msYdvbDtgBCJ1biWFxcnOLi/q8UPHXqVE2dOtVWmyRISJLeffddbdmyxfD5IStrGAJAV1q/fr169eqln/zkJz77t2zZoq+++kqzZs0KuM3w/+Y1hNqCMPOrLQJKE6+88oquu+46VVZW6tVXX1VLS4sqKyv15ptvdvpahwA6V6zEsccff1x9+/btsL9///567LHHLLXJCNJENK9h+G2PPfaYnnnmGc2bN08XXHCBnn32WWVmZuruu+/WwIEDQ909ADbEShz77LPPDJ/bzsjIUFVVlaU2GUFCH3/8sW666SZJUlJSks6cOSOHw6FFixZp7dq1Ie4dAJxf//799f7773fYf/jwYfXp08dSm4wgTcTK80O9e/f2PlR78cUX68MPP9SIESP05Zdf6quvvgpx7wDYEStx7Kc//akWLFigCy64QD/60Y8kSbt27dIvfvEL/fSnP7XUJgnSRKviFR8D06Nzc3NVVlamESNGaOrUqfrFL36hN998U2VlZZowYUKouwfAhliJY48++qg+++wzTZgwQQkJZ1NbW1ubZs6cyXeQsO7555/XN998I0kqLCxUt27dtHfvXt1yyy369a9/HeLeAcD5JSYmqqSkRI888ogOHz6s7t27a8SIEcrIyLDcJgnSRHCeHwr/t7h3797e/46Li9P999+v+++/P4Q9AhAssRLH2g0ZMkRDhgwJSluRc9Uh0BaE2V+RMD163Lhxuv3223XbbbfxWAcQZaI5jhUUFOiRRx5Rz549vWtLn8uKFSsCbp8ECY0YMUK/+tWvNH/+fE2aNEkzZszQpEmTfNZmBYBwU1FRoZaWFu9/BxsJ0kSsPD/03HPPaeXKlfrLX/6iTZs2adasWYqPj9dtt92mf/3Xf9XYsWND3UUAFkVzHGtfT/q7/x0s4T93N4TaF/m1u0WCuLg45eXlacOGDTp58qTWrFmj//7v/9b48eND3TUANsRKHLvzzju9j6t925kzZ3TnnXdaapMECR+1tbVavXq1nnjiCb3//vvKzs4OdZcA4Lx+//vf6+uvv+6w/+uvv9bGjRsttUmJ1cTZ0oTd2V/hf+flcrm0bds2bdq0STt37tSll16q6dOn65VXXtHll18e6u4BsCHa45jL5ZLH45HH41FjY6OcTqf339xut0pLS9W/f39LbZMgTURz7f7bUlNTddFFF2nq1Kl67LHH9L3vfS/UXQIQJNEexy688EI5HA45HA7DxzscDocefvhhS22TIGOcx+PRs88+q9tvv109evQIdXcAICBvvfWWPB6Pxo8fr23btvk8152YmKiMjAwNGjTIUtskSBPRfuclnU2Q8+fP17hx43TFFVeEujsAgiza49jYsWPV2tqqmTNnKjs7W+np6UFrm0k6JmLhd9Ti4uJ0xRVXqL6+PtRdAdAJYiGOJSQkaNu2bXK73UFtlwQJPfnkk7rvvvv04YcfhrorAGDJhAkTtHPnzqC2SYnVRKvi5YiBVfBvv/12ffXVVxo5cqQSExPVvXt3n3//4osvQtQzAHbFShybOHGiCgsL9eGHHyorK0s9e/b0+fd//ud/DrhNEqQJt+IVF8XTo9utXLky1F0A0EliJY797Gc/k2S85qrD4bBUfiVBQrNmzQp1FwDAlra2tqC3SYI0cfbOK3pnf7Wrqqoy/fdLLrmki3oCINhiJY592zfffOOzYIBVJEgTsfLBGjx4sBwOxzn/PdgzwwB0nViJY263W4899phWr16tkydP6m9/+5suvfRS/frXv9bgwYM1Z86cgNtkFitUUVGh9957z7u98847Wr16tYYMGaItW7aEunsAcF6/+c1vtGHDBj355JM+P9U3YsQIvfjii5baZARpIlZmf40cObLDvuzsbA0aNEi//e1vdcstt4SgVwCCIVbi2MaNG7V27VpNmDBB+fn53v1XX321/vd//9dSmyRIE21KsL3Ib1sEv8VDhgzRu+++G+puALAhVuLY8ePHDX9coa2tzfujyoEK/6tGp3O5XD5/ezwe1dTU6KGHHmL5OQAR4aqrrtKePXuUkZHhs3/Lli269tprLbVJgjThDkJpIhK+3G5fDf/bPB6P0tPTtXnz5hD1CkAwxEocW7ZsmWbMmKHjx4+rra1N27dv11//+ldt3LhR//mf/2mpTRKkCbfigvDBCv95UG+99ZbP33FxcerXr58uv/xyJSTwEQEiWazEscmTJ6ukpESPPfaYHA6Hli5dqlGjRun111/XDTfcYKlNoh+0f/9+paam6s477/TZv27dOn3++ed64IEHQtQzAPDP7Nmzdfvtt2vnzp2mj60FIvxvC0KoVfFB2cLdmjVrdOWVV3bYf9VVV2n16tUh6BGAYImVOFZfX6+bbrpJaWlpuvfee3Xo0CHbbZIgTbj/PvvL7hbuamtrNXDgwA77+/Xrp5qamhD0CECwxEoce+2111RbW6tly5bp4MGDysrK0rBhw/TYY4/p008/tdQmCRJKT0/Xvn37Ouzft2+f5V/iBoCuduGFF+quu+7Szp079dlnn2n27Nn6wx/+YPj4hz/C/7YghNp/aNRuG+Fu7ty5WrhwoVpaWjR+/HhJ0n/913/p/vvv1y9/+csQ9w6AHbESx76tpaVFBw8e1DvvvKNPP/1UqampltohQZo4+6GK/unR999/v7744gvdc889am5uliQ5nU498MADKiwsDHHvANgRK3FMOjsjf9OmTdq2bZvcbrduueUWvf76694b/0BRYg1DxcXFyszMlNPpVFZWlvbs2XPOY7dv364bbrhB/fr1U3JysnJycvTGG28E9HoOh0NPPPGEPv/8c7399ts6fPiwvvjiCy1dutTupQCIYV0Zy9LS0jRp0iR9/vnnWrNmjU6ePKn169fr+uuvV1yctVRHgjTh/ntpwu4WiJKSEi1cuFBLlixRRUWFcnNzNXHixHP+JNXu3bt1ww03qLS0VOXl5Ro3bpwmT56sioqKgK+3V69e+t73vqfhw4crKSkp4PMBhJ9QxDGp62PZ0qVLdeLECf3pT3/ST37yk6D83JXD4/F4bLfSyY4dO6b09HRVV1crLS2t01/P5XIpJSVFQxveVHxyL1ttuV2ndSRlvBoaGpScnHze40ePHq1Ro0Zp1apV3n1Dhw7VlClTVFRU5NdrXnXVVZo2bRojQCCGhTKOSdERyxhBhpHm5maVl5crLy/PZ39eXp7279/vVxttbW1qbGxU7969O6OLAHBe0RLLmKRj4uyzP/beovbnh767IHhSUlKHMmZdXZ3cbneHGVepqamqra316/WefvppnTlzRlOnTrXRawDRoqvjmBQ9sYwRpIlg1u7T09OVkpLi3cxKDEYLh/uzdNLmzZv10EMPqaSkRP3797d38QCiQqjimBT5sYwRZBeprq72qd0b3XX17dtX8fHxHe6wTp06dd7neEpKSjRnzhxt2bJF119/fXA6DQDf4k8ck6InljGCNNEWhLuu9gdsk5OTfTajD1ZiYqKysrJUVlbms7+srExjxow5Zz83b96sO+64Q5s2bdJNN90U3DcBQETr6jgmRU8sYwRpolXxiuviFSgKCgo0Y8YMZWdnKycnR2vXrlVVVZXy8/MlSYWFhTp+/Lg2btwo6ewHaubMmXr22Wf1gx/8wHvH1r17d6WkpNjqO4DIF4o4JkVHLCNBhplp06apvr5ey5cvV01NjYYPH67S0lLvr2TX1NT4PEe0Zs0atba2at68eZo3b553/6xZs7Rhw4au7j4ASIqOWGbpOcji4mL99re/VU1Nja666iqtXLlSubm55z1v3759Gjt2rIYPHx7QT5GE6jnIQQ3vKS75AltttbkadSJlVEDPDwGAXcQx+wL+DjLQ1RHaNTQ0aObMmZowYYLlzna1UK1AAQDBQhyzLuAEuWLFCs2ZM0dz587V0KFDtXLlSqWnp/uslmDk7rvv1vTp05WTk2O5swAAdJWAEqTV1RHWr1+vjz/+WMuWLfPrdZqamuRyubxbY2NjIN0MGu68AEQ64ph1AU3SsbI6wkcffaTFixdrz549Skjw7+WKior08MMPB9K1TuFui5enzebsL5vnA4AdxDHrLD0H6e/qCG63W9OnT9fDDz+sIUOG+N1+YWGhGhoavFtlZaWVbgIAYFlAI8hAV0dobGzUwYMHVVFRofnz50s6uwCtx+NRQkKCduzYYfhDlt9d3++76/91FXdrvNpa7d05eWyeDwB2EMesCyhBfnt1hB//+Mfe/WVlZbr55ps7HJ+cnKwPPvjAZ19xcbHefPNNbd26VZmZmRa73TXcrQlytNp7VNRj83wAsIM4Zl3AVx3I6ghxcXEaPny4z/n9+/eX0+nssB8AgHAScIIMdHWESOZujZPDdmmC5W4BhA5xzDpLK+l0tVCtpNPtaLUcNleN8LhcaslMj7kVKACEFnHMvti8LQAA4Dxi85tXP7W2xsvRwuwvAJGLOGYdCdKEx50gj9vmW2T3fACwgThmHSVWAAAMxOZtgb9a489udtsAgFAhjllGgjTDBwtApCOOWUaJFQAAA4wgzbgdUmvHRdgDbgMAQoU4ZhkJ0kzr3ze7bQBAqBDHLKPECgCAAUaQZrjzAhDpiGOWkSDN8MECEOmIY5ZRYgUAwAAjSDOtklqC0AYAhApxzDISpBn33ze7bQBAqBDHLKPECgCAAUaQZvhyG0CkI45ZRoI0wwcLQKQjjllGiRUAAAOMIM1w5wUg0hHHLCNBmnHL/gcjRmd/AQgTxDHLKLECAGCAEaQZShMAIh1xzDISpBk+WAAiHXHMMkqsAAAYYARppkX21zC0ez4A2EEcs4wEaYY1DAFEOuKYZZRYAQAwwAjSDM8PAYh0xDHLSJBmmP0FINIRxyyjxAoAgAFGkGa48wIQ6YhjlpEgzfDBAhDpiGOWUWIFAMAAI0gzzP4CEOmIY5aRIM1QmgAQ6YhjllFiBQDAACNIMy2S4oPQBgCECnHMMhKkGdYwBBDpiGOWUWIFAMAAI0gzfLkNINIRxywjQZphejSASEccs4wSKwAABhhBmmmV/dlfMVqaABAmiGOWkSDNtMj+GDtGp0cDCBPEMcsosQIAYIARpBmeHwIQ6YhjlpEgzTD7C0CkI45ZRokVAAADjCDNtMr+LUSMzv4CECaIY5aRIM20SHIEoQ0ACBXimGWUWAEAMMAI0gyzvwBEOuKYZSRIM9TuAUQ64phllFgBADDACNIMzw8BiHTEMcsYQZppCdIWoOLiYmVmZsrpdCorK0t79uw557E1NTWaPn26/uEf/kFxcXFauHBh4C8IIHqFKI5JkR/LSJBhpqSkRAsXLtSSJUtUUVGh3NxcTZw4UVVVVYbHNzU1qV+/flqyZIlGjhzZxb0FAGPREMscHo/HE+pOnM+xY8eUnp6u6upqpaWldfrruVwupaSkSP/UIHVLttdYi0v6zxQ1NDQoOfn8bY0ePVqjRo3SqlWrvPuGDh2qKVOmqKioyPTcf/zHf9Q111yjlStX2uszgIgXyjgmRUcs4ztIM62y/4Dt32v/LpfLZ3dSUpKSkpJ89jU3N6u8vFyLFy/22Z+Xl6f9+/fb7AiAmNTFcUyKnlhGibWLpKenKyUlxbsZ3UHV1dXJ7XYrNTXVZ39qaqpqa2u7qqsAYMifOCZFTyxjBGkmiHde1dXVPqUJo7uudg6H74t6PJ4O+wDALyGKY1LkxzISpJlgPBz79zaSk5PPW7vv27ev4uPjO9xhnTp1qsOdGAD4pYvjmBQ9sYwSaxhJTExUVlaWysrKfPaXlZVpzJgxIeoVAAQmWmIZI0gzbtkvTQT4gG1BQYFmzJih7Oxs5eTkaO3ataqqqlJ+fr4kqbCwUMePH9fGjRu95xw6dEiSdPr0aX3++ec6dOiQEhMTNWzYMJudBxDxQhDHpOiIZSRIM0EsTfhr2rRpqq+v1/Lly1VTU6Phw4ertLRUGRkZks4+TPvd54iuvfZa73+Xl5dr06ZNysjI0Keffmq39wAiXQjimBQdsYznIA14nx/KaZASbD4/1OqSDgT2/BAA2EUcs48RpJkQ3XkBQNAQxywjQZpplWR3fB2ji/wCCBPEMcuYxQoAgAFGkGaCcdcUo3deAMIEccwyEqQZShMAIh1xzDJKrAAAGGAEaYY7LwCRjjhmGQnSTKukNptt2D0fAOwgjllGiRUAAAOMIM24Zb80EaN3XgDCBHHMMhKkmVbZH2PH6AcLQJggjllGiRUAAAOMIM1w5wUg0hHHLCNBmmkRHywAkY04ZhklVgAADDCCNNMm+7O/wv7XNgFENeKYZSRIM62SHDbbiNEPFoAwQRyzjBIrAAAGLCXI4uJiZWZmyul0KisrS3v27Dnnsdu3b9cNN9ygfv36KTk5WTk5OXrjjTcsd7hLtQZpA4BQIY5ZFnCCLCkp0cKFC7VkyRJVVFQoNzdXEydOVFVVleHxu3fv1g033KDS0lKVl5dr3Lhxmjx5sioqKmx3vtO1BGkDgFAhjlnm8Hg8AVWXR48erVGjRmnVqlXefUOHDtWUKVNUVFTkVxtXXXWVpk2bpqVLl/p1/LFjx5Senq7q6mqlpaUF0l1LXC6XUlJSpPgGyZFsrzGPS3KnqKGhQcnJNtsCAD8Rx+wLaJJOc3OzysvLtXjxYp/9eXl52r9/v19ttLW1qbGxUb179z7nMU1NTWpqavL+3djYGEg3g8ctvtwGENmIY5YFVGKtq6uT2+1Wamqqz/7U1FTV1tb61cbTTz+tM2fOaOrUqec8pqioSCkpKd5t2LBhgXQzuDw2NwAINeKYJZYm6TgcvrcjHo+nwz4jmzdv1kMPPaSSkhL179//nMcVFhaqoaHBu1VWVlrpJgAAlgVUYu3bt6/i4+M7jBZPnTrVYVT5XSUlJZozZ462bNmi66+/3vTYpKQkJSUlef92uVyBdBMAANsCGkEmJiYqKytLZWVlPvvLyso0ZsyYc563efNm3XHHHdq0aZNuuukmaz0FAKALBbySTkFBgWbMmKHs7Gzl5ORo7dq1qqqqUn5+vqSz5dHjx49r48aNks4mx5kzZ+rZZ5/VD37wA+/os3v37mdnWAEAEIYCTpDTpk1TfX29li9frpqaGg0fPlylpaXKyMiQJNXU1Pg8E7lmzRq1trZq3rx5mjdvnnf/rFmztGHDBvtXAABAJwj4OchQCNlzkKqTZPeZH5ekvjH3/BCA0CKO2cdarAAAGODXPEwFYxHCGF3EEECYII5ZRYI0FYxFCGN0EUMAYYI4ZhUlVgAADDCCNEVpAkCkI45ZRYI01Sr7pYXY/GABCBfEMasosQIAYIARpCm+3AYQ6YhjVpEgTVG7BxDpiGNWUWIFAMAAI0hTfLkNINIRx6wiQZqiNAEg0hHHrKLECgCAAUaQppj9BSDSEcesIkGaojQBINIRx6yixAoAgAFGkKaY/QUg0hHHrCJBmqI0ASDSEcesosQKAIABRpCmmP0FINIRx6wiQZqiNAEg0hHHrKLECgCAAUaQppj9BSDSEcesIkGaojQBINIRx6yixAoAgAFGkKaY/QUg0hHHrCJBmuKDBSDSEcesosQKAIABRpCm+HIbQKQjjllFgjTF9GgAkY44ZhUlVgAADDCCNEVpAkCkI45ZRYI01SL7b1Fszv4CEC6IY1ZRYgUAwAAjSFOUJgBEOuKYVSRIU8z+AhDpiGNWUWIFAMAACdJUa5C2wBQXFyszM1NOp1NZWVnas2eP6fG7du1SVlaWnE6nLr30Uq1evTrg1wQQrUITx6TIj2UkSFMtQdr8V1JSooULF2rJkiWqqKhQbm6uJk6cqKqqKsPjjx49qkmTJik3N1cVFRV68MEHtWDBAm3bts3C9QKIPl0fx6ToiGUOj8fjCdmr++nYsWNKT09XdXW10tLSOv31XC6XUlJSJP1KktNma99IelQNDQ1KTk4+79GjR4/WqFGjtGrVKu++oUOHasqUKSoqKupw/AMPPKDXXntNR44c8e7Lz8/X4cOHdeDAAZt9BxCpQhnHpOiIZYwgTZ2RdNrmdkbS2Q/rt7empqYOr9bc3Kzy8nLl5eX57M/Ly9P+/fsNe3jgwIEOx9944406ePCgWlpi89klAN/WtXFMip5YxixWA4mJiRowYIBqa58JSnu9evVSenq6z75ly5bpoYce8tlXV1cnt9ut1NRUn/2pqamqra01bLu2ttbw+NbWVtXV1WngwIH2LwBAxAlVHJOiJ5aRIA04nU4dPXpUzc3NQWnP4/HI4XD47EtKSjrn8d891uj88x1vtB9A7Ah1HJMiP5aRIM/B6XTK6bRbtw9M3759FR8f3+EO69SpUx3urNqdvUPseHxCQoL69OnTaX0FEP5CEcek6IllfAcZRhITE5WVlaWysjKf/WVlZRozZozhOTk5OR2O37Fjh7Kzs9WtW7dO6ysAnEvUxDJPBKiurvZI8lRXV4e6K53ulVde8XTr1s3z0ksveSorKz0LFy709OzZ0/Ppp596PB6PZ/HixZ4ZM2Z4j//kk088PXr08CxatMhTWVnpeemllzzdunXzbN26NVSXAABREctIkGHohRde8GRkZHgSExM9o0aN8uzatcv7b7NmzfKMHTvW5/idO3d6rr32Wk9iYqJn8ODBnlWrVnVxjwGgo0iPZTwHCQCAAb6DBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAAAkSAAADJEgAAAyQIAEAMECCBADAgKUEWVxcrMzMTDmdTmVlZWnPnj2mx+/atUtZWVlyOp269NJLtXr1akudBQCgqwScIEtKSrRw4UItWbJEFRUVys3N1cSJE1VVVWV4/NGjRzVp0iTl5uaqoqJCDz74oBYsWKBt27bZ7jwAAJ3F4fF4PIGcMHr0aI0aNUqrVq3y7hs6dKimTJmioqKiDsc/8MADeu2113TkyBHvvvz8fB0+fFgHDhzw6zWPHTum9PR0VVdXKy0tLZDuAgBgSUAjyObmZpWXlysvL89nf15envbv3294zoEDBzocf+ONN+rgwYNqaWkxPKepqUkul8u7NTY2BtJNAABsCyhB1tXVye12KzU11Wd/amqqamtrDc+pra01PL61tVV1dXWG5xQVFSklJcW7DRs2TJJIlACALmNpko7D4fD52+PxdNh3vuON9rcrLCxUQ0ODd/vwww8lST179rTSXQAAApYQyMF9+/ZVfHx8h9HiqVOnOowS2w0YMMDw+ISEBPXp08fwnKSkJCUlJXn/TklJkSTFxfFUCgCgawSUcRITE5WVlaWysjKf/WVlZRozZozhOTk5OR2O37Fjh7Kzs9WtW7cAuwsAQNcIeEhWUFCgF198UevWrdORI0e0aNEiVVVVKT8/X9LZ8ujMmTO9x+fn5+uzzz5TQUGBjhw5onXr1umll17SvffeG7yrAAAgyAIqsUrStGnTVF9fr+XLl6umpkbDhw9XaWmpMjIyJEk1NTU+z0RmZmaqtLRUixYt0gsvvKBBgwbpueee06233ur3ayYnJ2vs2LFKTk4OtLsAAFgS8HOQAADEAma9AABggAQJAIABEiQAAAZIkAAAGAjrBLl7926NGDFCCQkJcjgcuuyyy87701oAAARDWCfI0tJSVVZW6q677pIkDRs2zPSntQAACJawTpBvvfWW7rrrLhUXF0uS5syZo/T0dJ+f2gIAoDOEbYK08tNaAAAES9gmSCs/rQUAQLCEbYJsF+hPawEAEAxhmyCt/LQWAADBErYJ0spPawEAECwB/5pHV7rnnns0d+5c9e/fX5L0zDPP6OjRo5o8eXKIewYAiHZh/WseO3fu1Lhx4zrsnzVrljZs2ND1HQIAxIywTpAAAIRK2H4HCQBAKJEgAQAwQIIEAMAACRIAAAMkSAAADJAgAQAwQIIEAMAACRIAAAMkSAAADJAgAQAwQIIEAMAACRIAAAP/H2hbffxLQuH8AAAAAElFTkSuQmCC", 253 | "text/plain": [ 254 | "
" 255 | ] 256 | }, 257 | "metadata": {}, 258 | "output_type": "display_data" 259 | } 260 | ], 261 | "source": [ 262 | "filter_points = np.asarray(My_treetool.filtered_points)\n", 263 | "open3dpaint(\n", 264 | " [\n", 265 | " O3dPointSetClass(\n", 266 | " show_points_curv,\n", 267 | " show_colors_curv,\n", 268 | " name=\"curvature\",show_color_bars=True\n", 269 | " ),\n", 270 | " O3dPointSetClass(\n", 271 | " show_points_ver,\n", 272 | " show_colors_ver,\n", 273 | " name=\"verticality\",show_color_bars=True\n", 274 | " ),\n", 275 | " ],\n", 276 | " voxel_size=0.1,\n", 277 | " pointsize=2,show_color_bars=True\n", 278 | ")" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": 25, 284 | "metadata": {}, 285 | "outputs": [], 286 | "source": [ 287 | "My_treetool.step_3_dbscan_clustering(eps=0.4, min_cluster_size=20)\n", 288 | "\n", 289 | "# Obtained attributes:\n", 290 | "# cluster_list: List of all clusters obtained with Euclidean Clustering\n", 291 | "\n", 292 | "open3dpaint(My_treetool.cluster_list, voxel_size=0.1)" 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": 26, 298 | "metadata": {}, 299 | "outputs": [], 300 | "source": [ 301 | "# Group stem segments\n", 302 | "My_treetool.step_4_group_stems(max_distance=0.4)\n", 303 | "\n", 304 | "# Obtained attributes:\n", 305 | "# complete_Stems: List of all complete stems obtained by joining clusters belonging to the same tree\n", 306 | "\n", 307 | "open3dpaint(My_treetool.complete_Stems, voxel_size=0.1)" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": 27, 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [ 316 | "My_treetool.step_5_get_ground_level_trees(lowstems_height=5, cutstems_height=5)\n", 317 | "\n", 318 | "# Obtained attributes:\n", 319 | "# low_stems: List of all stems truncated to the specified height\n", 320 | "\n", 321 | "open3dpaint(My_treetool.low_stems, voxel_size=0.1)" 322 | ] 323 | }, 324 | { 325 | "cell_type": "code", 326 | "execution_count": 28, 327 | "metadata": {}, 328 | "outputs": [], 329 | "source": [ 330 | "My_treetool.step_6_get_cylinder_tree_models(search_radius=0.1)\n", 331 | "\n", 332 | "# Obtained attributes:\n", 333 | "# finalstems: List of Dictionaries with two keys 'tree' which contains the points used to fit the cylinder model and 'model' which contains the cylinder model parameters\n", 334 | "# visualization_cylinders: List of the pointclouds that represent the tree modeled with a cylinder\n", 335 | "\n", 336 | "open3dpaint(\n", 337 | " [i[\"tree\"] for i in My_treetool.finalstems] + My_treetool.visualization_cylinders,\n", 338 | " voxel_size=0.1,\n", 339 | ")" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": 29, 345 | "metadata": {}, 346 | "outputs": [], 347 | "source": [ 348 | "My_treetool.step_7_ellipse_fit()\n", 349 | "\n", 350 | "# Obtained attributes:\n", 351 | "# Three new keys in our finalstems dictionaries:\n", 352 | "# final_diameter: Final DBH of every tree\n", 353 | "# cylinder_diameter: DBH obtained with cylinder fitting\n", 354 | "# ellipse_diameter;DBH obtained with Ellipse fitting" 355 | ] 356 | }, 357 | { 358 | "cell_type": "markdown", 359 | "metadata": {}, 360 | "source": [ 361 | "Finally, we save the extracted trees and their DBH" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": null, 367 | "metadata": {}, 368 | "outputs": [], 369 | "source": [ 370 | "My_treetool.save_results(save_location=\"results/myresults.csv\")" 371 | ] 372 | }, 373 | { 374 | "cell_type": "markdown", 375 | "metadata": {}, 376 | "source": [ 377 | "Load Cloud and visualize" 378 | ] 379 | }, 380 | { 381 | "cell_type": "markdown", 382 | "metadata": {}, 383 | "source": [ 384 | "WARNING: Please adjust the path before executing" 385 | ] 386 | }, 387 | { 388 | "cell_type": "code", 389 | "execution_count": null, 390 | "metadata": {}, 391 | "outputs": [], 392 | "source": [ 393 | "file_directory = r\"data/downsampledlesscloudEURO3.pcd\"\n", 394 | "pc_original = o3d.io.read_point_cloud(file_directory)\n", 395 | "pc_downsampled = seg_tree.voxelize(pc_original, 0.06)" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": null, 401 | "metadata": {}, 402 | "outputs": [], 403 | "source": [ 404 | "My_treetool.point_cloud = tree_tool.set_point_cloud(pc_downsampled)" 405 | ] 406 | }, 407 | { 408 | "cell_type": "code", 409 | "execution_count": null, 410 | "metadata": {}, 411 | "outputs": [], 412 | "source": [ 413 | "My_treetool.full_process(\n", 414 | " verticality_threshold=0.1,\n", 415 | " curvature_threshold=0.1,\n", 416 | " dbscan_eps=0.4,\n", 417 | " min_cluster_size=20,\n", 418 | " group_stems_max_distance=0.4,\n", 419 | " lowstems_height=5,\n", 420 | " cutstems_height=5,\n", 421 | " search_radius=0.1,\n", 422 | ")\n", 423 | "\n", 424 | "cloud_match = [i[\"tree\"] for i in My_treetool.finalstems] + [\n", 425 | " i for i in My_treetool.visualization_cylinders\n", 426 | "]\n", 427 | "open3dpaint(cloud_match + [np.asarray(pc_downsampled.points)], voxel_size=0.1)" 428 | ] 429 | }, 430 | { 431 | "cell_type": "markdown", 432 | "metadata": {}, 433 | "source": [ 434 | "WARNING: Please adjust the path before executing" 435 | ] 436 | }, 437 | { 438 | "cell_type": "code", 439 | "execution_count": null, 440 | "metadata": {}, 441 | "outputs": [], 442 | "source": [ 443 | "#####################################################\n", 444 | "# Get ground truth\n", 445 | "tree_data = pd.read_csv(\n", 446 | " \"data/TLS_Benchmarking_Plot_3_LHD.txt\", sep=\"\\t\", names=[\"x\", \"y\", \"height\", \"DBH\"]\n", 447 | ")\n", 448 | "Xcor, Ycor, diam = tree_data.iloc[0, [0, 1, 3]]\n", 449 | "cylinders_from_GT = [\n", 450 | " utils.makecylinder(model=[Xcor, Ycor, 0, 0, 0, 1, diam / 2], height=10, density=20)\n", 451 | "]\n", 452 | "TreeDict = [np.array([Xcor, Ycor, diam])]\n", 453 | "for i, rows in tree_data.iloc[1:].iterrows():\n", 454 | " Xcor, Ycor, diam = rows.iloc[[0, 1, 3]]\n", 455 | " if not np.any(np.isnan([Xcor, Ycor, diam])):\n", 456 | " cylinders_from_GT.append(\n", 457 | " utils.makecylinder(model=[Xcor, Ycor, 0, 0, 0, 1, diam / 2], height=10, density=10)\n", 458 | " )\n", 459 | " TreeDict.append(np.array([Xcor, Ycor, diam]))\n", 460 | "cloud_of_cylinders_from_GT = [p for i in cylinders_from_GT for p in i]\n", 461 | "\n", 462 | "# DataBase\n", 463 | "# Found trees\n", 464 | "# Hungarian Algorithm assignment\n", 465 | "CostMat = np.ones([len(TreeDict), len(My_treetool.visualization_cylinders)])\n", 466 | "for X, datatree in enumerate(TreeDict):\n", 467 | " for Y, foundtree in enumerate(My_treetool.finalstems):\n", 468 | " CostMat[X, Y] = np.linalg.norm([datatree[0:2] - foundtree[\"model\"][0:2]])\n", 469 | "\n", 470 | "dataindex, foundindex = linear_sum_assignment(CostMat, maximize=False)\n", 471 | "\n", 472 | "# Get metrics\n", 473 | "locationerror = []\n", 474 | "correctlocationerror = []\n", 475 | "diametererror = []\n", 476 | "diametererrorElipse = []\n", 477 | "diametererrorComb = []\n", 478 | "cloud_match = []\n", 479 | "for i, j in zip(dataindex, foundindex):\n", 480 | " locationerror.append(\n", 481 | " np.linalg.norm((My_treetool.finalstems[j][\"model\"][0:2] - TreeDict[i][0:2]))\n", 482 | " )\n", 483 | " if locationerror[-1] < 0.6:\n", 484 | " if My_treetool.finalstems[j][\"cylinder_diameter\"] is not None:\n", 485 | " diametererror.append(\n", 486 | " abs(My_treetool.finalstems[j][\"cylinder_diameter\"] - TreeDict[i][2])\n", 487 | " )\n", 488 | " diametererrorElipse.append(\n", 489 | " abs(My_treetool.finalstems[j][\"ellipse_diameter\"] - TreeDict[i][2])\n", 490 | " )\n", 491 | " mindi = max(\n", 492 | " My_treetool.finalstems[j][\"cylinder_diameter\"],\n", 493 | " My_treetool.finalstems[j][\"ellipse_diameter\"],\n", 494 | " )\n", 495 | " mendi = np.mean(\n", 496 | " [\n", 497 | " My_treetool.finalstems[j][\"cylinder_diameter\"],\n", 498 | " My_treetool.finalstems[j][\"ellipse_diameter\"],\n", 499 | " ]\n", 500 | " )\n", 501 | " diametererrorComb.append(abs(mindi - TreeDict[i][2]))\n", 502 | " correctlocationerror.append(\n", 503 | " np.linalg.norm((My_treetool.finalstems[j][\"model\"][0:2] - TreeDict[i][0:2]))\n", 504 | " )\n", 505 | " cloud_match.append(\n", 506 | " np.vstack(\n", 507 | " [\n", 508 | " cylinders_from_GT[i],\n", 509 | " My_treetool.finalstems[j][\"tree\"],\n", 510 | " My_treetool.visualization_cylinders[j],\n", 511 | " ]\n", 512 | " )\n", 513 | " )" 514 | ] 515 | }, 516 | { 517 | "cell_type": "code", 518 | "execution_count": null, 519 | "metadata": {}, 520 | "outputs": [], 521 | "source": [ 522 | "n_ref = len(TreeDict)\n", 523 | "n_match = len(diametererror)\n", 524 | "n_extr = len(locationerror) - n_match\n", 525 | "\n", 526 | "Completeness = n_match / n_ref\n", 527 | "Correctness = n_match / (n_extr + n_match)" 528 | ] 529 | }, 530 | { 531 | "cell_type": "code", 532 | "execution_count": null, 533 | "metadata": {}, 534 | "outputs": [], 535 | "source": [ 536 | "plt.figure(figsize=(20, 6))\n", 537 | "plt.subplot(1, 3, 1)\n", 538 | "plt.hist(diametererror, 50)\n", 539 | "plt.title(\"Cylinder DBH error\")\n", 540 | "\n", 541 | "plt.subplot(1, 3, 2)\n", 542 | "plt.hist(diametererrorComb, 50)\n", 543 | "plt.title(\"Final DBH error\")\n", 544 | "\n", 545 | "plt.subplot(1, 3, 3)\n", 546 | "plt.hist(correctlocationerror, 50)\n", 547 | "plt.title(\"Location error\")\n", 548 | "\n", 549 | "print(\"Total number of trees in Ground Truth: \", n_ref)\n", 550 | "print(\"Total number of trees matched with Ground Truth: \", n_match)\n", 551 | "print(\"Total number of trees extra trees found: \", n_extr)\n", 552 | "\n", 553 | "print(\"Percentage of matched trees: \", round(Completeness * 100), \"%\")\n", 554 | "print(\"Percentage of correctly matched trees: \", round(Correctness * 100), \"%\")\n", 555 | "\n", 556 | "print(\n", 557 | " \"Cylinder DBH mean Error: \",\n", 558 | " np.mean(diametererror),\n", 559 | ")\n", 560 | "print(\"Ellipse DBH mean Error: \", np.mean(diametererrorElipse))\n", 561 | "print(\"Final DBH mean Error: \", np.mean(diametererrorComb))" 562 | ] 563 | }, 564 | { 565 | "cell_type": "code", 566 | "execution_count": null, 567 | "metadata": {}, 568 | "outputs": [], 569 | "source": [] 570 | } 571 | ], 572 | "metadata": { 573 | "kernelspec": { 574 | "display_name": "treetool", 575 | "language": "python", 576 | "name": "python3" 577 | }, 578 | "language_info": { 579 | "codemirror_mode": { 580 | "name": "ipython", 581 | "version": 3 582 | }, 583 | "file_extension": ".py", 584 | "mimetype": "text/x-python", 585 | "name": "python", 586 | "nbconvert_exporter": "python", 587 | "pygments_lexer": "ipython3", 588 | "version": "3.10.16" 589 | } 590 | }, 591 | "nbformat": 4, 592 | "nbformat_minor": 4 593 | } 594 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # treetool 2 | 3 | 4 | The main objective of our work is to estimate the carbon content of trees in a forest plot. With this in mind, we have implemented the algorithm of Liang et al [1] to detect trees in the wild and measure their diameters at breast height (1.3 m) from point clouds. We usually get our point clouds from photogrammetry techniques, where cameras or direct 3D measurements are used, either with LiDAR or TLS sensors. This document describes the requirements, installation, and how to run our code to detect trees and measure their diameter at chest height. 5 | 6 | # Software description 7 | 8 | treetool is made up of some sample notebooks and three libraries segTree, treetool and Utils. segTree contains several useful functions that allow us to quickly perform operations on point clouds. treetool contains our main class. When called, it performs the complete process of tree detection and extraction of diameters at chest height. Finally, Utils contains special functions required by the internal processes and functions for displaying point clouds. 9 | 10 | # Hardware requirements 11 | The requirements will depend mainly on the size of the point clouds that will be worked with, since a cloud can vary between hundreds of thousands to hundreds of millions of points. The recommended requirements are designed to process clouds with around ten million points with good fluency. 12 | 13 | Recommended technical requirements 14 | • Ubuntu 15 | • Processor: 2 GHz or more, 2 or more cores 16 | • RAM: 16 GB 17 | • Graphics: Dedicated video card with 4GB of VRAM 18 | 19 | 20 | # Install Anaconda 21 | Anaconda is a package manager system that may make you life easier with your setup. If you do not have it installed in your system, you may want to download it and install it from https://www.anaconda.com/products/individual/get-started 22 | 23 | 24 | # Create a Virtual Environment 25 | 26 | This may be useful to isolate the package installation to your current system setup. 27 | 28 | Create a virtual environment using our environment.yml 29 | 30 | ``` 31 | conda env create 32 | ``` 33 | 34 | To activate the virtual environment run 35 | ``` 36 | conda activate treetool 37 | ``` 38 | 39 | 40 | Finally you can download the point clouds for TLS tests at this address. 41 | [https://drive.google.com/drive/folders/15aW3Npr9lOdxGrswWrsN9wN0g2Q9pBGo?usp=sharing](https://drive.google.com/drive/folders/1AhYd8pwCrTAJCV4OIc9MWot8tjNgIJSx?usp=sharing) 42 | 43 | The original databases and original publication can be found on this page. 44 | https://laserscanning.fi/results-available-for-international-benchmarking-of-terrestrial-laser-scanning-methods/ 45 | 46 | 47 | [1] Liang, X., Litkey, P., Hyyppa, J., Kaartinen, H., Vastaranta, M., & Holopainen, M. (2011). Automatic stem mapping using single-scan terrestrial laser scanning. IEEE Transactions on Geoscience and Remote Sensing, 50 (2), 661-670. 48 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | # run: conda env create --file environment.yml 2 | name: treetool 3 | channels: 4 | - conda-forge 5 | dependencies: 6 | - python=3.10 7 | - python-pdal 8 | - pip 9 | - open3d 10 | - matplotlib 11 | - pandas 12 | - numpy>=1.18.0,<2.0.0 13 | - scipy 14 | - jupyter 15 | - pip: 16 | - git+https://github.com/porteratzo/porteratzo3D.git 17 | - lsq-ellipse 18 | -------------------------------------------------------------------------------- /resultsFGIElipse.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/porteratzo/TreeTool/e38ef44c16ced3612bf8c617e9d8dfbec6eb26c1/resultsFGIElipse.npz -------------------------------------------------------------------------------- /scripts/pip_upload.sh: -------------------------------------------------------------------------------- 1 | conda activate build 2 | rm -rf dist/* 3 | python setup.py sdist bdist_wheel 4 | twine upload dist/* -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | from setuptools import setup 3 | 4 | setup( 5 | name="treetool", 6 | version="1.0.1", 7 | description="Python package for tree detection, segmentation and extraction of DBH", 8 | url="https://github.com/porteratzo/TreeTool", 9 | author="Omar Montoya", 10 | author_email="omar.alfonso.montoya@hotmail.com", 11 | license="MIT License", 12 | packages=setuptools.find_packages(), 13 | install_requires=[ 14 | "open3d", 15 | "lsq-ellipse", 16 | "matplotlib", 17 | "pandas", 18 | "numpy>=1.18.0,<2.0.0", 19 | "scipy", 20 | "porteratzo3D @ git+https://github.com/porteratzo/porteratzo3D.git", 21 | ], 22 | classifiers=[], 23 | ) 24 | -------------------------------------------------------------------------------- /treetool/__init__.py: -------------------------------------------------------------------------------- 1 | name = 'treetool' -------------------------------------------------------------------------------- /treetool/seg_tree.py: -------------------------------------------------------------------------------- 1 | """ 2 | MIT License 3 | 4 | Copyright (c) 2021 porteratzo 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | """ 24 | 25 | import open3d as o3d 26 | import numpy as np 27 | import pdal 28 | import os 29 | import random 30 | from typing import Union, Tuple, List, Optional 31 | 32 | 33 | def voxelize(points: np.ndarray, leaf: float = 0.1) -> np.ndarray: 34 | """ 35 | Use voxelgrid to subsample a pointcloud 36 | 37 | Args: 38 | points : np.ndarray 39 | (n,3) point cloud 40 | 41 | leaf: float 42 | Voxelsize 43 | 44 | Returns: 45 | VFmm: np.narray (n,3) 46 | (n,3) subsampled Pointcloud 47 | 48 | """ 49 | return_same = O3dPointsReturnSame(points) 50 | downpcd = return_same.get().voxel_down_sample(voxel_size=leaf) 51 | return return_same.get(downpcd) 52 | 53 | 54 | class O3dPointsReturnSame: 55 | def __init__(self, points: Union[np.ndarray, o3d.geometry.PointCloud]) -> None: 56 | """ 57 | Initializes the O3dPointsReturnSame object. 58 | 59 | Args: 60 | points: np.ndarray | o3d.geometry.PointCloud 61 | The input points, either as a numpy array of shape (n, 3) or an 62 | open3d.geometry.PointCloud object. 63 | """ 64 | if isinstance(points, o3d.geometry.PointCloud): 65 | self.is_cloud = True 66 | self.pointcloud = points 67 | else: 68 | self.is_cloud = False 69 | self.pointcloud = o3d.geometry.PointCloud() 70 | self.pointcloud.points = o3d.utility.Vector3dVector(points) 71 | 72 | def get( 73 | self, pointcloud: Union[np.ndarray, o3d.geometry.PointCloud] = None 74 | ) -> Union[np.ndarray, o3d.geometry.PointCloud]: 75 | """ 76 | Retrieves the current point cloud or points. 77 | 78 | Args: 79 | pointcloud: np.ndarray | o3d.geometry.PointCloud, optional 80 | The point cloud to retrieve. If None, uses the internally stored 81 | point cloud. 82 | 83 | Returns: 84 | np.ndarray | o3d.geometry.PointCloud: The point cloud or points. 85 | Returns the input type (numpy array or open3d.geometry.PointCloud). 86 | """ 87 | if pointcloud is not None: 88 | cur_cloud = pointcloud 89 | else: 90 | cur_cloud = self.pointcloud 91 | if self.is_cloud: 92 | return cur_cloud 93 | else: 94 | return np.asarray(cur_cloud.points) 95 | 96 | def get_cloud(self) -> o3d.geometry.PointCloud: 97 | """ 98 | Retrieves the internally stored point cloud. 99 | 100 | Returns: 101 | o3d.geometry.PointCloud: The internally stored point cloud. 102 | """ 103 | return self.pointcloud 104 | 105 | def get_points(self) -> np.ndarray: 106 | """ 107 | Retrieves the points of the internally stored point cloud. 108 | 109 | Returns: 110 | np.ndarray: The points of the internally stored point cloud as a numpy array. 111 | """ 112 | return np.asarray(self.pointcloud.points) 113 | 114 | 115 | def floor_remove( 116 | points: np.ndarray, 117 | set_max_window_size: int = 20, 118 | set_slope: float = 1.0, 119 | set_initial_distance: float = 0.5, 120 | set_max_distance: float = 3.0, 121 | cell_size: int = 1, 122 | ) -> Tuple[np.ndarray, np.ndarray]: 123 | """ 124 | Takes a point cloud and returns 2 pointclouds, the first for non ground points and the second 125 | for ground points 126 | 127 | Args: 128 | points : np.ndarray 129 | (n,3) point cloud 130 | 131 | set_max_window_size: int 132 | Set the maximum window size to be used in filtering ground returns. 133 | 134 | set_slope: float 135 | Set the slope value to be used in computing the height threshold. 136 | 137 | set_initial_distance: float 138 | Set the initial height above the parameterized ground surface to be considered a ground 139 | return. 140 | 141 | set_max_distance: float 142 | Set the maximum height above the parameterized ground surface to be considered a ground 143 | return. 144 | 145 | Returns: 146 | non_ground_points.xyz : np.narray (n,3) 147 | 3d point cloud of only non ground points 148 | 149 | ground.xyz : np.narray (n,3) 150 | 3d point cloud of only ground points 151 | 152 | """ 153 | return_same = O3dPointsReturnSame(points) 154 | o3d.io.write_point_cloud("floorseg_temp_file.pcd", return_same.get()) 155 | json = f""" 156 | [ 157 | "floorseg_temp_file.pcd", 158 | {{ 159 | "type":"filters.smrf", 160 | "cell":{cell_size}, 161 | "scalar":{set_max_distance}, 162 | "slope":{set_slope}, 163 | "threshold":{set_initial_distance}, 164 | "window":{set_max_window_size} 165 | }} 166 | ] 167 | """ 168 | pipeline = pdal.Pipeline(json) 169 | pipeline.execute() 170 | arrays = pipeline.arrays 171 | points1 = arrays[0][arrays[0]["Classification"] == 1] 172 | points2 = arrays[0][arrays[0]["Classification"] == 2] 173 | Nogroundpoints = np.array(points1[["X", "Y", "Z"]].tolist()) 174 | ground = np.array(points2[["X", "Y", "Z"]].tolist()) 175 | os.remove("floorseg_temp_file.pcd") 176 | 177 | return return_same.get(Nogroundpoints), return_same.get(ground) 178 | 179 | 180 | def radius_outlier_removal( 181 | points: np.ndarray, min_n: int = 6, radius: float = 0.4, organized: bool = True 182 | ) -> np.ndarray: 183 | """ 184 | Takes a point cloud and removes points that have less than minn neigbors in a certain radius 185 | 186 | Args: 187 | points : np.ndarray 188 | (n,3) point cloud 189 | 190 | min_n: int 191 | Neighbor threshold to keep a point 192 | 193 | radius: float 194 | Radius of the sphere a point can be in to be considered a neighbor of our sample point 195 | 196 | organized: bool 197 | If true outlier points are set to nan instead of removing the points from the cloud 198 | 199 | 200 | Returns: 201 | filtered_point_cloud.xyz : np.narray (n,3) 202 | (n,3) Pointcloud with outliers removed 203 | 204 | """ 205 | 206 | return_same = O3dPointsReturnSame(points) 207 | ror_filter = return_same.get() 208 | cl, ind = ror_filter.remove_radius_outlier(nb_points=min_n, radius=radius) 209 | if organized: 210 | na_idx = np.delete(np.arange(len(ror_filter.points)), ind) 211 | return_points = np.asanyarray(ror_filter.points) 212 | return_points[na_idx] = np.nan 213 | _return_same = O3dPointsReturnSame(return_points) 214 | cl = return_same.get(_return_same.get()) 215 | return cl 216 | 217 | 218 | def compute_eigenvalues(chunk_of_matrices: List[np.ndarray]) -> List[float]: 219 | _eigenvalues_list = [] 220 | for matrix in chunk_of_matrices: 221 | h1, h2, h3 = np.linalg.eigvals(matrix) 222 | _eigenvalues_list.append(h3 / (h1 + h2 + h3)) 223 | return _eigenvalues_list 224 | 225 | 226 | def extract_normals( 227 | points: np.ndarray, search_radius: float = 0.1 228 | ) -> Tuple[np.ndarray, np.ndarray]: 229 | """ 230 | Takes a point cloud and approximates their normals using PCA 231 | 232 | Args: 233 | points : np.ndarray 234 | (n,3) point cloud 235 | 236 | search_radius: float 237 | Radius of the sphere a point can be in to be considered in the calculation of a sample 238 | points' normal 239 | 240 | Returns: 241 | normals : np.narray (n,3) 242 | (n,3) Normal vectors corresponding to the points in the input point cloud 243 | 244 | """ 245 | return_same = O3dPointsReturnSame(points) 246 | PointCloudV = return_same.get_cloud() 247 | kd_tree = o3d.geometry.KDTreeSearchParamRadius(search_radius) 248 | PointCloudV.estimate_normals(kd_tree) 249 | PointCloudV.estimate_covariances(kd_tree) 250 | 251 | cov = np.asarray(PointCloudV.covariances) 252 | eigenvalues = np.linalg.eigvals(cov) 253 | result = eigenvalues[:, 0] / np.sum(eigenvalues, axis=1) 254 | 255 | return np.asarray(PointCloudV.normals), np.asarray(result) 256 | 257 | 258 | def dbscan_cluster_extract( 259 | points: np.ndarray, eps: float = 2, min_points: int = 20 260 | ) -> List[np.ndarray]: 261 | """ 262 | Takes a point cloud and clusters the points with euclidean clustering 263 | 264 | Args: 265 | points : np.ndarray 266 | (n,3) point cloud 267 | 268 | tolerance: int 269 | Maximum distance a point can be to a cluster to added to that cluster 270 | 271 | min_cluster_size: int 272 | Minimum number of points a cluster must have to be returned 273 | 274 | max_cluster_size: int 275 | Maximum number of points a cluster must have to be returned 276 | 277 | 278 | Returns: 279 | cluster_list : list 280 | List of (n,3) Pointclouds representing each cluster 281 | 282 | """ 283 | return_same = O3dPointsReturnSame(points) 284 | PointCloudV = return_same.get_cloud() 285 | labels = PointCloudV.cluster_dbscan( 286 | eps=eps, 287 | min_points=min_points, 288 | ) 289 | cluster_list = [ 290 | return_same.get_points()[np.asarray(labels) == label] 291 | for label in set(labels) 292 | if label != -1 293 | ] 294 | return cluster_list 295 | 296 | 297 | # Function to fit a cylinder model using RANSAC 298 | def fit_cylinder_ransac( 299 | points: np.ndarray, 300 | max_iterations: int = 1000, 301 | distance_threshold: float = 0.01, 302 | rlim: List[Optional[float]] = [None, None], 303 | ) -> Tuple[np.ndarray, np.ndarray]: 304 | 305 | def compute_inliers(points, axis_point1, axis_point2, radius, distance_threshold): 306 | axis_vector = axis_point2 - axis_point1 307 | axis_length = np.linalg.norm(axis_vector) 308 | axis_unit_vector = axis_vector / axis_length 309 | 310 | point_vectors = points - axis_point1 311 | projection_lengths = np.dot(point_vectors, axis_unit_vector) 312 | projection_points = axis_point1 + np.outer(projection_lengths, axis_unit_vector) 313 | 314 | distances_to_axis = np.linalg.norm(projection_points - points, axis=1) 315 | surface_distances = np.abs(distances_to_axis - radius) 316 | 317 | return np.where(surface_distances < distance_threshold)[0] 318 | 319 | best_cylinder = None 320 | best_inliers = [] 321 | 322 | num_points = points.shape[0] 323 | 324 | for _ in range(max_iterations): 325 | # Randomly sample 3 points to define a cylinder (axis and radius) 326 | sample_indices = random.sample(range(num_points), 3) 327 | sample_points = points[sample_indices] 328 | 329 | # Define the cylinder axis (from first two points) and radius (from third point) 330 | axis_point1, axis_point2, sample_point3 = sample_points 331 | axis_vector = axis_point2 - axis_point1 332 | radius = np.linalg.norm( 333 | np.cross(sample_point3 - axis_point1, axis_vector) 334 | ) / np.linalg.norm(axis_vector) 335 | 336 | # Measure inliers by calculating distances of all points to the cylinder 337 | 338 | if rlim[0] is None or radius > rlim[0]: 339 | if rlim[1] is None or radius < rlim[1]: 340 | inliers = compute_inliers( 341 | points, axis_point1, axis_point2, radius, distance_threshold 342 | ) 343 | # Track the best model (with the most inliers) 344 | if len(inliers) > len(best_inliers): 345 | best_inliers = inliers 346 | best_cylinder = np.concatenate( 347 | [axis_point1, axis_point2, np.array(radius)[None]] 348 | ) 349 | best_cylinder[3:6] = best_cylinder[3:6] - best_cylinder[0:3] 350 | best_cylinder[3:6] = best_cylinder[3:6] / np.linalg.norm(best_cylinder[3:6]) 351 | return best_inliers, best_cylinder 352 | 353 | 354 | # Function to fit a stick model using RANSAC 355 | def fit_stick_ransac( 356 | point_cloud: o3d.geometry.PointCloud, 357 | max_iterations: int = 1000, 358 | distance_threshold: float = 0.01, 359 | ) -> Tuple[Tuple[np.ndarray, np.ndarray], List[int]]: 360 | def point_to_line_distance(point, line_point1, line_point2): 361 | # Vector from line_point1 to line_point2 362 | line_vector = line_point2 - line_point1 363 | point_vector = point - line_point1 364 | 365 | # Projection of point onto the line 366 | projection = np.dot(point_vector, line_vector) / np.linalg.norm(line_vector) 367 | projection_point = line_point1 + (projection / np.linalg.norm(line_vector)) * line_vector 368 | 369 | # Perpendicular distance from point to the line 370 | distance = np.linalg.norm(point - projection_point) 371 | 372 | return distance 373 | 374 | best_stick = None 375 | best_inliers = [] 376 | 377 | points = np.asarray(point_cloud.points) 378 | num_points = points.shape[0] 379 | 380 | for _ in range(max_iterations): 381 | # Randomly sample 2 points to define a line (stick axis) 382 | sample_indices = random.sample(range(num_points), 2) 383 | sample_points = points[sample_indices] 384 | 385 | # Define the stick axis (line) using two points 386 | line_point1, line_point2 = sample_points 387 | 388 | # Measure inliers by calculating distances of all points to the stick axis (line) 389 | inliers = [] 390 | for i, point in enumerate(points): 391 | dist = point_to_line_distance(point, line_point1, line_point2) 392 | if dist < distance_threshold: 393 | inliers.append(i) 394 | 395 | # Track the best model (with the most inliers) 396 | if len(inliers) > len(best_inliers): 397 | best_inliers = inliers 398 | best_stick = (line_point1, line_point2) 399 | 400 | return best_stick, best_inliers 401 | 402 | 403 | if False: 404 | 405 | def region_growing( 406 | Points: np.ndarray, 407 | ksearch: int = 30, 408 | minc: int = 20, 409 | maxc: int = 100000, 410 | nn: int = 30, 411 | smoothness: float = 30.0, 412 | curvature: float = 1.0, 413 | ) -> List[np.ndarray]: 414 | """ 415 | Takes a point cloud and clusters the points with region growing 416 | 417 | Args: 418 | points : np.ndarray 419 | (n,3) point cloud 420 | 421 | Ksearch: int 422 | Number of points used to estimate a points normal 423 | 424 | minc: int 425 | Minimum number of points a cluster must have to be returned 426 | 427 | maxc: int 428 | Maximum number of points a cluster must have to be returned 429 | 430 | nn: int 431 | Number of nearest neighbors used by the region growing algorithm 432 | 433 | smoothness: 434 | Smoothness threshold used in region growing 435 | 436 | curvature: 437 | Curvature threshold used in region growing 438 | 439 | Returns: 440 | region_growing_clusters: list 441 | list of (n,3) Pointclouds representing each cluster 442 | 443 | """ 444 | pointcloud = pclpy.pcl.PointCloud.PointXYZ(Points) 445 | pointcloud_normals = pclpy.pcl.features.NormalEstimation.PointXYZ_Normal() 446 | tree = pclpy.pcl.search.KdTree.PointXYZ() 447 | 448 | pointcloud_normals.setInputCloud(pointcloud) 449 | pointcloud_normals.setSearchMethod(tree) 450 | pointcloud_normals.setKSearch(ksearch) 451 | normals = pclpy.pcl.PointCloud.Normal() 452 | pointcloud_normals.compute(normals) 453 | 454 | region_growing_clusterer = pclpy.pcl.segmentation.RegionGrowing.PointXYZ_Normal() 455 | region_growing_clusterer.setInputCloud(pointcloud) 456 | region_growing_clusterer.setInputNormals(normals) 457 | region_growing_clusterer.setMinClusterSize(minc) 458 | region_growing_clusterer.setMaxClusterSize(maxc) 459 | region_growing_clusterer.setSearchMethod(tree) 460 | region_growing_clusterer.setNumberOfNeighbours(nn) 461 | region_growing_clusterer.setSmoothnessThreshold(smoothness / 180.0 * np.pi) 462 | region_growing_clusterer.setCurvatureThreshold(curvature) 463 | 464 | clusters = pclpy.pcl.vectors.PointIndices() 465 | region_growing_clusterer.extract(clusters) 466 | 467 | region_growing_clusters = [pointcloud.xyz[i2.indices] for i2 in clusters] 468 | return region_growing_clusters 469 | 470 | def findstemsLiDAR(pointsXYZ: np.ndarray) -> Tuple[List[np.ndarray], List[np.ndarray]]: 471 | """ 472 | Takes a point cloud from a Cylindrical LiDAR and extract stems and their models 473 | 474 | Args: 475 | points : np.ndarray 476 | (n,3) point cloud 477 | 478 | Returns: 479 | stemsR : list(np.narray (n,3)) 480 | List of (n,3) Pointclouds belonging to each stem 481 | 482 | models : list(np.narray (n)) 483 | List of model coefficients corresponding to each extracted stem 484 | 485 | """ 486 | non_ground_points, ground = floor_remove(pointsXYZ) 487 | flatpoints = np.hstack( 488 | [ 489 | non_ground_points[:, 0:2], 490 | np.zeros_like(non_ground_points)[:, 0:1], 491 | ] 492 | ) 493 | 494 | filtered_points = radius_outlier_removal(flatpoints) 495 | notgoodpoints = non_ground_points[np.isnan(filtered_points[:, 0])] 496 | goodpoints = non_ground_points[np.bitwise_not(np.isnan(filtered_points[:, 0]))] 497 | 498 | cluster_list = dbscan_cluster_extract(goodpoints) 499 | rg_clusters = [] 500 | for i in cluster_list: 501 | rg_clusters.append(region_growing(i)) 502 | 503 | models = [] 504 | stem_clouds = [] 505 | for i in rg_clusters: 506 | for p in i: 507 | indices, model = segment_normals(p) 508 | prop = len(p[indices]) / len(p) 509 | if ( 510 | len(indices) > 1 511 | and prop > 0.0 512 | and np.arccos(np.dot([0, 0, 1], model[3:6])) < 0.6 513 | ): 514 | points = p[indices] 515 | PC, _, _ = Plane.getPrincipalComponents(points) 516 | if PC[0] / PC[1] > 10: 517 | stem_clouds.append(points) 518 | models.append(model) 519 | return stem_clouds, models 520 | -------------------------------------------------------------------------------- /treetool/tree_tool.py: -------------------------------------------------------------------------------- 1 | """ 2 | MIT License 3 | 4 | Copyright (c) 2021 porteratzo 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | """ 24 | 25 | import numpy as np 26 | import pandas as pd 27 | import treetool.seg_tree as seg_tree 28 | import treetool.utils as utils 29 | from ellipse import LsqEllipse 30 | import os 31 | import open3d as o3d 32 | from typing import Union 33 | 34 | 35 | def set_point_cloud( 36 | input_point_cloud: Union[np.ndarray, o3d.geometry.PointCloud] 37 | ) -> o3d.geometry.PointCloud: 38 | """ 39 | Resets the point cloud that treetool will process 40 | 41 | Args: 42 | point_cloud : np.narray 43 | The 3d point cloud of the forest that treetool will process, if it's a numpy array it 44 | should be shape (n,3) 45 | 46 | Returns: 47 | None 48 | """ 49 | if input_point_cloud is not None: 50 | assert isinstance(input_point_cloud, o3d.geometry.PointCloud) or ( 51 | type(input_point_cloud) is np.ndarray 52 | ), "Not valid point_cloud" 53 | if type(input_point_cloud) is np.ndarray: 54 | point_cloud = o3d.geometry.PointCloud() 55 | point_cloud.points = o3d.utility.Vector3dVector(input_point_cloud) 56 | else: 57 | point_cloud = input_point_cloud 58 | return point_cloud 59 | 60 | 61 | class treetool: 62 | """ 63 | Our main class that holds all necessary methods to process a raw point into a list of all tree 64 | stem locations and DBHs 65 | """ 66 | 67 | def __init__(self, point_cloud: Union[np.ndarray, o3d.geometry.PointCloud] = None) -> None: 68 | """ 69 | Parameters 70 | ---------- 71 | point_cloud : np.narray | o3d.geometry.PointCloud 72 | The 3d point cloud of the forest that treetool will process, if it's a numpy array it 73 | should be shape (n,3) 74 | """ 75 | if point_cloud is not None: 76 | self.point_cloud = set_point_cloud(point_cloud) 77 | 78 | def step_1_remove_floor( 79 | self, 80 | set_max_window_size: int = 20, 81 | set_slope: float = 1.0, 82 | set_initial_distance: float = 0.5, 83 | set_max_distance: float = 0.5, 84 | set_cell_size: float = 1.0, 85 | ) -> None: 86 | """ 87 | Applies ApproximateProgressiveMorphologicalFilter to point_cloud to separate the it's 88 | points into non_ground and ground points and assigns them to the non_ground_cloud and 89 | ground_cloud attributes 90 | 91 | Args: 92 | None 93 | 94 | Returns: 95 | None 96 | """ 97 | no_ground_points, ground = seg_tree.floor_remove( 98 | self.point_cloud, 99 | set_max_window_size=set_max_window_size, 100 | set_slope=set_slope, 101 | set_initial_distance=set_initial_distance, 102 | set_max_distance=set_max_distance, 103 | cell_size=set_cell_size, 104 | ) 105 | self.non_ground_cloud: o3d.geometry.PointCloud = set_point_cloud(no_ground_points) 106 | self.ground_cloud: o3d.geometry.PointCloud = set_point_cloud(ground) 107 | 108 | def step_2_normal_filtering( 109 | self, 110 | search_radius: float = 0.08, 111 | verticality_threshold: float = 0.08, 112 | curvature_threshold: float = 0.12, 113 | min_points: int = 0, 114 | ) -> None: 115 | """ 116 | Filters non_ground_cloud by approximating its normals and removing points with a high 117 | curvature and a non near horizontal normal 118 | the points that remained are assigned to 119 | 120 | Args: 121 | search_radius : float 122 | Maximum distance of the points to a sample point that will be used to approximate a 123 | the sample point's normal 124 | 125 | verticality_threshold: float 126 | Threshold in radians for filtering the verticality of each point, we determine 127 | obtaining the dot product of each points normal by a vertical vector [0,0,1] 128 | 129 | curvature_threshold: float 130 | Threshold [0-1] for filtering the curvature of each point, the curvature is given 131 | by lambda_0/(lambda_0 + lambda_1 + lambda_2) where lambda_j is the 132 | j-th eigenvalue of the covariance matrix of radius of points around each query 133 | point and lambda_0 < lambda_1 < lambda_2 134 | 135 | Returns: 136 | None 137 | """ 138 | # get point normals 139 | if min_points > 0: 140 | subject_cloud = seg_tree.radius_outlier_removal( 141 | self.non_ground_cloud.points, 142 | min_points, 143 | search_radius, 144 | organized=False, 145 | ) 146 | else: 147 | subject_cloud = self.non_ground_cloud.points 148 | non_ground_normals, non_ground_curvature = seg_tree.extract_normals( 149 | subject_cloud, search_radius 150 | ) 151 | # remove Nan points 152 | non_nan_mask = np.bitwise_not(np.isnan(non_ground_normals[:, 0])) 153 | non_nan_cloud = np.asarray(subject_cloud)[non_nan_mask] 154 | non_nan_normals = non_ground_normals[non_nan_mask] 155 | non_nan_curvature = non_ground_curvature[non_nan_mask] 156 | 157 | # get mask by filtering verticality and curvature 158 | verticality = np.abs(np.dot(non_nan_normals, [[0], [0], [1]])) 159 | verticality_mask = (verticality < verticality_threshold) & ( 160 | -verticality_threshold < verticality 161 | ) 162 | curvature_mask = non_nan_curvature < curvature_threshold 163 | verticality_curvature_mask = verticality_mask.ravel() & curvature_mask.ravel() 164 | 165 | only_horizontal_points = non_nan_cloud[verticality_curvature_mask] 166 | only_horizontal_normals = non_nan_normals[verticality_curvature_mask] 167 | 168 | self.curvature = non_nan_curvature 169 | self.verticality = verticality 170 | 171 | # set filtered and non filtered points 172 | self.non_ground_normals = non_ground_normals 173 | self.non_filtered_normals = non_nan_normals 174 | self.non_filtered_points = non_nan_cloud 175 | self.filtered_points = only_horizontal_points 176 | self.filtered_normals = only_horizontal_normals 177 | 178 | def step_3_dbscan_clustering(self, eps: float = 0.1, min_cluster_size: int = 40) -> None: 179 | """ 180 | Clusters filtered_points with euclidean clustering and assigns them to attribute 181 | cluster_list 182 | 183 | Args: 184 | tolerance : float 185 | Maximum distance a point can be from a cluster for that point to be included in the 186 | cluster. 187 | 188 | min_cluster_size: int 189 | Minimum number of points a cluster must have to not be discarded 190 | 191 | max_cluster_size: int 192 | Maximum number of points a cluster must have to not be discarded 193 | 194 | Returns: 195 | None 196 | """ 197 | self.cluster_list = seg_tree.dbscan_cluster_extract( 198 | self.filtered_points, 199 | eps=eps, 200 | min_points=min_cluster_size, 201 | ) 202 | 203 | def step_4_group_stems(self, max_distance: float = 0.4) -> None: 204 | """ 205 | For each cluster in attribute cluster_list, test if its centroid is near the line formed by 206 | the first principal vector of another cluster parting from the centroid of that cluster 207 | and if so, join the two clusters 208 | 209 | Args: 210 | max_distance : float 211 | Maximum distance a point can be from the line formed by the first principal vector 212 | of another cluster parting from the centroid of that cluster 213 | 214 | Returns: 215 | None 216 | """ 217 | # Get required info from clusters 218 | stem_groups = [] 219 | for n, p in enumerate(self.cluster_list): 220 | Centroid = np.mean(p, axis=0) 221 | vT, S = utils.getPrincipalVectors(p - Centroid) 222 | straightness = S[0] / (S[0] + S[1] + S[2]) 223 | 224 | clusters_dict = {} 225 | clusters_dict["cloud"] = p 226 | clusters_dict["straightness"] = straightness 227 | clusters_dict["center"] = Centroid 228 | clusters_dict["direction"] = vT 229 | stem_groups.append(clusters_dict) 230 | 231 | # For each cluster, test if its centroid is near the line formed by the first principal 232 | # vector of another cluster parting from the centroid of that cluster 233 | # if so, join the two clusters 234 | temp_stems = [i["cloud"] for i in stem_groups] 235 | for treenumber1 in reversed(range(0, len(temp_stems))): 236 | for treenumber2 in reversed(range(0, treenumber1)): 237 | center1 = stem_groups[treenumber1]["center"] 238 | center2 = stem_groups[treenumber2]["center"] 239 | if np.linalg.norm(center1[:2] - center2[:2]) < 2: 240 | vector1 = stem_groups[treenumber1]["direction"][0] 241 | vector2 = stem_groups[treenumber2]["direction"][0] 242 | dist1 = utils.DistPoint2Line(center2, vector1 + center1, center1) 243 | dist2 = utils.DistPoint2Line(center1, vector2 + center2, center2) 244 | if (dist1 < max_distance) | (dist2 < max_distance): 245 | temp_stems[treenumber2] = np.vstack( 246 | [ 247 | temp_stems[treenumber2], 248 | temp_stems.pop(treenumber1), 249 | ] 250 | ) 251 | break 252 | 253 | self.complete_Stems = temp_stems 254 | self.stem_groups = stem_groups 255 | 256 | def step_5_get_ground_level_trees( 257 | self, 258 | lowstems_height: int = 5, 259 | cutstems_height: int = 5, 260 | use_sampling: bool = False, 261 | dont_cut: bool = False, 262 | ) -> None: 263 | """ 264 | Filters stems to only keep those near the ground and crops them up to a certain height 265 | 266 | Args: 267 | lowstems_height: int 268 | Minimum number of points a cluster must have to not be discarded 269 | 270 | cutstems_height: int 271 | Maximum number of points a cluster must have to not be discarded 272 | 273 | Returns: 274 | None 275 | """ 276 | # Generate a bivariate quadratic equation to model the ground 277 | ground_points = np.asarray(self.ground_cloud.points) 278 | if not use_sampling: 279 | A = np.c_[ 280 | np.ones(ground_points.shape[0]), 281 | ground_points[:, :2], 282 | np.prod(ground_points[:, :2], axis=1), 283 | ground_points[:, :2] ** 2, 284 | ] 285 | self.ground_model_c, _, _, _ = np.linalg.lstsq(A, ground_points[:, 2], rcond=None) 286 | 287 | # Obtain a ground point for each stem by taking the XY component of the centroid 288 | # and obtaining the coresponding Z coordinate from our quadratic ground model 289 | self.stems_with_ground = [] 290 | for i in self.complete_Stems: 291 | center = np.mean(i, 0) 292 | X, Y = center[:2] 293 | if not use_sampling: 294 | Z = np.dot( 295 | np.c_[np.ones(X.shape), X, Y, X * Y, X**2, Y**2], 296 | self.ground_model_c, 297 | ) 298 | else: 299 | _size = 0.5 300 | while True: 301 | sub_pcd = o3d.geometry.crop_point_cloud( 302 | self.ground_cloud, 303 | np.hstack([X - _size, Y - _size, -100, 1]), 304 | np.hstack([X + _size, Y + _size, 100, 1]), 305 | ) 306 | if len(np.asarray(sub_pcd.points)) > 5: 307 | Z = [np.mean(np.asarray(sub_pcd.points)[:, 2])] 308 | break 309 | _size += 0.25 310 | 311 | self.stems_with_ground.append([i, [X, Y, Z[0]]]) 312 | 313 | # Filter stems that do not have points below our lowstems_height threshold 314 | low_stems = [ 315 | i 316 | for i in self.stems_with_ground 317 | if np.min(i[0], axis=0)[2] < (lowstems_height + i[1][2]) 318 | ] 319 | # Crop points above cutstems_height threshold 320 | if not dont_cut: 321 | cut_stems = [[i[0][i[0][:, 2] < (cutstems_height + i[1][2])], i[1]] for i in low_stems] 322 | else: 323 | cut_stems = low_stems 324 | 325 | self.cut_stems = cut_stems 326 | self.low_stems = [i[0] for i in cut_stems] 327 | 328 | def step_6_get_cylinder_tree_models( 329 | self, search_radius: float = 0.1, distance_threshold: float = 0.08 330 | ) -> None: 331 | """ 332 | For each cut stem we use ransac to extract a cylinder model 333 | 334 | Args: 335 | search_radius : float 336 | Maximum distance of the points to a sample point that will be used to approximate a 337 | the sample point's normal 338 | 339 | Returns: 340 | None 341 | """ 342 | final_stems = [] 343 | visualization_cylinders = [] 344 | for p in self.cut_stems: 345 | # Segment to cylinders 346 | stem_points = p[0] 347 | if len(stem_points) <= 1: 348 | continue 349 | indices, model = seg_tree.fit_cylinder_ransac( 350 | stem_points, 351 | max_iterations=1000, 352 | distance_threshold=distance_threshold, 353 | rlim=[0, search_radius], 354 | ) 355 | # If the model has more than 10 points 356 | if len(indices) > 10: 357 | # If the model finds an upright cylinder 358 | if abs(np.dot(model[3:6], [0, 0, 1]) / np.linalg.norm(model[3:6])) > 0.5: 359 | # Get centroid 360 | model = np.array(model) 361 | Z = 1.3 + p[1][2] 362 | Y = model[1] + model[4] * (Z - model[2]) / model[5] 363 | X = model[0] + model[3] * (Z - model[2]) / model[5] 364 | model[0:3] = np.array([X, Y, Z]) 365 | # make sure the vector is pointing upward 366 | model[3:6] = utils.similarize(model[3:6], [0, 0, 1]) 367 | final_stems.append( 368 | { 369 | "tree": stem_points[indices], 370 | "model": model, 371 | "ground": p[1][2], 372 | } 373 | ) 374 | visualization_cylinders.append( 375 | utils.makecylinder(model=model, height=7, density=60) 376 | ) 377 | 378 | self.finalstems = final_stems 379 | self.visualization_cylinders = visualization_cylinders 380 | 381 | def step_7_ellipse_fit(self, height_ll: float = -1, height_ul: float = -1) -> None: 382 | """ 383 | Extract the cylinder and ellipse diameter of each stem 384 | 385 | Args: 386 | None 387 | 388 | Returns: 389 | None 390 | """ 391 | for i in self.finalstems: 392 | # if the tree points has enough points to fit a ellipse 393 | if len(i["tree"]) > 5: 394 | # find a matrix that rotates the stem to be colinear to the z axis 395 | R = utils.rotation_matrix_from_vectors(i["model"][3:6], [0, 0, 1]) 396 | # we center the stem to the origen then rotate it 397 | centeredtree = i["tree"] - i["model"][0:3] 398 | correctedcyl = (R @ centeredtree.T).T 399 | # fit an ellipse using only the xy coordinates 400 | try: 401 | if height_ll != -1: 402 | correctedcyl = correctedcyl[:, 2] > height_ll 403 | if height_ul != -1: 404 | correctedcyl = correctedcyl[:, 2] < height_ul 405 | reg = LsqEllipse().fit(correctedcyl[:, 0:2]) 406 | center, a, b, phi = reg.as_parameters() 407 | 408 | ellipse_diameter = 3 * (a + b) - np.sqrt((3 * a + b) * (a + 3 * b)) 409 | except np.linalg.LinAlgError: 410 | ellipse_diameter = i["model"][6] * 2 411 | except IndexError: 412 | ellipse_diameter = i["model"][6] * 2 413 | cylinder_diameter = i["model"][6] * 2 414 | i["cylinder_diameter"] = cylinder_diameter 415 | i["ellipse_diameter"] = ellipse_diameter 416 | i["final_diameter"] = max(ellipse_diameter, cylinder_diameter) 417 | n_model = i["model"] 418 | n_model[6] = i["final_diameter"] 419 | i["vis_cyl"] = utils.makecylinder(model=n_model, height=7, density=60) 420 | else: 421 | i["cylinder_diameter"] = None 422 | i["ellipse_diameter"] = None 423 | i["final_diameter"] = None 424 | i["vis_cyl"] = None 425 | 426 | def full_process( 427 | self, 428 | search_radius: float = 0.1, 429 | verticality_threshold: float = 0.06, 430 | curvature_threshold: float = 0.1, 431 | dbscan_eps: float = 0.1, 432 | min_cluster_size: int = 40, 433 | group_stems_max_distance: float = 0.4, 434 | lowstems_height: int = 5, 435 | cutstems_height: int = 5, 436 | searchRadius_cylinder: float = 0.1, 437 | ) -> None: 438 | """ 439 | Clusters filtered_points with euclidean clustering and assigns them to attribute 440 | cluster_list 441 | 442 | Args: 443 | search_radius : float 444 | Maximum distance of the points to a sample point that will be used to approximate a 445 | the sample point's normal 446 | 447 | verticality_threshold: float 448 | Threshold in radians for filtering the verticality of each point, we determine 449 | obtaining the dot product of each points normal by a vertical vector [0,0,1] 450 | 451 | curvature_threshold: float 452 | Threshold [0-1] for filtering the curvature of each point, the curvature is given 453 | by lambda_0/(lambda_0 + lambda_1 + lambda_2) where lambda_j is the 454 | j-th eigenvalue of the covariance matrix of radius of points around each query 455 | point and lambda_0 < lambda_1 < lambda_2 456 | 457 | tolerance : float 458 | Maximum distance a point can be from a cluster for that point to be included in the 459 | cluster. 460 | 461 | min_cluster_size: int 462 | Minimum number of points a cluster must have to not be discarded 463 | 464 | max_cluster_size: int 465 | Maximum number of points a cluster must have to not be discarded 466 | 467 | max_distance : float 468 | Maximum distance a point can be from the line formed by the first principal vector 469 | of another cluster parting from the centroid of that cluster 470 | 471 | lowstems_height: int 472 | Minimum number of points a cluster must have to not be discarded 473 | 474 | cutstems_height: int 475 | Maximum number of points a cluster must have to not be discarded 476 | 477 | searchRadius_cylinder : float 478 | Maximum distance of the points to a sample point that will be used to approximate a 479 | the sample point's normal 480 | 481 | 482 | Returns: 483 | None 484 | minimum number of points a cluster must have to not be discarded 485 | 486 | """ 487 | print("step_1_Remove_Floor") 488 | self.step_1_remove_floor() 489 | print("step_2_normal_filtering") 490 | self.step_2_normal_filtering(search_radius, verticality_threshold, curvature_threshold) 491 | print("step_3_euclidean_clustering") 492 | self.step_3_dbscan_clustering(dbscan_eps, min_cluster_size) 493 | print("step_4_Group_Stems") 494 | self.step_4_group_stems(group_stems_max_distance) 495 | print("step_5_Get_Ground_Level_Trees") 496 | self.step_5_get_ground_level_trees(lowstems_height, cutstems_height) 497 | print("step_6_Get_Cylinder_Tree_Models") 498 | self.step_6_get_cylinder_tree_models(searchRadius_cylinder) 499 | print("step_7_Ellipse_fit") 500 | self.step_7_ellipse_fit() 501 | print("Done") 502 | 503 | def save_results(self, save_location: str = "results/myresults.csv") -> None: 504 | """ 505 | Save a csv with XYZ and DBH of each detected tree 506 | 507 | Args: 508 | savelocation : str 509 | path to save file 510 | 511 | Returns: 512 | None 513 | """ 514 | tree_model_info = [i["model"] for i in self.finalstems] 515 | tree_diameter_info = [i["final_diameter"] for i in self.finalstems] 516 | 517 | data = {"X": [], "Y": [], "Z": [], "DBH": []} 518 | for i, j in zip(tree_model_info, tree_diameter_info): 519 | data["X"].append(i[0]) 520 | data["Y"].append(i[1]) 521 | data["Z"].append(i[2]) 522 | data["DBH"].append(j) 523 | 524 | os.makedirs(os.path.dirname(save_location), exist_ok=True) 525 | 526 | pd.DataFrame.from_dict(data).to_csv(save_location) 527 | -------------------------------------------------------------------------------- /treetool/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | MIT License 3 | 4 | Copyright (c) 2021 porteratzo 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | """ 24 | 25 | import numpy as np 26 | import open3d 27 | 28 | 29 | def rotation_matrix_from_vectors(vector1: np.ndarray, vector2: np.ndarray) -> np.ndarray: 30 | """ 31 | Finds a rotation matrix that can rotate vector1 to align with vector 2 32 | 33 | Args: 34 | vector1: np.narray (3) 35 | Vector we would apply the rotation to 36 | 37 | vector2: np.narray (3) 38 | Vector that will be aligned to 39 | 40 | Returns: 41 | rotation_matrix: np.narray (3,3) 42 | Rotation matrix that when applied to vector1 will turn it to the same direction as vector2 43 | """ 44 | if all(np.abs(vector1) == np.abs(vector2)): 45 | return np.eye(3) 46 | a, b = (vector1 / np.linalg.norm(vector1)).reshape(3), ( 47 | vector2 / np.linalg.norm(vector2) 48 | ).reshape(3) 49 | v = np.cross(a, b) 50 | c = np.dot(a, b) 51 | s = np.linalg.norm(v) 52 | matrix = np.array([[0, -v[2], v[1]], [v[2], 0, -v[0]], [-v[1], v[0], 0]]) 53 | rotation_matrix = np.eye(3) + matrix + matrix.dot(matrix) * ((1 - c) / (s**2)) 54 | return rotation_matrix 55 | 56 | 57 | def angle_between_vectors(vector1: np.ndarray, vector2: np.ndarray) -> float: 58 | """ 59 | Finds the angle between 2 vectors 60 | 61 | Args: 62 | vec1: np.narray (3) 63 | First vector to measure angle from 64 | 65 | vec2: np.narray (3) 66 | Second vector to measure angle to 67 | 68 | Returns: 69 | None 70 | """ 71 | value = np.sum(np.multiply(vector1, vector2)) / ( 72 | np.linalg.norm(vector1) * np.linalg.norm(vector2) 73 | ) 74 | if (value < -1) | (value > 1): 75 | value = np.sign(value) 76 | angle = np.arccos(value) 77 | return angle 78 | 79 | 80 | def makecylinder(model: np.ndarray = [0, 0, 0, 1, 0, 0, 1], height: float = 1, density: int = 10) -> np.ndarray: 81 | """ 82 | Makes a point cloud of a cylinder given a (7) parameter cylinder model and a length and density 83 | 84 | Args: 85 | model: np.narray (7) 86 | 7 parameter cylinder model 87 | 88 | height: float 89 | Desired height of the generated cylinder 90 | 91 | density: int 92 | Desired density of the generated cylinder, 93 | this density is determines the amount of points on each ring that composes the cylinder and on how many rings the cylinder will have 94 | 95 | Returns: 96 | rotated_cylinder: np.narray (n,3) 97 | 3d point cloud of the desired cylinder 98 | """ 99 | # extract info from cylinder model 100 | radius = model[6] 101 | X, Y, Z = model[:3] 102 | # get 3d points to make an upright cylinder centered to the origin 103 | n = np.arange(0, 360, int(360 / density)) 104 | height = np.arange(0, height, height / density) 105 | n = np.deg2rad(n) 106 | x, z = np.meshgrid(n, height) 107 | x = x.flatten() 108 | z = z.flatten() 109 | cyl = np.vstack([np.cos(x) * radius, np.sin(x) * radius, z]).T 110 | # rotate and translate the cylinder to fit the model 111 | rotation = rotation_matrix_from_vectors([0, 0, 1], model[3:6]) 112 | rotated_cylinder = np.matmul(rotation, cyl.T).T + np.array([X, Y, Z]) 113 | return rotated_cylinder 114 | 115 | 116 | def DistPoint2Line(point: np.ndarray, line_point1: np.ndarray, line_point2: np.ndarray = np.array([0, 0, 0])) -> float: 117 | """ 118 | Get minimum distance from a point to a line composed by 2 points 119 | 120 | Args: 121 | point: np.narray (3) 122 | XYZ coordinates of the 3d point 123 | 124 | line_point1: np.narray (3) 125 | XYZ coordinates of the first 3d point that composes the line if line_point2 is not given, line_point2 defaults to 0,0,0 126 | 127 | line_point2: np.narray (3) 128 | XYZ coordinates of the second 3d point that composes the line, if not given defaults to 0,0,0 129 | 130 | Returns: 131 | distance: float 132 | Shortest distance from point to the line composed by line_point1 line_point2 133 | """ 134 | return np.linalg.norm(np.cross((point - line_point2), (point - line_point1))) / np.linalg.norm( 135 | line_point1 - line_point2 136 | ) 137 | 138 | 139 | def getPrincipalVectors(A: np.ndarray) -> tuple[np.ndarray, np.ndarray]: # 140 | """ 141 | Get principal vectors and values of a matrix centered around (0,0,0) 142 | 143 | Args: 144 | A: np.narray (n,m) 145 | Matrix to extract principal vectors from 146 | 147 | Returns: 148 | Vectors: np.narray (m,m) 149 | The principal vectors from A 150 | Values: np.narray (m,m) 151 | The principal values from A 152 | """ 153 | VT = np.linalg.eig(np.matmul(A.T, A)) 154 | sort = sorted(zip(VT[0], VT[1].T.tolist()), reverse=True) 155 | values, vectors = zip(*sort) 156 | return vectors, values 157 | 158 | 159 | def convertcloud(points: np.ndarray) -> open3d.geometry.PointCloud: 160 | """ 161 | Turns a numpy (n,3) point cloud to a open3d pointcloud 162 | 163 | Args: 164 | points: np.narray (n,3) 165 | A 3d numpy point cloud 166 | 167 | Returns: 168 | pcd: open3d.geometry.PointCloud 169 | An open 3d point cloud 170 | """ 171 | pcd = open3d.geometry.PointCloud() 172 | pcd.points = open3d.utility.Vector3dVector(points) 173 | return pcd 174 | 175 | 176 | def makesphere(centroid: list[float] = [0, 0, 0], radius: float = 1, dense: int = 90) -> np.ndarray: 177 | n = np.arange(0, 360, int(360 / dense)) 178 | n = np.deg2rad(n) 179 | x, y = np.meshgrid(n, n) 180 | x = x.flatten() 181 | y = y.flatten() 182 | sphere = np.vstack( 183 | [ 184 | centroid[0] + np.sin(x) * np.cos(y) * radius, 185 | centroid[1] + np.sin(x) * np.sin(y) * radius, 186 | centroid[2] + np.cos(x) * radius, 187 | ] 188 | ).T 189 | return sphere 190 | 191 | 192 | def similarize(test: np.ndarray, target: np.ndarray) -> np.ndarray: 193 | """ 194 | Test a vectors angle to another vector and mirror its direction if it is greater than pi/2 195 | 196 | Args: 197 | test: np.narray (3) 198 | 3d vector to test 199 | 200 | target: np.narray (3) 201 | 3d vector to which test has to have an angle smaller than pi/2 202 | 203 | Returns: 204 | test: np.narray (3) 205 | 3d vectors whos angle is below pi/2 with respect to the target vector 206 | """ 207 | test = np.array(test) 208 | assert len(test) == 3, "vector must be dim 3" 209 | angle = angle_between_vectors(test, target) 210 | if angle > np.pi / 2: 211 | test = -test 212 | return test 213 | 214 | 215 | def Iscaled_dimensions(las_file, new_data: dict) -> np.ndarray: 216 | 217 | x_dimension = np.array(new_data["X"]) 218 | scale = las_file.header.scales[0] 219 | offset = las_file.header.offsets[0] 220 | x = x_dimension + offset 221 | 222 | y_dimension = np.array(new_data["Y"]) 223 | offset = las_file.header.offsets[1] 224 | y = y_dimension + offset 225 | 226 | z_dimension = np.array(new_data["Z"]) 227 | offset = las_file.header.offsets[2] 228 | z = z_dimension + offset 229 | return np.vstack([x, y, z]).T 230 | 231 | 232 | def scaled_dimensions( 233 | las_file, 234 | ) -> np.ndarray: 235 | xyz = las_file.xyz 236 | x_dimension = xyz[:, 0] 237 | offset = las_file.header.offsets[0] 238 | x = x_dimension - offset 239 | 240 | y_dimension = xyz[:, 1] 241 | offset = las_file.header.offsets[1] 242 | y = y_dimension - offset 243 | 244 | z_dimension = xyz[:, 2] 245 | offset = las_file.header.offsets[2] 246 | z = z_dimension - offset 247 | return np.vstack([x, y, z]).T 248 | --------------------------------------------------------------------------------