├── .gitignore ├── LICENSE ├── README.md ├── clean.sh ├── clean_test.sh ├── install.sh ├── setup.py ├── test.sh ├── tests ├── marching_cubes.py ├── models │ ├── Arma.obj │ ├── block.obj │ ├── cube.obj │ ├── dinosaur.obj │ ├── fandisk.obj │ ├── rocker.obj │ ├── sphere-42.obj │ └── sphere-642.obj ├── normal.py ├── normalize.py ├── sign.py ├── sign_check.py ├── speed.py └── value.py ├── torchsdf ├── __init__.py ├── csrc │ ├── bindings.cpp │ ├── check.h │ ├── unbatched_triangle_distance.cpp │ ├── unbatched_triangle_distance.h │ ├── unbatched_triangle_distance_cuda.cu │ └── utils.h └── sdf.py └── version.txt /.gitignore: -------------------------------------------------------------------------------- 1 | artifacts 2 | cuda_*.exe 3 | miniconda3.exe 4 | vs_buildtools.exe 5 | 6 | 7 | # all folders starting with _ are local 8 | _*/ 9 | 10 | # generated folders 11 | torchsdf/version.py 12 | build/ 13 | tests/outputs 14 | torchsdf.egg-info 15 | *.so 16 | *.egg 17 | .eggs/ 18 | 19 | # byte-compiled python files 20 | *.py[cod] 21 | 22 | # pip cache 23 | .cache 24 | __pycache__ 25 | 26 | # editor specific 27 | *~ 28 | .idea 29 | .vscode/c_cpp_properties.json 30 | .vscode/launch.json 31 | .vscode/settings.json 32 | .vscode/tasks.json 33 | *.sw* 34 | 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TorchSDF 2 | 3 | This is a custom version of the signed distance field(SDF) computation from the [Kaolin library](https://github.com/NVIDIAGameWorks/kaolin). It supports SDF computation for **manifold** meshes with PyTorch on GPU. 4 | 5 | ## Purpose 6 | 7 | Why don't I use the original Kaolin API? 8 | 9 | - I just want to compute SDF. Kaolin is too large and redundant. I want a lighter package. 10 | - With Kaolin, I should use `kaolin.metrics.trianglemesh.point_to_mesh_distance` and `kaolin.ops.mesh.check_sign` for SDF computation. But in a simple but not so precise definition, I can get the value in a single cuda kernel function. This is a potential acceleration. 11 | - I can learn knowledge about the cpp interface of PyTorch. 12 | 13 | ## Installation 14 | 15 | Require PyTorch installed. 16 | 17 | ```bash 18 | bash install.sh 19 | ``` 20 | 21 | ## Usage 22 | 23 | The code provides two functions: 24 | 25 | - `compute_sdf(pointclouds, face_vertices)` 26 | - input 27 | - unbatched points with shape (num_point , 3) 28 | - unbatched face_vertices with shape (num_face , 3, 3) 29 | - returns 30 | - squared distance 31 | - normals defined by gradient 32 | - distance signs (inside -1 and outside 1) 33 | - closest points 34 | - `index_vertices_by_faces(vertices_features, faces)`: return face_verts reqired by `compute_sdf(pointclouds, face_vertices)`. 35 | 36 | ## Note 37 | 38 | - The sign is defined by `sign((p - closest_point).dot(face_normal))`, **check your mesh has perfect normal information**. 39 | - **This definition sometimes causes wrong results.** For example, there is an acute angle between two faces. 40 | - So it is not so precise so far. 41 | - Returned normal is defined by `(p - closest_point).normalized()` or equally $\frac{\partial d}{\partial p}$, not face normal. 42 | - The code only runs on cuda. 43 | - Scripts in `tests` cannot run independently (require Kaolin API). 44 | -------------------------------------------------------------------------------- /clean.sh: -------------------------------------------------------------------------------- 1 | pip uninstall torchsdf -y 2 | rm -rf build 3 | rm -rf torchsdf.egg-info 4 | rm torchsdf/_C.so -------------------------------------------------------------------------------- /clean_test.sh: -------------------------------------------------------------------------------- 1 | rm -rf tests/outputs -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | if [ -d "build" ];then 2 | bash clean.sh 3 | fi 4 | 5 | python setup.py develop 6 | python -c "import torchsdf; print(torchsdf.__version__)" -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # some useful environment variables: 2 | # 3 | # TORCH_CUDA_ARCH_LIST 4 | # specify which CUDA architectures to build for 5 | # 6 | # IGNORE_TORCH_VER 7 | # ignore version requirements for PyTorch 8 | 9 | import os 10 | from setuptools import setup, find_packages, dist 11 | import importlib 12 | from pkg_resources import parse_version 13 | import subprocess 14 | import warnings 15 | 16 | TORCH_MIN_VER = '1.5.0' 17 | TORCH_MAX_VER = '1.12.1' 18 | IGNORE_TORCH_VER = True 19 | 20 | # Module required before installation 21 | # trying to install it ahead turned out to be too unstable. 22 | torch_spec = importlib.util.find_spec("torch") 23 | if torch_spec is None: 24 | raise ImportError( 25 | f"Kaolin requires PyTorch >={TORCH_MIN_VER}, <={TORCH_MAX_VER}, " 26 | "but couldn't find the module installed." 27 | ) 28 | else: 29 | import torch 30 | torch_ver = parse_version(torch.__version__) 31 | if (torch_ver < parse_version(TORCH_MIN_VER) or 32 | torch_ver > parse_version(TORCH_MAX_VER)): 33 | if IGNORE_TORCH_VER: 34 | warnings.warn( 35 | f'Kaolin is compatible with PyTorch >={TORCH_MIN_VER}, <={TORCH_MAX_VER}, ' 36 | f'but found version {torch.__version__}. Continuing with the installed ' 37 | 'version as IGNORE_TORCH_VER is set.' 38 | ) 39 | else: 40 | raise ImportError( 41 | f'Kaolin requires PyTorch >={TORCH_MIN_VER}, <={TORCH_MAX_VER}, ' 42 | f'but found version {torch.__version__} instead.' 43 | 'If you wish to install with this specific version set IGNORE_TORCH_VER=1.' 44 | ) 45 | 46 | import os 47 | import sys 48 | import logging 49 | import glob 50 | 51 | import torch 52 | from torch.utils.cpp_extension import BuildExtension, CppExtension, CUDAExtension, CUDA_HOME 53 | 54 | cwd = os.path.dirname(os.path.abspath(__file__)) 55 | 56 | logger = logging.getLogger() 57 | logging.basicConfig(format='%(levelname)s - %(message)s') 58 | 59 | def get_cuda_bare_metal_version(cuda_dir): 60 | raw_output = subprocess.check_output([cuda_dir + "/bin/nvcc", "-V"], universal_newlines=True) 61 | output = raw_output.split() 62 | release_idx = output.index("release") + 1 63 | release = output[release_idx].split(".") 64 | bare_metal_major = release[0] 65 | bare_metal_minor = release[1][0] 66 | 67 | return raw_output, bare_metal_major, bare_metal_minor 68 | 69 | if not torch.cuda.is_available(): 70 | if os.getenv('FORCE_CUDA', '0') == '1': 71 | # From: https://github.com/NVIDIA/apex/blob/c4e85f7bf144cb0e368da96d339a6cbd9882cea5/setup.py 72 | # Extension builds after https://github.com/pytorch/pytorch/pull/23408 attempt to query torch.cuda.get_device_capability(), 73 | # which will fail if you are compiling in an environment without visible GPUs (e.g. during an nvidia-docker build command). 74 | logging.warning( 75 | "Torch did not find available GPUs on this system.\n" 76 | "If your intention is to cross-compile, this is not an error.\n" 77 | "By default, Apex will cross-compile for Pascal (compute capabilities 6.0, 6.1, 6.2),\n" 78 | "Volta (compute capability 7.0), Turing (compute capability 7.5),\n" 79 | "and, if the CUDA version is >= 11.0, Ampere (compute capability 8.0).\n" 80 | "If you wish to cross-compile for a single specific architecture,\n" 81 | 'export TORCH_CUDA_ARCH_LIST="compute capability" before running setup.py.\n' 82 | ) 83 | if os.getenv("TORCH_CUDA_ARCH_LIST", None) is None: 84 | _, bare_metal_major, bare_metal_minor = get_cuda_bare_metal_version(CUDA_HOME) 85 | if int(bare_metal_major) == 11: 86 | if int(bare_metal_minor) == 0: 87 | os.environ["TORCH_CUDA_ARCH_LIST"] = "6.0;6.1;6.2;7.0;7.5;8.0" 88 | else: 89 | os.environ["TORCH_CUDA_ARCH_LIST"] = "6.0;6.1;6.2;7.0;7.5;8.0;8.6" 90 | else: 91 | os.environ["TORCH_CUDA_ARCH_LIST"] = "6.0;6.1;6.2;7.0;7.5" 92 | else: 93 | logging.warning( 94 | "Torch did not find available GPUs on this system.\n" 95 | "Kaolin will install only with CPU support and will have very limited features.\n" 96 | 'If your wish to cross-compile for GPU `export FORCE_CUDA=1` before running setup.py\n' 97 | "By default, Apex will cross-compile for Pascal (compute capabilities 6.0, 6.1, 6.2),\n" 98 | "Volta (compute capability 7.0), Turing (compute capability 7.5),\n" 99 | "and, if the CUDA version is >= 11.0, Ampere (compute capability 8.0).\n" 100 | "If you wish to cross-compile for a single specific architecture,\n" 101 | 'export TORCH_CUDA_ARCH_LIST="compute capability" before running setup.py.\n' 102 | ) 103 | 104 | PACKAGE_NAME = 'torchsdf' 105 | LICENSE = 'Apache License 2.0' 106 | 107 | version_txt = os.path.join(cwd, 'version.txt') 108 | with open(version_txt) as f: 109 | version = f.readline().strip() 110 | 111 | def write_version_file(): 112 | version_path = os.path.join(cwd, 'torchsdf', 'version.py') 113 | with open(version_path, 'w') as f: 114 | f.write("__version__ = '{}'\n".format(version)) 115 | 116 | 117 | write_version_file() 118 | 119 | 120 | def get_extensions(): 121 | extra_compile_args = {'cxx': ['-O3']} 122 | define_macros = [] 123 | include_dirs = [] 124 | sources = glob.glob('torchsdf/csrc/**/*.cpp', recursive=True) 125 | # FORCE_CUDA is for cross-compilation in docker build 126 | if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1': 127 | with_cuda = True 128 | define_macros += [("WITH_CUDA", None), ("THRUST_IGNORE_CUB_VERSION_CHECK", None)] 129 | sources += glob.glob('torchsdf/csrc/**/*.cu', recursive=True) 130 | extension = CUDAExtension 131 | extra_compile_args.update({'nvcc': [ 132 | '-O3', 133 | '-DWITH_CUDA', 134 | '-DTHRUST_IGNORE_CUB_VERSION_CHECK' 135 | ]}) 136 | include_dirs = get_include_dirs() 137 | else: 138 | extension = CppExtension 139 | with_cuda = False 140 | extensions = [] 141 | extensions.append( 142 | extension( 143 | name='torchsdf._C', 144 | sources=sources, 145 | define_macros=define_macros, 146 | extra_compile_args=extra_compile_args, 147 | include_dirs=include_dirs 148 | ) 149 | ) 150 | 151 | # use cudart_static instead 152 | for extension in extensions: 153 | extension.libraries = ['cudart_static' if x == 'cudart' else x 154 | for x in extension.libraries] 155 | 156 | return extensions 157 | 158 | def get_include_dirs(): 159 | include_dirs = [] 160 | if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1': 161 | _, bare_metal_major, _ = get_cuda_bare_metal_version(CUDA_HOME) 162 | 163 | return include_dirs 164 | 165 | 166 | if __name__ == '__main__': 167 | setup( 168 | # Metadata 169 | name=PACKAGE_NAME, 170 | version=version, 171 | license=LICENSE, 172 | 173 | # Package info 174 | # packages=find_packages(exclude=('tests')), 175 | # include_package_data=True, 176 | zip_safe=False, 177 | ext_modules=get_extensions(), 178 | cmdclass={ 179 | 'build_ext': BuildExtension.with_options(no_python_abi_suffix=True) 180 | } 181 | ) 182 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | python tests/value.py 2 | python tests/sign.py 3 | python tests/normal.py -------------------------------------------------------------------------------- /tests/marching_cubes.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import kaolin 3 | from torchsdf import index_vertices_by_faces, compute_sdf 4 | import os 5 | import torch 6 | import numpy as np 7 | import skimage 8 | 9 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 10 | device = "cuda" 11 | 12 | os.makedirs("tests/outputs", exist_ok=True) 13 | 14 | # Ns = N * N * N 15 | N = 256 16 | 17 | voxel_origin = [-1, -1, -1] 18 | voxel_size = 2.0 / (N - 1) 19 | overall_index = torch.arange(0, N**3, 1, dtype=torch.long) 20 | samples = torch.zeros((N**3, 3)) 21 | 22 | samples[:, 2] = overall_index % N 23 | samples[:, 1] = (overall_index / N) % N 24 | samples[:, 0] = ((overall_index / N) / N) % N 25 | 26 | samples[:, 0] = (samples[:, 0] * voxel_size) + voxel_origin[0] 27 | samples[:, 1] = (samples[:, 1] * voxel_size) + voxel_origin[1] 28 | samples[:, 2] = (samples[:, 2] * voxel_size) + voxel_origin[2] 29 | 30 | # (Ns, 3) 31 | samples = samples.to(device) 32 | values = None 33 | # api = "Kaolin" 34 | api = "TorchSDF" 35 | 36 | print("====Marching cube test====") 37 | for model in os.listdir("tests/models"): 38 | print("Test:", model[:-4]) 39 | model_path = os.path.join("tests/models", model) 40 | mesh = trimesh.load(model_path, force="mesh", process=False) 41 | # (Ns, 3) 42 | x = samples.clone().requires_grad_() 43 | # (Nv, 3) 44 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 45 | # (Nf, 3) 46 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 47 | # (1, Nf, 3, 3) 48 | face_verts = kaolin.ops.mesh.index_vertices_by_faces( 49 | verts.unsqueeze(0), faces) 50 | face_verts_ts = index_vertices_by_faces(verts, faces) 51 | 52 | distances, signs = None, None 53 | # Kaolin 54 | if api == "Kaolin": 55 | # (1, Ns) 56 | distances, face_indexes, types = kaolin.metrics.trianglemesh.point_to_mesh_distance( 57 | x.unsqueeze(0), face_verts) 58 | # (1, Ns) 59 | signs_ = kaolin.ops.mesh.check_sign( 60 | verts.unsqueeze(0), faces, x.unsqueeze(0)) 61 | # (1, Ns) 62 | signs = torch.where(signs_, -torch.ones_like( 63 | signs_).int(), torch.ones_like(signs_).int()) 64 | # TorchSDF 65 | elif api == "TorchSDF": 66 | distances, signs, normals_ts, clst_points_ts = compute_sdf( 67 | x, face_verts_ts) 68 | 69 | values = distances.sqrt() * signs 70 | values = values.detach().cpu().numpy().reshape(N, N, N) 71 | verts, faces, _, _ = skimage.measure.marching_cubes( 72 | values, level=0.0, spacing=[voxel_size] * 3) 73 | verts[:, 0] += voxel_origin[0] 74 | verts[:, 1] += voxel_origin[1] 75 | verts[:, 2] += voxel_origin[2] 76 | 77 | trimesh.Trimesh(vertices=verts, faces=faces, 78 | process=False).export(os.path.join("tests/outputs", model)) 79 | 80 | print("====Done====") 81 | -------------------------------------------------------------------------------- /tests/models/cube.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v -0.56053424 -0.56053424 -0.56053424 3 | v -0.56053424 -0.56053424 0.56053424 4 | v -0.56053424 0.56053424 -0.56053424 5 | v -0.56053424 0.56053424 0.56053424 6 | v 0.56053424 -0.56053424 -0.56053424 7 | v 0.56053424 -0.56053424 0.56053424 8 | v 0.56053424 0.56053424 -0.56053424 9 | v 0.56053424 0.56053424 0.56053424 10 | f 2 3 1 11 | f 4 7 3 12 | f 8 5 7 13 | f 6 1 5 14 | f 7 1 3 15 | f 4 6 8 16 | f 2 4 3 17 | f 4 8 7 18 | f 8 6 5 19 | f 6 2 1 20 | f 7 5 1 21 | f 4 2 6 -------------------------------------------------------------------------------- /tests/models/sphere-42.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v 0.00000000 0.00000000 -0.97087331 3 | v 0.70253073 -0.51041237 -0.43419396 4 | v -0.26833773 -0.82587241 -0.43419396 5 | v -0.86837434 0.00000000 -0.43419008 6 | v -0.26833773 0.82587241 -0.43419396 7 | v 0.70253073 0.51041237 -0.43419396 8 | v 0.26833773 -0.82587241 0.43419396 9 | v -0.70253073 -0.51041237 0.43419396 10 | v -0.70253073 0.51041237 0.43419396 11 | v 0.26833773 0.82587241 0.43419396 12 | v 0.86837434 0.00000000 0.43419008 13 | v 0.00000000 0.00000000 0.97087331 14 | v -0.15772420 -0.48543180 -0.82587727 15 | v 0.41293475 -0.30001053 -0.82587727 16 | v 0.25521250 -0.78544816 -0.51042499 17 | v 0.82587144 0.00000000 -0.51042305 18 | v 0.41293475 0.30001053 -0.82587727 19 | v -0.51041723 0.00000000 -0.82587533 20 | v -0.66814434 -0.48543374 -0.51042305 21 | v -0.15772420 0.48543180 -0.82587727 22 | v -0.66814434 0.48543374 -0.51042305 23 | v 0.25521250 0.78544816 -0.51042499 24 | v 0.92335683 -0.30001248 0.00000000 25 | v 0.92335683 0.30001248 0.00000000 26 | v 0.00000000 -0.97087331 0.00000000 27 | v 0.57066574 -0.78545302 0.00000000 28 | v -0.92335683 -0.30001248 0.00000000 29 | v -0.57066574 -0.78545302 0.00000000 30 | v -0.57066574 0.78545302 0.00000000 31 | v -0.92335683 0.30001248 0.00000000 32 | v 0.57066574 0.78545302 0.00000000 33 | v 0.00000000 0.97087331 0.00000000 34 | v 0.66814434 -0.48543374 0.51042305 35 | v -0.25521250 -0.78544816 0.51042499 36 | v -0.82587144 0.00000000 0.51042305 37 | v -0.25521250 0.78544816 0.51042499 38 | v 0.66814434 0.48543374 0.51042305 39 | v 0.15772420 -0.48543180 0.82587727 40 | v 0.51041723 0.00000000 0.82587533 41 | v -0.41293475 -0.30001053 0.82587727 42 | v -0.41293475 0.30001053 0.82587727 43 | v 0.15772420 0.48543180 0.82587727 44 | f 1 14 13 45 | f 2 14 16 46 | f 1 13 18 47 | f 1 18 20 48 | f 1 20 17 49 | f 2 16 23 50 | f 3 15 25 51 | f 4 19 27 52 | f 5 21 29 53 | f 6 22 31 54 | f 2 23 26 55 | f 3 25 28 56 | f 4 27 30 57 | f 5 29 32 58 | f 6 31 24 59 | f 7 33 38 60 | f 8 34 40 61 | f 9 35 41 62 | f 10 36 42 63 | f 11 37 39 64 | f 39 42 12 65 | f 39 37 42 66 | f 37 10 42 67 | f 42 41 12 68 | f 42 36 41 69 | f 36 9 41 70 | f 41 40 12 71 | f 41 35 40 72 | f 35 8 40 73 | f 40 38 12 74 | f 40 34 38 75 | f 34 7 38 76 | f 38 39 12 77 | f 38 33 39 78 | f 33 11 39 79 | f 24 37 11 80 | f 24 31 37 81 | f 31 10 37 82 | f 32 36 10 83 | f 32 29 36 84 | f 29 9 36 85 | f 30 35 9 86 | f 30 27 35 87 | f 27 8 35 88 | f 28 34 8 89 | f 28 25 34 90 | f 25 7 34 91 | f 26 33 7 92 | f 26 23 33 93 | f 23 11 33 94 | f 31 32 10 95 | f 31 22 32 96 | f 22 5 32 97 | f 29 30 9 98 | f 29 21 30 99 | f 21 4 30 100 | f 27 28 8 101 | f 27 19 28 102 | f 19 3 28 103 | f 25 26 7 104 | f 25 15 26 105 | f 15 2 26 106 | f 23 24 11 107 | f 23 16 24 108 | f 16 6 24 109 | f 17 22 6 110 | f 17 20 22 111 | f 20 5 22 112 | f 20 21 5 113 | f 20 18 21 114 | f 18 4 21 115 | f 18 19 4 116 | f 18 13 19 117 | f 13 3 19 118 | f 16 17 6 119 | f 16 14 17 120 | f 14 1 17 121 | f 13 15 3 122 | f 13 14 15 123 | f 14 2 15 -------------------------------------------------------------------------------- /tests/models/sphere-642.obj: -------------------------------------------------------------------------------- 1 | # https://github.com/mikedh/trimesh 2 | v 0.00000000 0.00000000 -0.97087315 3 | v 0.70253060 -0.51041228 -0.43419389 4 | v -0.26833769 -0.82587227 -0.43419389 5 | v -0.86837418 0.00000000 -0.43419001 6 | v -0.26833769 0.82587227 -0.43419389 7 | v 0.70253060 0.51041228 -0.43419389 8 | v 0.26833769 -0.82587227 0.43419389 9 | v -0.70253060 -0.51041228 0.43419389 10 | v -0.70253060 0.51041228 0.43419389 11 | v 0.26833769 0.82587227 0.43419389 12 | v 0.86837418 0.00000000 0.43419001 13 | v 0.00000000 0.00000000 0.97087315 14 | v -0.25042411 -0.77073736 -0.53464528 15 | v -0.22604063 -0.69569177 -0.63836754 16 | v -0.19484259 -0.59967533 -0.73825485 17 | v -0.15772417 -0.48543172 -0.82587713 18 | v -0.11690575 -0.35980365 -0.89413048 19 | v -0.07534655 -0.23189596 -0.93975666 20 | v -0.03577473 -0.11010478 -0.96394597 21 | v 0.09366110 -0.06804753 -0.96394597 22 | v 0.19726298 -0.14331835 -0.93975666 23 | v 0.30606873 -0.22236879 -0.89413048 24 | v 0.41293468 -0.30001048 -0.82587713 25 | v 0.51011617 -0.37061626 -0.73825485 26 | v 0.59179281 -0.42995700 -0.63836754 27 | v 0.65563064 -0.47633755 -0.53464528 28 | v 0.61985590 -0.58644427 -0.46309387 29 | v 0.51644723 -0.66185587 -0.48767152 30 | v 0.39321139 -0.73042476 -0.50443850 31 | v 0.25521245 -0.78544803 -0.51042491 32 | v 0.11122711 -0.82204897 -0.50443850 33 | v -0.02877571 -0.83901304 -0.48767152 34 | v -0.15676203 -0.83878683 -0.46309484 35 | v 0.74929174 0.40829002 -0.46309387 36 | v 0.78905676 0.28663865 -0.48767055 37 | v 0.81618684 0.14824650 -0.50443753 38 | v 0.82587130 -0.00000000 -0.51042296 39 | v 0.81618684 -0.14824650 -0.50443753 40 | v 0.78905676 -0.28663865 -0.48767055 41 | v 0.74929174 -0.40829002 -0.46309387 42 | v 0.09366110 0.06804753 -0.96394597 43 | v 0.19726298 0.14331835 -0.93975666 44 | v 0.30606873 0.22236879 -0.89413048 45 | v 0.41293468 0.30001048 -0.82587713 46 | v 0.51011617 0.37061626 -0.73825485 47 | v 0.59179281 0.42995700 -0.63836754 48 | v 0.65563064 0.47633755 -0.53464528 49 | v -0.81040335 0.00000000 -0.53464140 50 | v -0.73149661 0.00000000 -0.63836366 51 | v -0.63053939 0.00000000 -0.73825097 52 | v -0.51041714 0.00000000 -0.82587518 53 | v -0.37832305 0.00000000 -0.89412854 54 | v -0.24383188 0.00000000 -0.93975569 55 | v -0.11577177 0.00000000 -0.96394597 56 | v -0.36619685 -0.77073833 -0.46309484 57 | v -0.46987445 -0.69569372 -0.48767152 58 | v -0.57317050 -0.59967727 -0.50443850 59 | v -0.66814422 -0.48543366 -0.51042296 60 | v -0.74744805 -0.35980559 -0.50443656 61 | v -0.80684510 -0.23189596 -0.48766861 62 | v -0.84617905 -0.11010478 -0.46309096 63 | v -0.25042411 0.77073736 -0.53464528 64 | v -0.22604063 0.69569177 -0.63836754 65 | v -0.19484259 0.59967533 -0.73825485 66 | v -0.15772417 0.48543172 -0.82587713 67 | v -0.11690575 0.35980365 -0.89413048 68 | v -0.07534655 0.23189596 -0.93975666 69 | v -0.03577473 0.11010478 -0.96394597 70 | v -0.84617905 0.11010478 -0.46309096 71 | v -0.80684510 0.23189596 -0.48766861 72 | v -0.74744805 0.35980559 -0.50443656 73 | v -0.66814422 0.48543366 -0.51042296 74 | v -0.57317050 0.59967727 -0.50443850 75 | v -0.46987445 0.69569372 -0.48767152 76 | v -0.36619685 0.77073833 -0.46309484 77 | v -0.15676203 0.83878683 -0.46309484 78 | v -0.02877571 0.83901304 -0.48767152 79 | v 0.11122711 0.82204897 -0.50443947 80 | v 0.25521245 0.78544803 -0.51042491 81 | v 0.39321139 0.73042476 -0.50443850 82 | v 0.51644723 0.66185587 -0.48767152 83 | v 0.61985687 0.58644427 -0.46309387 84 | v 0.90406542 -0.06804753 0.34731822 85 | v 0.92876249 -0.14331835 0.24383382 86 | v 0.93661200 -0.22236976 0.12610963 87 | v 0.92335667 -0.30001242 -0.00000000 88 | v 0.88844407 -0.37061820 -0.12610963 89 | v 0.83562858 -0.42995894 -0.24383576 90 | v 0.77140435 -0.47633852 -0.34732113 91 | v 0.77140435 0.47633852 -0.34732113 92 | v 0.83562858 0.42995894 -0.24383576 93 | v 0.88844407 0.37061820 -0.12610963 94 | v 0.92335667 0.30001242 0.00000000 95 | v 0.93661200 0.22236976 0.12610963 96 | v 0.92876249 0.14331835 0.24383382 97 | v 0.90406542 0.06804753 0.34731822 98 | v 0.21464937 -0.88084505 0.34732113 99 | v 0.15069408 -0.92759356 0.24383673 100 | v 0.07793781 -0.95948772 0.12611060 101 | v -0.00000000 -0.97087315 -0.00000000 102 | v -0.07793781 -0.95948772 -0.12611060 103 | v -0.15069408 -0.92759356 -0.24383673 104 | v -0.21464937 -0.88084505 -0.34732113 105 | v 0.69140731 -0.58644524 -0.34732113 106 | v 0.66714422 -0.66185879 -0.24383673 107 | v 0.62702774 -0.73042962 -0.12611060 108 | v 0.57066564 -0.78545288 0.00000000 109 | v 0.50091812 -0.82205383 0.12611060 110 | v 0.42330749 -0.83901692 0.24383673 111 | v 0.34408618 -0.83878877 0.34732210 112 | v -0.77140435 -0.47633852 0.34732113 113 | v -0.83562858 -0.42995894 0.24383576 114 | v -0.88844407 -0.37061820 0.12610963 115 | v -0.92335667 -0.30001242 -0.00000000 116 | v -0.93661200 -0.22236976 -0.12610963 117 | v -0.92876249 -0.14331835 -0.24383382 118 | v -0.90406542 -0.06804753 -0.34731822 119 | v -0.34408618 -0.83878877 -0.34732210 120 | v -0.42330749 -0.83901692 -0.24383673 121 | v -0.50091812 -0.82205383 -0.12611060 122 | v -0.57066564 -0.78545288 0.00000000 123 | v -0.62702774 -0.73042864 0.12611060 124 | v -0.66714422 -0.66185879 0.24383673 125 | v -0.69140731 -0.58644524 0.34732113 126 | v -0.69140731 0.58644524 0.34732113 127 | v -0.66714422 0.66185879 0.24383673 128 | v -0.62702774 0.73042962 0.12611060 129 | v -0.57066564 0.78545288 -0.00000000 130 | v -0.50091812 0.82205383 -0.12611060 131 | v -0.42330749 0.83901692 -0.24383673 132 | v -0.34408618 0.83878877 -0.34732210 133 | v -0.90406542 0.06804753 -0.34731822 134 | v -0.92876249 0.14331835 -0.24383382 135 | v -0.93661200 0.22236976 -0.12610963 136 | v -0.92335667 0.30001242 0.00000000 137 | v -0.88844407 0.37061820 0.12610963 138 | v -0.83562858 0.42995894 0.24383576 139 | v -0.77140435 0.47633852 0.34732113 140 | v 0.34408618 0.83878877 0.34732210 141 | v 0.42330749 0.83901692 0.24383673 142 | v 0.50091812 0.82205383 0.12611060 143 | v 0.57066564 0.78545288 -0.00000000 144 | v 0.62702774 0.73042864 -0.12611060 145 | v 0.66714422 0.66185879 -0.24383673 146 | v 0.69140731 0.58644524 -0.34732113 147 | v -0.21464937 0.88084505 -0.34732113 148 | v -0.15069408 0.92759356 -0.24383673 149 | v -0.07793781 0.95948772 -0.12611060 150 | v 0.00000000 0.97087315 0.00000000 151 | v 0.07793781 0.95948772 0.12611060 152 | v 0.15069408 0.92759356 0.24383673 153 | v 0.21464937 0.88084505 0.34732113 154 | v 0.84617905 -0.11010478 0.46309096 155 | v 0.80684510 -0.23189596 0.48766861 156 | v 0.74744805 -0.35980559 0.50443656 157 | v 0.66814422 -0.48543366 0.51042296 158 | v 0.57317050 -0.59967727 0.50443850 159 | v 0.46987445 -0.69569372 0.48767152 160 | v 0.36619685 -0.77073833 0.46309484 161 | v 0.15676203 -0.83878683 0.46309484 162 | v 0.02877571 -0.83901304 0.48767152 163 | v -0.11122711 -0.82204897 0.50443947 164 | v -0.25521245 -0.78544803 0.51042491 165 | v -0.39321139 -0.73042476 0.50443850 166 | v -0.51644723 -0.66185587 0.48767152 167 | v -0.61985687 -0.58644427 0.46309387 168 | v -0.74929174 -0.40829002 0.46309387 169 | v -0.78905676 -0.28663865 0.48767055 170 | v -0.81618684 -0.14824650 0.50443753 171 | v -0.82587130 0.00000000 0.51042296 172 | v -0.81618684 0.14824650 0.50443753 173 | v -0.78905676 0.28663865 0.48767055 174 | v -0.74929174 0.40829002 0.46309387 175 | v -0.61985590 0.58644427 0.46309387 176 | v -0.51644723 0.66185587 0.48767152 177 | v -0.39321139 0.73042476 0.50443850 178 | v -0.25521245 0.78544803 0.51042491 179 | v -0.11122711 0.82204897 0.50443850 180 | v 0.02877571 0.83901304 0.48767152 181 | v 0.15676203 0.83878683 0.46309484 182 | v 0.36619685 0.77073833 0.46309484 183 | v 0.46987445 0.69569372 0.48767152 184 | v 0.57317050 0.59967727 0.50443850 185 | v 0.66814422 0.48543366 0.51042296 186 | v 0.74744805 0.35980559 0.50443656 187 | v 0.80684510 0.23189596 0.48766861 188 | v 0.84617905 0.11010478 0.46309096 189 | v 0.03577473 -0.11010478 0.96394597 190 | v 0.07534655 -0.23189596 0.93975666 191 | v 0.11690575 -0.35980365 0.89413048 192 | v 0.15772417 -0.48543172 0.82587713 193 | v 0.19484259 -0.59967533 0.73825485 194 | v 0.22604063 -0.69569177 0.63836754 195 | v 0.25042411 -0.77073736 0.53464528 196 | v 0.81040335 0.00000000 0.53464140 197 | v 0.73149661 0.00000000 0.63836366 198 | v 0.63053939 0.00000000 0.73825097 199 | v 0.51041714 0.00000000 0.82587518 200 | v 0.37832305 0.00000000 0.89412854 201 | v 0.24383188 0.00000000 0.93975569 202 | v 0.11577177 0.00000000 0.96394597 203 | v -0.09366110 -0.06804753 0.96394597 204 | v -0.19726298 -0.14331835 0.93975666 205 | v -0.30606873 -0.22236879 0.89413048 206 | v -0.41293468 -0.30001048 0.82587713 207 | v -0.51011617 -0.37061626 0.73825485 208 | v -0.59179281 -0.42995700 0.63836754 209 | v -0.65563064 -0.47633755 0.53464528 210 | v -0.09366110 0.06804753 0.96394597 211 | v -0.19726298 0.14331835 0.93975666 212 | v -0.30606873 0.22236879 0.89413048 213 | v -0.41293468 0.30001048 0.82587713 214 | v -0.51011617 0.37061626 0.73825485 215 | v -0.59179281 0.42995700 0.63836754 216 | v -0.65563064 0.47633755 0.53464528 217 | v 0.03577473 0.11010478 0.96394597 218 | v 0.07534655 0.23189596 0.93975666 219 | v 0.11690575 0.35980365 0.89413048 220 | v 0.15772417 0.48543172 0.82587713 221 | v 0.19484259 0.59967533 0.73825485 222 | v 0.22604063 0.69569177 0.63836754 223 | v 0.25042411 0.77073736 0.53464528 224 | v 0.16135718 0.11723196 0.95016636 225 | v 0.29822019 0.12283293 0.91573532 226 | v 0.20897559 0.24566586 0.91573629 227 | v 0.43822787 0.12595234 0.85713924 228 | v 0.35126190 0.25520663 0.86837710 229 | v 0.25520566 0.37785606 0.85714021 230 | v 0.57066273 0.12595234 0.77528978 231 | v 0.49196958 0.25864352 0.79603054 232 | v 0.39801042 0.38796479 0.79603151 233 | v 0.29612990 0.50380938 0.77529269 234 | v 0.68568693 0.12283390 0.67626946 235 | v 0.61960542 0.25520760 0.70253352 236 | v 0.53398800 0.38796576 0.71199273 237 | v 0.43418321 0.51041520 0.70253546 238 | v 0.32870755 0.61416756 0.67627237 239 | v 0.77769075 0.11723293 0.56925302 240 | v 0.72559758 0.24566780 0.59644815 241 | v 0.65251122 0.37785800 0.61159377 242 | v 0.56099963 0.50381132 0.61159474 243 | v 0.45786281 0.61416853 0.59645009 244 | v 0.35181142 0.70339954 0.56925594 245 | v -0.06163394 0.18968531 0.95016636 246 | v -0.02466794 0.32157940 0.91573629 247 | v -0.16906591 0.27466098 0.91573629 248 | v 0.01562912 0.45569776 0.85714021 249 | v -0.13417176 0.41293080 0.86837807 250 | v -0.28050079 0.35947743 0.85714021 251 | v 0.05655336 0.58165107 0.77529269 252 | v -0.09396013 0.54781323 0.79603249 253 | v -0.24598625 0.49841618 0.79603249 254 | v -0.38764246 0.43732010 0.77529269 255 | v 0.09506304 0.69008207 0.67627237 256 | v -0.05125239 0.66814034 0.70253546 257 | v -0.20396880 0.62773842 0.71199468 258 | v -0.35126579 0.57065788 0.70253546 259 | v -0.48253269 0.50240647 0.67627237 260 | v 0.12881933 0.77585289 0.56925594 261 | v -0.00942524 0.76599755 0.59645106 262 | v -0.15773096 0.73733738 0.61159571 263 | v -0.30579689 0.68922673 0.61159571 264 | v -0.44262301 0.62524036 0.59645009 265 | v -0.56025788 0.55195401 0.56925594 266 | v -0.19944841 0.00000000 0.95016539 267 | v -0.31346387 0.07591451 0.91573532 268 | v -0.31346387 -0.07591451 0.91573532 269 | v -0.42856574 0.15568339 0.85714021 270 | v -0.43418418 -0.00000000 0.86837710 271 | v -0.42856574 -0.15568339 0.85714021 272 | v -0.53570741 0.23352606 0.77529172 273 | v -0.55003750 0.07992422 0.79603054 274 | v -0.55003750 -0.07992422 0.79603054 275 | v -0.53570741 -0.23352606 0.77529172 276 | v -0.62693163 0.30365805 0.67627237 277 | v -0.65127821 0.15772514 0.70253449 278 | v -0.66004617 -0.00000000 0.71199273 279 | v -0.65127821 -0.15772514 0.70253449 280 | v -0.62693163 -0.30365805 0.67627140 281 | v -0.69807236 0.36226675 0.56925594 282 | v -0.73141991 0.22774354 0.59644912 283 | v -0.74999174 0.07784170 0.61159377 284 | v -0.74999174 -0.07784170 0.61159377 285 | v -0.73141991 -0.22774354 0.59644912 286 | v -0.69807236 -0.36226675 0.56925594 287 | v -0.06163394 -0.18968531 0.95016636 288 | v -0.16906591 -0.27466098 0.91573629 289 | v -0.02466794 -0.32157940 0.91573629 290 | v -0.28050079 -0.35947743 0.85714021 291 | v -0.13417176 -0.41293080 0.86837807 292 | v 0.01562912 -0.45569776 0.85714021 293 | v -0.38764246 -0.43732010 0.77529269 294 | v -0.24598625 -0.49841618 0.79603249 295 | v -0.09396013 -0.54781323 0.79603249 296 | v 0.05655336 -0.58165107 0.77529269 297 | v -0.48253269 -0.50240647 0.67627237 298 | v -0.35126579 -0.57065788 0.70253546 299 | v -0.20396880 -0.62773842 0.71199468 300 | v -0.05125239 -0.66814034 0.70253546 301 | v 0.09506304 -0.69008207 0.67627237 302 | v -0.56025788 -0.55195401 0.56925594 303 | v -0.44262301 -0.62524036 0.59645106 304 | v -0.30579689 -0.68922673 0.61159571 305 | v -0.15773096 -0.73733738 0.61159571 306 | v -0.00942524 -0.76599755 0.59645106 307 | v 0.12881933 -0.77585289 0.56925691 308 | v 0.16135718 -0.11723196 0.95016636 309 | v 0.20897559 -0.24566586 0.91573629 310 | v 0.29822019 -0.12283293 0.91573532 311 | v 0.25520566 -0.37785606 0.85714021 312 | v 0.35126190 -0.25520663 0.86837710 313 | v 0.43822787 -0.12595234 0.85713924 314 | v 0.29612990 -0.50380938 0.77529269 315 | v 0.39801042 -0.38796479 0.79603151 316 | v 0.49196958 -0.25864352 0.79603054 317 | v 0.57066273 -0.12595234 0.77528978 318 | v 0.32870755 -0.61416756 0.67627237 319 | v 0.43418321 -0.51041520 0.70253546 320 | v 0.53398800 -0.38796576 0.71199273 321 | v 0.61960542 -0.25520760 0.70253352 322 | v 0.68568693 -0.12283390 0.67626946 323 | v 0.35181142 -0.70339954 0.56925594 324 | v 0.45786281 -0.61416853 0.59645009 325 | v 0.56099963 -0.50381132 0.61159474 326 | v 0.65251122 -0.37785800 0.61159377 327 | v 0.72559758 -0.24566780 0.59644815 328 | v 0.77769075 -0.11723293 0.56925302 329 | v 0.87741690 0.18968531 0.36980267 330 | v 0.89466737 0.27466195 0.25831342 331 | v 0.83008877 0.32158134 0.38746965 332 | v 0.89209262 0.35947937 0.13243681 333 | v 0.83670236 0.41293371 0.26834545 334 | v 0.75965581 0.45569970 0.39730945 335 | v 0.86680040 0.43732301 -0.00000000 336 | v 0.82199946 0.49841909 0.13597952 337 | v 0.75400921 0.54781614 0.27195904 338 | v 0.66814519 0.58165399 0.39731042 339 | v 0.82066936 0.50240841 -0.12915720 340 | v 0.78545482 0.57066176 0.00000000 341 | v 0.72804030 0.62774328 0.13597952 342 | v 0.65128210 0.66814422 0.26834643 343 | v 0.56235497 0.69008498 0.38747159 344 | v 0.75970921 0.55195595 -0.24653673 345 | v 0.73142476 0.62524425 -0.12915720 346 | v 0.68377916 0.68923158 0.00000000 347 | v 0.61756076 0.73734223 0.13243778 348 | v 0.53768897 0.76600144 0.25831536 349 | v 0.45153854 0.77585580 0.36980558 350 | v 0.09072907 0.89308873 0.36980558 351 | v 0.01524271 0.93575375 0.25831439 352 | v -0.04933589 0.88883437 0.38747159 353 | v -0.06621840 0.95951587 0.13243681 354 | v -0.13417273 0.92335376 0.26834643 355 | v -0.19865521 0.86329361 0.39731042 356 | v -0.14806689 0.95951587 -0.00000000 357 | v -0.22001733 0.93578676 0.13597952 358 | v -0.28800758 0.88638874 0.27195904 359 | v -0.34672210 0.81518296 0.39731042 360 | v -0.22422121 0.93575569 -0.12915817 361 | v -0.30001534 0.92335570 -0.00000000 362 | v -0.37204539 0.88638971 0.13597952 363 | v -0.43418903 0.82587227 0.26834643 364 | v -0.48253560 0.74807718 0.38747159 365 | v -0.29017942 0.89308970 -0.24653770 366 | v -0.36862112 0.88883728 -0.12915817 367 | v -0.44420068 0.86329652 -0.00000000 368 | v -0.51041908 0.81518587 0.13243681 369 | v -0.56235788 0.74807912 0.25831536 370 | v -0.59835106 0.66918985 0.36980558 371 | v -0.82134121 0.36226772 0.36980461 372 | v -0.88524602 0.30365902 0.25831342 373 | v -0.86057807 0.22774451 0.38747062 374 | v -0.93301686 0.23352703 0.13243681 375 | v -0.91962367 0.15772611 0.26834545 376 | v -0.88242854 0.07784267 0.39730945 377 | v -0.95830908 0.15568436 0.00000000 378 | v -0.95797510 0.07992422 0.13597952 379 | v -0.93200521 -0.00000000 0.27195807 380 | v -0.88242854 -0.07784267 0.39730945 381 | v -0.95924500 0.07591451 -0.12915623 382 | v -0.97087315 -0.00000000 0.00000097 383 | v -0.95797510 -0.07992519 0.13597952 384 | v -0.91962367 -0.15772611 0.26834643 385 | v -0.86057710 -0.22774451 0.38747062 386 | v -0.93904987 -0.00000000 -0.24653479 387 | v -0.95924500 -0.07591548 -0.12915525 388 | v -0.95830908 -0.15568533 0.00000097 389 | v -0.93301686 -0.23352800 0.13243778 390 | v -0.88524505 -0.30365902 0.25831536 391 | v -0.82134121 -0.36226772 0.36980558 392 | v -0.59835106 -0.66918985 0.36980558 393 | v -0.56235788 -0.74807815 0.25831439 394 | v -0.48253560 -0.74807718 0.38747159 395 | v -0.51041908 -0.81518490 0.13243681 396 | v -0.43419001 -0.82587227 0.26834643 397 | v -0.34672210 -0.81518296 0.39731042 398 | v -0.44420068 -0.86329652 -0.00000097 399 | v -0.37204636 -0.88638971 0.13597952 400 | v -0.28800855 -0.88638874 0.27195904 401 | v -0.19865618 -0.86329361 0.39731042 402 | v -0.36862209 -0.88883631 -0.12915817 403 | v -0.30001631 -0.92335473 -0.00000097 404 | v -0.22001830 -0.93578676 0.13597952 405 | v -0.13417370 -0.92335376 0.26834643 406 | v -0.04933686 -0.88883437 0.38747159 407 | v -0.29017942 -0.89308970 -0.24653770 408 | v -0.22422121 -0.93575472 -0.12915817 409 | v -0.14806689 -0.95951587 -0.00000000 410 | v -0.06621937 -0.95951587 0.13243681 411 | v 0.01524174 -0.93575375 0.25831439 412 | v 0.09072907 -0.89308873 0.36980558 413 | v 0.45153854 -0.77585580 0.36980558 414 | v 0.53768897 -0.76600241 0.25831439 415 | v 0.56235497 -0.69008498 0.38747159 416 | v 0.61756076 -0.73734223 0.13243681 417 | v 0.65128112 -0.66814519 0.26834545 418 | v 0.66814519 -0.58165399 0.39730945 419 | v 0.68377916 -0.68923158 -0.00000097 420 | v 0.72804030 -0.62774328 0.13597855 421 | v 0.75400921 -0.54781711 0.27195710 422 | v 0.75965581 -0.45570067 0.39730848 423 | v 0.73142476 -0.62524425 -0.12915914 424 | v 0.78545482 -0.57066273 -0.00000194 425 | v 0.82199849 -0.49842006 0.13597758 426 | v 0.83670236 -0.41293468 0.26834351 427 | v 0.83008877 -0.32158231 0.38746868 428 | v 0.75970921 -0.55195595 -0.24653770 429 | v 0.82066839 -0.50240938 -0.12915914 430 | v 0.86680040 -0.43732398 -0.00000194 431 | v 0.89209262 -0.35948132 0.13243389 432 | v 0.89466737 -0.27466390 0.25831148 433 | v 0.87741690 -0.18968628 0.36980170 434 | v 0.29018039 0.89308970 0.24653770 435 | v 0.36862209 0.88883631 0.12915817 436 | v 0.22422121 0.93575472 0.12915817 437 | v 0.44420068 0.86329652 -0.00000000 438 | v 0.30001631 0.92335473 -0.00000000 439 | v 0.14806689 0.95951587 0.00000000 440 | v 0.51041908 0.81518490 -0.13243778 441 | v 0.37204539 0.88638971 -0.13598049 442 | v 0.22001830 0.93578676 -0.13598049 443 | v 0.06621840 0.95951587 -0.13243681 444 | v 0.56235788 0.74807815 -0.25831536 445 | v 0.43419001 0.82587130 -0.26834740 446 | v 0.28800758 0.88638777 -0.27196001 447 | v 0.13417370 0.92335376 -0.26834740 448 | v -0.01524174 0.93575375 -0.25831536 449 | v 0.59835106 0.66918888 -0.36980558 450 | v 0.48253560 0.74807718 -0.38747256 451 | v 0.34672210 0.81518296 -0.39731139 452 | v 0.19865521 0.86329263 -0.39731139 453 | v 0.04933686 0.88883437 -0.38747256 454 | v -0.09072907 0.89308776 -0.36980558 455 | v -0.75970921 0.55195595 0.24653770 456 | v -0.73142476 0.62524425 0.12915720 457 | v -0.82066936 0.50240938 0.12915720 458 | v -0.68377916 0.68923256 -0.00000000 459 | v -0.78545385 0.57066273 -0.00000000 460 | v -0.86680040 0.43732398 0.00000000 461 | v -0.61755979 0.73734320 -0.13243778 462 | v -0.72803933 0.62774425 -0.13597952 463 | v -0.82199849 0.49842103 -0.13597952 464 | v -0.89209262 0.35948035 -0.13243681 465 | v -0.53768800 0.76600241 -0.25831536 466 | v -0.65128015 0.66814616 -0.26834643 467 | v -0.75400727 0.54781809 -0.27195904 468 | v -0.83670139 0.41293565 -0.26834545 469 | v -0.89466640 0.27466390 -0.25831342 470 | v -0.45153757 0.77585580 -0.36980558 471 | v -0.56235303 0.69008595 -0.38747159 472 | v -0.66814422 0.58165593 -0.39731042 473 | v -0.75965484 0.45570261 -0.39730945 474 | v -0.83008780 0.32158328 -0.38746965 475 | v -0.87741593 0.18968725 -0.36980267 476 | v -0.75970921 -0.55195595 0.24653770 477 | v -0.82066936 -0.50240938 0.12915720 478 | v -0.73142476 -0.62524425 0.12915720 479 | v -0.86680040 -0.43732398 -0.00000000 480 | v -0.78545385 -0.57066273 0.00000000 481 | v -0.68377916 -0.68923256 0.00000000 482 | v -0.89209262 -0.35948035 -0.13243681 483 | v -0.82199849 -0.49842103 -0.13597952 484 | v -0.72803933 -0.62774425 -0.13597952 485 | v -0.61755979 -0.73734320 -0.13243778 486 | v -0.89466737 -0.27466390 -0.25831342 487 | v -0.83670139 -0.41293565 -0.26834545 488 | v -0.75400824 -0.54781809 -0.27195904 489 | v -0.65128015 -0.66814616 -0.26834643 490 | v -0.53768800 -0.76600241 -0.25831536 491 | v -0.87741593 -0.18968725 -0.36980267 492 | v -0.83008780 -0.32158328 -0.38746965 493 | v -0.75965484 -0.45570261 -0.39730945 494 | v -0.66814422 -0.58165593 -0.39731042 495 | v -0.56235303 -0.69008595 -0.38747159 496 | v -0.45153757 -0.77585580 -0.36980558 497 | v 0.29018039 -0.89308970 0.24653770 498 | v 0.22422121 -0.93575472 0.12915720 499 | v 0.36862209 -0.88883631 0.12915817 500 | v 0.14806689 -0.95951587 -0.00000000 501 | v 0.30001631 -0.92335473 0.00000000 502 | v 0.44420068 -0.86329652 0.00000000 503 | v 0.06621937 -0.95951587 -0.13243681 504 | v 0.22001830 -0.93578676 -0.13598049 505 | v 0.37204539 -0.88638971 -0.13598049 506 | v 0.51041908 -0.81518490 -0.13243778 507 | v -0.01524174 -0.93575375 -0.25831536 508 | v 0.13417370 -0.92335376 -0.26834643 509 | v 0.28800758 -0.88638777 -0.27196001 510 | v 0.43419001 -0.82587130 -0.26834740 511 | v 0.56235788 -0.74807815 -0.25831536 512 | v -0.09072907 -0.89308873 -0.36980558 513 | v 0.04933686 -0.88883437 -0.38747256 514 | v 0.19865521 -0.86329263 -0.39731042 515 | v 0.34672210 -0.81518296 -0.39731042 516 | v 0.48253560 -0.74807718 -0.38747256 517 | v 0.59835106 -0.66918985 -0.36980558 518 | v 0.93904987 -0.00000000 0.24653479 519 | v 0.95924500 -0.07591451 0.12915623 520 | v 0.95924500 0.07591451 0.12915623 521 | v 0.95830908 -0.15568436 -0.00000000 522 | v 0.97087315 -0.00000000 0.00000000 523 | v 0.95830908 0.15568436 0.00000000 524 | v 0.93301686 -0.23352703 -0.13243681 525 | v 0.95797510 -0.07992422 -0.13597952 526 | v 0.95797510 0.07992422 -0.13597952 527 | v 0.93301686 0.23352703 -0.13243681 528 | v 0.88524602 -0.30365902 -0.25831439 529 | v 0.91962367 -0.15772611 -0.26834545 530 | v 0.93200521 -0.00000000 -0.27195807 531 | v 0.91962367 0.15772611 -0.26834545 532 | v 0.88524602 0.30365902 -0.25831439 533 | v 0.82134121 -0.36226772 -0.36980461 534 | v 0.86057807 -0.22774354 -0.38747062 535 | v 0.88242854 -0.07784267 -0.39730945 536 | v 0.88242854 0.07784267 -0.39730945 537 | v 0.86057807 0.22774354 -0.38747062 538 | v 0.82134121 0.36226772 -0.36980461 539 | v 0.56025788 0.55195401 -0.56925594 540 | v 0.48253269 0.50240647 -0.67627237 541 | v 0.44262301 0.62524134 -0.59645009 542 | v 0.38764246 0.43732107 -0.77529269 543 | v 0.35126579 0.57065885 -0.70253546 544 | v 0.30579689 0.68922770 -0.61159474 545 | v 0.28050079 0.35947840 -0.85714021 546 | v 0.24598625 0.49841715 -0.79603151 547 | v 0.20396783 0.62773939 -0.71199371 548 | v 0.15773096 0.73733835 -0.61159474 549 | v 0.16906591 0.27466195 -0.91573629 550 | v 0.13417176 0.41293274 -0.86837710 551 | v 0.09396013 0.54781517 -0.79603054 552 | v 0.05125142 0.66814131 -0.70253449 553 | v 0.00942524 0.76599852 -0.59645009 554 | v 0.06163297 0.18968628 -0.95016539 555 | v 0.02466697 0.32158134 -0.91573532 556 | v -0.01563009 0.45569970 -0.85713924 557 | v -0.05655336 0.58165301 -0.77529075 558 | v -0.09506304 0.69008304 -0.67627140 559 | v -0.12881933 0.77585289 -0.56925594 560 | v -0.35181239 0.70339857 -0.56925594 561 | v -0.32870755 0.61416756 -0.67627237 562 | v -0.45786475 0.61416756 -0.59645009 563 | v -0.29612990 0.50380841 -0.77529269 564 | v -0.43418515 0.51041423 -0.70253546 565 | v -0.56100157 0.50380938 -0.61159474 566 | v -0.25520566 0.37785606 -0.85714021 567 | v -0.39801139 0.38796382 -0.79603151 568 | v -0.53398994 0.38796382 -0.71199273 569 | v -0.65251219 0.37785606 -0.61159377 570 | v -0.20897559 0.24566586 -0.91573629 571 | v -0.35126288 0.25520663 -0.86837710 572 | v -0.49197055 0.25864255 -0.79603054 573 | v -0.61960639 0.25520663 -0.70253255 574 | v -0.72559855 0.24566586 -0.59644718 575 | v -0.16135718 0.11723196 -0.95016539 576 | v -0.29822116 0.12283293 -0.91573532 577 | v -0.43822884 0.12595137 -0.85713827 578 | v -0.57066370 0.12595137 -0.77528978 579 | v -0.68568693 0.12283293 -0.67626849 580 | v -0.77769075 0.11723196 -0.56925205 581 | v -0.77769075 -0.11723293 -0.56925205 582 | v -0.68568693 -0.12283293 -0.67626946 583 | v -0.72559855 -0.24566683 -0.59644718 584 | v -0.57066370 -0.12595234 -0.77528978 585 | v -0.61960639 -0.25520760 -0.70253255 586 | v -0.65251219 -0.37785703 -0.61159280 587 | v -0.43822787 -0.12595234 -0.85713827 588 | v -0.49197055 -0.25864352 -0.79602957 589 | v -0.53398897 -0.38796576 -0.71199176 590 | v -0.56100157 -0.50381035 -0.61159377 591 | v -0.29822116 -0.12283390 -0.91573532 592 | v -0.35126288 -0.25520760 -0.86837613 593 | v -0.39801139 -0.38796576 -0.79603054 594 | v -0.43418515 -0.51041617 -0.70253352 595 | v -0.45786475 -0.61416853 -0.59644912 596 | v -0.16135718 -0.11723293 -0.95016539 597 | v -0.20897656 -0.24566780 -0.91573532 598 | v -0.25520663 -0.37785800 -0.85713924 599 | v -0.29613087 -0.50381035 -0.77529075 600 | v -0.32870755 -0.61416853 -0.67627140 601 | v -0.35181239 -0.70339954 -0.56925496 602 | v 0.69807236 0.36226675 -0.56925594 603 | v 0.73141991 0.22774354 -0.59644912 604 | v 0.62693163 0.30365805 -0.67627140 605 | v 0.74999271 0.07784170 -0.61159377 606 | v 0.65127821 0.15772514 -0.70253449 607 | v 0.53570838 0.23352606 -0.77529172 608 | v 0.74999271 -0.07784267 -0.61159377 609 | v 0.66004714 -0.00000000 -0.71199273 610 | v 0.55003847 0.07992422 -0.79603054 611 | v 0.42856574 0.15568339 -0.85713924 612 | v 0.73142088 -0.22774354 -0.59644912 613 | v 0.65127918 -0.15772611 -0.70253352 614 | v 0.55003944 -0.07992519 -0.79602957 615 | v 0.43418515 -0.00000097 -0.86837613 616 | v 0.31346484 0.07591354 -0.91573532 617 | v 0.69807333 -0.36226772 -0.56925496 618 | v 0.62693260 -0.30365902 -0.67627043 619 | v 0.53570936 -0.23352703 -0.77529075 620 | v 0.42856671 -0.15568533 -0.85713924 621 | v 0.31346581 -0.07591548 -0.91573532 622 | v 0.19944841 -0.00000097 -0.95016539 623 | v -0.12881933 -0.77585289 -0.56925594 624 | v -0.09506304 -0.69008207 -0.67627237 625 | v 0.00942621 -0.76599755 -0.59645009 626 | v -0.05655239 -0.58165107 -0.77529269 627 | v 0.05125239 -0.66814034 -0.70253546 628 | v 0.15773096 -0.73733738 -0.61159571 629 | v -0.01562815 -0.45569873 -0.85714021 630 | v 0.09396110 -0.54781420 -0.79603151 631 | v 0.20396977 -0.62773939 -0.71199371 632 | v 0.30579689 -0.68922770 -0.61159474 633 | v 0.02466892 -0.32158037 -0.91573629 634 | v 0.13417370 -0.41293274 -0.86837710 635 | v 0.24598819 -0.49841715 -0.79603054 636 | v 0.35126676 -0.57065885 -0.70253449 637 | v 0.44262398 -0.62524134 -0.59645009 638 | v 0.06163491 -0.18968628 -0.95016539 639 | v 0.16906785 -0.27466195 -0.91573532 640 | v 0.28050273 -0.35947937 -0.85713924 641 | v 0.38764440 -0.43732107 -0.77529075 642 | v 0.48253463 -0.50240744 -0.67627140 643 | v 0.56025885 -0.55195401 -0.56925496 644 | f 1 20 19 645 | f 2 26 40 646 | f 1 19 54 647 | f 1 54 68 648 | f 1 68 41 649 | f 2 40 89 650 | f 3 33 103 651 | f 4 61 117 652 | f 5 75 131 653 | f 6 82 145 654 | f 2 89 104 655 | f 3 103 118 656 | f 4 117 132 657 | f 5 131 146 658 | f 6 145 90 659 | f 7 159 194 660 | f 8 166 208 661 | f 9 173 215 662 | f 10 180 222 663 | f 11 187 195 664 | f 201 216 12 665 | f 200 223 201 666 | f 199 224 200 667 | f 198 226 199 668 | f 197 229 198 669 | f 196 233 197 670 | f 195 238 196 671 | f 201 223 216 672 | f 223 217 216 673 | f 200 224 223 674 | f 224 225 223 675 | f 223 225 217 676 | f 225 218 217 677 | f 199 226 224 678 | f 226 227 224 679 | f 224 227 225 680 | f 227 228 225 681 | f 225 228 218 682 | f 228 219 218 683 | f 198 229 226 684 | f 229 230 226 685 | f 226 230 227 686 | f 230 231 227 687 | f 227 231 228 688 | f 231 232 228 689 | f 228 232 219 690 | f 232 220 219 691 | f 197 233 229 692 | f 233 234 229 693 | f 229 234 230 694 | f 234 235 230 695 | f 230 235 231 696 | f 235 236 231 697 | f 231 236 232 698 | f 236 237 232 699 | f 232 237 220 700 | f 237 221 220 701 | f 196 238 233 702 | f 238 239 233 703 | f 233 239 234 704 | f 239 240 234 705 | f 234 240 235 706 | f 240 241 235 707 | f 235 241 236 708 | f 241 242 236 709 | f 236 242 237 710 | f 242 243 237 711 | f 237 243 221 712 | f 243 222 221 713 | f 195 187 238 714 | f 187 186 238 715 | f 238 186 239 716 | f 186 185 239 717 | f 239 185 240 718 | f 185 184 240 719 | f 240 184 241 720 | f 184 183 241 721 | f 241 183 242 722 | f 183 182 242 723 | f 242 182 243 724 | f 182 181 243 725 | f 243 181 222 726 | f 181 10 222 727 | f 216 209 12 728 | f 217 244 216 729 | f 218 245 217 730 | f 219 247 218 731 | f 220 250 219 732 | f 221 254 220 733 | f 222 259 221 734 | f 216 244 209 735 | f 244 210 209 736 | f 217 245 244 737 | f 245 246 244 738 | f 244 246 210 739 | f 246 211 210 740 | f 218 247 245 741 | f 247 248 245 742 | f 245 248 246 743 | f 248 249 246 744 | f 246 249 211 745 | f 249 212 211 746 | f 219 250 247 747 | f 250 251 247 748 | f 247 251 248 749 | f 251 252 248 750 | f 248 252 249 751 | f 252 253 249 752 | f 249 253 212 753 | f 253 213 212 754 | f 220 254 250 755 | f 254 255 250 756 | f 250 255 251 757 | f 255 256 251 758 | f 251 256 252 759 | f 256 257 252 760 | f 252 257 253 761 | f 257 258 253 762 | f 253 258 213 763 | f 258 214 213 764 | f 221 259 254 765 | f 259 260 254 766 | f 254 260 255 767 | f 260 261 255 768 | f 255 261 256 769 | f 261 262 256 770 | f 256 262 257 771 | f 262 263 257 772 | f 257 263 258 773 | f 263 264 258 774 | f 258 264 214 775 | f 264 215 214 776 | f 222 180 259 777 | f 180 179 259 778 | f 259 179 260 779 | f 179 178 260 780 | f 260 178 261 781 | f 178 177 261 782 | f 261 177 262 783 | f 177 176 262 784 | f 262 176 263 785 | f 176 175 263 786 | f 263 175 264 787 | f 175 174 264 788 | f 264 174 215 789 | f 174 9 215 790 | f 209 202 12 791 | f 210 265 209 792 | f 211 266 210 793 | f 212 268 211 794 | f 213 271 212 795 | f 214 275 213 796 | f 215 280 214 797 | f 209 265 202 798 | f 265 203 202 799 | f 210 266 265 800 | f 266 267 265 801 | f 265 267 203 802 | f 267 204 203 803 | f 211 268 266 804 | f 268 269 266 805 | f 266 269 267 806 | f 269 270 267 807 | f 267 270 204 808 | f 270 205 204 809 | f 212 271 268 810 | f 271 272 268 811 | f 268 272 269 812 | f 272 273 269 813 | f 269 273 270 814 | f 273 274 270 815 | f 270 274 205 816 | f 274 206 205 817 | f 213 275 271 818 | f 275 276 271 819 | f 271 276 272 820 | f 276 277 272 821 | f 272 277 273 822 | f 277 278 273 823 | f 273 278 274 824 | f 278 279 274 825 | f 274 279 206 826 | f 279 207 206 827 | f 214 280 275 828 | f 280 281 275 829 | f 275 281 276 830 | f 281 282 276 831 | f 276 282 277 832 | f 282 283 277 833 | f 277 283 278 834 | f 283 284 278 835 | f 278 284 279 836 | f 284 285 279 837 | f 279 285 207 838 | f 285 208 207 839 | f 215 173 280 840 | f 173 172 280 841 | f 280 172 281 842 | f 172 171 281 843 | f 281 171 282 844 | f 171 170 282 845 | f 282 170 283 846 | f 170 169 283 847 | f 283 169 284 848 | f 169 168 284 849 | f 284 168 285 850 | f 168 167 285 851 | f 285 167 208 852 | f 167 8 208 853 | f 202 188 12 854 | f 203 286 202 855 | f 204 287 203 856 | f 205 289 204 857 | f 206 292 205 858 | f 207 296 206 859 | f 208 301 207 860 | f 202 286 188 861 | f 286 189 188 862 | f 203 287 286 863 | f 287 288 286 864 | f 286 288 189 865 | f 288 190 189 866 | f 204 289 287 867 | f 289 290 287 868 | f 287 290 288 869 | f 290 291 288 870 | f 288 291 190 871 | f 291 191 190 872 | f 205 292 289 873 | f 292 293 289 874 | f 289 293 290 875 | f 293 294 290 876 | f 290 294 291 877 | f 294 295 291 878 | f 291 295 191 879 | f 295 192 191 880 | f 206 296 292 881 | f 296 297 292 882 | f 292 297 293 883 | f 297 298 293 884 | f 293 298 294 885 | f 298 299 294 886 | f 294 299 295 887 | f 299 300 295 888 | f 295 300 192 889 | f 300 193 192 890 | f 207 301 296 891 | f 301 302 296 892 | f 296 302 297 893 | f 302 303 297 894 | f 297 303 298 895 | f 303 304 298 896 | f 298 304 299 897 | f 304 305 299 898 | f 299 305 300 899 | f 305 306 300 900 | f 300 306 193 901 | f 306 194 193 902 | f 208 166 301 903 | f 166 165 301 904 | f 301 165 302 905 | f 165 164 302 906 | f 302 164 303 907 | f 164 163 303 908 | f 303 163 304 909 | f 163 162 304 910 | f 304 162 305 911 | f 162 161 305 912 | f 305 161 306 913 | f 161 160 306 914 | f 306 160 194 915 | f 160 7 194 916 | f 188 201 12 917 | f 189 307 188 918 | f 190 308 189 919 | f 191 310 190 920 | f 192 313 191 921 | f 193 317 192 922 | f 194 322 193 923 | f 188 307 201 924 | f 307 200 201 925 | f 189 308 307 926 | f 308 309 307 927 | f 307 309 200 928 | f 309 199 200 929 | f 190 310 308 930 | f 310 311 308 931 | f 308 311 309 932 | f 311 312 309 933 | f 309 312 199 934 | f 312 198 199 935 | f 191 313 310 936 | f 313 314 310 937 | f 310 314 311 938 | f 314 315 311 939 | f 311 315 312 940 | f 315 316 312 941 | f 312 316 198 942 | f 316 197 198 943 | f 192 317 313 944 | f 317 318 313 945 | f 313 318 314 946 | f 318 319 314 947 | f 314 319 315 948 | f 319 320 315 949 | f 315 320 316 950 | f 320 321 316 951 | f 316 321 197 952 | f 321 196 197 953 | f 193 322 317 954 | f 322 323 317 955 | f 317 323 318 956 | f 323 324 318 957 | f 318 324 319 958 | f 324 325 319 959 | f 319 325 320 960 | f 325 326 320 961 | f 320 326 321 962 | f 326 327 321 963 | f 321 327 196 964 | f 327 195 196 965 | f 194 159 322 966 | f 159 158 322 967 | f 322 158 323 968 | f 158 157 323 969 | f 323 157 324 970 | f 157 156 324 971 | f 324 156 325 972 | f 156 155 325 973 | f 325 155 326 974 | f 155 154 326 975 | f 326 154 327 976 | f 154 153 327 977 | f 327 153 195 978 | f 153 11 195 979 | f 96 187 11 980 | f 95 328 96 981 | f 94 329 95 982 | f 93 331 94 983 | f 92 334 93 984 | f 91 338 92 985 | f 90 343 91 986 | f 96 328 187 987 | f 328 186 187 988 | f 95 329 328 989 | f 329 330 328 990 | f 328 330 186 991 | f 330 185 186 992 | f 94 331 329 993 | f 331 332 329 994 | f 329 332 330 995 | f 332 333 330 996 | f 330 333 185 997 | f 333 184 185 998 | f 93 334 331 999 | f 334 335 331 1000 | f 331 335 332 1001 | f 335 336 332 1002 | f 332 336 333 1003 | f 336 337 333 1004 | f 333 337 184 1005 | f 337 183 184 1006 | f 92 338 334 1007 | f 338 339 334 1008 | f 334 339 335 1009 | f 339 340 335 1010 | f 335 340 336 1011 | f 340 341 336 1012 | f 336 341 337 1013 | f 341 342 337 1014 | f 337 342 183 1015 | f 342 182 183 1016 | f 91 343 338 1017 | f 343 344 338 1018 | f 338 344 339 1019 | f 344 345 339 1020 | f 339 345 340 1021 | f 345 346 340 1022 | f 340 346 341 1023 | f 346 347 341 1024 | f 341 347 342 1025 | f 347 348 342 1026 | f 342 348 182 1027 | f 348 181 182 1028 | f 90 145 343 1029 | f 145 144 343 1030 | f 343 144 344 1031 | f 144 143 344 1032 | f 344 143 345 1033 | f 143 142 345 1034 | f 345 142 346 1035 | f 142 141 346 1036 | f 346 141 347 1037 | f 141 140 347 1038 | f 347 140 348 1039 | f 140 139 348 1040 | f 348 139 181 1041 | f 139 10 181 1042 | f 152 180 10 1043 | f 151 349 152 1044 | f 150 350 151 1045 | f 149 352 150 1046 | f 148 355 149 1047 | f 147 359 148 1048 | f 146 364 147 1049 | f 152 349 180 1050 | f 349 179 180 1051 | f 151 350 349 1052 | f 350 351 349 1053 | f 349 351 179 1054 | f 351 178 179 1055 | f 150 352 350 1056 | f 352 353 350 1057 | f 350 353 351 1058 | f 353 354 351 1059 | f 351 354 178 1060 | f 354 177 178 1061 | f 149 355 352 1062 | f 355 356 352 1063 | f 352 356 353 1064 | f 356 357 353 1065 | f 353 357 354 1066 | f 357 358 354 1067 | f 354 358 177 1068 | f 358 176 177 1069 | f 148 359 355 1070 | f 359 360 355 1071 | f 355 360 356 1072 | f 360 361 356 1073 | f 356 361 357 1074 | f 361 362 357 1075 | f 357 362 358 1076 | f 362 363 358 1077 | f 358 363 176 1078 | f 363 175 176 1079 | f 147 364 359 1080 | f 364 365 359 1081 | f 359 365 360 1082 | f 365 366 360 1083 | f 360 366 361 1084 | f 366 367 361 1085 | f 361 367 362 1086 | f 367 368 362 1087 | f 362 368 363 1088 | f 368 369 363 1089 | f 363 369 175 1090 | f 369 174 175 1091 | f 146 131 364 1092 | f 131 130 364 1093 | f 364 130 365 1094 | f 130 129 365 1095 | f 365 129 366 1096 | f 129 128 366 1097 | f 366 128 367 1098 | f 128 127 367 1099 | f 367 127 368 1100 | f 127 126 368 1101 | f 368 126 369 1102 | f 126 125 369 1103 | f 369 125 174 1104 | f 125 9 174 1105 | f 138 173 9 1106 | f 137 370 138 1107 | f 136 371 137 1108 | f 135 373 136 1109 | f 134 376 135 1110 | f 133 380 134 1111 | f 132 385 133 1112 | f 138 370 173 1113 | f 370 172 173 1114 | f 137 371 370 1115 | f 371 372 370 1116 | f 370 372 172 1117 | f 372 171 172 1118 | f 136 373 371 1119 | f 373 374 371 1120 | f 371 374 372 1121 | f 374 375 372 1122 | f 372 375 171 1123 | f 375 170 171 1124 | f 135 376 373 1125 | f 376 377 373 1126 | f 373 377 374 1127 | f 377 378 374 1128 | f 374 378 375 1129 | f 378 379 375 1130 | f 375 379 170 1131 | f 379 169 170 1132 | f 134 380 376 1133 | f 380 381 376 1134 | f 376 381 377 1135 | f 381 382 377 1136 | f 377 382 378 1137 | f 382 383 378 1138 | f 378 383 379 1139 | f 383 384 379 1140 | f 379 384 169 1141 | f 384 168 169 1142 | f 133 385 380 1143 | f 385 386 380 1144 | f 380 386 381 1145 | f 386 387 381 1146 | f 381 387 382 1147 | f 387 388 382 1148 | f 382 388 383 1149 | f 388 389 383 1150 | f 383 389 384 1151 | f 389 390 384 1152 | f 384 390 168 1153 | f 390 167 168 1154 | f 132 117 385 1155 | f 117 116 385 1156 | f 385 116 386 1157 | f 116 115 386 1158 | f 386 115 387 1159 | f 115 114 387 1160 | f 387 114 388 1161 | f 114 113 388 1162 | f 388 113 389 1163 | f 113 112 389 1164 | f 389 112 390 1165 | f 112 111 390 1166 | f 390 111 167 1167 | f 111 8 167 1168 | f 124 166 8 1169 | f 123 391 124 1170 | f 122 392 123 1171 | f 121 394 122 1172 | f 120 397 121 1173 | f 119 401 120 1174 | f 118 406 119 1175 | f 124 391 166 1176 | f 391 165 166 1177 | f 123 392 391 1178 | f 392 393 391 1179 | f 391 393 165 1180 | f 393 164 165 1181 | f 122 394 392 1182 | f 394 395 392 1183 | f 392 395 393 1184 | f 395 396 393 1185 | f 393 396 164 1186 | f 396 163 164 1187 | f 121 397 394 1188 | f 397 398 394 1189 | f 394 398 395 1190 | f 398 399 395 1191 | f 395 399 396 1192 | f 399 400 396 1193 | f 396 400 163 1194 | f 400 162 163 1195 | f 120 401 397 1196 | f 401 402 397 1197 | f 397 402 398 1198 | f 402 403 398 1199 | f 398 403 399 1200 | f 403 404 399 1201 | f 399 404 400 1202 | f 404 405 400 1203 | f 400 405 162 1204 | f 405 161 162 1205 | f 119 406 401 1206 | f 406 407 401 1207 | f 401 407 402 1208 | f 407 408 402 1209 | f 402 408 403 1210 | f 408 409 403 1211 | f 403 409 404 1212 | f 409 410 404 1213 | f 404 410 405 1214 | f 410 411 405 1215 | f 405 411 161 1216 | f 411 160 161 1217 | f 118 103 406 1218 | f 103 102 406 1219 | f 406 102 407 1220 | f 102 101 407 1221 | f 407 101 408 1222 | f 101 100 408 1223 | f 408 100 409 1224 | f 100 99 409 1225 | f 409 99 410 1226 | f 99 98 410 1227 | f 410 98 411 1228 | f 98 97 411 1229 | f 411 97 160 1230 | f 97 7 160 1231 | f 110 159 7 1232 | f 109 412 110 1233 | f 108 413 109 1234 | f 107 415 108 1235 | f 106 418 107 1236 | f 105 422 106 1237 | f 104 427 105 1238 | f 110 412 159 1239 | f 412 158 159 1240 | f 109 413 412 1241 | f 413 414 412 1242 | f 412 414 158 1243 | f 414 157 158 1244 | f 108 415 413 1245 | f 415 416 413 1246 | f 413 416 414 1247 | f 416 417 414 1248 | f 414 417 157 1249 | f 417 156 157 1250 | f 107 418 415 1251 | f 418 419 415 1252 | f 415 419 416 1253 | f 419 420 416 1254 | f 416 420 417 1255 | f 420 421 417 1256 | f 417 421 156 1257 | f 421 155 156 1258 | f 106 422 418 1259 | f 422 423 418 1260 | f 418 423 419 1261 | f 423 424 419 1262 | f 419 424 420 1263 | f 424 425 420 1264 | f 420 425 421 1265 | f 425 426 421 1266 | f 421 426 155 1267 | f 426 154 155 1268 | f 105 427 422 1269 | f 427 428 422 1270 | f 422 428 423 1271 | f 428 429 423 1272 | f 423 429 424 1273 | f 429 430 424 1274 | f 424 430 425 1275 | f 430 431 425 1276 | f 425 431 426 1277 | f 431 432 426 1278 | f 426 432 154 1279 | f 432 153 154 1280 | f 104 89 427 1281 | f 89 88 427 1282 | f 427 88 428 1283 | f 88 87 428 1284 | f 428 87 429 1285 | f 87 86 429 1286 | f 429 86 430 1287 | f 86 85 430 1288 | f 430 85 431 1289 | f 85 84 431 1290 | f 431 84 432 1291 | f 84 83 432 1292 | f 432 83 153 1293 | f 83 11 153 1294 | f 139 152 10 1295 | f 140 433 139 1296 | f 141 434 140 1297 | f 142 436 141 1298 | f 143 439 142 1299 | f 144 443 143 1300 | f 145 448 144 1301 | f 139 433 152 1302 | f 433 151 152 1303 | f 140 434 433 1304 | f 434 435 433 1305 | f 433 435 151 1306 | f 435 150 151 1307 | f 141 436 434 1308 | f 436 437 434 1309 | f 434 437 435 1310 | f 437 438 435 1311 | f 435 438 150 1312 | f 438 149 150 1313 | f 142 439 436 1314 | f 439 440 436 1315 | f 436 440 437 1316 | f 440 441 437 1317 | f 437 441 438 1318 | f 441 442 438 1319 | f 438 442 149 1320 | f 442 148 149 1321 | f 143 443 439 1322 | f 443 444 439 1323 | f 439 444 440 1324 | f 444 445 440 1325 | f 440 445 441 1326 | f 445 446 441 1327 | f 441 446 442 1328 | f 446 447 442 1329 | f 442 447 148 1330 | f 447 147 148 1331 | f 144 448 443 1332 | f 448 449 443 1333 | f 443 449 444 1334 | f 449 450 444 1335 | f 444 450 445 1336 | f 450 451 445 1337 | f 445 451 446 1338 | f 451 452 446 1339 | f 446 452 447 1340 | f 452 453 447 1341 | f 447 453 147 1342 | f 453 146 147 1343 | f 145 82 448 1344 | f 82 81 448 1345 | f 448 81 449 1346 | f 81 80 449 1347 | f 449 80 450 1348 | f 80 79 450 1349 | f 450 79 451 1350 | f 79 78 451 1351 | f 451 78 452 1352 | f 78 77 452 1353 | f 452 77 453 1354 | f 77 76 453 1355 | f 453 76 146 1356 | f 76 5 146 1357 | f 125 138 9 1358 | f 126 454 125 1359 | f 127 455 126 1360 | f 128 457 127 1361 | f 129 460 128 1362 | f 130 464 129 1363 | f 131 469 130 1364 | f 125 454 138 1365 | f 454 137 138 1366 | f 126 455 454 1367 | f 455 456 454 1368 | f 454 456 137 1369 | f 456 136 137 1370 | f 127 457 455 1371 | f 457 458 455 1372 | f 455 458 456 1373 | f 458 459 456 1374 | f 456 459 136 1375 | f 459 135 136 1376 | f 128 460 457 1377 | f 460 461 457 1378 | f 457 461 458 1379 | f 461 462 458 1380 | f 458 462 459 1381 | f 462 463 459 1382 | f 459 463 135 1383 | f 463 134 135 1384 | f 129 464 460 1385 | f 464 465 460 1386 | f 460 465 461 1387 | f 465 466 461 1388 | f 461 466 462 1389 | f 466 467 462 1390 | f 462 467 463 1391 | f 467 468 463 1392 | f 463 468 134 1393 | f 468 133 134 1394 | f 130 469 464 1395 | f 469 470 464 1396 | f 464 470 465 1397 | f 470 471 465 1398 | f 465 471 466 1399 | f 471 472 466 1400 | f 466 472 467 1401 | f 472 473 467 1402 | f 467 473 468 1403 | f 473 474 468 1404 | f 468 474 133 1405 | f 474 132 133 1406 | f 131 75 469 1407 | f 75 74 469 1408 | f 469 74 470 1409 | f 74 73 470 1410 | f 470 73 471 1411 | f 73 72 471 1412 | f 471 72 472 1413 | f 72 71 472 1414 | f 472 71 473 1415 | f 71 70 473 1416 | f 473 70 474 1417 | f 70 69 474 1418 | f 474 69 132 1419 | f 69 4 132 1420 | f 111 124 8 1421 | f 112 475 111 1422 | f 113 476 112 1423 | f 114 478 113 1424 | f 115 481 114 1425 | f 116 485 115 1426 | f 117 490 116 1427 | f 111 475 124 1428 | f 475 123 124 1429 | f 112 476 475 1430 | f 476 477 475 1431 | f 475 477 123 1432 | f 477 122 123 1433 | f 113 478 476 1434 | f 478 479 476 1435 | f 476 479 477 1436 | f 479 480 477 1437 | f 477 480 122 1438 | f 480 121 122 1439 | f 114 481 478 1440 | f 481 482 478 1441 | f 478 482 479 1442 | f 482 483 479 1443 | f 479 483 480 1444 | f 483 484 480 1445 | f 480 484 121 1446 | f 484 120 121 1447 | f 115 485 481 1448 | f 485 486 481 1449 | f 481 486 482 1450 | f 486 487 482 1451 | f 482 487 483 1452 | f 487 488 483 1453 | f 483 488 484 1454 | f 488 489 484 1455 | f 484 489 120 1456 | f 489 119 120 1457 | f 116 490 485 1458 | f 490 491 485 1459 | f 485 491 486 1460 | f 491 492 486 1461 | f 486 492 487 1462 | f 492 493 487 1463 | f 487 493 488 1464 | f 493 494 488 1465 | f 488 494 489 1466 | f 494 495 489 1467 | f 489 495 119 1468 | f 495 118 119 1469 | f 117 61 490 1470 | f 61 60 490 1471 | f 490 60 491 1472 | f 60 59 491 1473 | f 491 59 492 1474 | f 59 58 492 1475 | f 492 58 493 1476 | f 58 57 493 1477 | f 493 57 494 1478 | f 57 56 494 1479 | f 494 56 495 1480 | f 56 55 495 1481 | f 495 55 118 1482 | f 55 3 118 1483 | f 97 110 7 1484 | f 98 496 97 1485 | f 99 497 98 1486 | f 100 499 99 1487 | f 101 502 100 1488 | f 102 506 101 1489 | f 103 511 102 1490 | f 97 496 110 1491 | f 496 109 110 1492 | f 98 497 496 1493 | f 497 498 496 1494 | f 496 498 109 1495 | f 498 108 109 1496 | f 99 499 497 1497 | f 499 500 497 1498 | f 497 500 498 1499 | f 500 501 498 1500 | f 498 501 108 1501 | f 501 107 108 1502 | f 100 502 499 1503 | f 502 503 499 1504 | f 499 503 500 1505 | f 503 504 500 1506 | f 500 504 501 1507 | f 504 505 501 1508 | f 501 505 107 1509 | f 505 106 107 1510 | f 101 506 502 1511 | f 506 507 502 1512 | f 502 507 503 1513 | f 507 508 503 1514 | f 503 508 504 1515 | f 508 509 504 1516 | f 504 509 505 1517 | f 509 510 505 1518 | f 505 510 106 1519 | f 510 105 106 1520 | f 102 511 506 1521 | f 511 512 506 1522 | f 506 512 507 1523 | f 512 513 507 1524 | f 507 513 508 1525 | f 513 514 508 1526 | f 508 514 509 1527 | f 514 515 509 1528 | f 509 515 510 1529 | f 515 516 510 1530 | f 510 516 105 1531 | f 516 104 105 1532 | f 103 33 511 1533 | f 33 32 511 1534 | f 511 32 512 1535 | f 32 31 512 1536 | f 512 31 513 1537 | f 31 30 513 1538 | f 513 30 514 1539 | f 30 29 514 1540 | f 514 29 515 1541 | f 29 28 515 1542 | f 515 28 516 1543 | f 28 27 516 1544 | f 516 27 104 1545 | f 27 2 104 1546 | f 83 96 11 1547 | f 84 517 83 1548 | f 85 518 84 1549 | f 86 520 85 1550 | f 87 523 86 1551 | f 88 527 87 1552 | f 89 532 88 1553 | f 83 517 96 1554 | f 517 95 96 1555 | f 84 518 517 1556 | f 518 519 517 1557 | f 517 519 95 1558 | f 519 94 95 1559 | f 85 520 518 1560 | f 520 521 518 1561 | f 518 521 519 1562 | f 521 522 519 1563 | f 519 522 94 1564 | f 522 93 94 1565 | f 86 523 520 1566 | f 523 524 520 1567 | f 520 524 521 1568 | f 524 525 521 1569 | f 521 525 522 1570 | f 525 526 522 1571 | f 522 526 93 1572 | f 526 92 93 1573 | f 87 527 523 1574 | f 527 528 523 1575 | f 523 528 524 1576 | f 528 529 524 1577 | f 524 529 525 1578 | f 529 530 525 1579 | f 525 530 526 1580 | f 530 531 526 1581 | f 526 531 92 1582 | f 531 91 92 1583 | f 88 532 527 1584 | f 532 533 527 1585 | f 527 533 528 1586 | f 533 534 528 1587 | f 528 534 529 1588 | f 534 535 529 1589 | f 529 535 530 1590 | f 535 536 530 1591 | f 530 536 531 1592 | f 536 537 531 1593 | f 531 537 91 1594 | f 537 90 91 1595 | f 89 40 532 1596 | f 40 39 532 1597 | f 532 39 533 1598 | f 39 38 533 1599 | f 533 38 534 1600 | f 38 37 534 1601 | f 534 37 535 1602 | f 37 36 535 1603 | f 535 36 536 1604 | f 36 35 536 1605 | f 536 35 537 1606 | f 35 34 537 1607 | f 537 34 90 1608 | f 34 6 90 1609 | f 47 82 6 1610 | f 46 538 47 1611 | f 45 539 46 1612 | f 44 541 45 1613 | f 43 544 44 1614 | f 42 548 43 1615 | f 41 553 42 1616 | f 47 538 82 1617 | f 538 81 82 1618 | f 46 539 538 1619 | f 539 540 538 1620 | f 538 540 81 1621 | f 540 80 81 1622 | f 45 541 539 1623 | f 541 542 539 1624 | f 539 542 540 1625 | f 542 543 540 1626 | f 540 543 80 1627 | f 543 79 80 1628 | f 44 544 541 1629 | f 544 545 541 1630 | f 541 545 542 1631 | f 545 546 542 1632 | f 542 546 543 1633 | f 546 547 543 1634 | f 543 547 79 1635 | f 547 78 79 1636 | f 43 548 544 1637 | f 548 549 544 1638 | f 544 549 545 1639 | f 549 550 545 1640 | f 545 550 546 1641 | f 550 551 546 1642 | f 546 551 547 1643 | f 551 552 547 1644 | f 547 552 78 1645 | f 552 77 78 1646 | f 42 553 548 1647 | f 553 554 548 1648 | f 548 554 549 1649 | f 554 555 549 1650 | f 549 555 550 1651 | f 555 556 550 1652 | f 550 556 551 1653 | f 556 557 551 1654 | f 551 557 552 1655 | f 557 558 552 1656 | f 552 558 77 1657 | f 558 76 77 1658 | f 41 68 553 1659 | f 68 67 553 1660 | f 553 67 554 1661 | f 67 66 554 1662 | f 554 66 555 1663 | f 66 65 555 1664 | f 555 65 556 1665 | f 65 64 556 1666 | f 556 64 557 1667 | f 64 63 557 1668 | f 557 63 558 1669 | f 63 62 558 1670 | f 558 62 76 1671 | f 62 5 76 1672 | f 62 75 5 1673 | f 63 559 62 1674 | f 64 560 63 1675 | f 65 562 64 1676 | f 66 565 65 1677 | f 67 569 66 1678 | f 68 574 67 1679 | f 62 559 75 1680 | f 559 74 75 1681 | f 63 560 559 1682 | f 560 561 559 1683 | f 559 561 74 1684 | f 561 73 74 1685 | f 64 562 560 1686 | f 562 563 560 1687 | f 560 563 561 1688 | f 563 564 561 1689 | f 561 564 73 1690 | f 564 72 73 1691 | f 65 565 562 1692 | f 565 566 562 1693 | f 562 566 563 1694 | f 566 567 563 1695 | f 563 567 564 1696 | f 567 568 564 1697 | f 564 568 72 1698 | f 568 71 72 1699 | f 66 569 565 1700 | f 569 570 565 1701 | f 565 570 566 1702 | f 570 571 566 1703 | f 566 571 567 1704 | f 571 572 567 1705 | f 567 572 568 1706 | f 572 573 568 1707 | f 568 573 71 1708 | f 573 70 71 1709 | f 67 574 569 1710 | f 574 575 569 1711 | f 569 575 570 1712 | f 575 576 570 1713 | f 570 576 571 1714 | f 576 577 571 1715 | f 571 577 572 1716 | f 577 578 572 1717 | f 572 578 573 1718 | f 578 579 573 1719 | f 573 579 70 1720 | f 579 69 70 1721 | f 68 54 574 1722 | f 54 53 574 1723 | f 574 53 575 1724 | f 53 52 575 1725 | f 575 52 576 1726 | f 52 51 576 1727 | f 576 51 577 1728 | f 51 50 577 1729 | f 577 50 578 1730 | f 50 49 578 1731 | f 578 49 579 1732 | f 49 48 579 1733 | f 579 48 69 1734 | f 48 4 69 1735 | f 48 61 4 1736 | f 49 580 48 1737 | f 50 581 49 1738 | f 51 583 50 1739 | f 52 586 51 1740 | f 53 590 52 1741 | f 54 595 53 1742 | f 48 580 61 1743 | f 580 60 61 1744 | f 49 581 580 1745 | f 581 582 580 1746 | f 580 582 60 1747 | f 582 59 60 1748 | f 50 583 581 1749 | f 583 584 581 1750 | f 581 584 582 1751 | f 584 585 582 1752 | f 582 585 59 1753 | f 585 58 59 1754 | f 51 586 583 1755 | f 586 587 583 1756 | f 583 587 584 1757 | f 587 588 584 1758 | f 584 588 585 1759 | f 588 589 585 1760 | f 585 589 58 1761 | f 589 57 58 1762 | f 52 590 586 1763 | f 590 591 586 1764 | f 586 591 587 1765 | f 591 592 587 1766 | f 587 592 588 1767 | f 592 593 588 1768 | f 588 593 589 1769 | f 593 594 589 1770 | f 589 594 57 1771 | f 594 56 57 1772 | f 53 595 590 1773 | f 595 596 590 1774 | f 590 596 591 1775 | f 596 597 591 1776 | f 591 597 592 1777 | f 597 598 592 1778 | f 592 598 593 1779 | f 598 599 593 1780 | f 593 599 594 1781 | f 599 600 594 1782 | f 594 600 56 1783 | f 600 55 56 1784 | f 54 19 595 1785 | f 19 18 595 1786 | f 595 18 596 1787 | f 18 17 596 1788 | f 596 17 597 1789 | f 17 16 597 1790 | f 597 16 598 1791 | f 16 15 598 1792 | f 598 15 599 1793 | f 15 14 599 1794 | f 599 14 600 1795 | f 14 13 600 1796 | f 600 13 55 1797 | f 13 3 55 1798 | f 34 47 6 1799 | f 35 601 34 1800 | f 36 602 35 1801 | f 37 604 36 1802 | f 38 607 37 1803 | f 39 611 38 1804 | f 40 616 39 1805 | f 34 601 47 1806 | f 601 46 47 1807 | f 35 602 601 1808 | f 602 603 601 1809 | f 601 603 46 1810 | f 603 45 46 1811 | f 36 604 602 1812 | f 604 605 602 1813 | f 602 605 603 1814 | f 605 606 603 1815 | f 603 606 45 1816 | f 606 44 45 1817 | f 37 607 604 1818 | f 607 608 604 1819 | f 604 608 605 1820 | f 608 609 605 1821 | f 605 609 606 1822 | f 609 610 606 1823 | f 606 610 44 1824 | f 610 43 44 1825 | f 38 611 607 1826 | f 611 612 607 1827 | f 607 612 608 1828 | f 612 613 608 1829 | f 608 613 609 1830 | f 613 614 609 1831 | f 609 614 610 1832 | f 614 615 610 1833 | f 610 615 43 1834 | f 615 42 43 1835 | f 39 616 611 1836 | f 616 617 611 1837 | f 611 617 612 1838 | f 617 618 612 1839 | f 612 618 613 1840 | f 618 619 613 1841 | f 613 619 614 1842 | f 619 620 614 1843 | f 614 620 615 1844 | f 620 621 615 1845 | f 615 621 42 1846 | f 621 41 42 1847 | f 40 26 616 1848 | f 26 25 616 1849 | f 616 25 617 1850 | f 25 24 617 1851 | f 617 24 618 1852 | f 24 23 618 1853 | f 618 23 619 1854 | f 23 22 619 1855 | f 619 22 620 1856 | f 22 21 620 1857 | f 620 21 621 1858 | f 21 20 621 1859 | f 621 20 41 1860 | f 20 1 41 1861 | f 13 33 3 1862 | f 14 622 13 1863 | f 15 623 14 1864 | f 16 625 15 1865 | f 17 628 16 1866 | f 18 632 17 1867 | f 19 637 18 1868 | f 13 622 33 1869 | f 622 32 33 1870 | f 14 623 622 1871 | f 623 624 622 1872 | f 622 624 32 1873 | f 624 31 32 1874 | f 15 625 623 1875 | f 625 626 623 1876 | f 623 626 624 1877 | f 626 627 624 1878 | f 624 627 31 1879 | f 627 30 31 1880 | f 16 628 625 1881 | f 628 629 625 1882 | f 625 629 626 1883 | f 629 630 626 1884 | f 626 630 627 1885 | f 630 631 627 1886 | f 627 631 30 1887 | f 631 29 30 1888 | f 17 632 628 1889 | f 632 633 628 1890 | f 628 633 629 1891 | f 633 634 629 1892 | f 629 634 630 1893 | f 634 635 630 1894 | f 630 635 631 1895 | f 635 636 631 1896 | f 631 636 29 1897 | f 636 28 29 1898 | f 18 637 632 1899 | f 637 638 632 1900 | f 632 638 633 1901 | f 638 639 633 1902 | f 633 639 634 1903 | f 639 640 634 1904 | f 634 640 635 1905 | f 640 641 635 1906 | f 635 641 636 1907 | f 641 642 636 1908 | f 636 642 28 1909 | f 642 27 28 1910 | f 19 20 637 1911 | f 20 21 637 1912 | f 637 21 638 1913 | f 21 22 638 1914 | f 638 22 639 1915 | f 22 23 639 1916 | f 639 23 640 1917 | f 23 24 640 1918 | f 640 24 641 1919 | f 24 25 641 1920 | f 641 25 642 1921 | f 25 26 642 1922 | f 642 26 27 1923 | f 26 2 27 -------------------------------------------------------------------------------- /tests/normal.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | from torchsdf import index_vertices_by_faces, compute_sdf 3 | import os 4 | import torch 5 | from time import time 6 | 7 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 8 | device = "cuda" 9 | # Ns 10 | num_sample = 1000000 11 | samples = torch.rand((num_sample, 3)).to(device).detach() 12 | samples = samples * 2 - 1 13 | 14 | all_pass = True 15 | 16 | print("====Normal test====") 17 | for model in os.listdir("tests/models"): 18 | print("Test:", model[:-4], end=" ") 19 | model_path = os.path.join("tests/models", model) 20 | mesh = trimesh.load(model_path, force="mesh", process=False) 21 | # (Ns, 3) 22 | x = samples.clone().requires_grad_() 23 | # (Nv, 3) 24 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 25 | # (Nf, 3) 26 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 27 | # (Nf, 3, 3) 28 | face_verts = index_vertices_by_faces(verts, faces) 29 | 30 | # TorchSDF 31 | # (Ns) 32 | distances, dist_sign, normals, clst_points = compute_sdf(x, face_verts) 33 | gradient = torch.autograd.grad([distances.sum()], [x], create_graph=True, 34 | retain_graph=True)[0] 35 | 36 | normal_direct = normals * 2 * distances.unsqueeze(1).sqrt() 37 | normal_from_grad = torch.autograd.grad([distances.sum()], [x], create_graph=True, 38 | retain_graph=True)[0] 39 | normal_fit = torch.allclose(normal_direct, normal_from_grad, atol=5e-7) 40 | if normal_fit: 41 | print("\x1B[32mPass\x1B[0m") 42 | else: 43 | all_pass = False 44 | print("\x1B[31mNormal wrong!\x1B[0m") 45 | print("Max abs:", (normal_direct - normal_from_grad).abs().max().item()) 46 | 47 | if (all_pass): 48 | print("====\x1B[32mAll pass\x1B[0m====") 49 | else: 50 | print("====\x1B[31mWrong\x1B[0m====") 51 | -------------------------------------------------------------------------------- /tests/normalize.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import os 3 | import numpy as np 4 | 5 | for model in os.listdir("tests/models"): 6 | model_path = os.path.join("tests/models", model) 7 | mesh = trimesh.load(model_path, force="mesh", process=False) 8 | # print(mesh.is_watertight) 9 | # verts = np.array(mesh.vertices) 10 | # xcenter = (np.max(verts[:, 0]) + np.min(verts[:, 0])) / 2 11 | # ycenter = (np.max(verts[:, 1]) + np.min(verts[:, 1])) / 2 12 | # zcenter = (np.max(verts[:, 2]) + np.min(verts[:, 2])) / 2 13 | # verts_ = verts - np.array([xcenter, ycenter, zcenter]) 14 | # dmax = np.max(np.sqrt(np.sum(np.square(verts_), axis=1))) * 1.03 15 | # verts_ /= dmax 16 | # mesh_ = trimesh.Trimesh(vertices=verts_, faces=mesh.faces, process=False) 17 | # mesh_.export(model_path) 18 | print(mesh.is_watertight) 19 | -------------------------------------------------------------------------------- /tests/sign.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import kaolin 3 | from torchsdf import index_vertices_by_faces, compute_sdf 4 | import os 5 | import torch 6 | from time import time 7 | 8 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 9 | device = "cuda" 10 | # Ns 11 | num_sample = 1000000 12 | samples = torch.rand((num_sample, 3)).to(device).detach() 13 | samples = samples * 2 - 1 14 | 15 | all_pass = True 16 | 17 | print("====Sign test====") 18 | for model in os.listdir("tests/models"): 19 | print("Test:", model[:-4], end=" ") 20 | model_path = os.path.join("tests/models", model) 21 | mesh = trimesh.load(model_path, force="mesh", process=False) 22 | # (Ns, 3) 23 | x = samples.clone().requires_grad_() 24 | # (Nv, 3) 25 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 26 | # (Nf, 3) 27 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 28 | # (1, Nf, 3, 3) 29 | face_verts = kaolin.ops.mesh.index_vertices_by_faces( 30 | verts.unsqueeze(0), faces) 31 | # (Nf, 3, 3) 32 | face_verts_ts = index_vertices_by_faces(verts, faces) 33 | 34 | # Kaolin 35 | # (1, Ns) 36 | signs = kaolin.ops.mesh.check_sign( 37 | verts.unsqueeze(0), faces, x.unsqueeze(0)) 38 | signs = torch.where(signs, -1*torch.ones_like(signs, dtype=torch.int32), 39 | torch.ones_like(signs, dtype=torch.int32)) 40 | 41 | # TorchSDF 42 | # (Ns) 43 | distances_ts, signs_ts, normals_ts, clst_points_ts = compute_sdf( 44 | x, face_verts_ts) 45 | equal_num = (signs_ts == signs).sum().item() 46 | equal_ratio = equal_num/num_sample 47 | sign_fit = (equal_ratio > 0.98) 48 | if (sign_fit): 49 | print("\x1B[32mPass\x1B[0m") 50 | else: 51 | all_pass = False 52 | print("\x1B[31mSign wrong!\x1B[0m") 53 | print(f"Ratio: {equal_ratio:.3f} ({equal_num:d}/{num_sample:d})") 54 | 55 | if (all_pass): 56 | print("====\x1B[32mAll pass\x1B[0m====") 57 | else: 58 | print("====\x1B[31mWrong\x1B[0m====") 59 | -------------------------------------------------------------------------------- /tests/sign_check.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import kaolin 3 | from torchsdf import index_vertices_by_faces, compute_sdf 4 | import os 5 | import torch 6 | from time import time 7 | 8 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 9 | device = "cuda" 10 | 11 | os.makedirs("tests/outputs", exist_ok=True) 12 | 13 | # Ns 14 | num_sample = 1000000 15 | samples = torch.rand((num_sample, 3)).to(device).detach() 16 | samples = samples * 2 - 1 17 | 18 | all_pass = True 19 | 20 | 21 | def write_points(finename, points, color): 22 | point_count = points.shape[0] 23 | ply_file = open(finename, 'w') 24 | ply_file.write("ply\n") 25 | ply_file.write("format ascii 1.0\n") 26 | ply_file.write("element vertex " + str(point_count) + "\n") 27 | ply_file.write("property float x\n") 28 | ply_file.write("property float y\n") 29 | ply_file.write("property float z\n") 30 | ply_file.write("property uchar red\n") 31 | ply_file.write("property uchar green\n") 32 | ply_file.write("property uchar blue\n") 33 | ply_file.write("end_header\n") 34 | 35 | for i in range(point_count): 36 | ply_file.write(str(points[i, 0]) + " " + 37 | str(points[i, 1]) + " " + 38 | str(points[i, 2])) 39 | ply_file.write(" "+str(int(color[i, 0])) + " " + 40 | str(int(color[i, 1])) + " " + 41 | str(int(color[i, 2])) + " ") 42 | ply_file.write("\n") 43 | ply_file.close() 44 | 45 | 46 | print("====Sign check====") 47 | for model in os.listdir("tests/models"): 48 | print("Test:", model[:-4]) 49 | model_path = os.path.join("tests/models", model) 50 | mesh = trimesh.load(model_path, force="mesh", process=False) 51 | # (Ns, 3) 52 | x = samples.clone().requires_grad_() 53 | # (Nv, 3) 54 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 55 | # (Nf, 3) 56 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 57 | # (1, Nf, 3, 3) 58 | face_verts = kaolin.ops.mesh.index_vertices_by_faces( 59 | verts.unsqueeze(0), faces) 60 | # (Nf, 3, 3) 61 | face_verts_ts = index_vertices_by_faces(verts, faces) 62 | 63 | # Kaolin 64 | # (1, Ns) 65 | signs = kaolin.ops.mesh.check_sign( 66 | verts.unsqueeze(0), faces, x.unsqueeze(0)) 67 | signs = torch.where(signs, -1*torch.ones_like(signs, dtype=torch.int32), 68 | torch.ones_like(signs, dtype=torch.int32)) 69 | 70 | # TorchSDF 71 | # (Ns) 72 | distances_ts, signs_ts, normals_ts, clst_points_ts = compute_sdf( 73 | x, face_verts_ts) 74 | # (1, Ns) 75 | dif = signs_ts != signs 76 | dif = dif.reshape(-1) 77 | miss_points = x[dif, :] 78 | color = torch.zeros_like(miss_points).int() 79 | color[:, 0] = 255 80 | write_points(os.path.join("tests/outputs", 81 | model[:-4]+".ply"), points=miss_points.detach().cpu().numpy(), color=color) 82 | -------------------------------------------------------------------------------- /tests/speed.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import kaolin 3 | from torchsdf import index_vertices_by_faces, compute_sdf 4 | import os 5 | import torch 6 | from time import time 7 | 8 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 9 | device = "cuda" 10 | # Ns 11 | num_sample = 10000000 12 | samples = torch.rand((num_sample, 3)).to(device).detach() 13 | samples = samples * 2 - 1 14 | 15 | all_pass = True 16 | 17 | print("====Speed test====") 18 | for model in os.listdir("tests/models"): 19 | print("Test:", model[:-4], end=" ") 20 | model_path = os.path.join("tests/models", model) 21 | mesh = trimesh.load(model_path, force="mesh", process=False) 22 | # (Ns, 3) 23 | x = samples.clone().requires_grad_() 24 | # (Nv, 3) 25 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 26 | # (Nf, 3) 27 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 28 | # (1, Nf, 3, 3) 29 | face_verts = kaolin.ops.mesh.index_vertices_by_faces( 30 | verts.unsqueeze(0), faces) 31 | # (Nf, 3, 3) 32 | face_verts_ts = index_vertices_by_faces(verts, faces) 33 | 34 | # Kaolin 35 | # (1, Ns) 36 | torch.cuda.synchronize() 37 | tmp = time() 38 | distances, face_indexes, types = kaolin.metrics.trianglemesh.point_to_mesh_distance( 39 | x.unsqueeze(0), face_verts) 40 | signs_ = kaolin.ops.mesh.check_sign( 41 | verts.unsqueeze(0), faces, x.unsqueeze(0)) 42 | signs = torch.where(signs_, -torch.ones_like( 43 | signs_).int(), torch.ones_like(signs_).int()) 44 | sdf = distances.sqrt() * signs 45 | torch.cuda.synchronize() 46 | time_kaolin = time() - tmp 47 | 48 | # TorchSDF 49 | # (Ns) 50 | torch.cuda.synchronize() 51 | tmp = time() 52 | distances_ts, dist_sign_ts, normals_ts, clst_points_ts = compute_sdf( 53 | x, face_verts_ts) 54 | sdf_ts = distances_ts.sqrt() * dist_sign_ts 55 | torch.cuda.synchronize() 56 | time_ts = time() - tmp 57 | 58 | equal_num = (dist_sign_ts == signs).sum().item() 59 | equal_ratio = equal_num/num_sample 60 | sign_fit = (equal_ratio > 0.98) 61 | dis_fit = torch.allclose(distances, distances_ts) 62 | if (dis_fit and sign_fit): 63 | print("\x1B[32mPass\x1B[0m") 64 | else: 65 | all_pass = False 66 | if (not dis_fit): 67 | print("\x1B[31mDistance wrong!\x1B[0m") 68 | if (not sign_fit): 69 | print("\x1B[31mSign wrong!\x1B[0m") 70 | print("Max abs:", (distances.sqrt() - distances_ts.sqrt()).abs().max().item()) 71 | print(f"Ratio: {equal_ratio:.3f} ({equal_num:d}/{num_sample:d})") 72 | print("TorchSDF/Kaolin time:", time_ts/time_kaolin) 73 | 74 | print("====\x1B[32mAll pass\x1B[0m====") 75 | -------------------------------------------------------------------------------- /tests/value.py: -------------------------------------------------------------------------------- 1 | import trimesh 2 | import kaolin 3 | from torchsdf import index_vertices_by_faces, compute_sdf 4 | import os 5 | import torch 6 | from time import time 7 | 8 | os.environ["CUDA_VISIBLE_DEVICES"] = "1" 9 | device = "cuda" 10 | # Ns 11 | num_sample = 10000000 12 | samples = torch.rand((num_sample, 3)).to(device).detach() 13 | samples = samples * 2 - 1 14 | 15 | all_pass = True 16 | 17 | print("====Value test====") 18 | for model in os.listdir("tests/models"): 19 | print("Test:", model[:-4], end=" ") 20 | model_path = os.path.join("tests/models", model) 21 | mesh = trimesh.load(model_path, force="mesh", process=False) 22 | # (Ns, 3) 23 | x = samples.clone().requires_grad_() 24 | # (Nv, 3) 25 | verts = torch.Tensor(mesh.vertices.copy()).to(device) 26 | # (Nf, 3) 27 | faces = torch.Tensor(mesh.faces.copy()).long().to(device) 28 | # (1, Nf, 3, 3) 29 | face_verts = kaolin.ops.mesh.index_vertices_by_faces( 30 | verts.unsqueeze(0), faces) 31 | # (Nf, 3, 3) 32 | face_verts_ts = index_vertices_by_faces(verts, faces) 33 | 34 | # Kaolin 35 | # (1, Ns) 36 | torch.cuda.synchronize() 37 | tmp = time() 38 | distances, face_indexes, types = kaolin.metrics.trianglemesh.point_to_mesh_distance( 39 | x.unsqueeze(0), face_verts) 40 | gradient = torch.autograd.grad([distances.sum()], [x], create_graph=True, 41 | retain_graph=True)[0] 42 | torch.cuda.synchronize() 43 | time_kaolin = time() - tmp 44 | 45 | # TorchSDF 46 | # (Ns) 47 | torch.cuda.synchronize() 48 | tmp = time() 49 | distances_ts, dist_sign_ts, normals_ts, clst_points_ts = compute_sdf( 50 | x, face_verts_ts) 51 | gradient_ts = torch.autograd.grad([distances_ts.sum()], [x], create_graph=True, 52 | retain_graph=True)[0] 53 | torch.cuda.synchronize() 54 | time_ts = time() - tmp 55 | 56 | dis_fit = torch.allclose(distances, distances_ts) 57 | grad_fit = torch.allclose(gradient, gradient_ts, atol=5e-7) 58 | if (dis_fit and grad_fit): 59 | print("\x1B[32mPass\x1B[0m") 60 | else: 61 | all_pass = False 62 | if (not dis_fit): 63 | print("\x1B[31mDistance wrong!\x1B[0m") 64 | if (not grad_fit): 65 | print("\x1B[31mGradient wrong!\x1B[0m") 66 | print("Max abs:", (gradient - gradient_ts).abs().max().item()) 67 | print("TorchSDF/Kaolin time:", time_ts/time_kaolin) 68 | 69 | if (all_pass): 70 | print("====\x1B[32mAll pass\x1B[0m====") 71 | else: 72 | print("====\x1B[31mWrong\x1B[0m====") 73 | -------------------------------------------------------------------------------- /torchsdf/__init__.py: -------------------------------------------------------------------------------- 1 | from torchsdf.sdf import * 2 | try: 3 | from .version import __version__ # noqa: F401 4 | except ImportError: 5 | pass 6 | -------------------------------------------------------------------------------- /torchsdf/csrc/bindings.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2020,21-22 NVIDIA CORPORATION & AFFILIATES. 2 | // All rights reserved. 3 | 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | #include 17 | 18 | #include "unbatched_triangle_distance.h" 19 | 20 | namespace kaolin { 21 | 22 | PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { 23 | m.def("unbatched_triangle_distance_forward_cuda", 24 | &unbatched_triangle_distance_forward_cuda); 25 | m.def("unbatched_triangle_distance_backward_cuda", 26 | &unbatched_triangle_distance_backward_cuda); 27 | } 28 | 29 | } // namespace kaolin 30 | -------------------------------------------------------------------------------- /torchsdf/csrc/check.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2019,20-21 NVIDIA CORPORATION & AFFILIATES. 2 | // All rights reserved. 3 | 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | #ifndef KAOLIN_CHECK_H_ 17 | #define KAOLIN_CHECK_H_ 18 | 19 | #include 20 | #include 21 | 22 | #define CHECK_CUDA(x) TORCH_CHECK(x.device().is_cuda(), #x " must be a CUDA tensor") 23 | #define CHECK_CPU(x) TORCH_CHECK(x.device().is_cpu(), #x " must be a cpu tensor") 24 | #define CHECK_CONTIGUOUS(x) TORCH_CHECK(x.is_contiguous(), #x " must be contiguous") 25 | 26 | #define CHECK_HALF(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Half, #x " is not half") 27 | #define CHECK_FLOAT(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Float, #x " must be byte") 28 | #define CHECK_DOUBLE(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Double, #x " must be double") 29 | #define CHECK_BOOL(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Bool, #x " must be bool") 30 | #define CHECK_BYTE(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Byte, #x " must be byte") 31 | #define CHECK_SHORT(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Short, #x " must be short") 32 | #define CHECK_INT(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Int, #x " must be int") 33 | #define CHECK_LONG(x) TORCH_CHECK(x.scalar_type() == at::ScalarType::Long, #x " must be long") 34 | 35 | #define CHECK_DIMS(x, d) TORCH_CHECK(x.dim() == d, #x " must have " #d " dims") 36 | #define CHECK_SIZE(x, d, s) \ 37 | TORCH_CHECK(x.size(d) == s, #x " must have dim " #d " of size " #s) 38 | #define CHECK_SIZES(x, ...) \ 39 | TORCH_CHECK(x.sizes() == std::vector({__VA_ARGS__}), \ 40 | #x " must of size {" #__VA_ARGS__ "}") 41 | 42 | #define KAOLIN_NO_CUDA_ERROR(func_name) \ 43 | AT_ERROR("In ", func_name, ": Kaolin built without CUDA, " \ 44 | "cannot run with GPU tensors") 45 | 46 | #endif // KAOLIN_CHECK_H_ 47 | -------------------------------------------------------------------------------- /torchsdf/csrc/unbatched_triangle_distance.cpp: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. 2 | // All rights reserved. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | #include 17 | 18 | #include "check.h" 19 | 20 | namespace kaolin { 21 | 22 | #ifdef WITH_CUDA 23 | 24 | void unbatched_triangle_distance_forward_cuda_impl( 25 | at::Tensor points, 26 | at::Tensor face_vertices, 27 | at::Tensor dist, 28 | at::Tensor dist_sign, 29 | at::Tensor normals, 30 | at::Tensor clst_points); 31 | 32 | void unbatched_triangle_distance_backward_cuda_impl( 33 | at::Tensor grad_dist, 34 | at::Tensor points, 35 | at::Tensor clst_points, 36 | at::Tensor grad_points); 37 | 38 | #endif // WITH_CUDA 39 | 40 | 41 | void unbatched_triangle_distance_forward_cuda( 42 | at::Tensor points, 43 | at::Tensor face_vertices, 44 | at::Tensor dist, 45 | at::Tensor dist_sign, 46 | at::Tensor normals, 47 | at::Tensor clst_points) { 48 | CHECK_CUDA(points); 49 | CHECK_CUDA(face_vertices); 50 | CHECK_CUDA(dist); 51 | CHECK_CUDA(dist_sign); 52 | CHECK_CUDA(normals); 53 | CHECK_CUDA(clst_points); 54 | CHECK_CONTIGUOUS(points); 55 | CHECK_CONTIGUOUS(face_vertices); 56 | CHECK_CONTIGUOUS(dist); 57 | CHECK_CONTIGUOUS(dist_sign); 58 | CHECK_CONTIGUOUS(normals); 59 | CHECK_CONTIGUOUS(clst_points); 60 | const int num_points = points.size(0); 61 | const int num_faces = face_vertices.size(0); 62 | CHECK_SIZES(points, num_points, 3); 63 | CHECK_SIZES(face_vertices, num_faces, 3, 3); 64 | CHECK_SIZES(dist, num_points); 65 | CHECK_SIZES(dist_sign, num_points); 66 | CHECK_SIZES(normals, num_points, 3); 67 | CHECK_SIZES(clst_points, num_points, 3); 68 | #if WITH_CUDA 69 | unbatched_triangle_distance_forward_cuda_impl( 70 | points, face_vertices, dist, dist_sign, normals, clst_points); 71 | #else 72 | AT_ERROR("unbatched_triangle_distance not built with CUDA"); 73 | #endif 74 | } 75 | 76 | void unbatched_triangle_distance_backward_cuda( 77 | at::Tensor grad_dist, 78 | at::Tensor points, 79 | at::Tensor clst_points, 80 | at::Tensor grad_points) { 81 | CHECK_CUDA(grad_dist); 82 | CHECK_CUDA(points); 83 | CHECK_CUDA(clst_points); 84 | CHECK_CUDA(grad_points); 85 | CHECK_CONTIGUOUS(grad_dist); 86 | CHECK_CONTIGUOUS(points); 87 | CHECK_CONTIGUOUS(clst_points); 88 | CHECK_CONTIGUOUS(grad_points); 89 | 90 | const int num_points = points.size(0); 91 | CHECK_SIZES(grad_dist, num_points); 92 | CHECK_SIZES(points, num_points, 3); 93 | CHECK_SIZES(clst_points, num_points, 3); 94 | CHECK_SIZES(grad_points, num_points, 3); 95 | 96 | #if WITH_CUDA 97 | unbatched_triangle_distance_backward_cuda_impl( 98 | grad_dist, points, clst_points, grad_points); 99 | #else 100 | AT_ERROR("unbatched_triangle_distance_backward not built with CUDA"); 101 | #endif 102 | } 103 | 104 | } // namespace kaolin 105 | -------------------------------------------------------------------------------- /torchsdf/csrc/unbatched_triangle_distance.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. 2 | // All rights reserved. 3 | 4 | // Licensed under the Apache License, Version 2.0 (the "License"); 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | #ifndef KAOLIN_METRICS_UNBATCHED_TRIANGLE_DISTANCE_H_ 17 | #define KAOLIN_METRICS_UNBATCHED_TRIANGLE_DISTANCE_H_ 18 | 19 | #include 20 | 21 | namespace kaolin { 22 | 23 | void unbatched_triangle_distance_forward_cuda( 24 | at::Tensor points, 25 | at::Tensor face_vertices, 26 | at::Tensor dist, 27 | at::Tensor dist_sign, 28 | at::Tensor normals, 29 | at::Tensor clst_points); 30 | 31 | void unbatched_triangle_distance_backward_cuda( 32 | at::Tensor grad_dist, 33 | at::Tensor points, 34 | at::Tensor clst_points, 35 | at::Tensor grad_points); 36 | 37 | } // namespace kaolin 38 | 39 | #endif // KAOLIN_METRICS_UNBATCHED_TRIANGLE_DISTANCE_H_ 40 | -------------------------------------------------------------------------------- /torchsdf/csrc/unbatched_triangle_distance_cuda.cu: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. 2 | // All rights reserved. 3 | // 4 | // Licensed under the Apache License, Version 2.0 (the "License") 5 | // you may not use this file except in compliance with the License. 6 | // You may obtain a copy of the License at 7 | // 8 | // http://www.apache.org/licenses/LICENSE-2.0 9 | // 10 | // Unless required by applicable law or agreed to in writing, software 11 | // distributed under the License is distributed on an "AS IS" BASIS, 12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | // See the License for the specific language governing permissions and 14 | // limitations under the License. 15 | 16 | #include 17 | 18 | #include 19 | #include 20 | #include 21 | 22 | #include "utils.h" 23 | 24 | #define PRIVATE_CASE_TYPE_AND_VAL(ENUM_TYPE, TYPE, TYPE_NAME, VAL, ...) \ 25 | case ENUM_TYPE: { \ 26 | using TYPE_NAME = TYPE; \ 27 | const int num_threads = VAL; \ 28 | return __VA_ARGS__(); \ 29 | } 30 | 31 | 32 | #define DISPATCH_INPUT_TYPES(TYPE, TYPE_NAME, SCOPE_NAME, ...) \ 33 | [&] { \ 34 | switch(TYPE) \ 35 | { \ 36 | PRIVATE_CASE_TYPE_AND_VAL(at::ScalarType::Float, float, TYPE_NAME, 1024, __VA_ARGS__) \ 37 | PRIVATE_CASE_TYPE_AND_VAL(at::ScalarType::Double, double, TYPE_NAME, 512, __VA_ARGS__) \ 38 | default: \ 39 | AT_ERROR(#SCOPE_NAME, " not implemented for '", toString(TYPE), "'"); \ 40 | } \ 41 | }() 42 | 43 | namespace kaolin { 44 | 45 | template 46 | struct ScalarTypeToVec3 { using type = void; }; 47 | template <> struct ScalarTypeToVec3 { using type = float3; }; 48 | template <> struct ScalarTypeToVec3 { using type = double3; }; 49 | 50 | template 51 | struct Vec3TypeToScalar { using type = void; }; 52 | template <> struct Vec3TypeToScalar { using type = float; }; 53 | template <> struct Vec3TypeToScalar { using type = double; }; 54 | 55 | __device__ __forceinline__ float3 make_vector(float x, float y, float z) { 56 | return make_float3(x, y, z); 57 | } 58 | 59 | __device__ __forceinline__ double3 make_vector(double x, double y, double z) { 60 | return make_double3(x, y, z); 61 | } 62 | 63 | template 64 | __device__ __forceinline__ typename Vec3TypeToScalar::type dot(vector_t a, vector_t b) { 65 | return a.x * b.x + a.y * b.y + a.z * b.z ; 66 | } 67 | 68 | template 69 | __device__ __forceinline__ scalar_t dot2(vector_t v) { 70 | return dot(v, v); 71 | } 72 | 73 | template 74 | __device__ __forceinline__ scalar_t clamp(scalar_t x, scalar_t a, scalar_t b) { 75 | return max(a, min(b, x)); 76 | } 77 | 78 | template 79 | __device__ __forceinline__ vector_t cross(vector_t a, vector_t b) { 80 | return make_vector(a.y * b.z - a.z * b.y, 81 | a.z * b.x - a.x * b.z, 82 | a.x * b.y - a.y * b.x); 83 | } 84 | 85 | template 86 | __device__ __forceinline__ int sign(scalar_t a) { 87 | if (a <= 0) {return -1;} 88 | else {return 1;} 89 | } 90 | 91 | template 92 | __device__ __forceinline__ vector_t operator* (vector_t a, scalar_t b) { 93 | return make_vector(a.x * b, a.y * b, a.z * b); 94 | } 95 | 96 | template 97 | __device__ __forceinline__ vector_t operator* (vector_t a, vector_t b) { 98 | return make_vector(a.x * b.x, a.y * b.y, a.z * b.z); 99 | } 100 | 101 | template 102 | __device__ __forceinline__ vector_t operator+ (vector_t a, scalar_t b) { 103 | return make_vector(a.x + b, a.y + b, a.z + b); 104 | } 105 | 106 | template 107 | __device__ __forceinline__ vector_t operator+ (vector_t a, vector_t b) { 108 | return make_vector(a.x + b.x, a.y + b.y, a.z + b.z); 109 | } 110 | 111 | template 112 | __device__ __forceinline__ vector_t operator- (vector_t a, scalar_t b) { 113 | return make_vector(a.x - b, a.y - b, a.z - b); 114 | } 115 | 116 | template 117 | __device__ __forceinline__ vector_t operator- (vector_t a, vector_t b) { 118 | return make_vector(a.x - b.x, a.y - b.y, a.z - b.z); 119 | } 120 | 121 | template 122 | __device__ __forceinline__ vector_t operator/ (vector_t a, scalar_t b) { 123 | return make_vector(a.x / b, a.y / b, a.z / b); 124 | } 125 | 126 | template 127 | __device__ __forceinline__ vector_t operator/ (vector_t a, vector_t b) { 128 | return make_vector(a.x / b.x, a.y / b.y, a.z / b.z); 129 | } 130 | 131 | template 132 | __device__ __forceinline__ typename Vec3TypeToScalar::type project_edge( 133 | vector_t vertex, vector_t edge, vector_t point) { 134 | typedef typename Vec3TypeToScalar::type scalar_t; 135 | vector_t point_vec = point - vertex; 136 | scalar_t length = dot(edge, edge); 137 | return dot(point_vec, edge) / length; 138 | } 139 | 140 | template 141 | __device__ __forceinline__ vector_t project_plane(vector_t vertex, vector_t normal, vector_t point) { 142 | typedef typename Vec3TypeToScalar::type scalar_t; 143 | scalar_t inv_len = rsqrt(dot(normal, normal)); 144 | vector_t unit_normal = normal * inv_len; 145 | scalar_t dist = (point.x - vertex.x) * unit_normal.x + \ 146 | (point.y - vertex.y) * unit_normal.y + \ 147 | (point.z - vertex.z) * unit_normal.z; 148 | return point - (unit_normal * dist); 149 | } 150 | 151 | template 152 | __device__ __forceinline__ bool in_range(scalar_t a) { 153 | return (a <= 1 && a >= 0); 154 | } 155 | 156 | template 157 | __device__ __forceinline__ bool is_above(vector_t vertex, vector_t edge, vector_t normal, vector_t point) { 158 | vector_t edge_normal = cross(normal, edge); 159 | return dot(edge_normal, point - vertex) > 0; 160 | } 161 | 162 | template 163 | __device__ __forceinline__ bool is_not_above(vector_t vertex, vector_t edge, vector_t normal, 164 | vector_t point) { 165 | vector_t edge_normal = cross(normal, edge); 166 | return dot(edge_normal, point - vertex) <= 0; 167 | } 168 | 169 | 170 | template 171 | __device__ __forceinline__ vector_t point_at(vector_t vertex, vector_t edge, float t) { 172 | return vertex + (edge * t); 173 | } 174 | 175 | 176 | template 177 | __global__ void unbatched_triangle_distance_forward_cuda_kernel( 178 | const vector_t* points, 179 | const vector_t* vertices, 180 | int num_points, 181 | int num_faces, 182 | scalar_t* out_dist, 183 | int* out_dist_sign, 184 | vector_t* out_normals, 185 | vector_t* clst_points) { 186 | __shared__ vector_t shm[BLOCK_SIZE * 3]; 187 | 188 | for (int start_face_idx = 0; start_face_idx < num_faces; start_face_idx += BLOCK_SIZE) { 189 | int num_faces_iter = min(num_faces - start_face_idx, BLOCK_SIZE); 190 | for (int j = threadIdx.x; j < num_faces_iter * 3; j += blockDim.x) { 191 | shm[j] = vertices[start_face_idx * 3 + j]; 192 | } 193 | __syncthreads(); 194 | for (int point_idx = threadIdx.x + blockDim.x * blockIdx.x; point_idx < num_points; 195 | point_idx += blockDim.x * gridDim.x) { 196 | vector_t p = points[point_idx]; 197 | scalar_t best_dist = INFINITY; 198 | int best_dist_sign = 0; 199 | vector_t best_normal; 200 | vector_t best_clst_point; 201 | for (int sub_face_idx = 0; sub_face_idx < num_faces_iter; sub_face_idx++) { 202 | vector_t closest_point; 203 | 204 | vector_t v1 = shm[sub_face_idx * 3]; 205 | vector_t v2 = shm[sub_face_idx * 3 + 1]; 206 | vector_t v3 = shm[sub_face_idx * 3 + 2]; 207 | 208 | vector_t e12 = v2 - v1; 209 | vector_t e23 = v3 - v2; 210 | vector_t e31 = v1 - v3; 211 | vector_t normal = cross(v1 - v2, e31); 212 | scalar_t uab = project_edge(v1, e12, p); 213 | scalar_t uca = project_edge(v3, e31, p); 214 | if (uca > 1 && uab < 0) { 215 | closest_point = v1; 216 | } else { 217 | scalar_t ubc = project_edge(v2, e23, p); 218 | if (uab > 1 && ubc < 0) { 219 | closest_point = v2; 220 | } else if (ubc > 1 && uca < 0) { 221 | closest_point = v3; 222 | } else { 223 | if (in_range(uab) && (is_not_above(v1, e12, normal, p))) { 224 | closest_point = point_at(v1, e12, uab); 225 | } else if (in_range(ubc) && (is_not_above(v2, e23, normal, p))) { 226 | closest_point = point_at(v2, e23, ubc); 227 | } else if (in_range(uca) && (is_not_above(v3, e31, normal, p))) { 228 | closest_point = point_at(v3, e31, uca); 229 | } else { 230 | closest_point = project_plane(v1, normal, p); 231 | } 232 | } 233 | } 234 | vector_t dist_vec = p - closest_point; 235 | vector_t grad_normal = dist_vec * rsqrt(1e-16f + dot(dist_vec, dist_vec)); 236 | int dist_sign = (dot(dist_vec, normal)>=0)? 1 : -1; 237 | float dist = dot(dist_vec, dist_vec); 238 | if (sub_face_idx == 0 || best_dist > dist) { 239 | best_dist = dist; 240 | best_dist_sign = dist_sign; 241 | best_normal = grad_normal; 242 | best_clst_point = closest_point; 243 | } 244 | } 245 | if (start_face_idx == 0 || out_dist[point_idx] > best_dist) { 246 | out_dist[point_idx] = best_dist; 247 | out_dist_sign[point_idx] = best_dist_sign; 248 | out_normals[point_idx] = best_normal; 249 | clst_points[point_idx] = best_clst_point; 250 | } 251 | } 252 | __syncthreads(); 253 | } 254 | } 255 | 256 | template 257 | __global__ void unbatched_triangle_distance_backward_cuda_kernel( 258 | const scalar_t* grad_dist, 259 | const vector_t* points, 260 | const vector_t* clst_points, 261 | int num_points, 262 | vector_t* grad_points) { 263 | for (int point_id = threadIdx.x + blockIdx.x * blockDim.x; point_id < num_points; 264 | point_id += blockDim.x * gridDim.x) { 265 | scalar_t grad_out = 2. * grad_dist[point_id]; 266 | vector_t dist_vec = points[point_id] - clst_points[point_id]; 267 | dist_vec = dist_vec * grad_out; 268 | grad_points[point_id] = dist_vec; 269 | } 270 | } 271 | 272 | void unbatched_triangle_distance_forward_cuda_impl( 273 | at::Tensor points, 274 | at::Tensor face_vertices, 275 | at::Tensor dist, 276 | at::Tensor dist_sign, 277 | at::Tensor normals, 278 | at::Tensor clst_points) { 279 | const int num_threads = 512; 280 | const int num_points = points.size(0); 281 | const int num_blocks = (num_points + num_threads - 1) / num_threads; 282 | AT_DISPATCH_FLOATING_TYPES(points.scalar_type(), 283 | "unbatched_triangle_distance_forward_cuda", [&] { 284 | using vector_t = ScalarTypeToVec3::type; 285 | const at::cuda::OptionalCUDAGuard device_guard(at::device_of(points)); 286 | auto stream = at::cuda::getCurrentCUDAStream(); 287 | unbatched_triangle_distance_forward_cuda_kernel<<< 288 | num_blocks, num_threads, 0, stream>>>( 289 | reinterpret_cast(points.data_ptr()), 290 | reinterpret_cast(face_vertices.data_ptr()), 291 | points.size(0), 292 | face_vertices.size(0), 293 | dist.data_ptr(), 294 | dist_sign.data_ptr(), 295 | reinterpret_cast(normals.data_ptr()), 296 | reinterpret_cast(clst_points.data_ptr())); 297 | CUDA_CHECK(cudaGetLastError()); 298 | }); 299 | } 300 | 301 | void unbatched_triangle_distance_backward_cuda_impl( 302 | at::Tensor grad_dist, 303 | at::Tensor points, 304 | at::Tensor clst_points, 305 | at::Tensor grad_points) { 306 | 307 | DISPATCH_INPUT_TYPES(points.scalar_type(), scalar_t, 308 | "unbatched_triangle_distance_backward_cuda", [&] { 309 | const int num_points = points.size(0); 310 | const int num_blocks = (num_points + num_threads - 1) / num_threads; 311 | using vector_t = ScalarTypeToVec3::type; 312 | const at::cuda::OptionalCUDAGuard device_guard(at::device_of(points)); 313 | auto stream = at::cuda::getCurrentCUDAStream(); 314 | unbatched_triangle_distance_backward_cuda_kernel<<< 315 | num_blocks, num_threads, 0, stream>>>( 316 | grad_dist.data_ptr(), 317 | reinterpret_cast(points.data_ptr()), 318 | reinterpret_cast(clst_points.data_ptr()), 319 | points.size(0), 320 | reinterpret_cast(grad_points.data_ptr())); 321 | CUDA_CHECK(cudaGetLastError()); 322 | }); 323 | } 324 | 325 | } // namespace kaolin 326 | 327 | #undef PRIVATE_CASE_TYPE_AND_VAL 328 | #undef DISPATCH_INPUT_TYPES -------------------------------------------------------------------------------- /torchsdf/csrc/utils.h: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. 2 | 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | #ifndef KAOLIN_UTILS_H_ 16 | #define KAOLIN_UTILS_H_ 17 | 18 | #include 19 | #include 20 | #include 21 | 22 | #define CUDA_CHECK(condition) \ 23 | /* Code block avoids redefinition of cudaError_t error */ \ 24 | do { \ 25 | cudaError_t error = condition; \ 26 | if (error != cudaSuccess) { \ 27 | AT_ERROR("CUDA error: ", cudaGetErrorString(error)); \ 28 | } \ 29 | } while (0) 30 | 31 | #define PRIVATE_CASE_TYPE(ENUM_TYPE, TYPE, TYPE_NAME, ...) \ 32 | case ENUM_TYPE: { \ 33 | using TYPE_NAME = TYPE; \ 34 | return __VA_ARGS__(); \ 35 | } 36 | 37 | #define PRIVATE_CASE_INOUT_TYPES(CONST_IN_TYPE, CONST_OUT_TYPE, ENUM_IN_TYPE, ENUM_OUT_TYPE, \ 38 | IN_TYPE, OUT_TYPE, IN_TYPE_NAME, OUT_TYPE_NAME, ...) \ 39 | if (CONST_IN_TYPE == ENUM_IN_TYPE && CONST_OUT_TYPE == ENUM_OUT_TYPE) { \ 40 | using IN_TYPE_NAME = IN_TYPE; \ 41 | using OUT_TYPE_NAME = OUT_TYPE; \ 42 | return __VA_ARGS__(); \ 43 | } else \ 44 | 45 | #define PRIVATE_CASE_INOUT_DEDUCED_TYPES(ENUM_TYPE, IN_TYPE, OUT_TYPE, \ 46 | IN_TYPE_NAME, OUT_TYPE_NAME, ...) \ 47 | case ENUM_TYPE: { \ 48 | using IN_TYPE_NAME = IN_TYPE; \ 49 | using OUT_TYPE_NAME = OUT_TYPE; \ 50 | return __VA_ARGS__(); \ 51 | } 52 | 53 | #define PRIVATE_CASE_INT(CONST_INT, VAR_NAME, ...) \ 54 | case CONST_INT: { \ 55 | const int VAR_NAME = CONST_INT; \ 56 | return __VA_ARGS__(); \ 57 | } 58 | 59 | #define DISPATCH_NUM_TYPES(TYPE, TYPE_NAME, SCOPE_NAME, ...) \ 60 | [&] { \ 61 | switch(TYPE) \ 62 | { \ 63 | PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, TYPE_NAME, __VA_ARGS__) \ 64 | PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, TYPE_NAME, __VA_ARGS__) \ 65 | PRIVATE_CASE_TYPE(at::ScalarType::Int, int, TYPE_NAME, __VA_ARGS__) \ 66 | PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, TYPE_NAME, __VA_ARGS__) \ 67 | PRIVATE_CASE_TYPE(at::ScalarType::Half, at::Half, TYPE_NAME, __VA_ARGS__) \ 68 | PRIVATE_CASE_TYPE(at::ScalarType::Float, float, TYPE_NAME, __VA_ARGS__) \ 69 | PRIVATE_CASE_TYPE(at::ScalarType::Double, double, TYPE_NAME, __VA_ARGS__) \ 70 | default: \ 71 | AT_ERROR(#SCOPE_NAME, " not implemented for '", toString(TYPE), "'"); \ 72 | } \ 73 | }() 74 | 75 | 76 | #define DISPATCH_INTEGER_TYPES(TYPE, TYPE_NAME, SCOPE_NAME, ...) \ 77 | [&] { \ 78 | switch(TYPE) \ 79 | { \ 80 | PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, TYPE_NAME, __VA_ARGS__) \ 81 | PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, TYPE_NAME, __VA_ARGS__) \ 82 | PRIVATE_CASE_TYPE(at::ScalarType::Int, int, TYPE_NAME, __VA_ARGS__) \ 83 | PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, TYPE_NAME, __VA_ARGS__) \ 84 | default: \ 85 | AT_ERROR(#SCOPE_NAME, " not implemented for '", toString(TYPE), "'"); \ 86 | } \ 87 | }() 88 | 89 | #define DISPATCH_FLOAT_TYPES(TYPE, TYPE_NAME, SCOPE_NAME, ...) \ 90 | [&] { \ 91 | switch(TYPE) \ 92 | { \ 93 | PRIVATE_CASE_TYPE(at::ScalarType::Half, at::Half, TYPE_NAME, __VA_ARGS__) \ 94 | PRIVATE_CASE_TYPE(at::ScalarType::Float, float, TYPE_NAME, __VA_ARGS__) \ 95 | PRIVATE_CASE_TYPE(at::ScalarType::Double, double, TYPE_NAME, __VA_ARGS__) \ 96 | default: \ 97 | AT_ERROR(#SCOPE_NAME, " not implemented for '", toString(TYPE), "'"); \ 98 | } \ 99 | }() 100 | 101 | #endif // KAOLIN_UTILS_H_ 102 | -------------------------------------------------------------------------------- /torchsdf/sdf.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torchsdf import _C 3 | 4 | 5 | def index_vertices_by_faces(vertices_features, faces): 6 | r"""Index vertex features to convert per vertex tensor to per vertex per face tensor. 7 | 8 | Args: 9 | vertices_features (torch.FloatTensor): 10 | vertices features, of shape 11 | :math:`(\text{batch_size}, \text{num_points}, \text{knum})`, 12 | ``knum`` is feature dimension, the features could be xyz position, 13 | rgb color, or even neural network features. 14 | faces (torch.LongTensor): 15 | face index, of shape :math:`(\text{num_faces}, \text{num_vertices})`. 16 | Returns: 17 | (torch.FloatTensor): 18 | the face features, of shape 19 | :math:`(\text{batch_size}, \text{num_faces}, \text{num_vertices}, \text{knum})`. 20 | """ 21 | assert vertices_features.ndim == 2, \ 22 | "vertices_features must have 2 dimensions of shape (batch_sizenum_points, knum)" 23 | assert faces.ndim == 2, "faces must have 2 dimensions of shape (num_faces, num_vertices)" 24 | # input = vertices_features.unsqueeze(2).expand(-1, -1, faces.shape[-1], -1) 25 | # indices = faces[None, ..., None].expand( 26 | # vertices_features.shape[0], -1, -1, vertices_features.shape[-1]) 27 | # return torch.gather(input=input, index=indices, dim=1) 28 | input = vertices_features.reshape(-1, 1, 3).expand(-1, faces.shape[-1], -1) 29 | indices = faces[..., None].expand( 30 | -1, -1, vertices_features.shape[-1]) 31 | return torch.gather(input=input, index=indices, dim=0) 32 | 33 | 34 | def compute_sdf(pointclouds, face_vertices): 35 | return _UnbatchedTriangleDistanceCuda.apply(pointclouds, face_vertices) 36 | 37 | 38 | class _UnbatchedTriangleDistanceCuda(torch.autograd.Function): 39 | @staticmethod 40 | def forward(ctx, points, face_vertices): 41 | num_points = points.shape[0] 42 | min_dist = torch.zeros( 43 | (num_points), device=points.device, dtype=points.dtype) 44 | dist_sign = torch.zeros( 45 | (num_points), device=points.device, dtype=torch.int32) 46 | normals = torch.zeros( 47 | (num_points, 3), device=points.device, dtype=points.dtype) 48 | clst_points = torch.zeros( 49 | (num_points, 3), device=points.device, dtype=points.dtype) 50 | _C.unbatched_triangle_distance_forward_cuda( 51 | points, face_vertices, min_dist, dist_sign, normals, clst_points) 52 | ctx.save_for_backward(points.contiguous(), clst_points) 53 | ctx.mark_non_differentiable(dist_sign, normals, clst_points) 54 | return min_dist, dist_sign, normals, clst_points 55 | 56 | @staticmethod 57 | def backward(ctx, grad_dist, grad_dist_sign, grad_normals, grad_clst_points): 58 | points, clst_points = ctx.saved_tensors 59 | grad_dist = grad_dist.contiguous() 60 | grad_points = torch.zeros_like(points) 61 | grad_face_vertices = None 62 | _C.unbatched_triangle_distance_backward_cuda( 63 | grad_dist, points, clst_points, grad_points) 64 | return grad_points, grad_face_vertices 65 | -------------------------------------------------------------------------------- /version.txt: -------------------------------------------------------------------------------- 1 | 0.1.0 2 | --------------------------------------------------------------------------------