├── .github └── workflows │ ├── build_aarch64.yml │ └── build_wheel.yml ├── .gitignore ├── .gitmodules ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── build-macos-arm.sh ├── example.py ├── pyproject.toml ├── pytest.ini ├── setup-macos-arm.py ├── setup.py ├── src ├── DracoPy.h ├── DracoPy.pxd └── DracoPy.pyx ├── testdata_files ├── bunny.drc ├── bunny.obj ├── bunny_normals.drc ├── point_cloud_bunny.drc └── point_cloud_bunny_test.drc └── tests.py /.github/workflows/build_aarch64.yml: -------------------------------------------------------------------------------- 1 | name: Build Wheels 2 | 3 | on: [push, workflow_dispatch] 4 | 5 | env: 6 | CIBW_SKIP: cp36-* cp37* cp38* pp* *-musllinux* 7 | 8 | jobs: 9 | build_wheels: 10 | name: Build wheels on ${{matrix.arch}} for ${{ matrix.os }} 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | matrix: 14 | os: [macos-latest] 15 | include: 16 | - os: ubuntu-20.04 17 | arch: aarch64 18 | 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | - name: Set up QEMU 23 | if: ${{ matrix.arch == 'aarch64' }} 24 | uses: docker/setup-qemu-action@v2 25 | 26 | - name: Install OpenSSL 27 | if: ${{ matrix.arch == 'aarch64' }} 28 | run: sudo apt-get update && sudo apt-get install -y libssl-dev cmake 29 | 30 | - name: Set OpenSSL path 31 | run: echo "OPENSSL_ROOT_DIR=/usr" >> $GITHUB_ENV 32 | 33 | - name: Setup cmake 34 | uses: jwlawson/actions-setup-cmake@v2 35 | # with: 36 | # cmake-version: '3.16.x' 37 | 38 | - name: Build wheels 39 | uses: pypa/cibuildwheel@v2.22.0 40 | # to supply options, put them in 'env', like: 41 | env: 42 | CIBW_BEFORE_BUILD: git submodule init && git submodule update && pip install "cmake>=3.30.2" && pip install cython numpy scikit-build 43 | CPPFLAGS: -I/usr/local/opt/zlib/include 44 | LDFLAGS: -L/usr/local/opt/zlib/lib 45 | CIBW_ARCHS_MACOS: "arm64" 46 | CIBW_ARCHS_LINUX: "aarch64" 47 | 48 | - name: Upload built wheels 49 | uses: actions/upload-artifact@v4 50 | with: 51 | name: built-wheels-${{ matrix.os }}-${{ matrix.arch }} 52 | path: ./wheelhouse/*.whl 53 | if-no-files-found: warn 54 | -------------------------------------------------------------------------------- /.github/workflows/build_wheel.yml: -------------------------------------------------------------------------------- 1 | name: Build Wheels 2 | 3 | on: [push, workflow_dispatch] 4 | 5 | env: 6 | CIBW_SKIP: cp36-* cp37* cp38* pp* *-musllinux* 7 | 8 | jobs: 9 | build_wheels: 10 | name: Build wheels on ${{matrix.arch}} for ${{ matrix.os }} 11 | runs-on: ${{ matrix.os }} 12 | strategy: 13 | matrix: 14 | os: [ubuntu-latest, macos-latest, windows-2019] 15 | arch: [auto] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | 20 | - name: Set up QEMU 21 | if: ${{ matrix.arch == 'aarch64' }} 22 | uses: docker/setup-qemu-action@v2 23 | 24 | 25 | - name: Setup cmake 26 | uses: jwlawson/actions-setup-cmake@v2 27 | # with: 28 | # cmake-version: '3.16.x' 29 | 30 | - name: Build wheels 31 | uses: pypa/cibuildwheel@v2.22.0 32 | # to supply options, put them in 'env', like: 33 | env: 34 | CIBW_BEFORE_BUILD: git submodule init && git submodule update && pip install "cmake>=3.30.2" && pip install cython numpy scikit-build setuptools wheel 35 | CPPFLAGS: -I/usr/local/opt/zlib/include 36 | LDFLAGS: -L/usr/local/opt/zlib/lib 37 | CIBW_ARCHS_MACOS: "x86_64" 38 | CIBW_ARCHS_LINUX: "auto" 39 | 40 | - name: Upload built wheels 41 | uses: actions/upload-artifact@v4 42 | with: 43 | name: built-wheels-${{ matrix.os }}-${{ matrix.arch }} 44 | path: ./wheelhouse/*.whl 45 | if-no-files-found: warn 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | src/DracoPy.cpp 2 | DracoPy.egg-info/ 3 | MANIFEST* 4 | _skbuild/ 5 | *.so 6 | .vscode/ 7 | dist/ 8 | bunny_test.drc 9 | *secret.json 10 | __pycache__/ 11 | .eggs/ 12 | src/DracoPy.cpp 13 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "draco"] 2 | path = draco 3 | url = https://github.com/google/draco 4 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM quay.io/pypa/manylinux2014_aarch64 2 | MAINTAINER William Silversmith 3 | 4 | COPY . /DracoPy 5 | 6 | WORKDIR "/DracoPy" 7 | 8 | ENV GCC "gcc" 9 | 10 | RUN rm -rf *.so _skbuild __pycache__ dist DracoPy.egg-info 11 | 12 | RUN yum update && yum install -y openssl-devel cmake 13 | 14 | RUN /opt/python/cp38-cp38/bin/pip3.8 install pip --upgrade 15 | RUN /opt/python/cp39-cp39/bin/pip3.9 install pip --upgrade 16 | RUN /opt/python/cp310-cp310/bin/pip3.10 install pip --upgrade 17 | RUN /opt/python/cp311-cp311/bin/pip3.11 install pip --upgrade 18 | RUN /opt/python/cp312-cp312/bin/pip3.12 install pip --upgrade 19 | 20 | RUN /opt/python/cp38-cp38/bin/pip3.8 install scikit-build twine numpy cython pytest -U 21 | RUN /opt/python/cp39-cp39/bin/pip3.9 install scikit-build twine numpy cython pytest -U 22 | RUN /opt/python/cp310-cp310/bin/pip3.10 install scikit-build twine numpy cython pytest -U 23 | RUN /opt/python/cp311-cp311/bin/pip3.11 install scikit-build twine numpy cython pytest -U 24 | RUN /opt/python/cp312-cp312/bin/pip3.12 install scikit-build twine numpy cython pytest -U 25 | 26 | RUN touch src/DracoPy.pyx && /opt/python/cp38-cp38/bin/python3.8 setup.py develop 27 | RUN touch src/DracoPy.pyx && /opt/python/cp39-cp39/bin/python3.9 setup.py develop 28 | RUN touch src/DracoPy.pyx && /opt/python/cp310-cp310/bin/python3.10 setup.py develop 29 | RUN touch src/DracoPy.pyx && /opt/python/cp311-cp311/bin/python3.11 setup.py develop 30 | RUN touch src/DracoPy.pyx && /opt/python/cp312-cp312/bin/python3.12 setup.py develop 31 | 32 | RUN /opt/python/cp38-cp38/bin/python3.8 -m pytest -v -x tests.py 33 | RUN /opt/python/cp39-cp39/bin/python3.9 -m pytest -v -x tests.py 34 | RUN /opt/python/cp310-cp310/bin/python3.10 -m pytest -v -x tests.py 35 | RUN /opt/python/cp311-cp311/bin/python3.11 -m pytest -v -x tests.py 36 | RUN /opt/python/cp312-cp312/bin/python3.12 -m pytest -v -x tests.py 37 | 38 | RUN touch src/DracoPy.pyx && /opt/python/cp38-cp38/bin/python3.8 setup.py bdist_wheel 39 | RUN touch src/DracoPy.pyx && /opt/python/cp39-cp39/bin/python3.9 setup.py bdist_wheel 40 | RUN touch src/DracoPy.pyx && /opt/python/cp310-cp310/bin/python3.10 setup.py bdist_wheel 41 | RUN touch src/DracoPy.pyx && /opt/python/cp311-cp311/bin/python3.11 setup.py bdist_wheel 42 | RUN touch src/DracoPy.pyx && /opt/python/cp312-cp312/bin/python3.12 setup.py bdist_wheel 43 | 44 | RUN for whl in `ls dist/*.whl`; do auditwheel repair --plat manylinux2014_aarch64 $whl; done 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # This makefile is for building libdraco.a as a fat binary 2 | # that can be used to produce universal2 wheels. 3 | 4 | ARMDIR = draco/build_arm 5 | X86DIR = draco/build_x86 6 | 7 | all: staticlib 8 | 9 | staticlib: arm64 x86 10 | lipo -create -output ./libdraco.a $(ARMDIR)/libdraco.a $(X86DIR)/libdraco.a 11 | 12 | arm64: 13 | mkdir -p $(ARMDIR) 14 | cmake -B $(ARMDIR) -S draco -DCMAKE_OSX_ARCHITECTURES=arm64 -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0 -DBUILD_SHARED_LIBS=OFF 15 | cd $(ARMDIR) && make 16 | x86: 17 | mkdir -p $(X86DIR) 18 | cmake -B $(X86DIR) -S draco -DCMAKE_OSX_ARCHITECTURES=x86_64 -DCMAKE_OSX_DEPLOYMENT_TARGET=10.9 -DBUILD_SHARED_LIBS=OFF 19 | cd $(X86DIR) && make 20 | 21 | clean: 22 | rm -rf draco/build_arm draco/build_x86 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![PyPI version](https://badge.fury.io/py/DracoPy.svg)](https://badge.fury.io/py/DracoPy) 2 | 3 | # DracoPy 4 | 5 | ```python 6 | import os 7 | import DracoPy 8 | 9 | with open('bunny.drc', 'rb') as draco_file: 10 | mesh = DracoPy.decode(draco_file.read()) 11 | 12 | print(f"number of points: {len(mesh.points)}") 13 | print(f"number of faces: {len(mesh.faces)}") 14 | print(f"number of normals: {len(mesh.normals)}") 15 | 16 | # Note: If mesh.points is an integer numpy array, 17 | # it will be encoded as an integer attribute. Otherwise, 18 | # it will be encoded as floating point. 19 | binary = DracoPy.encode(mesh.points, mesh.faces) 20 | with open('bunny_test.drc', 'wb') as test_file: 21 | test_file.write(encoding_test) 22 | 23 | # If faces is omitted, DracoPy will encode a point cloud 24 | binary = Dracopy.encode(mesh.points) 25 | 26 | # Options for encoding: 27 | binary = Dracopy.encode( 28 | mesh.points, faces=mesh.faces, 29 | quantization_bits=14, compression_level=1, 30 | quantization_range=-1, quantization_origin=None, 31 | create_metadata=False, preserve_order=False, 32 | colors=mesh.colors 33 | ) 34 | 35 | ``` 36 | 37 | DracoPy is a Python wrapper for Google's Draco mesh compression library. 38 | 39 | ## Installation 40 | 41 | Binary wheels are available for users with Python >= 3.6 and pip >= 20. 42 | 43 | Installation from source requires Python >= 3.6, pip >= 10, and a C++ compiler that is fully compatible with C++11. 44 | 45 | It supports Linux, OS X, and Windows. Numpy is required. 46 | 47 | ```bash 48 | pip install DracoPy 49 | ``` 50 | 51 | ## Acknowledgements 52 | 53 | We graciously thank The Stanford 3D Scanning Repository for providing the Stanford Bunny test model. 54 | 55 | https://graphics.stanford.edu/data/3Dscanrep/ -------------------------------------------------------------------------------- /build-macos-arm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/zsh 2 | 3 | source /usr/local/bin/virtualenvwrapper.sh 4 | 5 | export ARCHFLAGS="-arch arm64" 6 | 7 | function build { 8 | pip install numpy cython -U 9 | rm src/DracoPy.cpp 10 | cython -3 src/DracoPy.pyx 11 | python setup-macos-arm.py develop 12 | python -m pytest -v -x tests.py 13 | python setup-macos-arm.py bdist_wheel 14 | } 15 | 16 | rm -r .eggs build *.so 17 | make -j 7 18 | 19 | for venv in dracopy38 dracopy39 dracopy310 dracopy311 dracopy312; 20 | do 21 | echo $venv 22 | workon $venv 23 | build 24 | done 25 | 26 | workon dracopy -------------------------------------------------------------------------------- /example.py: -------------------------------------------------------------------------------- 1 | import os 2 | import DracoPy 3 | 4 | with open('bunny.drc', 'rb') as draco_file: 5 | file_content = draco_file.read() 6 | mesh_object = DracoPy.decode_buffer_to_mesh(file_content) 7 | print('number of points in original file: {0}'.format(len(mesh_object.points))) 8 | print('number of faces in original file: {0}'.format(len(mesh_object.faces))) 9 | encoding_test = DracoPy.encode_mesh_to_buffer(mesh_object.points, mesh_object.faces) 10 | with open('bunny_test.drc', 'wb') as test_file: 11 | test_file.write(encoding_test) 12 | 13 | with open('bunny_test.drc', 'rb') as test_file: 14 | file_content = test_file.read() 15 | mesh_object = DracoPy.decode_buffer_to_mesh(file_content) 16 | print('number of points in test file: {0}'.format(len(mesh_object.points))) 17 | print('number of faces in test file: {0}'.format(len(mesh_object.faces))) 18 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools", 4 | "scikit-build >= 0.9.0", 5 | "wheel", 6 | "numpy" 7 | ] -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | norecursedirs = draco/third_party 3 | python_files = tests.py 4 | -------------------------------------------------------------------------------- /setup-macos-arm.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | import os 3 | import platform 4 | import sys 5 | import subprocess 6 | 7 | import numpy as np 8 | 9 | def read(fname): 10 | with open(os.path.join(os.path.dirname(__file__), fname), 'rt') as f: 11 | return f.read() 12 | 13 | src_dir = './src' 14 | 15 | setuptools.setup( 16 | name='DracoPy', 17 | version='1.4.1', 18 | description = 'Python wrapper for Google\'s Draco Mesh Compression Library', 19 | author = 'Manuel Castro, William Silversmith :: Contributors :: Fatih Erol, Faru Nuri Sonmez, Zeyu Zhao, Denis Riviere', 20 | author_email = 'macastro@princeton.edu, ws9@princeton.edu', 21 | url = 'https://github.com/seung-lab/DracoPy', 22 | long_description=read('README.md'), 23 | long_description_content_type="text/markdown", 24 | license = "License :: OSI Approved :: Apache Software License", 25 | setup_requires=['cython'], 26 | install_requires=['numpy'], 27 | ext_modules=[ 28 | setuptools.Extension( 29 | 'DracoPy', 30 | sources=[ os.path.join(src_dir, 'DracoPy.pyx') ], 31 | depends=[ os.path.join(src_dir, 'DracoPy.h') ], 32 | language='c++', 33 | include_dirs = [ 34 | './draco/src/', 35 | np.get_include(), 36 | ], 37 | extra_compile_args=[ 38 | '-std=c++17','-O3', '-stdlib=libc++' 39 | ], 40 | extra_link_args=[ "-ldraco" ], 41 | ) 42 | ], 43 | classifiers=[ 44 | "Intended Audience :: Developers", 45 | "Development Status :: 5 - Production/Stable", 46 | "License :: OSI Approved :: Apache Software License", 47 | "Programming Language :: Python", 48 | "Programming Language :: Python :: 3", 49 | "Programming Language :: Python :: 3.8", 50 | "Programming Language :: Python :: 3.9", 51 | "Programming Language :: Python :: 3.10", 52 | "Programming Language :: Python :: 3.11", 53 | "Programming Language :: Python :: 3.12", 54 | "Topic :: Scientific/Engineering", 55 | "Operating System :: POSIX", 56 | "Operating System :: MacOS", 57 | "Operating System :: Microsoft :: Windows :: Windows 10", 58 | "Topic :: Utilities", 59 | ] 60 | ) 61 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | import os 3 | import platform 4 | import sys 5 | import shutil 6 | 7 | from skbuild import setup 8 | from skbuild.constants import CMAKE_INSTALL_DIR, skbuild_plat_name 9 | from skbuild.exceptions import SKBuildError 10 | from skbuild.cmaker import get_cmake_version 11 | 12 | import multiprocessing as mp 13 | 14 | class NumpyImport: 15 | def __repr__(self): 16 | import numpy as np 17 | 18 | return np.get_include() 19 | 20 | __fspath__ = __repr__ 21 | 22 | 23 | if not "CMAKE_BUILD_PARALLEL_LEVEL" in os.environ: 24 | os.environ["CMAKE_BUILD_PARALLEL_LEVEL"] = str(mp.cpu_count()) 25 | 26 | def read(fname): 27 | with open(os.path.join(os.path.dirname(__file__), fname), 'rt') as f: 28 | return f.read() 29 | 30 | # Add CMake as a build requirement if cmake is not installed or is too low a version 31 | setup_requires = ['cython'] 32 | setup_requires.append('cmake') 33 | 34 | # If you want to re-build the cython cpp file (DracoPy.cpp), run: 35 | # cython --cplus -3 -I./_skbuild/linux-x86_64-3.6/cmake-install/include/draco/ ./src/DracoPy.pyx 36 | # Replace "linux-x86_64-3.6" with the directory under _skbuild in your system 37 | # Draco must already be built/setup.py already be run before running the above command 38 | 39 | src_dir = './src' 40 | lib_dirs = [os.path.abspath(os.path.join(CMAKE_INSTALL_DIR(), 'lib/')), 41 | os.path.abspath(os.path.join(CMAKE_INSTALL_DIR(), 'lib64/'))] 42 | cmake_args = [] 43 | 44 | operating_system = platform.system().lower() 45 | 46 | is_macos = sys.platform == 'darwin' or operating_system == "darwin" 47 | is_windows = sys.platform == 'win32' or operating_system == "windows" 48 | 49 | if is_macos: 50 | plat_name = skbuild_plat_name() 51 | sep = [pos for pos, char in enumerate(plat_name) if char == '-'] 52 | assert len(sep) == 2 53 | cmake_args = [ 54 | '-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING='+plat_name[sep[0]+1:sep[1]], 55 | '-DCMAKE_OSX_ARCHITECTURES:STRING='+plat_name[sep[1]+1:] 56 | ] 57 | library_link_args = [ 58 | f'-l{lib}' for lib in ('draco',) 59 | ] 60 | elif is_windows: 61 | library_link_args = [ 62 | lib for lib in ('draco.lib',) 63 | ] 64 | else: # linux 65 | library_link_args = [ 66 | f'-l:{lib}' for lib in ('libdraco.a',) 67 | ] 68 | 69 | cmake_args.append("-DCMAKE_POSITION_INDEPENDENT_CODE=ON") # make -fPIC code 70 | 71 | if is_windows: 72 | extra_link_args = ['/LIBPATH:{0}'.format(lib_dir) for lib_dir in lib_dirs] + library_link_args 73 | extra_compile_args = [ 74 | '/std:c++17', '/O2', 75 | ] 76 | else: 77 | extra_link_args = ['-L{0}'.format(lib_dir) for lib_dir in lib_dirs] + library_link_args 78 | extra_compile_args = [ 79 | '-std=c++11','-O3' 80 | ] 81 | 82 | if os.path.exists(".eggs"): 83 | shutil.rmtree(".eggs") 84 | 85 | setup( 86 | name='DracoPy', 87 | version='1.5.0', 88 | description = 'Python wrapper for Google\'s Draco Mesh Compression Library', 89 | author = 'Manuel Castro, William Silversmith :: Contributors :: Fatih Erol, Faru Nuri Sonmez, Forrest Collman, Zeyu Zhao, Denis Riviere, Brett Tully, Hanseul Jun, Afshawn Lotfi', 90 | author_email = 'macastro@princeton.edu, ws9@princeton.edu', 91 | url = 'https://github.com/seung-lab/DracoPy', 92 | long_description=read('README.md'), 93 | long_description_content_type="text/markdown", 94 | license = "License :: OSI Approved :: Apache Software License", 95 | cmake_source_dir='./draco', 96 | cmake_args=cmake_args, 97 | build_args=['--verbose'], 98 | setup_requires=setup_requires, 99 | ext_modules=[ 100 | setuptools.Extension( 101 | 'DracoPy', 102 | sources=[ os.path.join(src_dir, 'DracoPy.pyx') ], 103 | depends=[ os.path.join(src_dir, 'DracoPy.h') ], 104 | language='c++', 105 | include_dirs = [ 106 | str(NumpyImport()), 107 | os.path.join(CMAKE_INSTALL_DIR(), 'include/'), 108 | ], 109 | extra_compile_args=extra_compile_args, 110 | extra_link_args=extra_link_args 111 | ) 112 | ], 113 | classifiers=[ 114 | "Intended Audience :: Developers", 115 | "Development Status :: 5 - Production/Stable", 116 | "License :: OSI Approved :: Apache Software License", 117 | "Programming Language :: Python", 118 | "Programming Language :: Python :: 3", 119 | "Programming Language :: Python :: 3.8", 120 | "Programming Language :: Python :: 3.9", 121 | "Programming Language :: Python :: 3.10", 122 | "Programming Language :: Python :: 3.11", 123 | "Programming Language :: Python :: 3.12", 124 | "Programming Language :: Python :: 3.13", 125 | "Topic :: Scientific/Engineering", 126 | "Operating System :: POSIX", 127 | "Operating System :: MacOS", 128 | "Operating System :: Microsoft :: Windows :: Windows 10", 129 | "Topic :: Utilities", 130 | ] 131 | ) 132 | -------------------------------------------------------------------------------- /src/DracoPy.h: -------------------------------------------------------------------------------- 1 | #ifndef __DRACOPY_H__ 2 | #define __DRACOPY_H__ 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "draco/compression/decode.h" 9 | #include "draco/compression/encode.h" 10 | #include "draco/compression/config/compression_shared.h" 11 | #include "draco/core/status_or.h" 12 | #include "draco/core/encoder_buffer.h" 13 | #include "draco/core/vector_d.h" 14 | #include "draco/mesh/triangle_soup_mesh_builder.h" 15 | #include "draco/point_cloud/point_cloud_builder.h" 16 | 17 | namespace DracoFunctions { 18 | 19 | enum decoding_status { 20 | successful, 21 | not_draco_encoded, 22 | no_position_attribute, 23 | no_tex_coord_attribute, 24 | no_normal_coord_attribute, 25 | failed_during_decoding 26 | }; 27 | enum encoding_status { 28 | successful_encoding, 29 | failed_during_encoding 30 | }; 31 | 32 | struct PointCloudObject { 33 | std::vector points; 34 | 35 | // Encoding options stored in metadata 36 | bool encoding_options_set; 37 | bool colors_set; 38 | int quantization_bits; 39 | double quantization_range; 40 | std::vector quantization_origin; 41 | 42 | decoding_status decode_status; 43 | std::vector colors; 44 | }; 45 | 46 | struct MeshObject : PointCloudObject { 47 | std::vector normals; 48 | std::vector faces; 49 | std::vector tex_coord; 50 | }; 51 | 52 | struct EncodedObject { 53 | std::vector buffer; 54 | encoding_status encode_status; 55 | }; 56 | 57 | 58 | #define CHECK_STATUS(statusor, obj) \ 59 | if (!(statusor).ok()) {\ 60 | std::string status_string = (statusor).status().error_msg_string(); \ 61 | if (\ 62 | status_string.compare("Not a Draco file.") \ 63 | || status_string.compare("Failed to parse Draco header.")) {\ 64 | \ 65 | (obj).decode_status = not_draco_encoded;\ 66 | }\ 67 | else {\ 68 | (obj).decode_status = failed_during_decoding;\ 69 | }\ 70 | return (obj);\ 71 | } 72 | 73 | MeshObject decode_buffer(const char *buffer, std::size_t buffer_len) { 74 | MeshObject meshObject; 75 | draco::DecoderBuffer decoderBuffer; 76 | decoderBuffer.Init(buffer, buffer_len); 77 | 78 | auto type_statusor = draco::Decoder::GetEncodedGeometryType(&decoderBuffer); 79 | CHECK_STATUS(type_statusor, meshObject) 80 | draco::EncodedGeometryType geotype = std::move(type_statusor).value(); 81 | 82 | if (geotype == draco::EncodedGeometryType::INVALID_GEOMETRY_TYPE) { 83 | meshObject.decode_status = not_draco_encoded; 84 | return meshObject; 85 | } 86 | 87 | draco::Decoder decoder; 88 | std::unique_ptr in_mesh; 89 | std::unique_ptr in_pointcloud; 90 | draco::Mesh *mesh; 91 | 92 | if (geotype == draco::EncodedGeometryType::POINT_CLOUD) { 93 | auto statusor = decoder.DecodePointCloudFromBuffer(&decoderBuffer); 94 | CHECK_STATUS(statusor, meshObject) 95 | in_pointcloud = std::move(statusor).value(); 96 | // This is okay because draco::Mesh is a subclass of 97 | // draco::PointCloud 98 | mesh = static_cast(in_pointcloud.get()); 99 | } 100 | else if (geotype == draco::EncodedGeometryType::TRIANGULAR_MESH) { 101 | auto statusor = decoder.DecodeMeshFromBuffer(&decoderBuffer); 102 | CHECK_STATUS(statusor, meshObject) 103 | in_mesh = std::move(statusor).value(); 104 | mesh = in_mesh.get(); 105 | } 106 | else { 107 | throw std::runtime_error("Should never be reached."); 108 | } 109 | 110 | const int pos_att_id = mesh->GetNamedAttributeId(draco::GeometryAttribute::POSITION); 111 | if (pos_att_id < 0) { 112 | meshObject.decode_status = no_position_attribute; 113 | return meshObject; 114 | } 115 | 116 | meshObject.points.reserve(3 * mesh->num_points()); 117 | const auto *const pos_att = mesh->attribute(pos_att_id); 118 | std::array pos_val; 119 | for (draco::PointIndex v(0); v < mesh->num_points(); ++v) { 120 | if (!pos_att->ConvertValue(pos_att->mapped_index(v), &pos_val[0])) { 121 | meshObject.decode_status = no_position_attribute; 122 | return meshObject; 123 | } 124 | meshObject.points.push_back(pos_val[0]); 125 | meshObject.points.push_back(pos_val[1]); 126 | meshObject.points.push_back(pos_val[2]); 127 | } 128 | 129 | const int color_att_id = mesh->GetNamedAttributeId(draco::GeometryAttribute::COLOR); 130 | if (color_att_id >= 0) { 131 | meshObject.colors_set = true; 132 | const auto *const color_att = mesh->attribute(color_att_id); 133 | const int colors_channel = color_att->num_components(); 134 | meshObject.colors.reserve(colors_channel * mesh->num_points()); 135 | uint8_t* color_val = new uint8_t[colors_channel]; 136 | for (draco::PointIndex v(0); v < mesh->num_points(); ++v) { 137 | if (!color_att->ConvertValue(color_att->mapped_index(v), colors_channel, color_val)) { 138 | meshObject.colors_set = false; // color decoding failed! 139 | break; // it already failed 140 | } else { 141 | for (int i = 0; i < colors_channel; ++i) { 142 | meshObject.colors.push_back(color_val[i]); 143 | } 144 | } 145 | } 146 | delete [] color_val; 147 | } else { 148 | meshObject.colors_set = false; 149 | } 150 | 151 | const int tex_att_id = mesh->GetNamedAttributeId(draco::GeometryAttribute::TEX_COORD); 152 | if (tex_att_id >= 0) { 153 | const auto *const tex_att = mesh->attribute(tex_att_id); 154 | const int tex_channel = tex_att->num_components(); 155 | meshObject.tex_coord.reserve(tex_channel * mesh->num_points()); 156 | float* tex_val = new float[tex_channel]; 157 | for (draco::PointIndex v(0); v < mesh->num_points(); ++v) { 158 | if (!tex_att->ConvertValue(tex_att->mapped_index(v), tex_channel, tex_val)) { 159 | meshObject.decode_status = no_tex_coord_attribute; 160 | break; 161 | } else { 162 | for (int i = 0; i < tex_channel; ++i) { 163 | meshObject.tex_coord.push_back(tex_val[i]); 164 | } 165 | } 166 | } 167 | delete [] tex_val; 168 | } 169 | 170 | const draco::GeometryMetadata *metadata = mesh->GetMetadata(); 171 | meshObject.encoding_options_set = false; 172 | if (metadata) { 173 | metadata->GetEntryInt("quantization_bits", &(meshObject.quantization_bits)); 174 | if (metadata->GetEntryDouble("quantization_range", &(meshObject.quantization_range)) && 175 | metadata->GetEntryDoubleArray("quantization_origin", &(meshObject.quantization_origin))) { 176 | meshObject.encoding_options_set = true; 177 | } 178 | } 179 | 180 | if (geotype == draco::EncodedGeometryType::POINT_CLOUD) { 181 | meshObject.decode_status = successful; 182 | return meshObject; 183 | } 184 | 185 | meshObject.faces.reserve(3 * mesh->num_faces()); 186 | for (draco::FaceIndex i(0); i < mesh->num_faces(); ++i) { 187 | const auto &f = mesh->face(i); 188 | meshObject.faces.push_back(*(reinterpret_cast(&(f[0])))); 189 | meshObject.faces.push_back(*(reinterpret_cast(&(f[1])))); 190 | meshObject.faces.push_back(*(reinterpret_cast(&(f[2])))); 191 | } 192 | 193 | const int normal_att_id = mesh->GetNamedAttributeId(draco::GeometryAttribute::NORMAL); 194 | if (normal_att_id < 0) { // No normal values are present. 195 | meshObject.decode_status = successful; 196 | return meshObject; 197 | } 198 | 199 | const auto *const normal_att = mesh->attribute(normal_att_id); 200 | meshObject.normals.reserve(3 * normal_att->size()); 201 | 202 | std::array normal_val; 203 | for (draco::PointIndex v(0); v < normal_att->size(); ++v){ 204 | if (!normal_att->ConvertValue(normal_att->mapped_index(v), &normal_val[0])){ 205 | meshObject.decode_status = no_normal_coord_attribute; 206 | } 207 | meshObject.normals.push_back(normal_val[0]); 208 | meshObject.normals.push_back(normal_val[1]); 209 | meshObject.normals.push_back(normal_val[2]); 210 | } 211 | 212 | meshObject.decode_status = successful; 213 | return meshObject; 214 | } 215 | 216 | void setup_encoder_and_metadata(draco::PointCloud *point_cloud_or_mesh, draco::Encoder &encoder, int compression_level, int quantization_bits, float quantization_range, const float *quantization_origin, bool create_metadata) { 217 | int speed = 10 - compression_level; 218 | encoder.SetSpeedOptions(speed, speed); 219 | std::unique_ptr metadata = std::unique_ptr(new draco::GeometryMetadata()); 220 | if (quantization_origin == NULL || quantization_range <= 0.f) { 221 | // @zeruniverse All quantization_range <= 0.f is useless, see 222 | // https://github.com/google/draco/blob/master/src/draco/attributes/attribute_quantization_transform.cc#L160-L170 223 | encoder.SetAttributeQuantization(draco::GeometryAttribute::POSITION, quantization_bits); 224 | } 225 | else { 226 | encoder.SetAttributeExplicitQuantization(draco::GeometryAttribute::POSITION, quantization_bits, 3, quantization_origin, quantization_range); 227 | if (create_metadata) { 228 | metadata->AddEntryDouble("quantization_range", quantization_range); 229 | std::vector quantization_origin_vec; 230 | for (int i = 0; i < 3; i++) { 231 | quantization_origin_vec.push_back(quantization_origin[i]); 232 | } 233 | metadata->AddEntryDoubleArray("quantization_origin", quantization_origin_vec); 234 | } 235 | } 236 | if (create_metadata) { 237 | metadata->AddEntryInt("quantization_bits", quantization_bits); 238 | point_cloud_or_mesh->AddMetadata(std::move(metadata)); 239 | } 240 | } 241 | 242 | EncodedObject encode_mesh( 243 | const std::vector &points, 244 | const std::vector &faces, 245 | const int quantization_bits, 246 | const int compression_level, 247 | const float quantization_range, 248 | const float *quantization_origin, 249 | const bool preserve_order, 250 | const bool create_metadata, 251 | const int integer_mark, 252 | const std::vector &colors, 253 | const uint8_t colors_channel, 254 | const std::vector &tex_coord, 255 | const uint8_t tex_coord_channel, 256 | const std::vector &normals, 257 | const uint8_t has_normals 258 | ) { 259 | // @zeruniverse TriangleSoupMeshBuilder will cause problems when 260 | // preserve_order=True due to vertices merging. 261 | // In order to support preserve_order, we need to build mesh 262 | // manually. 263 | draco::Mesh mesh; //Initialize a draco mesh 264 | 265 | // Process vertices 266 | const size_t num_pts = points.size() / 3; 267 | mesh.set_num_points(num_pts); 268 | draco::GeometryAttribute positions_attr; 269 | if (integer_mark == 1) { 270 | positions_attr.Init(draco::GeometryAttribute::POSITION, // Attribute type 271 | nullptr, // data buffer 272 | 3, // number of components 273 | draco::DT_INT32, // data type 274 | false, // normalized 275 | sizeof(int32_t) * 3, // byte stride 276 | 0); // byte offset 277 | } else if (integer_mark == 2) { 278 | positions_attr.Init(draco::GeometryAttribute::POSITION, // Attribute type 279 | nullptr, // data buffer 280 | 3, // number of components 281 | draco::DT_UINT32, // data type 282 | false, // normalized 283 | sizeof(uint32_t) * 3, // byte stride 284 | 0); // byte offset 285 | } else { 286 | positions_attr.Init(draco::GeometryAttribute::POSITION, // Attribute type 287 | nullptr, // data buffer 288 | 3, // number of components 289 | draco::DT_FLOAT32, // data type 290 | false, // normalized 291 | sizeof(float) * 3, // byte stride 292 | 0); // byte offset 293 | } 294 | int color_att_id = -1; 295 | if(colors_channel) { 296 | draco::GeometryAttribute colors_attr; 297 | colors_attr.Init(draco::GeometryAttribute::COLOR, // Attribute type 298 | nullptr, // data buffer 299 | colors_channel, // number of components 300 | draco::DT_UINT8, // data type 301 | true, // normalized 302 | sizeof(uint8_t) * colors_channel, // byte stride 303 | 0); // byte offset 304 | color_att_id = mesh.AddAttribute(colors_attr, true, num_pts); 305 | } 306 | int tex_coord_att_id = -1; 307 | if(tex_coord_channel) { 308 | draco::GeometryAttribute tex_coord_attr; 309 | tex_coord_attr.Init(draco::GeometryAttribute::TEX_COORD, // Attribute type 310 | nullptr, // data buffer 311 | tex_coord_channel, // number of components 312 | draco::DT_FLOAT32, // data type 313 | true, // normalized 314 | sizeof(float) * tex_coord_channel, // byte stride 315 | 0); // byte offset 316 | tex_coord_att_id = mesh.AddAttribute(tex_coord_attr, true, num_pts); 317 | } 318 | 319 | int normal_att_id = -1; 320 | if(has_normals) { 321 | draco::GeometryAttribute normal_attr; 322 | normal_attr.Init(draco::GeometryAttribute::NORMAL, // Attribute type 323 | nullptr, // data buffer 324 | 3, // number of components (normals are 3D vectors) 325 | draco::DT_FLOAT32, // data type 326 | false, // normalized 327 | sizeof(float) * 3, // byte stride 328 | 0); // byte offset 329 | normal_att_id = mesh.AddAttribute(normal_attr, true, num_pts); 330 | } 331 | 332 | 333 | const int pos_att_id = mesh.AddAttribute(positions_attr, true, num_pts); 334 | std::vector pts_int32; 335 | std::vector pts_uint32; 336 | if (integer_mark == 1) { 337 | pts_int32.reserve(points.size()); 338 | std::transform(points.begin(), points.end(), std::back_inserter(pts_int32), [](float x) { 339 | return lrint(x); 340 | }); 341 | } else if (integer_mark == 2) { 342 | pts_uint32.reserve(points.size()); 343 | std::transform(points.begin(), points.end(), std::back_inserter(pts_uint32), [](float x) { 344 | return (x <= 0.f)? 0: (uint32_t)(x + 0.5); 345 | }); 346 | } 347 | 348 | 349 | for (size_t i = 0; i < num_pts; ++i) { 350 | if (integer_mark == 1) { 351 | mesh.attribute(pos_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &pts_int32[i * 3ul]); 352 | } else if (integer_mark == 2) { 353 | mesh.attribute(pos_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &pts_uint32[i * 3ul]); 354 | } else { 355 | mesh.attribute(pos_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &points[i * 3ul]); 356 | } 357 | 358 | if(colors_channel){ 359 | mesh.attribute(color_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &colors[i * colors_channel]); 360 | } 361 | if(tex_coord_channel){ 362 | mesh.attribute(tex_coord_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &tex_coord[i * tex_coord_channel]); 363 | } 364 | if(has_normals){ 365 | mesh.attribute(normal_att_id)->SetAttributeValue(draco::AttributeValueIndex(i), &normals[i * 3]); 366 | } 367 | } 368 | 369 | 370 | // Process faces 371 | const size_t num_faces = faces.size() / 3; 372 | for (size_t i = 0; i < num_faces; ++i) { 373 | mesh.AddFace( 374 | draco::Mesh::Face{draco::PointIndex(faces[3 * i]), 375 | draco::PointIndex(faces[3 * i + 1]), 376 | draco::PointIndex(faces[3 * i + 2])}); 377 | } 378 | 379 | // deduplicate 380 | if (!preserve_order && mesh.DeduplicateAttributeValues()) { 381 | mesh.DeduplicatePointIds(); 382 | } 383 | 384 | draco::Encoder encoder; 385 | setup_encoder_and_metadata( 386 | &mesh, encoder, compression_level, 387 | quantization_bits, quantization_range, 388 | quantization_origin, create_metadata 389 | ); 390 | if (preserve_order) { 391 | encoder.SetEncodingMethod(draco::MESH_SEQUENTIAL_ENCODING); 392 | } 393 | 394 | draco::EncoderBuffer buffer; 395 | const draco::Status status = encoder.EncodeMeshToBuffer(mesh, &buffer); 396 | EncodedObject encodedMeshObject; 397 | encodedMeshObject.buffer = *((std::vector *)buffer.buffer()); 398 | 399 | if (status.ok()) { 400 | encodedMeshObject.encode_status = successful_encoding; 401 | } 402 | else { 403 | std::cerr << "Draco encoding error: " << status.error_msg_string() << std::endl; 404 | encodedMeshObject.encode_status = failed_during_encoding; 405 | } 406 | 407 | return encodedMeshObject; 408 | } 409 | 410 | EncodedObject encode_point_cloud( 411 | const std::vector &points, const int quantization_bits, 412 | const int compression_level, const float quantization_range, 413 | const float *quantization_origin, const bool preserve_order, 414 | const bool create_metadata, const int integer_mark, 415 | const std::vector &colors, 416 | const uint8_t colors_channel 417 | ) { 418 | int num_points = points.size() / 3; 419 | draco::PointCloudBuilder pcb; 420 | pcb.Start(num_points); 421 | 422 | auto dtype = (integer_mark == 1) 423 | ? draco::DataType::DT_INT32 424 | : ( 425 | (integer_mark == 2) 426 | ? draco::DataType::DT_UINT32 427 | : draco::DataType::DT_FLOAT32 428 | ); 429 | 430 | const int pos_att_id = pcb.AddAttribute( 431 | draco::GeometryAttribute::POSITION, 3, dtype 432 | ); 433 | 434 | if(colors_channel){ 435 | const int color_att_id = pcb.AddAttribute( 436 | draco::GeometryAttribute::COLOR, colors_channel, draco::DataType::DT_UINT8 437 | ); 438 | for (draco::PointIndex i(0); i < num_points; i++) { 439 | pcb.SetAttributeValueForPoint(pos_att_id, i, points.data() + 3 * i.value()); 440 | pcb.SetAttributeValueForPoint(color_att_id, i, colors.data() + colors_channel * i.value()); 441 | } 442 | } else { 443 | for (draco::PointIndex i(0); i < num_points; i++) { 444 | pcb.SetAttributeValueForPoint(pos_att_id, i, points.data() + 3 * i.value()); 445 | } 446 | } 447 | 448 | std::unique_ptr ptr_point_cloud = pcb.Finalize(!preserve_order); 449 | draco::PointCloud *point_cloud = ptr_point_cloud.get(); 450 | draco::Encoder encoder; 451 | setup_encoder_and_metadata(point_cloud, encoder, compression_level, quantization_bits, quantization_range, quantization_origin, create_metadata); 452 | if (preserve_order) { 453 | encoder.SetEncodingMethod(draco::POINT_CLOUD_SEQUENTIAL_ENCODING); 454 | } 455 | 456 | draco::EncoderBuffer buffer; 457 | const draco::Status status = encoder.EncodePointCloudToBuffer(*point_cloud, &buffer); 458 | 459 | EncodedObject encodedPointCloudObject; 460 | encodedPointCloudObject.buffer = *((std::vector *)buffer.buffer()); 461 | if (status.ok()) { 462 | encodedPointCloudObject.encode_status = successful_encoding; 463 | } 464 | else { 465 | std::cerr << "Draco encoding error: " << status.error_msg_string() << std::endl; 466 | encodedPointCloudObject.encode_status = failed_during_encoding; 467 | } 468 | 469 | return encodedPointCloudObject; 470 | } 471 | 472 | }; 473 | 474 | #undef CHECK_STATUS 475 | #endif 476 | -------------------------------------------------------------------------------- /src/DracoPy.pxd: -------------------------------------------------------------------------------- 1 | #cython: language_level=3 2 | from libcpp.vector cimport vector 3 | from libc.stdint cimport uint8_t, uint32_t 4 | from libcpp cimport bool 5 | 6 | cimport numpy as cnp 7 | import numpy as np 8 | 9 | cnp.import_array() 10 | 11 | cdef extern from "DracoPy.h" namespace "DracoFunctions": 12 | 13 | cdef enum decoding_status: 14 | successful, not_draco_encoded, no_position_attribute, 15 | failed_during_decoding 16 | 17 | cdef enum encoding_status: 18 | successful_encoding, failed_during_encoding 19 | 20 | cdef struct PointCloudObject: 21 | vector[float] points 22 | 23 | # Encoding options 24 | bool encoding_options_set 25 | bool colors_set 26 | int quantization_bits 27 | double quantization_range 28 | vector[double] quantization_origin 29 | 30 | # Represents the decoding success or error message 31 | decoding_status decode_status 32 | vector[uint8_t] colors 33 | 34 | cdef struct MeshObject: 35 | vector[float] points 36 | vector[unsigned int] faces 37 | 38 | vector[float] normals 39 | vector[float] tex_coord 40 | 41 | # Encoding options 42 | bool encoding_options_set 43 | bool colors_set 44 | int quantization_bits 45 | double quantization_range 46 | vector[double] quantization_origin 47 | 48 | # Represents the decoding success or error message 49 | decoding_status decode_status 50 | vector[uint8_t] colors 51 | 52 | cdef struct EncodedObject: 53 | vector[unsigned char] buffer 54 | encoding_status encode_status 55 | 56 | MeshObject decode_buffer(const char *buffer, size_t buffer_len) except + 57 | 58 | EncodedObject encode_mesh( 59 | const vector[float] points, 60 | const vector[uint32_t] faces, 61 | const int quantization_bits, 62 | const int compression_level, 63 | const float quantization_range, 64 | const float *quantization_origin, 65 | const bool preserve_order, 66 | const bool create_metadata, 67 | const int integer_mark, 68 | const vector[uint8_t] colors, 69 | const uint8_t colors_channel, 70 | const vector[float] tex_coord, 71 | const uint8_t tex_coord_channel, 72 | const vector[float] normals, 73 | const uint8_t has_normals 74 | ) except + 75 | 76 | EncodedObject encode_point_cloud( 77 | const vector[float] points, 78 | const int quantization_bits, 79 | const int compression_level, 80 | const float quantization_range, 81 | const float *quantization_origin, 82 | const bool preserve_order, 83 | const bool create_metadata, 84 | const int integer_mark, 85 | const vector[uint8_t] colors, 86 | const uint8_t colors_channel 87 | ) except + 88 | -------------------------------------------------------------------------------- /src/DracoPy.pyx: -------------------------------------------------------------------------------- 1 | # distutils: language = c++ 2 | from typing import Union, cast 3 | 4 | from cpython.mem cimport PyMem_Malloc, PyMem_Free 5 | cimport DracoPy 6 | import struct 7 | from math import floor 8 | from libc.string cimport memcmp 9 | from libc.stdint cimport ( 10 | int8_t, int16_t, int32_t, int64_t, 11 | uint8_t, uint16_t, uint32_t, uint64_t, 12 | ) 13 | 14 | cimport numpy as cnp 15 | cnp.import_array() 16 | 17 | import numpy as np 18 | 19 | class DracoPointCloud: 20 | def __init__(self, data_struct): 21 | self.data_struct = data_struct 22 | if data_struct['encoding_options_set']: 23 | self.encoding_options = EncodingOptions(data_struct['quantization_bits'], 24 | data_struct['quantization_range'], data_struct['quantization_origin']) 25 | else: 26 | self.encoding_options = None 27 | 28 | def get_encoded_coordinate(self, value, axis): 29 | if self.encoding_options is not None: 30 | return self.encoding_options.get_encoded_coordinate(value, axis) 31 | 32 | def get_encoded_point(self, point): 33 | if self.encoding_options is not None: 34 | return self.encoding_options.get_encoded_point(point) 35 | 36 | @property 37 | def num_axes(self): 38 | return 3 39 | 40 | @property 41 | def points(self): 42 | points_ = self.data_struct['points'] 43 | N = len(points_) // 3 44 | return np.array(points_).reshape((N, 3)) 45 | 46 | @property 47 | def colors(self): 48 | if self.data_struct['colors_set']: 49 | colors_ = self.data_struct['colors'] 50 | N = len(self.data_struct['points']) // 3 51 | return np.array(colors_).reshape((N, -1)) 52 | else: 53 | return None 54 | 55 | class DracoMesh(DracoPointCloud): 56 | @property 57 | def faces(self): 58 | faces_ = self.data_struct['faces'] 59 | N = len(faces_) // 3 60 | return np.array(faces_).reshape((N, 3)) 61 | 62 | @property 63 | def normals(self): 64 | normals_ = self.data_struct['normals'] 65 | N = len(normals_) // 3 66 | return np.array(normals_).reshape((N,3)) 67 | 68 | @property 69 | def tex_coord(self): 70 | tex_coord_ = self.data_struct['tex_coord'] 71 | if len(tex_coord_) == 0: 72 | return None 73 | N = len(self.data_struct['points']) // 3 74 | NC = len(tex_coord_) // N 75 | return np.array(tex_coord_).reshape((N, NC)) 76 | 77 | class EncodingOptions(object): 78 | def __init__(self, quantization_bits, quantization_range, quantization_origin): 79 | self.quantization_bits = quantization_bits 80 | self.quantization_range = quantization_range 81 | self.quantization_origin = quantization_origin 82 | self.inverse_alpha = quantization_range / ((2 ** quantization_bits) - 1) 83 | 84 | def get_encoded_coordinate(self, value, axis): 85 | if value < self.quantization_origin[axis] or value > (self.quantization_origin[axis] + self.quantization_range): 86 | raise ValueError('Specified value out of encoded range') 87 | difference = value - self.quantization_origin[axis] 88 | quantized_index = floor((difference / self.inverse_alpha) + 0.5) 89 | return self.quantization_origin[axis] + (quantized_index * self.inverse_alpha) 90 | 91 | def get_encoded_point(self, point): 92 | encoded_point = [] 93 | for axis in range(self.num_axes): 94 | encoded_point.append(self.get_encoded_coordinate(point[axis], axis)) 95 | return encoded_point 96 | 97 | @property 98 | def num_axes(self): 99 | return 3 100 | 101 | class FileTypeException(Exception): 102 | pass 103 | 104 | class EncodingFailedException(Exception): 105 | pass 106 | 107 | def format_array(arr, col=3): 108 | if arr is None: 109 | return None 110 | 111 | if not isinstance(arr, np.ndarray): 112 | arr = np.array(arr) 113 | if arr.ndim == 1: 114 | arr = arr.reshape((len(arr) // col, col)) 115 | return arr 116 | 117 | def encode( 118 | points, faces=None, 119 | quantization_bits=14, compression_level=1, 120 | quantization_range=-1, quantization_origin=None, 121 | create_metadata=False, preserve_order=False, 122 | colors=None, tex_coord=None, normals=None 123 | ) -> bytes: 124 | """ 125 | bytes encode( 126 | points, faces=None, 127 | quantization_bits=11, compression_level=1, 128 | quantization_range=-1, quantization_origin=None, 129 | create_metadata=False, preserve_order=False, 130 | colors=None, tex_coord=None, normals=None 131 | ) 132 | 133 | Encode a list or numpy array of points/vertices (float) and faces 134 | (unsigned int) to a draco buffer. If faces is None, then a point 135 | cloud file will be generated, otherwise a mesh file. 136 | 137 | Quantization bits should be an integer between 1 and 30 138 | Compression level should be an integer between 0 and 10 139 | Quantization_range is a float representing the size of the 140 | bounding cube for the mesh. By default it is the range 141 | of the dimension of the input vertices with greatest range. 142 | Set a negative value to use the default behavior. 143 | Quantization_origin is the point in space where the bounding box begins. 144 | By default it is a point where each coordinate is the minimum of 145 | that coordinate among the input vertices. 146 | Preserve_order controls whether the order of points / faces should be 147 | preserved after compression. Setting it to True will reduce compression 148 | ratio (greatly) but guarantees the result points / faces are in same 149 | order as the input. 150 | Colors is a numpy array of colors (uint8) with shape (N, K). N is the number of 151 | vertices. K must be >= 1. Use None if mesh does not have colors 152 | Tex coord is a numpy array of texture coordinates (float) with shape (N, 2). N is the number of 153 | vertices. Use None if mesh does not have texture coordinates. 154 | Normals is a numpy array of normal vectors (float) with shape (N, 3). N is the number of 155 | vertices. Use None if mesh does not have normal vectors. 156 | """ 157 | assert 0 <= compression_level <= 10, "Compression level must be in range [0, 10]" 158 | 159 | # @zeruniverse Draco supports quantization_bits 1 to 30, see following link: 160 | # https://github.com/google/draco/blob/master/src/draco/attributes/attribute_quantization_transform.cc#L107 161 | assert 1 <= quantization_bits <= 30, "Quantization bits must be in range [1, 30]" 162 | 163 | points = format_array(points) 164 | faces = format_array(faces) 165 | colors = format_array(colors) 166 | tex_coord = format_array(tex_coord, col=2) 167 | normals = format_array(normals, col=3) 168 | 169 | integer_mark = 0 170 | 171 | if np.issubdtype(points.dtype, np.signedinteger): 172 | integer_mark = 1 173 | elif np.issubdtype(points.dtype, np.unsignedinteger): 174 | integer_mark = 2 175 | 176 | cdef cnp.ndarray[float, ndim=1] qorigin = np.zeros((3,), dtype=np.float32) 177 | cdef float[:] quant_origin = qorigin 178 | 179 | if quantization_origin is not None: 180 | qorigin[:] = quantization_origin[:] 181 | else: 182 | qorigin[:] = np.min(points, axis=0) 183 | 184 | cdef vector[float] pointsview = points.reshape((points.size,)) 185 | cdef vector[uint32_t] facesview 186 | cdef vector[uint8_t] colorsview 187 | cdef vector[float] texcoordview 188 | cdef vector[float] normalsview 189 | 190 | colors_channel = 0 191 | if colors is not None: 192 | assert np.issubdtype(colors.dtype, np.uint8), "Colors must be uint8" 193 | assert len(colors.shape) == 2, "Colors must be 2D" 194 | colors_channel = colors.shape[1] 195 | assert 1 <= colors_channel <= 127, "Number of color channels must be in range [1, 127]" 196 | colorsview = colors.reshape((colors.size,)) 197 | 198 | tex_coord_channel = 0 199 | if tex_coord is not None: 200 | assert np.issubdtype(tex_coord.dtype, float), "Tex coord must be float" 201 | assert len(tex_coord.shape) == 2, "Tex coord must be 2D" 202 | tex_coord_channel = tex_coord.shape[1] 203 | assert 1 <= tex_coord_channel <= 127, "Number of tex coord channels must be in range [1, 127]" 204 | texcoordview = tex_coord.reshape((tex_coord.size,)) 205 | 206 | 207 | has_normals = 0 208 | if normals is not None: 209 | assert np.issubdtype(normals.dtype, float), "Normals must be float" 210 | assert normals.shape[1] == 3, "Normals must have 3 components" 211 | has_normals = 1 212 | normalsview = normals.reshape((normals.size,)) 213 | 214 | if faces is None: 215 | encoded = DracoPy.encode_point_cloud( 216 | pointsview, quantization_bits, compression_level, 217 | quantization_range, &quant_origin[0], 218 | preserve_order, create_metadata, integer_mark, 219 | colorsview, colors_channel 220 | ) 221 | else: 222 | facesview = faces.reshape((faces.size,)) 223 | encoded = DracoPy.encode_mesh( 224 | pointsview, facesview, 225 | quantization_bits, compression_level, 226 | quantization_range, &quant_origin[0], 227 | preserve_order, create_metadata, integer_mark, 228 | colorsview, colors_channel, texcoordview, tex_coord_channel, 229 | normalsview, has_normals # Add these two parameters 230 | ) 231 | 232 | 233 | if encoded.encode_status == DracoPy.encoding_status.successful_encoding: 234 | return bytes(encoded.buffer) 235 | elif encoded.encode_status == DracoPy.encoding_status.failed_during_encoding: 236 | raise EncodingFailedException('Invalid mesh') 237 | 238 | def raise_decoding_error(decoding_status): 239 | if decoding_status == DracoPy.decoding_status.not_draco_encoded: 240 | raise FileTypeException('Input mesh is not draco encoded') 241 | elif decoding_status == DracoPy.decoding_status.failed_during_decoding: 242 | raise TypeError('Failed to decode input mesh. Data might be corrupted') 243 | elif decoding_status == DracoPy.decoding_status.no_position_attribute: 244 | raise ValueError('DracoPy only supports meshes with position attributes') 245 | 246 | def decode(bytes buffer) -> Union[DracoMesh, DracoPointCloud]: 247 | """ 248 | (DracoMesh|DracoPointCloud) decode(bytes buffer) 249 | 250 | Decodes a binary draco file into either a DracoPointCloud 251 | or a DracoMesh. 252 | """ 253 | mesh_struct = DracoPy.decode_buffer(buffer, len(buffer)) 254 | if mesh_struct.decode_status != DracoPy.decoding_status.successful: 255 | raise_decoding_error(mesh_struct.decode_status) 256 | 257 | if len(mesh_struct.faces) > 0: 258 | return DracoMesh(mesh_struct) 259 | return DracoPointCloud(mesh_struct) 260 | 261 | # FOR BACKWARDS COMPATIBILITY 262 | 263 | def encode_mesh_to_buffer(*args, **kwargs) -> bytes: 264 | """Provided for backwards compatibility. Use encode.""" 265 | return encode(*args, **kwargs) 266 | 267 | def encode_point_cloud_to_buffer( 268 | points, quantization_bits=14, compression_level=1, 269 | quantization_range=-1, quantization_origin=None, 270 | create_metadata=False 271 | ) -> bytes: 272 | """Provided for backwards compatibility. Use encode.""" 273 | return encode( 274 | points=points, 275 | faces=None, 276 | quantization_bits=quantization_bits, 277 | compression_level=compression_level, 278 | quantization_range=quantization_range, 279 | quantization_origin=quantization_origin, 280 | create_metadata=create_metadata, 281 | preserve_order=False 282 | ) 283 | 284 | def decode_buffer_to_mesh(buffer) -> Union[DracoMesh, DracoPointCloud]: 285 | """Provided for backwards compatibility. Use decode.""" 286 | return decode(buffer) 287 | 288 | def decode_buffer_to_point_cloud(buffer) -> Union[DracoMesh, DracoPointCloud]: 289 | """Provided for backwards compatibility. Use decode.""" 290 | return cast(decode(buffer), DracoPointCloud) 291 | -------------------------------------------------------------------------------- /testdata_files/bunny.drc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/seung-lab/DracoPy/07ff20f087e7c5c8430869417d68dcc4301851ae/testdata_files/bunny.drc -------------------------------------------------------------------------------- /testdata_files/bunny_normals.drc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/seung-lab/DracoPy/07ff20f087e7c5c8430869417d68dcc4301851ae/testdata_files/bunny_normals.drc -------------------------------------------------------------------------------- /testdata_files/point_cloud_bunny.drc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/seung-lab/DracoPy/07ff20f087e7c5c8430869417d68dcc4301851ae/testdata_files/point_cloud_bunny.drc -------------------------------------------------------------------------------- /testdata_files/point_cloud_bunny_test.drc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/seung-lab/DracoPy/07ff20f087e7c5c8430869417d68dcc4301851ae/testdata_files/point_cloud_bunny_test.drc -------------------------------------------------------------------------------- /tests.py: -------------------------------------------------------------------------------- 1 | import os 2 | import DracoPy 3 | import pytest 4 | 5 | import numpy as np 6 | 7 | testdata_directory = "testdata_files" 8 | 9 | EXPECTED_POINTS_BUNNY_MESH = 104502 // 3 10 | EXPECTED_POINTS_BUNNY_PTC = 107841 // 3 11 | EXPECTED_FACES_BUNNY = 208353 // 3 12 | 13 | def test_decoding_and_encoding_mesh_file(): 14 | with open(os.path.join(testdata_directory, "bunny.drc"), "rb") as draco_file: 15 | mesh = DracoPy.decode(draco_file.read()) 16 | 17 | assert len(mesh.points) == EXPECTED_POINTS_BUNNY_MESH 18 | assert len(mesh.faces) == EXPECTED_FACES_BUNNY 19 | assert len(mesh.normals) == 0 20 | 21 | with open(os.path.join(testdata_directory, "bunny_normals.drc"), "rb") as draco_file: 22 | mesh = DracoPy.decode(draco_file.read()) 23 | 24 | assert len(mesh.points) == EXPECTED_POINTS_BUNNY_MESH 25 | assert len(mesh.faces) == EXPECTED_FACES_BUNNY 26 | assert len(mesh.normals) == EXPECTED_POINTS_BUNNY_MESH 27 | 28 | encoding_test1 = DracoPy.encode(mesh.points, mesh.faces) 29 | encoding_test2 = DracoPy.encode(mesh.points.flatten(), mesh.faces) 30 | 31 | assert encoding_test1 == encoding_test2 32 | encoding_test = encoding_test1 33 | 34 | # test preserve_order 35 | np.random.shuffle(mesh.faces) 36 | encoding_test3 = DracoPy.encode(mesh.points, mesh.faces, compression_level=10, 37 | preserve_order=True) 38 | mesh_decode = DracoPy.decode(encoding_test3) 39 | assert np.allclose(mesh.points, mesh_decode.points) 40 | assert np.allclose(mesh.faces, mesh_decode.faces) 41 | # color is None 42 | assert mesh_decode.colors is None, "colors should not present" 43 | 44 | colors = np.random.randint(0, 255, [mesh.points.shape[0], 16]).astype(np.uint8) 45 | tex_coord = np.random.random([mesh.points.shape[0], 2]).astype(float) 46 | 47 | # test extreme quantization 48 | encoding_test4 = DracoPy.encode(mesh.points, mesh.faces, compression_level=10, 49 | quantization_bits=1, colors=colors, 50 | preserve_order=True) 51 | mesh_decode = DracoPy.decode(encoding_test4) 52 | assert np.array_equal(colors, mesh_decode.colors), "colors decode result is wrong" 53 | 54 | # Setting quantization_bits 26 here. Larger value causes MemoryError on 32bit systems. 55 | encoding_test5 = DracoPy.encode(mesh.points, mesh.faces, compression_level=1, 56 | quantization_bits=26, colors=colors, tex_coord=tex_coord) 57 | mesh_decode = DracoPy.decode(encoding_test5) 58 | 59 | #this is very slow 60 | # from tqdm import tqdm 61 | # ptmap = {} 62 | # for pt in tqdm(mesh.points): 63 | # for i in range(len(mesh.points)): 64 | # if np.all(np.isclose(pt, mesh.points[i])): 65 | # ptmap[tuple(pt)] = i 66 | # break 67 | 68 | # for i, pt in enumerate(mesh_decode.points): 69 | # pt = tuple(pt) 70 | # assert np.all(np.isclose(mesh.tex_coord[ptmap[pt]], mesh_decode.tex_coord[i])) 71 | 72 | assert mesh_decode.colors is not None, "colors should present" 73 | assert mesh_decode.tex_coord is not None, "tex_coord should present" 74 | 75 | with open(os.path.join(testdata_directory, "bunny_test.drc"), "wb") as test_file: 76 | test_file.write(encoding_test) 77 | 78 | with open(os.path.join(testdata_directory, "bunny_test.drc"), "rb") as test_file: 79 | mesh = DracoPy.decode(test_file.read()) 80 | assert (mesh.encoding_options) is None 81 | assert len(mesh.points) == EXPECTED_POINTS_BUNNY_MESH 82 | assert len(mesh.faces) == EXPECTED_FACES_BUNNY 83 | 84 | with pytest.raises(AssertionError): 85 | invalid_c = np.random.randint(0, 255, [mesh.points.shape[0], 128]).astype(np.uint8) 86 | invalid_m = DracoPy.encode(mesh.points, mesh.faces, compression_level=1, 87 | quantization_bits=26, colors=invalid_c) 88 | 89 | def test_tex_coord_encoding(): 90 | points = np.array([ 91 | [0,0,0], 92 | [1,0,0], 93 | [1,1,0], 94 | [0,1,0], 95 | ]) 96 | faces = np.array([[0,1,2], [0,3,2]]) 97 | # tex_coord = np.random.random([len(points), 2]).astype(float) 98 | tex_coord = np.array([ 99 | [0.0, 0.1], 100 | [0.2, 0.3], 101 | [0.4, 0.5], 102 | [0.6, 0.7], 103 | ]) 104 | 105 | encoded = DracoPy.encode( 106 | points, faces, 107 | compression_level=1, 108 | quantization_bits=26, 109 | # colors=colors, 110 | tex_coord=tex_coord 111 | ) 112 | mesh = DracoPy.decode(encoded) 113 | 114 | ptmap = {} 115 | for pt in mesh.points: 116 | for i in range(len(points)): 117 | if np.all(pt == points[i]): 118 | ptmap[tuple(pt)] = i 119 | break 120 | 121 | for i, pt in enumerate(mesh.points): 122 | pt = tuple(pt) 123 | assert np.all(np.isclose(tex_coord[ptmap[pt]], mesh.tex_coord[i])) 124 | 125 | 126 | def test_decoding_and_encoding_mesh_file_integer_positions(): 127 | with open(os.path.join(testdata_directory, "bunny.drc"), "rb") as draco_file: 128 | file_content = draco_file.read() 129 | mesh_object = DracoPy.decode(file_content) 130 | assert len(mesh_object.points) == EXPECTED_POINTS_BUNNY_MESH 131 | assert len(mesh_object.faces) == EXPECTED_FACES_BUNNY 132 | 133 | points = np.array(mesh_object.points) 134 | points = points.astype(np.float64) 135 | points -= np.min(points, axis=0) 136 | points /= np.max(points, axis=0) 137 | points *= 2 ** 16 138 | points = points.astype(np.uint32) 139 | 140 | encoding_test_uint = DracoPy.encode( 141 | points, mesh_object.faces, 142 | quantization_bits=16, 143 | ) 144 | 145 | encoding_test_int = DracoPy.encode( 146 | points.astype(np.int64), mesh_object.faces, 147 | quantization_bits=16, 148 | ) 149 | 150 | encoding_test_float = DracoPy.encode( 151 | points.astype(np.float32), mesh_object.faces, 152 | quantization_bits=16, 153 | ) 154 | assert encoding_test_uint != encoding_test_float 155 | assert encoding_test_uint != encoding_test_int 156 | assert encoding_test_int != encoding_test_float 157 | 158 | encoding_test_uint64 = DracoPy.encode( 159 | points.astype(np.uint64), mesh_object.faces, 160 | quantization_bits=16, 161 | ) 162 | assert encoding_test_uint == encoding_test_uint64 163 | 164 | mesh_object = DracoPy.decode(encoding_test_uint) 165 | 166 | assert len(mesh_object.points) == EXPECTED_POINTS_BUNNY_MESH 167 | assert len(mesh_object.faces) == EXPECTED_FACES_BUNNY 168 | 169 | pts_f = np.array(mesh_object.points) 170 | pts_f = np.sort(pts_f, axis=0) 171 | pts_i = np.sort(np.copy(points), axis=0) 172 | assert np.all(np.isclose(pts_i, pts_f)) 173 | 174 | def test_decoding_improper_file(): 175 | with open(os.path.join(testdata_directory, "bunny.obj"), "rb") as improper_file: 176 | file_content = improper_file.read() 177 | with pytest.raises(DracoPy.FileTypeException): 178 | DracoPy.decode(file_content) 179 | 180 | 181 | def test_metadata(): 182 | with open(os.path.join(testdata_directory, "bunny.drc"), "rb") as draco_file: 183 | file_content = draco_file.read() 184 | mesh_object = DracoPy.decode(file_content) 185 | encoding_options = { 186 | "quantization_bits": 12, 187 | "compression_level": 3, 188 | "quantization_range": 1000, 189 | "quantization_origin": [-100, -100, -100], 190 | "create_metadata": True, 191 | } 192 | encoding_test = DracoPy.encode( 193 | mesh_object.points, mesh_object.faces, **encoding_options 194 | ) 195 | with open( 196 | os.path.join(testdata_directory, "bunny_test.drc"), "wb" 197 | ) as test_file: 198 | test_file.write(encoding_test) 199 | 200 | with open(os.path.join(testdata_directory, "bunny_test.drc"), "rb") as test_file: 201 | file_content = test_file.read() 202 | mesh_object = DracoPy.decode(file_content) 203 | eo = mesh_object.encoding_options 204 | assert (eo) is not None 205 | assert (eo.quantization_bits) == 12 206 | assert (eo.quantization_range) == 1000 207 | assert (eo.quantization_origin) == [-100, -100, -100] 208 | 209 | 210 | def test_decoding_and_encoding_point_cloud_file(): 211 | with open( 212 | os.path.join(testdata_directory, "point_cloud_bunny.drc"), "rb" 213 | ) as draco_file: 214 | file_content = draco_file.read() 215 | point_cloud_object = DracoPy.decode(file_content) 216 | assert len(point_cloud_object.points) == EXPECTED_POINTS_BUNNY_PTC 217 | encoding_test = DracoPy.encode(point_cloud_object.points) 218 | with open( 219 | os.path.join(testdata_directory, "point_cloud_bunny_test.drc"), "wb" 220 | ) as test_file: 221 | test_file.write(encoding_test) 222 | 223 | # test preserve_order 224 | np.random.shuffle(point_cloud_object.points) 225 | colors = np.random.randint(0, 255, [point_cloud_object.points.shape[0], 127]).astype(np.uint8) 226 | encoding_test2 = DracoPy.encode(point_cloud_object.points, compression_level=10, 227 | quantization_bits=26, preserve_order=True, colors=colors) 228 | ptc_decode = DracoPy.decode(encoding_test2) 229 | assert np.allclose(point_cloud_object.points, ptc_decode.points) 230 | assert np.array_equal(colors, ptc_decode.colors) 231 | 232 | # test extreme quantization 233 | encoding_test3 = DracoPy.encode(point_cloud_object.points, compression_level=10, 234 | quantization_bits=1) 235 | ptc_decode = DracoPy.decode(encoding_test3) 236 | assert ptc_decode.colors is None, "colors should not present" 237 | 238 | with open( 239 | os.path.join(testdata_directory, "point_cloud_bunny_test.drc"), "rb" 240 | ) as test_file: 241 | file_content = test_file.read() 242 | point_cloud_object = DracoPy.decode(file_content) 243 | assert (point_cloud_object.encoding_options) is None 244 | assert len(point_cloud_object.points) == EXPECTED_POINTS_BUNNY_PTC 245 | 246 | with pytest.raises(AssertionError): 247 | invalid_c = np.random.randint(0, 255, [point_cloud_object.points.shape[0], 128]).astype(np.uint8) 248 | invalid_m = DracoPy.encode(point_cloud_object.points, compression_level=1, 249 | quantization_bits=26, colors=invalid_c) 250 | 251 | 252 | def test_normals_encoding(): 253 | # Read reference mesh 254 | with open(os.path.join(testdata_directory, "bunny.drc"), 'rb') as draco_file: 255 | mesh = DracoPy.decode(draco_file.read()) 256 | 257 | # Create test normal vectors 258 | test_normals = np.array([[1.0, 0.0, 0.0]] * mesh.points.shape[0]) 259 | 260 | # Encode with test normals 261 | binary = DracoPy.encode(mesh.points, mesh.faces, normals=test_normals) 262 | 263 | # Decode and verify normals 264 | decoded_mesh = DracoPy.decode(binary) 265 | assert np.allclose(decoded_mesh.normals, test_normals) --------------------------------------------------------------------------------