├── gsconverter ├── __init__.py ├── utils │ ├── __init__.py │ ├── config.py │ ├── utility_functions.py │ ├── data_processing.py │ ├── argument_actions.py │ ├── conversion_functions.py │ ├── format_3dgs.py │ ├── format_parquet.py │ ├── format_cc.py │ ├── utility.py │ └── base_converter.py └── main.py ├── requirements.txt ├── .gitignore ├── setup.py ├── LICENSE └── README.md /gsconverter/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /gsconverter/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | scikit-learn 2 | plyfile 3 | pandas 4 | pyarrow -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.ply 2 | *.parquet 3 | build 4 | __pycache__ 5 | *.egg-info 6 | *.zip 7 | -------------------------------------------------------------------------------- /gsconverter/utils/config.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | DEBUG = False 10 | -------------------------------------------------------------------------------- /gsconverter/utils/utility_functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import signal 10 | from . import config 11 | 12 | def init_worker(): 13 | signal.signal(signal.SIGINT, signal.SIG_IGN) 14 | 15 | def debug_print(message): 16 | if config.DEBUG: 17 | print(message) -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | # Read the contents of your README file 4 | with open('README.md', encoding='utf-8') as f: 5 | long_description = f.read() 6 | 7 | # Read the contents of the requirements file 8 | with open('requirements.txt') as f: 9 | requirements = f.read().splitlines() 10 | 11 | setup( 12 | name='gsconverter', 13 | version='0.2', 14 | author='Francesco Fugazzi', 15 | #author_email='your.email@example.com', 16 | description='3D Gaussian Splatting Converter', 17 | long_description=long_description, 18 | long_description_content_type='text/markdown', 19 | url='https://github.com/francescofugazzi/3dgsconverter', 20 | packages=find_packages(), 21 | install_requires=requirements, 22 | classifiers=[ 23 | 'Programming Language :: Python :: 3', 24 | 'License :: MIT License', 25 | 'Operating System :: OS Independent', 26 | ], 27 | python_requires='>=3.8', 28 | entry_points={ 29 | 'console_scripts': [ 30 | '3dgsconverter=gsconverter.main:main', 31 | ], 32 | }, 33 | ) 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /gsconverter/utils/data_processing.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | from .utility_functions import debug_print 10 | 11 | def process_data(data_object, bbox=None, apply_density_filter=None, remove_flyers=None): 12 | # Crop the data based on the bounding box if specified 13 | if bbox: 14 | min_x, min_y, min_z, max_x, max_y, max_z = bbox 15 | data_object.crop_by_bbox(min_x, min_y, min_z, max_x, max_y, max_z) 16 | debug_print("[DEBUG] Bounding box cropped.") 17 | 18 | # Apply density filter if parameters are provided 19 | if apply_density_filter: 20 | # Unpack parameters, applying default values if not all parameters are given 21 | voxel_size, threshold_percentage = (apply_density_filter + [1.0, 0.32])[:2] # Defaults to 1.0 and 0.32 if not provided 22 | data_object.apply_density_filter(voxel_size=float(voxel_size), threshold_percentage=float(threshold_percentage)) 23 | debug_print("[DEBUG] Density filter applied.") 24 | 25 | # Remove flyers if parameters are provided 26 | if remove_flyers: 27 | # Example: expecting remove_flyers to be a list or tuple like [k, threshold_factor] 28 | # Provide default values if necessary 29 | k, threshold_factor = (remove_flyers + [25, 1.0])[:2] # Defaults to 25 and 1.0 if not provided 30 | data_object.remove_flyers(k=int(k), threshold_factor=float(threshold_factor)) 31 | debug_print("[DEBUG] Flyers removed.") -------------------------------------------------------------------------------- /gsconverter/utils/argument_actions.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import argparse 10 | 11 | class DensityFilterAction(argparse.Action): 12 | def __call__(self, parser, args, values, option_string=None): 13 | if values: 14 | if len(values) != 2: 15 | parser.error("--density_filter requires two numbers: voxel_size and threshold_percentage.") 16 | try: 17 | values = [float(v) for v in values] 18 | except ValueError: 19 | parser.error("Both arguments for --density_filter must be numbers.") 20 | else: 21 | values = [1.0, 0.32] # Default values if none are provided 22 | setattr(args, self.dest, values) 23 | 24 | class RemoveFlyersAction(argparse.Action): 25 | def __call__(self, parser, args, values, option_string=None): 26 | if values: 27 | if len(values) != 2: 28 | parser.error("--remove_flyers requires two numbers: 'k' for the number of neighbors and 'threshold_factor' for the multiplier of the standard deviation.") 29 | try: 30 | values = [float(v) for v in values] 31 | except ValueError: 32 | parser.error("Both arguments for --remove_flyers must be numbers.") 33 | else: 34 | values = [25, 10.5] # Default values if none are provided 35 | setattr(args, self.dest, values) 36 | 37 | class AboutAction(argparse.Action): 38 | def __init__(self, option_strings, dest, nargs=0, **kwargs): 39 | super(AboutAction, self).__init__(option_strings, dest, nargs=0, **kwargs) 40 | 41 | def __call__(self, parser, namespace, values, option_string=None): 42 | copyright_info = """ 43 | 3D Gaussian Splatting Converter 44 | Copyright (c) 2023 Francesco Fugazzi 45 | 46 | This software is released under the MIT License. 47 | For more information about the license, please see the LICENSE file. 48 | """ 49 | print(copyright_info) 50 | parser.exit() # Exit after displaying the information. 51 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 3D Gaussian Splatting Converter 2 | 3 | A tool for converting 3D Gaussian Splatting `.ply` and `.parquet` files into a format suitable for Cloud Compare and vice-versa. Enhance your point cloud editing with added functionalities like RGB coloring, density filtering, and flyer removal. 4 | 5 | ## Features 6 | 7 | - **Format Conversion**: Seamlessly switch between 3DGS `.ply` and Cloud Compare-friendly `.ply` formats. Now also `.parquet` is supported as input file. 8 | - **RGB Coloring**: Add RGB values to your point cloud for better visualization and editing in Cloud Compare. 9 | - **Density Filtering**: Focus on the dense regions of your point cloud by removing sparse data. 10 | - **Flyer Removal**: Get rid of unwanted outliers or floating points in your dataset. Especially useful when combined with the density filter due to its intensive nature. 11 | - **Bounding box cropping**: command for cropping point clouds to focus on specific regions. 12 | 13 | ## Installation 14 | 15 | There are two ways to install the 3D Gaussian Splatting Converter: 16 | 17 | **1. Direct Installation via pip**: 18 | 19 | Directly install the app from GitHub using pip. This method is straightforward and recommended for most users. 20 | 21 | ```bash 22 | pip install git+https://github.com/francescofugazzi/3dgsconverter.git 23 | ``` 24 | 25 | **2. Installation by Cloning the Repository:**: 26 | 27 | If you prefer to clone the repository and install from the source, follow these steps: 28 | 29 | ```bash 30 | git clone https://github.com/francescofugazzi/3dgsconverter 31 | cd 3dgsconverter 32 | pip install . 33 | ``` 34 | 35 | ## Usage 36 | 37 | Here are some basic examples to get you started: 38 | 39 | **1. Conversion from 3DGS to Cloud Compare format with RGB addition**: 40 | 41 | ```bash 42 | 3dgsconverter -i input_3dgs.ply -o output_cc.ply -f cc --rgb 43 | ``` 44 | 45 | **2. Conversion from Cloud Compare format back to 3DGS:**: 46 | 47 | ```bash 48 | 3dgsconverter -i input_cc.ply -o output_3dgs.ply -f 3dgs 49 | ``` 50 | 51 | **3. Applying Density Filter during conversion:**: 52 | 53 | ```bash 54 | 3dgsconverter -i input_3dgs.ply -o output_cc.ply -f cc --density_filter 55 | ``` 56 | 57 | **4. Applying Density Filter and Removing floaters during conversion:**: 58 | 59 | ```bash 60 | 3dgsconverter -i input_3dgs.ply -o output_cc.ply -f cc --density_filter --remove_flyers 61 | ``` 62 | 63 | For a full list of parameters and their descriptions, you can use the `-h` or `--help` argument: 64 | 65 | ```bash 66 | gsconverter -h 67 | ``` 68 | 69 | ## Debug Information 70 | 71 | For detailed insights pass the `--debug` flag (or `-d` for short) when executing the script. 72 | 73 | ## Contribute 74 | 75 | Feel free to open issues or PRs if you have suggestions or improvements for this tool! 76 | -------------------------------------------------------------------------------- /gsconverter/utils/conversion_functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | from .format_3dgs import Format3dgs 11 | from .format_cc import FormatCC 12 | from .format_parquet import FormatParquet 13 | from .utility_functions import debug_print 14 | from .data_processing import process_data # Place this import statement at the top with other imports 15 | 16 | def convert(data, source_format, target_format, **kwargs): 17 | debug_print(f"[DEBUG] Starting conversion from {source_format} to {target_format}...") 18 | 19 | if source_format == "3dgs": 20 | converter = Format3dgs(data) 21 | elif source_format == "cc": 22 | converter = FormatCC(data) 23 | elif source_format == "parquet": 24 | converter = FormatParquet(data) 25 | else: 26 | raise ValueError("Unsupported source format") 27 | 28 | # Apply optional pre-processing steps using process_data (newly added) 29 | process_data(converter, bbox=kwargs.get("bbox"), apply_density_filter=kwargs.get("density_filter"), remove_flyers=kwargs.get("remove_flyers")) 30 | 31 | # RGB processing 32 | if source_format == "cc": 33 | if kwargs.get("process_rgb", False) and converter.has_rgb(): 34 | print("Error: Source CC file already contains RGB data. Conversion stopped.") 35 | return None 36 | debug_print("[DEBUG] Adding or ignoring RGB for CC data...") 37 | converter.add_or_ignore_rgb(process_rgb=kwargs.get("process_rgb", False)) 38 | 39 | # Conversion operations 40 | process_rgb_flag = kwargs.get("process_rgb", False) 41 | if source_format == "3dgs" and target_format == "cc": 42 | debug_print("[DEBUG] Converting 3DGS to CC...") 43 | return converter.to_cc(process_rgb=process_rgb_flag) 44 | elif source_format == "cc" and target_format == "3dgs": 45 | debug_print("[DEBUG] Converting CC to 3DGS...") 46 | return converter.to_3dgs() 47 | elif source_format == "parquet" and target_format == "cc": 48 | debug_print("[DEBUG] Converting Parquet to CC...") 49 | return converter.to_cc(process_rgb=process_rgb_flag) 50 | elif source_format == "parquet" and target_format == "3dgs": 51 | debug_print("[DEBUG] Converting Parquet to 3DGS...") 52 | return converter.to_3dgs() 53 | elif source_format == "3dgs" and target_format == "3dgs": 54 | debug_print("[DEBUG] Applying operations on 3DGS data...") 55 | if not any(kwargs.values()): # If no flags are provided 56 | print("[INFO] No flags provided. The conversion will not happen as the output would be identical to the input.") 57 | return data['vertex'].data 58 | else: 59 | return converter.to_3dgs() 60 | elif source_format == "cc" and target_format == "cc": 61 | debug_print("[DEBUG] Applying operations on CC data...") 62 | converted_data = converter.to_cc() 63 | if isinstance(converted_data, np.ndarray): 64 | return converted_data 65 | else: 66 | return data['vertex'].data 67 | else: 68 | raise ValueError("Unsupported conversion") -------------------------------------------------------------------------------- /gsconverter/utils/format_3dgs.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | from .base_converter import BaseConverter 11 | from .utility_functions import debug_print 12 | from .utility import Utility 13 | from . import config 14 | 15 | class Format3dgs(BaseConverter): 16 | def to_cc(self, process_rgb=True): 17 | debug_print("[DEBUG] Starting conversion from 3DGS to CC...") 18 | 19 | # Load vertices from the provided data 20 | vertices = self.data 21 | debug_print(f"[DEBUG] Loaded {len(vertices)} vertices.") 22 | 23 | # Check if RGB processing is required 24 | if process_rgb: 25 | debug_print("[DEBUG] RGB processing is enabled.") 26 | 27 | # Compute RGB values for the vertices 28 | rgb_values = Utility.compute_rgb_from_vertex(vertices) 29 | 30 | if rgb_values is not None: 31 | # Define a new data type for the vertices that includes RGB 32 | new_dtype, prefix = BaseConverter.define_dtype(has_scal=True, has_rgb=True) 33 | 34 | # Create a new numpy array with the new data type 35 | converted_data = np.zeros(vertices.shape, dtype=new_dtype) 36 | 37 | # Copy the vertex data to the new numpy array 38 | Utility.copy_data_with_prefix_check(vertices, converted_data, [prefix]) 39 | 40 | # Add the RGB values to the new numpy array 41 | converted_data['red'] = rgb_values[:, 0] 42 | converted_data['green'] = rgb_values[:, 1] 43 | converted_data['blue'] = rgb_values[:, 2] 44 | 45 | debug_print("RGB processing completed.") 46 | else: 47 | debug_print("[DEBUG] RGB computation failed. Skipping RGB processing.") 48 | process_rgb = False 49 | 50 | if not process_rgb: 51 | debug_print("[DEBUG] RGB processing is skipped.") 52 | 53 | # Define a new data type for the vertices without RGB 54 | new_dtype, prefix = BaseConverter.define_dtype(has_scal=True, has_rgb=False) 55 | 56 | # Create a new numpy array with the new data type 57 | converted_data = np.zeros(vertices.shape, dtype=new_dtype) 58 | 59 | # Copy the vertex data to the new numpy array 60 | Utility.copy_data_with_prefix_check(vertices, converted_data, [prefix]) 61 | 62 | # For now, we'll just return the converted_data for the sake of this integration 63 | debug_print("[DEBUG] Conversion from 3DGS to CC completed.") 64 | return converted_data 65 | 66 | def to_3dgs(self): 67 | debug_print("[DEBUG] Starting conversion from 3DGS to 3DGS...") 68 | 69 | # Load vertices from the updated data after all filters 70 | vertices = self.data 71 | debug_print(f"[DEBUG] Loaded {len(vertices)} vertices.") 72 | 73 | # Create a new structured numpy array for 3DGS format 74 | dtype_3dgs = self.define_dtype(has_scal=False, has_rgb=False) # Define 3DGS dtype without any prefix 75 | converted_data = np.zeros(vertices.shape, dtype=dtype_3dgs) 76 | 77 | # Use the helper function to copy the data from vertices to converted_data 78 | Utility.copy_data_with_prefix_check(vertices, converted_data, ["", "scal_", "scalar_", "scalar_scal_"]) 79 | 80 | debug_print("[DEBUG] Data copying completed.") 81 | debug_print("[DEBUG] Sample of converted data (first 5 rows):") 82 | if config.DEBUG: 83 | for i in range(5): 84 | debug_print(converted_data[i]) 85 | 86 | debug_print("[DEBUG] Conversion from 3DGS to 3DGS completed.") 87 | return converted_data -------------------------------------------------------------------------------- /gsconverter/utils/format_parquet.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | from .base_converter import BaseConverter 11 | from .utility_functions import debug_print 12 | from .utility import Utility 13 | from . import config 14 | 15 | class FormatParquet(BaseConverter): 16 | def to_cc(self, process_rgb=True): 17 | debug_print("[DEBUG] Starting conversion from PARQUET to CC...") 18 | 19 | # Load vertices from the provided data 20 | vertices = self.data 21 | debug_print(f"[DEBUG] Loaded {len(vertices)} vertices.") 22 | 23 | # Check if RGB processing is required 24 | if process_rgb: 25 | debug_print("[DEBUG] RGB processing is enabled.") 26 | 27 | # Compute RGB values for the vertices 28 | rgb_values = Utility.compute_rgb_from_vertex(vertices) 29 | 30 | if rgb_values is not None: 31 | # Define a new data type for the vertices that includes RGB 32 | new_dtype, prefix = BaseConverter.define_dtype(has_scal=True, has_rgb=True) 33 | 34 | # Create a new numpy array with the new data type 35 | converted_data = np.zeros(vertices.shape, dtype=new_dtype) 36 | 37 | # Copy the vertex data to the new numpy array 38 | Utility.copy_data_with_prefix_check(vertices, converted_data, [prefix]) 39 | 40 | # Add the RGB values to the new numpy array 41 | converted_data['red'] = rgb_values[:, 0] 42 | converted_data['green'] = rgb_values[:, 1] 43 | converted_data['blue'] = rgb_values[:, 2] 44 | 45 | debug_print("RGB processing completed.") 46 | else: 47 | debug_print("[DEBUG] RGB computation failed. Skipping RGB processing.") 48 | process_rgb = False 49 | 50 | if not process_rgb: 51 | debug_print("[DEBUG] RGB processing is skipped.") 52 | 53 | # Define a new data type for the vertices without RGB 54 | new_dtype, prefix = BaseConverter.define_dtype(has_scal=True, has_rgb=False) 55 | 56 | # Create a new numpy array with the new data type 57 | converted_data = np.zeros(vertices.shape, dtype=new_dtype) 58 | 59 | # Copy the vertex data to the new numpy array 60 | Utility.copy_data_with_prefix_check(vertices, converted_data, [prefix]) 61 | 62 | # For now, we'll just return the converted_data for the sake of this integration 63 | debug_print("[DEBUG] Conversion from PARQUET to CC completed.") 64 | return converted_data 65 | 66 | def to_3dgs(self): 67 | debug_print("[DEBUG] Starting conversion from PARQUET to 3DGS...") 68 | 69 | # Load vertices from the updated data after all filters 70 | vertices = self.data 71 | debug_print(f"[DEBUG] Loaded {len(vertices)} vertices.") 72 | 73 | # Create a new structured numpy array for 3DGS format 74 | dtype_3dgs = self.define_dtype(has_scal=False, has_rgb=False) # Define 3DGS dtype without any prefix 75 | converted_data = np.zeros(vertices.shape, dtype=dtype_3dgs) 76 | 77 | # Use the helper function to copy the data from vertices to converted_data 78 | Utility.copy_data_with_prefix_check(vertices, converted_data, ["", "scal_", "scalar_", "scalar_scal_"]) 79 | 80 | debug_print("[DEBUG] Data copying completed.") 81 | debug_print("[DEBUG] Sample of converted data (first 5 rows):") 82 | if config.DEBUG: 83 | for i in range(5): 84 | debug_print(converted_data[i]) 85 | 86 | debug_print("[DEBUG] Conversion from PARQUET to 3DGS completed.") 87 | return converted_data 88 | -------------------------------------------------------------------------------- /gsconverter/utils/format_cc.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | from .base_converter import BaseConverter 11 | from .utility import Utility 12 | from .utility_functions import debug_print 13 | from . import config 14 | 15 | class FormatCC(BaseConverter): 16 | def to_3dgs(self): 17 | debug_print("[DEBUG] Starting conversion from CC to 3DGS...") 18 | 19 | # Load vertices from the updated data after all filters 20 | vertices = self.data 21 | debug_print(f"[DEBUG] Loaded {len(vertices)} vertices.") 22 | 23 | # Create a new structured numpy array for 3DGS format 24 | dtype_3dgs = self.define_dtype(has_scal=False, has_rgb=False) # Define 3DGS dtype without any prefix 25 | converted_data = np.zeros(vertices.shape, dtype=dtype_3dgs) 26 | 27 | # Use the helper function to copy the data from vertices to converted_data 28 | Utility.copy_data_with_prefix_check(vertices, converted_data, ["", "scal_", "scalar_", "scalar_scal_"]) 29 | 30 | debug_print("[DEBUG] Data copying completed.") 31 | debug_print("[DEBUG] Sample of converted data (first 5 rows):") 32 | if config.DEBUG: 33 | for i in range(5): 34 | debug_print(converted_data[i]) 35 | 36 | debug_print("[DEBUG] Conversion from CC to 3DGS completed.") 37 | return converted_data 38 | 39 | 40 | def to_cc(self, process_rgb=False): 41 | debug_print("[DEBUG] Processing CC data...") 42 | 43 | # Check if RGB processing is required 44 | if process_rgb and not self.has_rgb(): 45 | self.add_rgb() 46 | debug_print("[DEBUG] RGB added to data.") 47 | else: 48 | debug_print("[DEBUG] RGB processing is skipped or data already has RGB.") 49 | 50 | converted_data = self.data 51 | 52 | # For now, we'll just return the converted_data for the sake of this integration 53 | debug_print("[DEBUG] CC data processing completed.") 54 | return converted_data 55 | 56 | def add_or_ignore_rgb(self, process_rgb=True): 57 | debug_print("[DEBUG] Checking RGB for CC data...") 58 | 59 | # If RGB processing is required and RGB is not present 60 | if process_rgb and not self.has_rgb(): 61 | # Compute RGB values for the data 62 | rgb_values = Utility.compute_rgb_from_vertex(self.data) 63 | 64 | # Get the new dtype definition from the BaseConverter class 65 | new_dtype_list, _ = BaseConverter.define_dtype(has_scal=True, has_rgb=True) 66 | new_dtype = np.dtype(new_dtype_list) 67 | 68 | # Create a new structured array that includes fields for RGB 69 | # It should have the same number of rows as the original data 70 | converted_data = np.zeros(self.data.shape[0], dtype=new_dtype) 71 | 72 | # Copy the data to the new numpy array, preserving existing fields 73 | for name in self.data.dtype.names: 74 | converted_data[name] = self.data[name] 75 | 76 | # Add the RGB values to the new numpy array 77 | converted_data['red'] = rgb_values[:, 0] 78 | converted_data['green'] = rgb_values[:, 1] 79 | converted_data['blue'] = rgb_values[:, 2] 80 | 81 | self.data = converted_data # Update the instance's data with the new data 82 | debug_print("[DEBUG] RGB added to data.") 83 | else: 84 | debug_print("[DEBUG] RGB processing is skipped or data already has RGB.") 85 | converted_data = self.data # If RGB is not added or skipped, the converted_data is just the original data. 86 | 87 | # Return the converted_data 88 | debug_print("[DEBUG] RGB check for CC data completed.") 89 | return converted_data -------------------------------------------------------------------------------- /gsconverter/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import argparse 10 | import os 11 | import sys 12 | import numpy as np 13 | from .utils.utility import Utility 14 | from .utils.conversion_functions import convert 15 | from plyfile import PlyData, PlyElement 16 | from multiprocessing import Pool 17 | from .utils import config 18 | from .utils.utility_functions import init_worker 19 | from .utils.argument_actions import DensityFilterAction, RemoveFlyersAction, AboutAction 20 | from .utils.base_converter import BaseConverter 21 | 22 | __version__ = '0.2' 23 | 24 | def main(): 25 | print(f"3D Gaussian Splatting Converter: {__version__}") 26 | 27 | parser = argparse.ArgumentParser(description="Convert between standard 3D Gaussian Splat and Cloud Compare formats.") 28 | 29 | # Arguments for input and output 30 | parser.add_argument("--input", "-i", required=True, help="Path to the source point cloud file.") 31 | parser.add_argument("--output", "-o", required=True, help="Path to save the converted point cloud file.") 32 | parser.add_argument("--target_format", "-f", choices=["3dgs", "cc"], required=True, help="Target point cloud format.") 33 | parser.add_argument("--debug", "-d", action="store_true", help="Enable debug prints.") 34 | parser.add_argument('--about', action=AboutAction, help='Show copyright and license info') 35 | 36 | # Other flags 37 | parser.add_argument("--rgb", action="store_true", help="Add RGB values to the output file based on f_dc values (only applicable when converting to Cloud Compare format).") 38 | parser.add_argument("--bbox", nargs=6, type=float, metavar=('minX', 'minY', 'minZ', 'maxX', 'maxY', 'maxZ'), help="Specify the 3D bounding box to crop the point cloud.") 39 | parser.add_argument("--density_filter", nargs='*', action=DensityFilterAction, help="Filter the points to keep only regions with higher point density. Optionally provide 'voxel_size' and 'threshold_percentage' as two numbers (e.g., --density_filter 0.5 0.25). If no numbers are provided, defaults of 1.0 and 0.32 are used.") 40 | parser.add_argument("--remove_flyers", nargs='*', action=RemoveFlyersAction, help="Remove flyers based on k-nearest neighbors. Requires two numbers: 'k' (number of neighbors) and 'threshold_factor'.") 41 | 42 | args = parser.parse_args() 43 | 44 | config.DEBUG = args.debug 45 | 46 | # Check and append ".ply" extension if absent 47 | if not args.output.lower().endswith('.ply'): 48 | args.output += '.ply' 49 | 50 | # Now check if the file exists after potentially appending the extension 51 | if os.path.exists(args.output): 52 | user_response = input(f"File {args.output} already exists. Do you want to overwrite it? (y/N): ").lower() 53 | if user_response != 'y': 54 | print("Operation aborted by the user.") 55 | return 56 | 57 | # Detect the format of the input file 58 | if args.input.lower().endswith('.parquet'): 59 | source_format = 'parquet' 60 | else: 61 | source_format = Utility.text_based_detect_format(args.input) 62 | 63 | if not source_format: 64 | print("The provided file is not a recognized 3D Gaussian Splat point cloud format.") 65 | return 66 | 67 | print(f"Detected source format: {source_format}") 68 | 69 | # Check if --rgb flag is set for conversions involving 3dgs as target 70 | if args.target_format == "3dgs" and args.rgb: 71 | if source_format == "3dgs": 72 | print("Error: --rgb flag is not applicable for 3dgs to 3dgs conversion.") 73 | return 74 | elif source_format == "parquet": 75 | print("Error: --rgb flag is not applicable for parquet to 3dgs conversion.") 76 | return 77 | else: 78 | print("Error: --rgb flag is not applicable for cc to 3dgs conversion.") 79 | return 80 | 81 | # Check for RGB flag and format conditions 82 | if source_format == "cc" and args.target_format == "cc" and args.rgb: 83 | if 'red' in PlyData.read(args.input)['vertex']._property_lookup: 84 | print("Error: Source CC file already contains RGB data. Conversion stopped.") 85 | return 86 | 87 | # Read the data from the input file based on detected format 88 | if source_format == 'parquet': 89 | structured_data = BaseConverter.load_parquet(args.input) 90 | 91 | print(f"Number of vertices: {len(structured_data)}") 92 | else: 93 | data = PlyData.read(args.input) 94 | if isinstance(data, PlyData) and 'vertex' in data: 95 | print(f"Number of vertices in the header: {len(data['vertex'].data)}") 96 | structured_data = data['vertex'].data 97 | else: 98 | print("Error: Data format is not PlyData with a 'vertex' field.") 99 | return 100 | 101 | try: 102 | with Pool(initializer=init_worker) as pool: 103 | # If the bbox argument is provided, extract its values 104 | bbox_values = args.bbox if args.bbox else None 105 | 106 | # If the data is a structured array from a Parquet file, pass it directly 107 | if source_format == 'parquet': 108 | data_to_convert = structured_data 109 | else: 110 | # For PlyData, access the vertex data 111 | data_to_convert = data['vertex'].data 112 | 113 | # Call the convert function and pass the data to convert 114 | converted_data = convert(data_to_convert, source_format, args.target_format, process_rgb=args.rgb, density_filter=args.density_filter, remove_flyers=args.remove_flyers, bbox=bbox_values, pool=pool) 115 | 116 | except KeyboardInterrupt: 117 | print("Caught KeyboardInterrupt, terminating workers") 118 | pool.terminate() 119 | pool.join() 120 | sys.exit(-1) 121 | 122 | # Check if the conversion actually happened and save the result 123 | if isinstance(converted_data, np.ndarray): 124 | # Save the converted data to the output file 125 | PlyData([PlyElement.describe(converted_data, 'vertex')], byte_order='=').write(args.output) 126 | print(f"Conversion completed and saved to {args.output}.") 127 | else: 128 | print("Conversion was skipped.") 129 | 130 | if __name__ == "__main__": 131 | main() -------------------------------------------------------------------------------- /gsconverter/utils/utility.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | import multiprocessing 11 | from multiprocessing import Pool, cpu_count 12 | from .utility_functions import debug_print, init_worker 13 | 14 | class Utility: 15 | @staticmethod 16 | def text_based_detect_format(file_path): 17 | debug_print("[DEBUG] Executing 'text_based_detect_format' function...") 18 | 19 | """Detect if the given file is in '3dgs' or 'cc' format.""" 20 | with open(file_path, 'rb') as file: 21 | header_bytes = file.read(2048) # Read the beginning to detect the format 22 | 23 | header = header_bytes.decode('utf-8', errors='ignore') 24 | 25 | if "property float f_dc_0" in header: 26 | debug_print("[DEBUG] Detected format: 3dgs") 27 | return "3dgs" 28 | elif "property float scal_f_dc_0" in header or "property float scalar_scal_f_dc_0" in header or "property float scalar_f_dc_0" in header: 29 | debug_print("[DEBUG] Detected format: cc") 30 | return "cc" 31 | else: 32 | return None 33 | 34 | @staticmethod 35 | def copy_data_with_prefix_check(source, target, possible_prefixes): 36 | debug_print("[DEBUG] Executing 'copy_data_with_prefix_check' function...") 37 | 38 | """ 39 | Given two structured numpy arrays (source and target), copy the data from source to target. 40 | If a field exists in source but not in target, this function will attempt to find the field 41 | in target by adding any of the possible prefixes to the field name. 42 | """ 43 | for name in source.dtype.names: 44 | if name in target.dtype.names: 45 | target[name] = source[name] 46 | else: 47 | copied = False 48 | for prefix in possible_prefixes: 49 | # If the field starts with the prefix, try the field name without the prefix 50 | if name.startswith(prefix): 51 | stripped_name = name[len(prefix):] 52 | if stripped_name in target.dtype.names: 53 | target[stripped_name] = source[name] 54 | copied = True 55 | break 56 | # If the field doesn't start with any prefix, try adding the prefix 57 | else: 58 | prefixed_name = prefix + name 59 | if prefixed_name in target.dtype.names: 60 | debug_print(f"[DEBUG] Copying data from '{name}' to '{prefixed_name}'") 61 | target[prefixed_name] = source[name] 62 | copied = True 63 | break 64 | ##if not copied: 65 | ## print(f"Warning: Field {name} not found in target.") 66 | 67 | @staticmethod 68 | def compute_rgb_from_vertex(vertices): 69 | debug_print("[DEBUG] Executing 'compute_rgb_from_vertex' function...") 70 | 71 | # Depending on the available field names, choose the appropriate ones 72 | if 'f_dc_0' in vertices.dtype.names: 73 | f_dc = np.column_stack((vertices['f_dc_0'], vertices['f_dc_1'], vertices['f_dc_2'])) 74 | else: 75 | f_dc = np.column_stack((vertices['scalar_scal_f_dc_0'], vertices['scalar_scal_f_dc_1'], vertices['scalar_scal_f_dc_2'])) 76 | 77 | colors = (f_dc + 1) * 127.5 78 | colors = np.clip(colors, 0, 255).astype(np.uint8) 79 | 80 | debug_print("[DEBUG] RGB colors computed.") 81 | return colors 82 | 83 | @staticmethod 84 | def parallel_voxel_counting(vertices, voxel_size=1.0): 85 | debug_print("[DEBUG] Executing 'parallel_voxel_counting' function...") 86 | 87 | """Counts the number of points in each voxel in a parallelized manner.""" 88 | num_processes = cpu_count() 89 | chunk_size = len(vertices) // num_processes 90 | chunks = [vertices[i:i + chunk_size] for i in range(0, len(vertices), chunk_size)] 91 | 92 | num_cores = max(1, multiprocessing.cpu_count() - 1) 93 | with Pool(processes=num_cores, initializer=init_worker) as pool: 94 | results = pool.starmap(Utility.count_voxels_chunk, [(chunk, voxel_size) for chunk in chunks]) 95 | 96 | # Aggregate results from all processes 97 | total_voxel_counts = {} 98 | for result in results: 99 | for k, v in result.items(): 100 | if k in total_voxel_counts: 101 | total_voxel_counts[k] += v 102 | else: 103 | total_voxel_counts[k] = v 104 | 105 | debug_print(f"[DEBUG] Voxel counting completed with {len(total_voxel_counts)} unique voxels found.") 106 | return total_voxel_counts 107 | 108 | @staticmethod 109 | def count_voxels_chunk(vertices_chunk, voxel_size): 110 | debug_print("[DEBUG] Executing 'count_voxels_chunk' function for a chunk...") 111 | 112 | """Count the number of points in each voxel for a chunk of vertices.""" 113 | voxel_counts = {} 114 | for vertex in vertices_chunk: 115 | voxel_coords = (int(vertex['x'] / voxel_size), int(vertex['y'] / voxel_size), int(vertex['z'] / voxel_size)) 116 | if voxel_coords in voxel_counts: 117 | voxel_counts[voxel_coords] += 1 118 | else: 119 | voxel_counts[voxel_coords] = 1 120 | 121 | debug_print(f"[DEBUG] Chunk processed with {len(voxel_counts)} voxels counted.") 122 | return voxel_counts 123 | 124 | @staticmethod 125 | def get_neighbors(voxel_coords): 126 | debug_print(f"[DEBUG] Getting neighbors for voxel: {voxel_coords}...") 127 | 128 | """Get the face-touching neighbors of the given voxel coordinates.""" 129 | x, y, z = voxel_coords 130 | neighbors = [ 131 | (x-1, y, z), (x+1, y, z), 132 | (x, y-1, z), (x, y+1, z), 133 | (x, y, z-1), (x, y, z+1) 134 | ] 135 | return neighbors 136 | 137 | @staticmethod 138 | def knn_worker(args): 139 | debug_print(f"[DEBUG] Executing 'knn_worker' function for vertex: {args[0]}...") 140 | 141 | """Utility function for parallel KNN computation.""" 142 | coords, tree, k = args 143 | coords = coords.reshape(1, -1) # Reshape to a 2D array 144 | distances, _ = tree.kneighbors(coords) 145 | avg_distance = np.mean(distances[:, 1:]) 146 | 147 | debug_print(f"[DEBUG] Average distance computed for vertex: {args[0]} is {avg_distance}.") 148 | return avg_distance 149 | -------------------------------------------------------------------------------- /gsconverter/utils/base_converter.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3D Gaussian Splatting Converter 3 | Copyright (c) 2023 Francesco Fugazzi 4 | 5 | This software is released under the MIT License. 6 | For more information about the license, please see the LICENSE file. 7 | """ 8 | 9 | import numpy as np 10 | import pandas as pd 11 | from .utility import * 12 | from collections import deque 13 | from multiprocessing import Pool, cpu_count 14 | from sklearn.neighbors import NearestNeighbors 15 | from .utility_functions import debug_print, init_worker 16 | 17 | class BaseConverter: 18 | def __init__(self, data): 19 | self.data = data 20 | 21 | def extract_vertex_data(vertices, has_scal=True, has_rgb=False): 22 | """Extract and convert vertex data from a structured numpy array of vertices.""" 23 | debug_print("[DEBUG] Executing 'extract_vertex_data' function...") 24 | converted_data = [] 25 | 26 | # Determine the prefix to be used based on whether "scal_" should be included 27 | prefix = 'scal_' if has_scal else '' 28 | debug_print(f"[DEBUG] Prefix determined as: {prefix}") 29 | 30 | # Iterate over each vertex and extract the necessary attributes 31 | for vertex in vertices: 32 | entry = ( 33 | vertex['x'], vertex['y'], vertex['z'], 34 | vertex['nx'], vertex['ny'], vertex['nz'], 35 | vertex[f'{prefix}f_dc_0'], vertex[f'{prefix}f_dc_1'], vertex[f'{prefix}f_dc_2'], 36 | *[vertex[f'{prefix}f_rest_{i}'] for i in range(45)], 37 | vertex[f'{prefix}opacity'], 38 | vertex[f'{prefix}scale_0'], vertex[f'{prefix}scale_1'], vertex[f'{prefix}scale_2'], 39 | vertex[f'{prefix}rot_0'], vertex[f'{prefix}rot_1'], vertex[f'{prefix}rot_2'], vertex[f'{prefix}rot_3'] 40 | ) 41 | 42 | # If the point cloud contains RGB data, append it to the entry 43 | if has_rgb: 44 | entry += (vertex['red'], vertex['green'], vertex['blue']) 45 | 46 | converted_data.append(entry) 47 | 48 | debug_print("[DEBUG] 'extract_vertex_data' function completed.") 49 | return converted_data 50 | 51 | def apply_density_filter(self, voxel_size=1.0, threshold_percentage=0.32): 52 | debug_print("[DEBUG] Executing 'apply_density_filter' function...") 53 | # Ensure self.data is a numpy structured array 54 | if not isinstance(self.data, np.ndarray): 55 | raise TypeError("self.data must be a numpy structured array.") 56 | 57 | vertices = self.data # This assumes self.data is already a numpy structured array 58 | 59 | # Convert threshold_percentage into a ratio 60 | threshold_ratio = threshold_percentage / 100.0 61 | 62 | # Parallelized voxel counting 63 | voxel_counts = Utility.parallel_voxel_counting(vertices, voxel_size) 64 | 65 | threshold = int(len(vertices) * threshold_ratio) 66 | dense_voxels = {k: v for k, v in voxel_counts.items() if v >= threshold} 67 | 68 | visited = set() 69 | max_cluster = set() 70 | for voxel in dense_voxels: 71 | if voxel not in visited: 72 | current_cluster = set() 73 | queue = deque([voxel]) 74 | while queue: 75 | current_voxel = queue.popleft() 76 | visited.add(current_voxel) 77 | current_cluster.add(current_voxel) 78 | for neighbor in Utility.get_neighbors(current_voxel): 79 | if neighbor in dense_voxels and neighbor not in visited: 80 | queue.append(neighbor) 81 | visited.add(neighbor) 82 | if len(current_cluster) > len(max_cluster): 83 | max_cluster = current_cluster 84 | 85 | # Filter vertices to only include those in dense voxels 86 | filtered_vertices = [vertex for vertex in vertices if (int(vertex['x'] / voxel_size), int(vertex['y'] / voxel_size), int(vertex['z'] / voxel_size)) in max_cluster] 87 | 88 | # Convert the filtered vertices list to a numpy structured array 89 | self.data = np.array(filtered_vertices, dtype=vertices.dtype) 90 | 91 | # Informative print statement 92 | print(f"After density filter, retained {len(self.data)} out of {len(vertices)} vertices.") 93 | 94 | # Since we're working with numpy arrays, just return self.data 95 | return self.data 96 | 97 | def remove_flyers(self, k=25, threshold_factor=10.5, chunk_size=50000): 98 | debug_print("[DEBUG] Executing 'remove_flyers' function...") 99 | 100 | # Ensure self.data is a numpy structured array 101 | if not isinstance(self.data, np.ndarray): 102 | raise TypeError("self.data must be a numpy structured array.") 103 | 104 | # Extract vertex data from the current object's data 105 | vertices = self.data 106 | num_vertices = len(vertices) 107 | 108 | # Display the number of input vertices 109 | debug_print(f"[DEBUG] Number of input vertices: {num_vertices}") 110 | 111 | # Adjust k based on the number of vertices 112 | k = max(3, min(k, num_vertices // 100)) # Example: ensure k is between 3 and 1% of the total vertices 113 | debug_print(f"[DEBUG] Adjusted k to: {k}") 114 | 115 | # Number of chunks 116 | num_chunks = (num_vertices + chunk_size - 1) // chunk_size # Ceiling division 117 | masks = [] 118 | 119 | # Create a pool of workers 120 | num_cores = max(1, cpu_count() - 1) # Leave one core free 121 | with Pool(processes=num_cores, initializer=init_worker) as pool: 122 | for i in range(num_chunks): 123 | start_idx = i * chunk_size 124 | end_idx = min(start_idx + chunk_size, num_vertices) # Avoid going out of bounds 125 | chunk_coords = np.vstack((vertices['x'][start_idx:end_idx], vertices['y'][start_idx:end_idx], vertices['z'][start_idx:end_idx])).T 126 | 127 | # Compute K-Nearest Neighbors for the chunk 128 | nbrs = NearestNeighbors(n_neighbors=k+1, algorithm='ball_tree').fit(chunk_coords) 129 | avg_distances = pool.map(Utility.knn_worker, [(coord, nbrs, k) for coord in chunk_coords]) 130 | 131 | # Calculate the threshold for removal based on the mean and standard deviation of the average distances 132 | threshold = np.mean(avg_distances) + threshold_factor * np.std(avg_distances) 133 | 134 | # Create a mask for points to retain for this chunk 135 | mask = np.array(avg_distances) < threshold 136 | masks.append(mask) 137 | 138 | # Combine masks from all chunks 139 | combined_mask = np.concatenate(masks) 140 | 141 | # Apply the mask to the vertices and store the result in self.data 142 | self.data = vertices[combined_mask] 143 | 144 | print(f"After removing flyers, retained {np.count_nonzero(combined_mask)} out of {num_vertices} vertices.") 145 | return self.data 146 | 147 | @staticmethod 148 | def define_dtype(has_scal, has_rgb=False): 149 | debug_print("[DEBUG] Executing 'define_dtype' function...") 150 | 151 | prefix = 'scalar_scal_' if has_scal else '' 152 | debug_print(f"[DEBUG] Prefix determined as: {prefix}") 153 | 154 | dtype = [ 155 | ('x', 'f4'), ('y', 'f4'), ('z', 'f4'), 156 | ('nx', 'f4'), ('ny', 'f4'), ('nz', 'f4'), 157 | (f'{prefix}f_dc_0', 'f4'), (f'{prefix}f_dc_1', 'f4'), (f'{prefix}f_dc_2', 'f4'), 158 | *[(f'{prefix}f_rest_{i}', 'f4') for i in range(45)], 159 | (f'{prefix}opacity', 'f4'), 160 | (f'{prefix}scale_0', 'f4'), (f'{prefix}scale_1', 'f4'), (f'{prefix}scale_2', 'f4'), 161 | (f'{prefix}rot_0', 'f4'), (f'{prefix}rot_1', 'f4'), (f'{prefix}rot_2', 'f4'), (f'{prefix}rot_3', 'f4') 162 | ] 163 | debug_print("[DEBUG] Main dtype constructed.") 164 | 165 | if has_rgb: 166 | dtype.extend([('red', 'u1'), ('green', 'u1'), ('blue', 'u1')]) 167 | debug_print("[DEBUG] RGB fields added to dtype.") 168 | 169 | debug_print("[DEBUG] 'define_dtype' function completed.") 170 | return dtype, prefix 171 | 172 | def has_rgb(self): 173 | return 'red' in self.data.dtype.names and 'green' in self.data.dtype.names and 'blue' in self.data.dtype.names 174 | 175 | def crop_by_bbox(self, min_x, min_y, min_z, max_x, max_y, max_z): 176 | # Perform cropping based on the bounding box 177 | self.data = self.data[ 178 | (self.data['x'] >= min_x) & 179 | (self.data['x'] <= max_x) & 180 | (self.data['y'] >= min_y) & 181 | (self.data['y'] <= max_y) & 182 | (self.data['z'] >= min_z) & 183 | (self.data['z'] <= max_z) 184 | ] 185 | # Print the number of vertices after cropping 186 | debug_print(f"[DEBUG] Number of vertices after cropping: {len(self.data)}") 187 | 188 | # Informative print statement 189 | print(f"After cropping, retained {len(self.data)} vertices.") 190 | 191 | return self.data 192 | 193 | @staticmethod 194 | def load_parquet(file_path): 195 | # Load the Parquet file into a DataFrame 196 | df = pd.read_parquet(file_path) 197 | 198 | # Define a mapping from the Parquet column names to the expected dtype names 199 | column_mapping = { 200 | 'x': 'x', 201 | 'y': 'y', 202 | 'z': 'z', 203 | # Assuming 'nx', 'ny', 'nz' need to be created and set to 0 204 | 'r_sh0': 'f_dc_0', 205 | 'g_sh0': 'f_dc_1', 206 | 'b_sh0': 'f_dc_2', 207 | 'r_sh1': 'f_rest_0', 208 | 'r_sh2': 'f_rest_1', 209 | 'r_sh3': 'f_rest_2', 210 | 'r_sh4': 'f_rest_3', 211 | 'r_sh5': 'f_rest_4', 212 | 'r_sh6': 'f_rest_5', 213 | 'r_sh7': 'f_rest_6', 214 | 'r_sh8': 'f_rest_7', 215 | 'r_sh9': 'f_rest_8', 216 | 'r_sh10': 'f_rest_9', 217 | 'r_sh11': 'f_rest_10', 218 | 'r_sh12': 'f_rest_11', 219 | 'r_sh13': 'f_rest_12', 220 | 'r_sh14': 'f_rest_13', 221 | 'r_sh15': 'f_rest_14', 222 | 'g_sh1': 'f_rest_15', 223 | 'g_sh2': 'f_rest_16', 224 | 'g_sh3': 'f_rest_17', 225 | 'g_sh4': 'f_rest_18', 226 | 'g_sh5': 'f_rest_19', 227 | 'g_sh6': 'f_rest_20', 228 | 'g_sh7': 'f_rest_21', 229 | 'g_sh8': 'f_rest_22', 230 | 'g_sh9': 'f_rest_23', 231 | 'g_sh10': 'f_rest_24', 232 | 'g_sh11': 'f_rest_25', 233 | 'g_sh12': 'f_rest_26', 234 | 'g_sh13': 'f_rest_27', 235 | 'g_sh14': 'f_rest_28', 236 | 'g_sh15': 'f_rest_29', 237 | 'b_sh1': 'f_rest_30', 238 | 'b_sh2': 'f_rest_31', 239 | 'b_sh3': 'f_rest_32', 240 | 'b_sh4': 'f_rest_33', 241 | 'b_sh5': 'f_rest_34', 242 | 'b_sh6': 'f_rest_35', 243 | 'b_sh7': 'f_rest_36', 244 | 'b_sh8': 'f_rest_37', 245 | 'b_sh9': 'f_rest_38', 246 | 'b_sh10': 'f_rest_39', 247 | 'b_sh11': 'f_rest_40', 248 | 'b_sh12': 'f_rest_41', 249 | 'b_sh13': 'f_rest_42', 250 | 'b_sh14': 'f_rest_43', 251 | 'b_sh15': 'f_rest_44', 252 | 'alpha': 'opacity', 253 | 'cov_s0': 'scale_0', 254 | 'cov_s1': 'scale_1', 255 | 'cov_s2': 'scale_2', 256 | 'cov_q3': 'rot_0', 257 | 'cov_q0': 'rot_1', 258 | 'cov_q1': 'rot_2', 259 | 'cov_q2': 'rot_3', 260 | } 261 | 262 | for col in ['nx', 'ny', 'nz']: 263 | if col not in df.columns: 264 | df[col] = 0.0 265 | 266 | # Rename the DataFrame columns according to the mapping 267 | df_renamed = df.rename(columns=column_mapping) 268 | 269 | # Fetch the dtype from BaseConverter 270 | dtype_list, _ = BaseConverter.define_dtype(has_scal=False, has_rgb=False) 271 | 272 | # Convert the dtype list to a structured dtype object 273 | dtype_structured = np.dtype(dtype_list) 274 | 275 | # Convert DataFrame to a structured array with the defined dtype 276 | structured_array = np.zeros(df_renamed.shape[0], dtype=dtype_structured) 277 | for name in dtype_structured.names: 278 | structured_array[name] = df_renamed[name].values if name in df_renamed.columns else 0 279 | 280 | return structured_array --------------------------------------------------------------------------------